From 1deb45c8ba76e3ec60808830f6df2b9cdf8dca77 Mon Sep 17 00:00:00 2001 From: Simon Prickett Date: Fri, 24 Apr 2020 17:49:38 -0700 Subject: [PATCH 1/2] Adds programming challenge solutions. --- .../impl/redis/capacity_dao_redis_impl.js | 2 +- src/daos/impl/redis/feed_dao_redis_impl.js | 4 +-- src/daos/impl/redis/metric_dao_redis_impl.js | 6 ++-- src/daos/impl/redis/site_dao_redis_impl.js | 23 +++++++++++-- .../impl/redis/site_geo_dao_redis_impl.js | 13 ++++++-- .../impl/redis/sitestats_dao_redis_impl.js | 15 +++++++-- .../sliding_ratelimiter_dao_redis_impl.js | 33 +++++++++++++++---- src/daos/site_dao.js | 4 +-- tests/capacity_dao_redis_impl.test.js | 2 +- tests/feed_dao_redis_impl.test.js | 6 ++-- tests/metric_dao_redis_impl.test.js | 6 ++-- tests/site_dao_redis_impl.test.js | 4 +-- tests/site_geo_dao_redis_impl.test.js | 2 +- ...sliding_ratelimiter_dao_redis_impl.test.js | 6 ++-- 14 files changed, 91 insertions(+), 35 deletions(-) diff --git a/src/daos/impl/redis/capacity_dao_redis_impl.js b/src/daos/impl/redis/capacity_dao_redis_impl.js index 58a8e33..35a16d5 100644 --- a/src/daos/impl/redis/capacity_dao_redis_impl.js +++ b/src/daos/impl/redis/capacity_dao_redis_impl.js @@ -67,7 +67,7 @@ const getRank = async (siteId) => { // START Challenge #4 const client = redis.getClient(); - const result = await client.zrankAsync( + const result = await client.zrevrankAsync( keyGenerator.getCapacityRankingKey(), `${siteId}`, ); diff --git a/src/daos/impl/redis/feed_dao_redis_impl.js b/src/daos/impl/redis/feed_dao_redis_impl.js index 253bd40..bbc0aaa 100644 --- a/src/daos/impl/redis/feed_dao_redis_impl.js +++ b/src/daos/impl/redis/feed_dao_redis_impl.js @@ -1,10 +1,8 @@ const redis = require('./redis_client'); const keyGenerator = require('./redis_key_generator'); -/* eslint-disable no-unused-vars */ const globalMaxFeedLength = 10000; const siteMaxFeedLength = 2440; -/* eslint-enable */ /** * Takes an object and returns an array whose elements are alternating @@ -125,6 +123,8 @@ const insert = async (meterReading) => { const pipeline = client.batch(); // START Challenge #6 + pipeline.xadd(keyGenerator.getGlobalFeedKey(), 'MAXLEN', '~', globalMaxFeedLength, '*', ...fields); + pipeline.xadd(keyGenerator.getFeedKey(meterReading.siteId), 'MAXLEN', '~', siteMaxFeedLength, '*', ...fields); // END Challenge #6 await pipeline.execAsync(); diff --git a/src/daos/impl/redis/metric_dao_redis_impl.js b/src/daos/impl/redis/metric_dao_redis_impl.js index e3bb12a..1b77ace 100644 --- a/src/daos/impl/redis/metric_dao_redis_impl.js +++ b/src/daos/impl/redis/metric_dao_redis_impl.js @@ -41,7 +41,6 @@ const extractMeasurementMinute = (measurementMinute) => { }; }; -/* eslint-disable no-unused-vars */ /** * Insert a metric into the database for a given solar site ID. * This function uses a sorted set to store the metric. @@ -59,9 +58,12 @@ const insertMetric = async (siteId, metricValue, metricName, timestamp) => { const minuteOfDay = timeUtils.getMinuteOfDay(timestamp); // START Challenge #2 + + await client.zaddAsync(metricKey, minuteOfDay, formatMeasurementMinute(metricValue, minuteOfDay)); + await client.expireAsync(metricKey, metricExpirationSeconds); + // END Challenge #2 }; -/* eslint-enable */ /** * Get a set of metrics for a specific solar site on a given day. diff --git a/src/daos/impl/redis/site_dao_redis_impl.js b/src/daos/impl/redis/site_dao_redis_impl.js index b3ae30f..1149c5e 100644 --- a/src/daos/impl/redis/site_dao_redis_impl.js +++ b/src/daos/impl/redis/site_dao_redis_impl.js @@ -88,7 +88,6 @@ const findById = async (id) => { return (siteHash === null ? siteHash : remap(siteHash)); }; -/* eslint-disable arrow-body-style */ /** * Get an array of all site objects. * @@ -96,10 +95,28 @@ const findById = async (id) => { */ const findAll = async () => { // START CHALLENGE #1 - return []; + const client = redis.getClient(); + + const siteIds = await client.smembersAsync(keyGenerator.getSiteIDsKey()); + const sites = []; + + for (const siteId of siteIds) { + /* eslint-disable no-await-in-loop */ + const siteHash = await client.hgetallAsync(siteId); + /* eslint-enable */ + + if (siteHash) { + // Call remap to remap the flat key/value representation + // from the Redis hash into the site domain object format, + // and convert any fields that a numerical from the Redis + // string representations. + sites.push(remap(siteHash)); + } + } + + return sites; // END CHALLENGE #1 }; -/* eslint-enable */ /* eslint-disable no-unused-vars */ diff --git a/src/daos/impl/redis/site_geo_dao_redis_impl.js b/src/daos/impl/redis/site_geo_dao_redis_impl.js index f703e00..cda2a4a 100644 --- a/src/daos/impl/redis/site_geo_dao_redis_impl.js +++ b/src/daos/impl/redis/site_geo_dao_redis_impl.js @@ -178,9 +178,8 @@ const findByGeo = async (lat, lng, radius, radiusUnit) => { * @returns {Promise} - a Promise, resolving to an array of site objects. */ const findByGeoWithExcessCapacity = async (lat, lng, radius, radiusUnit) => { - /* eslint-disable no-unreachable */ // Challenge #5, remove the next line... - return []; + // return []; const client = redis.getClient(); @@ -205,6 +204,15 @@ const findByGeoWithExcessCapacity = async (lat, lng, radius, radiusUnit) => { const sitesInRadiusCapacitySortedSetKey = keyGenerator.getTemporaryKey(); // START Challenge #5 + setOperationsPipeline.zinterstore( + sitesInRadiusCapacitySortedSetKey, + 2, + sitesInRadiusSortedSetKey, + keyGenerator.getCapacityRankingKey(), + 'WEIGHTS', + 0, + 1, + ); // END Challenge #5 // Expire the temporary sorted sets after 30 seconds, so that we @@ -248,7 +256,6 @@ const findByGeoWithExcessCapacity = async (lat, lng, radius, radiusUnit) => { } return sitesWithCapacity; - /* eslint-enable */ }; module.exports = { diff --git a/src/daos/impl/redis/sitestats_dao_redis_impl.js b/src/daos/impl/redis/sitestats_dao_redis_impl.js index 65df024..3b0a6f1 100644 --- a/src/daos/impl/redis/sitestats_dao_redis_impl.js +++ b/src/daos/impl/redis/sitestats_dao_redis_impl.js @@ -43,7 +43,6 @@ const findById = async (siteId, timestamp) => { return (response ? remap(response) : response); }; -/* eslint-disable no-unused-vars */ /** * Updates the site stats for a specific site with the meter * reading data provided. @@ -59,9 +58,19 @@ const updateOptimized = async (meterReading) => { await compareAndUpdateScript.load(); // START Challenge #3 + const transaction = client.multi(); + + transaction.hset(key, 'lastReportingTime', timeUtils.getCurrentTimestamp()); + transaction.hincrby(key, 'meterReadingCount', 1); + transaction.expire(key, weekSeconds); + + transaction.evalsha(compareAndUpdateScript.updateIfGreater(key, 'maxWhGenerated', meterReading.whGenerated)); + transaction.evalsha(compareAndUpdateScript.updateIfLess(key, 'minWhGenerated', meterReading.whGenerated)); + transaction.evalsha(compareAndUpdateScript.updateIfGreater(key, 'maxCapacity', meterReading.whGenerated - meterReading.whUsed)); + + await transaction.execAsync(); // END Challenge #3 }; -/* eslint-enable */ /* eslint-disable no-unused-vars */ /** @@ -106,5 +115,5 @@ const updateBasic = async (meterReading) => { module.exports = { findById, - update: updateBasic, // updateOptimized + update: updateOptimized, // updateBasic }; diff --git a/src/daos/impl/redis/sliding_ratelimiter_dao_redis_impl.js b/src/daos/impl/redis/sliding_ratelimiter_dao_redis_impl.js index 0a445c1..96497b2 100644 --- a/src/daos/impl/redis/sliding_ratelimiter_dao_redis_impl.js +++ b/src/daos/impl/redis/sliding_ratelimiter_dao_redis_impl.js @@ -1,21 +1,42 @@ const redis = require('./redis_client'); -/* eslint-disable no-unused-vars */ const keyGenerator = require('./redis_key_generator'); const timeUtils = require('../../../utils/time_utils'); -/* eslint-enable */ - -/* eslint-disable no-unused-vars */ // Challenge 7 const hitSlidingWindow = async (name, opts) => { const client = redis.getClient(); // START Challenge #7 - return -2; + const key = keyGenerator.getKey(`limiter:${opts.interval}:${name}:${opts.maxHits}`); + const now = timeUtils.getCurrentTimestampMillis(); + + const transaction = client.multi(); + + const member = `${now}-${Math.random()}`; + + transaction.zadd(key, now, member); + transaction.zremrangebyscore(key, 0, now - opts.interval); + transaction.zcard(key); + + const response = await transaction.execAsync(); + + const hits = parseInt(response[2], 10); + + let hitsRemaining; + + if (hits > opts.maxHits) { + // Too many hits. + hitsRemaining = -1; + } else { + // Return number of hits remaining. + hitsRemaining = opts.maxHits - hits; + } + + return hitsRemaining; + // END Challenge #7 }; -/* eslint-enable */ module.exports = { /** diff --git a/src/daos/site_dao.js b/src/daos/site_dao.js index c1e915c..9de4f21 100644 --- a/src/daos/site_dao.js +++ b/src/daos/site_dao.js @@ -1,7 +1,7 @@ const daoLoader = require('./daoloader'); -// Week 3, change this from 'site' to 'site_geo'. -const impl = daoLoader.loadDao('site'); +// Change 'site_geo' to 'site' to revert to using 'site_dao_redis_impl.js'. +const impl = daoLoader.loadDao('site_geo'); module.exports = { /** diff --git a/tests/capacity_dao_redis_impl.test.js b/tests/capacity_dao_redis_impl.test.js index d0f3256..4203c6b 100644 --- a/tests/capacity_dao_redis_impl.test.js +++ b/tests/capacity_dao_redis_impl.test.js @@ -116,7 +116,7 @@ test(`${testSuiteName}: getReport`, async () => { }); // This test is for Challenge #4. -test.skip(`${testSuiteName}: getRank`, async () => { +test(`${testSuiteName}: getRank`, async () => { // Create some data const entries = [ { diff --git a/tests/feed_dao_redis_impl.test.js b/tests/feed_dao_redis_impl.test.js index 460f4b6..4f28edf 100644 --- a/tests/feed_dao_redis_impl.test.js +++ b/tests/feed_dao_redis_impl.test.js @@ -84,19 +84,19 @@ const insertAndReadBackFromStream = async (siteId) => { }; // This test is for Challenge #6. -test.skip(`${testSuiteName}: insert and read back from global stream`, async () => { +test(`${testSuiteName}: insert and read back from global stream`, async () => { await insertAndReadBackFromStream(); }); // This test is for Challenge #6. -test.skip(`${testSuiteName}: read stream for site that does not exist`, async () => { +test(`${testSuiteName}: read stream for site that does not exist`, async () => { const meterReadings = await redisFeedDAO.getRecentForSite(-1, 100); expect(meterReadings.length).toBe(0); }); // This test is for Challenge #6. -test.skip(`${testSuiteName}: insert and read back from site specific stream`, async () => { +test(`${testSuiteName}: insert and read back from site specific stream`, async () => { await insertAndReadBackFromStream(998); }); diff --git a/tests/metric_dao_redis_impl.test.js b/tests/metric_dao_redis_impl.test.js index ed267e1..c32fa76 100644 --- a/tests/metric_dao_redis_impl.test.js +++ b/tests/metric_dao_redis_impl.test.js @@ -76,12 +76,12 @@ const testInsertAndRetrieve = async (limit) => { }; // This test is for Challenge #2. -test.skip(`${testSuiteName}: test 1 reading`, async () => testInsertAndRetrieve(1)); +test(`${testSuiteName}: test 1 reading`, async () => testInsertAndRetrieve(1)); // This test is for Challenge #2. -test.skip(`${testSuiteName}: test 1 day of readings`, async () => testInsertAndRetrieve(60 * 24)); +test(`${testSuiteName}: test 1 day of readings`, async () => testInsertAndRetrieve(60 * 24)); // This test is for Challenge #2. -test.skip(`${testSuiteName}: test multiple days of readings`, async () => testInsertAndRetrieve(60 * 70)); +test(`${testSuiteName}: test multiple days of readings`, async () => testInsertAndRetrieve(60 * 70)); /* eslint-enable */ diff --git a/tests/site_dao_redis_impl.test.js b/tests/site_dao_redis_impl.test.js index d6504cb..5d2ed94 100644 --- a/tests/site_dao_redis_impl.test.js +++ b/tests/site_dao_redis_impl.test.js @@ -151,7 +151,7 @@ test(`${testSuiteName}: findById with missing site`, async () => { }); // This test is for Challenge #1. -test.skip(`${testSuiteName}: findAll with multiple sites`, async () => { +test(`${testSuiteName}: findAll with multiple sites`, async () => { const sites = [{ id: 1, capacity: 4.5, @@ -204,7 +204,7 @@ test.skip(`${testSuiteName}: findAll with multiple sites`, async () => { }); // This test is for Challenge #1. -test.skip(`${testSuiteName}: findAll with empty sites`, async () => { +test(`${testSuiteName}: findAll with empty sites`, async () => { const sites = await redisSiteDAO.findAll(); expect(sites).toEqual([]); }); diff --git a/tests/site_geo_dao_redis_impl.test.js b/tests/site_geo_dao_redis_impl.test.js index 8f1a7d9..6dd64d5 100644 --- a/tests/site_geo_dao_redis_impl.test.js +++ b/tests/site_geo_dao_redis_impl.test.js @@ -270,7 +270,7 @@ test(`${testSuiteName}: findByGeo no results`, async () => { }); // This test is for Challenge #5. -test.skip(`${testSuiteName}: findByGeoWithExcessCapacity`, async () => { +test(`${testSuiteName}: findByGeoWithExcessCapacity`, async () => { const site1 = { id: 1, capacity: 4.5, diff --git a/tests/sliding_ratelimiter_dao_redis_impl.test.js b/tests/sliding_ratelimiter_dao_redis_impl.test.js index 91a9985..5f07714 100644 --- a/tests/sliding_ratelimiter_dao_redis_impl.test.js +++ b/tests/sliding_ratelimiter_dao_redis_impl.test.js @@ -50,7 +50,7 @@ const runSlidingWindowTests = async (name, limiterOpts, howMany) => { }; // Challenge 7. Remove '.skip' to enable test. -test.skip(`${testSuiteName}: hit (sliding window limit not exceeded)`, async () => { +test(`${testSuiteName}: hit (sliding window limit not exceeded)`, async () => { const results = await runSlidingWindowTests('testresource', { interval: 10000, maxHits: 5, @@ -60,7 +60,7 @@ test.skip(`${testSuiteName}: hit (sliding window limit not exceeded)`, async () }); // Challenge 7. Remove '.skip' to enable test. -test.skip(`${testSuiteName}: hit (sliding window limit exceeded)`, async () => { +test(`${testSuiteName}: hit (sliding window limit exceeded)`, async () => { let results = await runSlidingWindowTests('testresource2', { interval: 10000, maxHits: 5, @@ -77,7 +77,7 @@ test.skip(`${testSuiteName}: hit (sliding window limit exceeded)`, async () => { }); // Challenge 7. Remove '.skip' to enable test. -test.skip(`${testSuiteName}: hit (sliding window ensure window slides)`, async () => { +test(`${testSuiteName}: hit (sliding window ensure window slides)`, async () => { const sliderName = 'testresource4'; const sliderOpts = { interval: 2000, From dfd2fe443cbfef7c815f8386a0c3db2d5e58c088 Mon Sep 17 00:00:00 2001 From: Simon Prickett Date: Tue, 28 Apr 2020 14:41:31 -0700 Subject: [PATCH 2/2] Adds optional bonus challenge pipeline solution. --- src/daos/impl/redis/site_geo_dao_redis_impl.js | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/daos/impl/redis/site_geo_dao_redis_impl.js b/src/daos/impl/redis/site_geo_dao_redis_impl.js index cda2a4a..05998bc 100644 --- a/src/daos/impl/redis/site_geo_dao_redis_impl.js +++ b/src/daos/impl/redis/site_geo_dao_redis_impl.js @@ -114,20 +114,24 @@ const findAll = async () => { const siteIds = await client.zrangeAsync(keyGenerator.getSiteGeoKey(), 0, -1); const sites = []; - for (const siteId of siteIds) { - const siteKey = keyGenerator.getSiteHashKey(siteId); + // OPTIONAL BONUS CHALLENGE: Optimize with a pipeline. + if (siteIds.length > 0) { + const pipeline = client.batch(); - /* eslint-disable no-await-in-loop */ - const siteHash = await client.hgetallAsync(siteKey); - /* eslint-enable */ + for (const siteId of siteIds) { + pipeline.hgetall(keyGenerator.getSiteHashKey(siteId)); + } - if (siteHash) { + // Get all of the site hashes in a single round trip. + const siteHashes = await pipeline.execAsync(); + + for (const siteHash of siteHashes) { // Call remap to remap the flat key/value representation // from the Redis hash into the site domain object format. sites.push(remap(siteHash)); } } - + // END OPTIONAL BONUS CHALLENGE return sites; };