here's a simple (60 sloc), zero-dependency/zero-config, high-performance express-middleware for serving files, that can broadcast a single fs.readFile() operation to multiple server-requests. the entire standalone program (including emulated express-server and client-side stress-testing) is under 200 sloc, and written entirely with glue-code and recursive-callbacks (similar to how one would go about writing an awk program).
honestly, there's a surprising amount of high-level stuff you can get done in javascript using just glue-code, instead of resorting to complicated “reusable” classes / promises / generators ```js /* * example.js * * this zero-dependency example will demo a simple (60 sloc), high-performance express-middleware * that can broadcast a single fs.readFile() operation to multiple server-requests * * * * example usage: * $ node example.js * * example output: * [server] - listening on port 1337 * [client] - hammering file-server for 1000 ms with client-request "http://localhost:1337/example.js" * [server] - broadcast 10083 bytes of data from task fs.readFile("example.js") to 352 server-requests in 229 ms * [server] - broadcast 10083 bytes of data from task fs.readFile("example.js") to 459 server-requests in 296 ms * [server] - broadcast 10083 bytes of data from task fs.readFile("example.js") to 642 server-requests in 299 ms * [server] - broadcast 10083 bytes of data from task fs.readFile("example.js") to 353 server-requests in 166 ms * [server] - broadcast 10083 bytes of data from task fs.readFile("example.js") to 1 server-requests in 1 ms * [server] - handled 1807 server-file-requests total * [client] - made 2100 client-requests total in 1022 ms * [client] - (1807 client-requests passed) * [client] - (293 client-requests failed) * * finished running stress-test * feel free to continue playing with this file-server * (e.g. $ curl http://localhost:1337/example.js) * or press "ctrl-c" to exit */ /*jslint bitwise: true, browser: true, maxerr: 4, maxlen: 200, node: true, nomen: true, regexp: true, stupid: true */ (function () { 'use strict'; var local; local = {}; local.middlewareFileServer = function (request, response, nextMiddleware) { /* * this express-middleware will serve files in an optimized manner by * piggybacking multiple requests for the same file onto a single readFileTask * 1. if readFileTask with the given filename exist, then skip to step 4. * 2. if requested filename does not exist, then goto nextMiddleware * 3. if readFileTask with the given filename does not exist, then create it * 4. piggyback server-request onto readFileTask with the given filename * 5. broadcast data from readFileTask to all piggybacked server-requests * 6. cleanup readFileTask */ var fileExists, filename, modeNext, onNext, timeStart; onNext = function (error, data) { modeNext += 1; switch (modeNext) { case 1: // init timeStart timeStart = Date.now(); // init readFileTaskDict local.readFileTaskDict = local.readFileTaskDict || {}; // init serverRequestsTotal local.serverRequestsTotal = local.serverRequestsTotal || 0 // get filename from request filename = require('url').parse(request.url).pathname; // security - prevent access to parent directory, e.g. ../users/john/.ssh/id_rsa filename = require('path').resolve('/', filename).slice(1); // init fileExists fileExists = true; // 1. if readFileTask with the given filename exist, then skip to step 4. if (local.readFileTaskDict[filename]) { modeNext += 2; onNext(); return; } local.readFileTaskDict[filename] = []; require('fs').exists(filename, onNext); break; case 2: fileExists = error; // 2. if requested filename does not exist, then goto nextMiddleware if (!fileExists) { modeNext = Infinity; } onNext(); break; case 3: // 3. if readFileTask with the given filename does not exist, then create it require('fs').readFile(filename, function (error, data) { modeNext = Infinity; onNext(error, data); }); onNext(); break; case 4: // 4. piggyback server-request onto readFileTask with the given filename local.readFileTaskDict[filename].push(response); break; default: // 5. broadcast data from readFileTask to all piggybacked server-requests local.readFileTaskDict[filename].forEach(function (response) { local.serverRequestsTotal += 1; if (error) { console.error(error); response.statusCode = 500; response.end('500 Internal Server Error'); return; } response.end(data); }); console.error('[server] - broadcast ' + (data || '').length + ' bytes of data from task fs.readFile(' + JSON.stringify(filename) + ') to ' + local.readFileTaskDict[filename].length + ' server-requests in ' + (Date.now() - timeStart) + ' ms'); // 6. cleanup readFileTask delete local.readFileTaskDict[filename]; if (!fileExists) { nextMiddleware(); } } }; modeNext = 0; onNext(); }; local.middlewareError = function (error, request, response) { /* * this express-error-middleware will handle all unhandled requests */ var message; // jslint-hack - prevent jslint from complaining about unused 'request' argument local.nop(request); message = '404 Not Found'; response.statusCode = 404; if (error) { console.error(error); message = '500 Internal Server Error'; response.statusCode = 500; } try { response.end(message); } catch (errorCaught) { console.error(errorCaught); } }; local.nop = function () { /* * this function will do nothing */ return; }; local.runClientTest = function () { /* * this function will hammer the file-server with as many [client] file-request as possible, * in 1000 ms and print the results afterwards */ var clientHttpRequest, ii, modeNext, onNext, requestsPassed, requestsTotal, timeElapsed, timeStart, timerInterval, url; onNext = function () { modeNext += 1; switch (modeNext) { case 1: // [client] - run initialization code timeStart = Date.now(); clientHttpRequest = function (url) { require('http').request(require('url').parse(url), function (response) { response .on('data', local.nop) .on('end', function () { requestsPassed += 1; }); }) .on('error', local.nop) .end(); }; requestsPassed = 0; requestsTotal = 0; url = 'http://localhost:1337/' + require('path').basename(__filename); // [client] - hammer server with as manny client-requests as possible in 1000 ms // [client] - with setInterval console.error('[client] - hammering file-server for 1000 ms with client-request ' + JSON.stringify(url)); onNext(); break; case 2: setTimeout(function () { for (ii = 0; ii < 100; ii += 1) { clientHttpRequest(url); requestsTotal += 1; } timeElapsed = Date.now() - timeStart; // recurse / repeat this step for 1000 ms if (timeElapsed < 1000) { modeNext -= 1; } onNext(); }); break; case 3: // [client] - stop stress-test and wait 2000 ms // [client] - for server to finish processing requests timeElapsed = Date.now() - timeStart; clearInterval(timerInterval); setTimeout(onNext, 2000); break; case 4: // [server] - print result console.error('[server] - handled ' + local.serverRequestsTotal + ' server-file-requests total'); // [client] - print result console.error('[client] - made ' + requestsTotal + ' client-requests total in ' + timeElapsed + ' ms'); console.error('[client] - (' + requestsPassed + ' client-requests passed)'); console.error('[client] - (' + (requestsTotal - requestsPassed) + ' client-requests failed)'); console.error('\nfinished running stress-test\n' + 'feel free to continue playing with this file-server\n' + '(e.g. $ curl ' + url + ')\n' + 'or press "ctrl-c" to exit'); break; } }; modeNext = 0; onNext(); }; // [server] - create file-server with express-middlewares local.server = require('http').createServer(function (request, response) { var modeNextMiddleware, onNextMiddleware; onNextMiddleware = function (error) { if (error) { modeNextMiddleware = Infinity; } modeNextMiddleware += 1; switch (modeNextMiddleware) { case 1: local.middlewareFileServer(request, response, onNextMiddleware); break; default: local.middlewareError(error, request, response); break; } }; modeNextMiddleware = 0; onNextMiddleware(); }); // [server] - listen on port 1337 console.error('[server] - listening on port 1337'); // [client] - run client test after server has started local.server.listen(1337, local.runClientTest); }()); ``` > On 24 Apr 2018, at 8:46 PM, Andrea Giammarchi <[email protected]> > wrote: > > to be honest, I have solved already these cases through named promises and > the broadcast micro library I've mentioned. > > ```js > let shouldAsk = true; > function askForExpensiveTask() { > if (shouldAsk) { > shouldAsk = false; > doExpensiveThing().then(r => broadcast.that('expensive-task', r)); > } > return broadcast.when('expensive-task'); > } > ``` > > That gives me the ability to name any task I want and resolve those asking > for such tasks whenever these are available. > > A further call to `broadcast.that('expensive-task', other)` would update the > resolved value and notify those that setup `broadcast.all('expensive-task', > callback)`, which you can also `.drop()` at any time. > > Yet, having a way to hook into these kind of flows in core would be great. > > Regards > > > On Tue, Apr 24, 2018 at 12:40 PM, kai zhu <[email protected] > <mailto:[email protected]>> wrote: >> I see a simple scenario like the following one: >> user asks for a very expensive task clicking section A >> while it's waiting for it, user changes idea clicking section B >> both section A and section B needs that very expensive async call >> drop "going to section A" info and put "go to section B" to that very same >> promise >> whenever resolved, do that action >> A caching mechanism to trigger only once such expensive operation would also >> work, yet it's not possible to drop "go into A" and put "go into B” > > for your scenario, what you want is a cacheable background-task, where you > can piggyback B onto the task initiated by A (e.g. common-but-expensive > database-queries that might take 10-60 seconds to execute). > > its generally more trouble than its worth to micromanage such tasks with > removeListeners or make them cancellable (maybe later on C wants to > piggyback, even tho A and B are no longer interested). its easier > implementation-wise to have the background-task run its course and save it to > cache, and just have A ignore the results. the logic is that because this > common-but-expensive task was recently called, it will likely be called again > in the near-future, so let it run its course and cache the result. > > here's a real-world cacheable-task implementation for such a scenario, but it > piggybacks the expensive gzipping of commonly-requested files, instead of > database-queries [1] [2] > > [1] > https://github.com/kaizhu256/node-utility2/blob/2018.1.13/lib.utility2.js#L4372 > > <https://github.com/kaizhu256/node-utility2/blob/2018.1.13/lib.utility2.js#L4372> > - piggyback gzipping of files onto a cacheable-task > [2] > https://github.com/kaizhu256/node-utility2/blob/2018.1.13/lib.utility2.js#L5872 > > <https://github.com/kaizhu256/node-utility2/blob/2018.1.13/lib.utility2.js#L5872> > - cacheable-task source-code > > <Screen Shot 2018-04-24 at 5.31.58 PM copy.jpg> > > > ```js > /*jslint > bitwise: true, > browser: true, > maxerr: 4, > maxlen: 100, > node: true, > nomen: true, > regexp: true, > stupid: true > */ > > local.middlewareAssetsCached = function (request, response, nextMiddleware) { > /* > * this function will run the middleware that will serve cached gzipped-assets > * 1. if cache-hit for the gzipped-asset, then immediately serve it to > response > * 2. run background-task (if not already) to re-gzip the asset and update > cache > * 3. save re-gzipped-asset to cache > * 4. if cache-miss, then piggy-back onto the background-task > */ > var options; > options = {}; > local.onNext(options, function (error, data) { > options.result = options.result || > local.assetsDict[request.urlParsed.pathname]; > if (options.result === undefined) { > nextMiddleware(error); > return; > } > switch (options.modeNext) { > case 1: > // skip gzip > if (response.headersSent || > !(/\bgzip\b/).test(request.headers['accept-encoding'])) { > options.modeNext += 1; > options.onNext(); > return; > } > // gzip and cache result > local.taskCreateCached({ > cacheDict: 'middlewareAssetsCachedGzip', > key: request.urlParsed.pathname > }, function (onError) { > local.zlib.gzip(options.result, function (error, data) { > onError(error, !error && data.toString('base64')); > }); > }, options.onNext); > break; > case 2: > // set gzip header > options.result = local.base64ToBuffer(data); > response.setHeader('Content-Encoding', 'gzip'); > response.setHeader('Content-Length', options.result.length); > options.onNext(); > break; > case 3: > local.middlewareCacheControlLastModified(request, response, > options.onNext); > break; > case 4: > response.end(options.result); > break; > } > }); > options.modeNext = 0; > options.onNext(); > }; > > ... > > local.taskCreateCached = function (options, onTask, onError) { > /* > * this function will > * 1. if cache-hit, then call onError with cacheValue > * 2. run onTask in background to update cache > * 3. save onTask's result to cache > * 4. if cache-miss, then call onError with onTask's result > */ > local.onNext(options, function (error, data) { > switch (options.modeNext) { > // 1. if cache-hit, then call onError with cacheValue > case 1: > // read cacheValue from memory-cache > local.cacheDict[options.cacheDict] = > local.cacheDict[options.cacheDict] || > {}; > options.cacheValue = > local.cacheDict[options.cacheDict][options.key]; > if (options.cacheValue) { > // call onError with cacheValue > options.modeCacheHit = true; > onError(null, JSON.parse(options.cacheValue)); > if (!options.modeCacheUpdate) { > break; > } > } > // run background-task with lower priority for cache-hit > setTimeout(options.onNext, options.modeCacheHit && > options.cacheTtl); > break; > // 2. run onTask in background to update cache > case 2: > local.taskCreate(options, onTask, options.onNext); > break; > default: > // 3. save onTask's result to cache > // JSON.stringify data to prevent side-effects on cache > options.cacheValue = JSON.stringify(data); > if (!error && options.cacheValue) { > local.cacheDict[options.cacheDict][options.key] = > options.cacheValue; > } > // 4. if cache-miss, then call onError with onTask's result > if (!options.modeCacheHit) { > onError(error, options.cacheValue && > JSON.parse(options.cacheValue)); > } > (options.onCacheWrite || local.nop)(); > break; > } > }); > options.modeNext = 0; > options.onNext(); > }; > ``` > >> On 24 Apr 2018, at 6:06 PM, Oliver Dunk <[email protected] >> <mailto:[email protected]>> wrote: >> >> Based on feedback, I agree that a blanket `Promise.prototype.clear()` is a >> bad idea. I don’t think that is worth pursuing. >> >> I still think that there is value in this, especially the adding and >> removing of listeners you have reference to as Andrea’s PoC shows. Listeners >> would prevent the chaining issue or alternatively I think it would >> definitely be possible to decide on intuitive behaviour with the clear >> mechanic. The benefit of `clear(reference)` over listeners is that it adds >> less to the semantics. >> >> I think the proposed userland solutions are bigger than I would want for >> something that I believe should be available by default, but I respect that >> a lot of the people in this conversation are in a better position to make a >> judgement about that than me. >> _______________________________________________ >> es-discuss mailing list >> [email protected] <mailto:[email protected]> >> https://mail.mozilla.org/listinfo/es-discuss >> <https://mail.mozilla.org/listinfo/es-discuss> > > > _______________________________________________ > es-discuss mailing list > [email protected] <mailto:[email protected]> > https://mail.mozilla.org/listinfo/es-discuss > <https://mail.mozilla.org/listinfo/es-discuss> > >
_______________________________________________ es-discuss mailing list [email protected] https://mail.mozilla.org/listinfo/es-discuss

