Arlolra has uploaded a new change for review. (
https://gerrit.wikimedia.org/r/348190 )
Change subject: WIP: Separate command line arg parsing from job execution
......................................................................
WIP: Separate command line arg parsing from job execution
* This is all in service of T110961 where we'd like to call out to
compute workers from the http api.
Change-Id: I1e751e9cb322d770f43c9d1a0cacc529062854eb
---
M bin/parse.js
A lib/parse.js
M npm-shrinkwrap.json
M package.json
4 files changed, 356 insertions(+), 300 deletions(-)
git pull ssh://gerrit.wikimedia.org:29418/mediawiki/services/parsoid
refs/changes/90/348190/1
diff --git a/bin/parse.js b/bin/parse.js
index f276220..042ecbe 100755
--- a/bin/parse.js
+++ b/bin/parse.js
@@ -6,23 +6,18 @@
'use strict';
require('../core-upgrade.js');
-var ParserEnv =
require('../lib/config/MWParserEnvironment.js').MWParserEnvironment;
-var ParsoidConfig = require('../lib/config/ParsoidConfig.js').ParsoidConfig;
-var TemplateRequest = require('../lib/mw/ApiRequest.js').TemplateRequest;
-var Util = require('../lib/utils/Util.js').Util;
-var DU = require('../lib/utils/DOMUtils.js').DOMUtils;
-var Promise = require('../lib/utils/promise.js');
var fs = require('fs');
var path = require('path');
var yargs = require('yargs');
var yaml = require('js-yaml');
+var workerFarm = require('worker-farm');
-process.on('SIGUSR2', function() {
- var heapdump = require('heapdump');
- console.error('SIGUSR2 received! Writing snapshot.');
- process.chdir('/tmp');
- heapdump.writeSnapshot();
-});
+var Util = require('../lib/utils/Util.js').Util;
+// var DU = require('../lib/utils/DOMUtils.js').DOMUtils;
+var Promise = require('../lib/utils/promise.js');
+
+// Meh ...
+var ParserEnv =
require('../lib/config/MWParserEnvironment.js').MWParserEnvironment;
var standardOpts = Util.addStandardOptions({
'wt2html': {
@@ -58,31 +53,6 @@
description: "Path to a config.yaml file. Use --config w/ no
argument to default to the server's config.yaml",
'default': false,
},
- 'prefix': {
- description: 'Which wiki prefix to use; e.g. "enwiki" for
English wikipedia, "eswiki" for Spanish, "mediawikiwiki" for mediawiki.org',
- 'boolean': false,
- 'default': null,
- },
- 'domain': {
- description: 'Which wiki to use; e.g. "en.wikipedia.org" for
English wikipedia, "es.wikipedia.org" for Spanish, "mediawiki.org" for
mediawiki.org',
- 'boolean': false,
- 'default': null,
- },
- 'page': {
- description: 'The page name, returned for {{PAGENAME}}. If no
input is given (ie. empty/stdin closed), it downloads and parses the page. This
should be the actual title of the article (that is, not including any
URL-encoding that might be necessary in wikitext).',
- 'boolean': false,
- 'default': ParserEnv.prototype.defaultPageName,
- },
- 'contentmodel': {
- description: 'The content model of the input. Defaults to
"wikitext" but extensions may support others (for example, "json").',
- 'boolean': false,
- 'default': null,
- },
- 'oldid': {
- description: 'Oldid of the given page.',
- 'boolean': false,
- 'default': null,
- },
'oldtext': {
description: 'The old page text for a selective-serialization
(see --selser)',
'boolean': false,
@@ -108,16 +78,6 @@
'boolean': false,
'default': false,
},
- 'contentVersion': {
- description: 'The acceptable content version.',
- 'boolean': false,
- 'default': ParserEnv.prototype.contentVersion,
- },
- 'pagebundle': {
- description: 'Output pagebundle JSON',
- 'boolean': true,
- 'default': false,
- },
'pbin': {
description: 'Input pagebundle JSON',
'boolean': false,
@@ -132,26 +92,6 @@
description: 'Output pagebundle JSON to file',
'boolean': false,
'default': false,
- },
- 'lint': {
- description: 'Parse with linter enabled',
- 'boolean': true,
- 'default': false,
- },
- 'scrubWikitext': {
- description: 'Apply wikitext scrubbing while serializing.',
- 'boolean': true,
- 'default': false,
- },
- 'nativeGallery': {
- description: 'Omit extsrc from gallery.',
- 'boolean': true,
- 'default': false,
- },
- 'loadWMF': {
- description: 'Use WMF mediawiki API config',
- 'boolean': true,
- 'default': true,
},
'offline': {
description: 'Shortcut to turn off various network fetches
during parse.',
@@ -168,6 +108,19 @@
'boolean': true,
'default': false,
},
+
+ // These are ParsoidConfig properties
+
+ 'linting': {
+ description: 'Parse with linter enabled',
+ 'boolean': true,
+ 'default': false,
+ },
+ 'loadWMF': {
+ description: 'Use WMF mediawiki API config',
+ 'boolean': true,
+ 'default': true,
+ },
'useBatchAPI': {
description: 'Turn on/off the API batching system',
// Since I picked a null default (to let the default config
setting be the default),
@@ -175,118 +128,173 @@
'boolean': false,
'default': null,
},
+
+ // These are MWParserEnvironment properties
+
+ 'prefix': {
+ description: 'Which wiki prefix to use; e.g. "enwiki" for
English wikipedia, "eswiki" for Spanish, "mediawikiwiki" for mediawiki.org',
+ 'boolean': false,
+ 'default': null,
+ },
+ 'domain': {
+ description: 'Which wiki to use; e.g. "en.wikipedia.org" for
English wikipedia, "es.wikipedia.org" for Spanish, "mediawiki.org" for
mediawiki.org',
+ 'boolean': false,
+ 'default': null,
+ },
+ 'oldid': {
+ description: 'Oldid of the given page.',
+ 'boolean': false,
+ 'default': null,
+ },
+ 'contentVersion': {
+ description: 'The acceptable content version.',
+ 'boolean': false,
+ 'default': ParserEnv.prototype.contentVersion,
+ },
+ 'pageName': {
+ description: 'The page name, returned for {{PAGENAME}}. If no
input is given (ie. empty/stdin closed), it downloads and parses the page. This
should be the actual title of the article (that is, not including any
URL-encoding that might be necessary in wikitext).',
+ 'boolean': false,
+ 'default': ParserEnv.prototype.defaultPageName,
+ },
+ 'pageBundle': {
+ description: 'Output pagebundle JSON',
+ 'boolean': true,
+ 'default': false,
+ },
+ 'scrubWikitext': {
+ description: 'Apply wikitext scrubbing while serializing.',
+ 'boolean': true,
+ 'default': false,
+ },
+ 'nativeGallery': {
+ description: 'Omit extsrc from gallery.',
+ 'boolean': true,
+ 'default': false,
+ },
+ 'contentmodel': {
+ description: 'The content model of the input. Defaults to
"wikitext" but extensions may support others (for example, "json").',
+ 'boolean': false,
+ 'default': null,
+ },
});
-var startsAtWikitext;
-var startsAtHTML = function(argv, env, input, pb) {
- var doc = DU.parseHTML(input);
- pb = pb || DU.extractPageBundle(doc);
- if (argv.selser) {
- pb = pb || DU.extractPageBundle(env.page.dom.ownerDocument);
- if (pb) {
- DU.applyPageBundle(env.page.dom.ownerDocument, pb);
+(function() {
+ var defaultModeStr = "Default conversion mode : --wt2html";
+
+ var opts = yargs.usage(
+ 'Usage: echo wikitext | $0 [options]\n\n' + defaultModeStr,
+ standardOpts
+ ).strict();
+
+ var argv = opts.parse(process.argv);
+
+ if (Util.booleanOption(argv.help)) {
+ opts.showHelp();
+ return;
+ }
+
+ var mode = ['selser', 'html2html', 'html2wt', 'wt2wt']
+ .find(function(m) { return argv[m]; }) || 'wt2html';
+
+ // if (!argv.wt2html) {
+ // if (argv.oldtextfile) {
+ // argv.oldtext = fs.readFileSync(argv.oldtextfile,
'utf8');
+ // }
+ // if (argv.oldhtmlfile) {
+ // env.page.dom = DU.parseHTML(
+ // fs.readFileSync(argv.oldhtmlfile, 'utf8')
+ // ).body;
+ // }
+ // if (argv.domdiff) {
+ // // FIXME: need to load diff markers from attributes
+ // env.page.domdiff = {
+ // isEmpty: false,
+ // dom: DU.ppToDOM(fs.readFileSync(argv.domdiff,
'utf8')),
+ // };
+ // throw new Error('this is broken');
+ // }
+ // env.setPageSrcInfo(argv.oldtext || null);
+ // }
+ //
+ // if (argv.selser && argv.oldtext === null) {
+ // throw new Error('Please provide original wikitext ' +
+ // '(--oldtext or --oldtextfile). Selser requires that.');
+ // }
+ //
+ // var pb;
+ // if (argv.pbin.length > 0) {
+ // pb = JSON.parse(argv.pbin);
+ // } else if (argv.pbinfile) {
+ // pb = JSON.parse(fs.readFileSync(argv.pbinfile, 'utf8'));
+ // }
+
+ var prefix = argv.prefix || null;
+ var domain = argv.domain || null;
+
+ if (argv.apiURL) {
+ prefix = 'customwiki';
+ domain = null;
+ } else if (!(prefix || domain)) {
+ domain = 'en.wikipedia.org';
+ }
+
+ var config = {
+ linting: argv.linting,
+ loadWMF: argv.loadWMF,
+ useBatchAPI: argv.useBatchAPI,
+ };
+
+ Util.setTemplatingAndProcessingFlags(config, argv);
+ Util.setDebuggingFlags(config, argv);
+
+ // Offline shortcut
+ if (argv.offline) {
+ config.fetchConfig = false;
+ config.fetchTemplates = false;
+ config.fetchImageInfo = false;
+ config.usephppreprocessor = false;
+ }
+
+ if (Util.booleanOption(argv.config)) {
+ var p = (typeof (argv.config) === 'string') ?
+ path.resolve('.', argv.config) :
+ path.resolve(__dirname, '../config.yaml');
+ // Assuming Parsoid is the first service in the list
+ config = yaml.load(fs.readFileSync(p, 'utf8')).services[0].conf;
+ }
+
+ if (config.localsettings) {
+ config.localsettings = path.resolve(__dirname,
config.localsettings);
+ }
+
+ var nock, dir, nocksFile;
+ if (argv.record || argv.replay) {
+ prefix = prefix || 'enwiki';
+ dir = path.resolve(__dirname, '../nocks/');
+ if (!fs.existsSync(dir)) {
+ fs.mkdirSync(dir);
+ }
+ dir = dir + '/' + prefix;
+ if (!fs.existsSync(dir)) {
+ fs.mkdirSync(dir);
+ }
+ nocksFile = dir + '/' + argv.page + '.js';
+ if (argv.record) {
+ nock = require('nock');
+ nock.recorder.rec({dont_print: true});
+ } else {
+ require(nocksFile);
}
}
- if (pb) {
- DU.applyPageBundle(doc, pb);
- }
- var handler = env.getContentHandler(argv.contentmodel);
- return handler.fromHTML(env, doc.body, argv.selser)
- .then(function(out) {
- if (argv.html2wt || argv.wt2wt) {
- return { trailingNL: true, out: out, env: env };
- } else {
- return startsAtWikitext(argv, env, out);
- }
- });
-};
-startsAtWikitext = function(argv, env, input) {
- env.setPageSrcInfo(input);
- var handler = env.getContentHandler(argv.contentmodel);
- return handler.toHTML(env)
- .then(function(doc) {
- if (argv.lint) {
- env.log("end/parse");
- }
- if (argv.wt2html || argv.html2html) {
- var out;
- if (argv.pboutfile) {
- var pb = DU.extractPageBundle(doc);
- fs.writeFileSync(argv.pboutfile,
JSON.stringify(pb), 'utf8');
- }
- if (argv.normalize) {
- out = DU.normalizeOut(doc.body, (argv.normalize
=== 'parsoid'));
- } else if (argv.document) {
- // used in Parsoid JS API, return document
- out = doc;
- } else {
- out = DU.toXML(doc);
- }
- return { trailingNL: true, out: out, env: env };
- } else {
- return startsAtHTML(argv, env, DU.toXML(doc));
- }
- });
-};
+ var environment = {
+ scrubWikitext: argv.scrubWikitext,
+ nativeGallery: argv.nativeGallery,
+ pageBundle: argv.pageBundle || argv.pboutfile,
+ };
-var parse = exports.parse = function(input, argv, parsoidConfig, prefix,
domain) {
- var env;
- return ParserEnv.getParserEnv(parsoidConfig, {
- prefix: prefix,
- domain: domain,
- pageName: argv.page,
- }).then(function(_env) {
- env = _env;
-
- // fetch templates from enwiki by default.
- if (argv.wgScriptPath) {
- env.conf.wiki.wgScriptPath = argv.wgScriptPath;
- }
-
- // Enable wikitext scrubbing
- env.scrubWikitext = argv.scrubWikitext;
-
- // Enable html editing galleries
- env.nativeGallery = argv.nativeGallery;
-
- // Sets ids on nodes and stores data-* attributes in a JSON blob
- env.pageBundle = argv.pagebundle || argv.pboutfile;
-
- // The content version to output
- if (argv.contentVersion) {
- env.setContentVersion(argv.contentVersion);
- }
-
- if (!argv.wt2html) {
- if (argv.oldtextfile) {
- argv.oldtext =
fs.readFileSync(argv.oldtextfile, 'utf8');
- }
- if (argv.oldhtmlfile) {
- env.page.dom = DU.parseHTML(
- fs.readFileSync(argv.oldhtmlfile,
'utf8')
- ).body;
- }
- if (argv.domdiff) {
- // FIXME: need to load diff markers from
attributes
- env.page.domdiff = {
- isEmpty: false,
- dom:
DU.ppToDOM(fs.readFileSync(argv.domdiff, 'utf8')),
- };
- throw new Error('this is broken');
- }
- env.setPageSrcInfo(argv.oldtext || null);
- }
-
- if (argv.selser && argv.oldtext === null) {
- throw new Error('Please provide original wikitext ' +
- '(--oldtext or --oldtextfile). Selser requires
that.');
- }
-
- if (typeof input === 'string') {
- return input;
- }
-
+ return Promise.resolve()
+ .then(function() {
if (argv.inputfile) {
// read input from the file, then process
var fileContents = fs.readFileSync(argv.inputfile,
'utf8');
@@ -311,143 +319,54 @@
stdin.on('end', function() {
resolve(inputChunks);
});
- }).then(function(inputChunks) {
+ })
+ .then(function(inputChunks) {
clearTimeout(stdinTimer);
// parse page if no input
if (inputChunks.length > 0) {
return inputChunks.join('');
} else if (argv.html2wt || argv.html2html) {
- env.log("fatal", "Pages start at wikitext.");
+ throw new Error('Pages start at wikitext.');
}
- var target = env.normalizeAndResolvePageTitle();
- return TemplateRequest
- .setPageSrcInfo(env, target, argv.oldid)
- .then(function() {
- // Preserve fetched contentmodel.
- argv.contentmodel = argv.contentmodel ||
-
env.page.meta.revision.contentmodel;
- return env.page.src;
- });
});
- }).then(function(str) {
- str = str.replace(/\r/g, '');
- if (argv.html2wt || argv.html2html) {
- var pb;
- if (argv.pbin.length > 0) {
- pb = JSON.parse(argv.pbin);
- } else if (argv.pbinfile) {
- pb = JSON.parse(fs.readFileSync(argv.pbinfile,
'utf8'));
- }
- return startsAtHTML(argv, env, str, pb);
- } else {
- return startsAtWikitext(argv, env, str);
- }
- });
-};
-
-if (require.main === module) {
- (function() {
- var defaultModeStr = "Default conversion mode : --wt2html";
-
- var opts = yargs.usage(
- 'Usage: echo wikitext | $0 [options]\n\n' +
defaultModeStr,
- standardOpts
- ).strict();
-
- var argv = opts.parse(process.argv);
-
- if (Util.booleanOption(argv.help)) {
- opts.showHelp();
- return;
+ })
+ .then(function(str) {
+ // Hmm ...
+ if (str !== undefined) {
+ str = str.replace(/\r/g, '');
}
- // Because selser builds on html2wt serialization,
- // the html2wt flag should be automatically set when selser is
set.
- if (argv.selser) {
- argv.html2wt = true;
- }
-
- // Default conversion mode
- if (!argv.html2wt && !argv.wt2wt && !argv.html2html) {
- argv.wt2html = true;
- }
-
- // Offline shortcut
- if (argv.offline) {
- argv.fetchConfig = false;
- argv.fetchTemplates = false;
- argv.fetchImageInfo = false;
- argv.usephppreprocessor = false;
- }
-
- var prefix = argv.prefix || null;
- var domain = argv.domain || null;
-
- if (argv.apiURL) {
- prefix = 'customwiki';
- domain = null;
- } else if (!(prefix || domain)) {
- domain = 'en.wikipedia.org';
- }
-
- var config = null;
- if (Util.booleanOption(argv.config)) {
- var p = (typeof (argv.config) === 'string') ?
- path.resolve('.', argv.config) :
- path.resolve(__dirname, '../config.yaml');
- // Assuming Parsoid is the first service in the list
- config = yaml.load(fs.readFileSync(p,
'utf8')).services[0].conf;
- }
-
- var setup = function(parsoidConfig) {
- parsoidConfig.loadWMF = argv.loadWMF;
- if (config && config.localsettings) {
- var local = require(path.resolve(__dirname,
config.localsettings));
- local.setup(parsoidConfig);
- }
- Util.setTemplatingAndProcessingFlags(parsoidConfig,
argv);
- Util.setDebuggingFlags(parsoidConfig, argv);
+ var farmOptions = {
+ maxConcurrentWorkers: 1,
+ maxConcurrentCallsPerWorker: 1,
+ maxCallTime: 2 * 60 * 1000,
+ // Crashes will retry, timeouts won't, as far as
testing showed,
+ // but it's documented differently. Anyways, we don't
want retries.
+ maxRetries: 0,
+ autoStart: true,
};
- var parsoidConfig = new ParsoidConfig({ setup: setup }, config);
+ var worker = workerFarm(farmOptions,
require.resolve('../lib/parse.js'));
+ var promiseWorker = Promise.promisify(worker);
- parsoidConfig.defaultWiki = prefix ? prefix :
- parsoidConfig.reverseMwApiMap.get(domain);
-
- var nock, dir, nocksFile;
- if (argv.record || argv.replay) {
- prefix = prefix || 'enwiki';
- dir = path.resolve(__dirname, '../nocks/');
- if (!fs.existsSync(dir)) {
- fs.mkdirSync(dir);
- }
- dir = dir + '/' + prefix;
- if (!fs.existsSync(dir)) {
- fs.mkdirSync(dir);
- }
- nocksFile = dir + '/' + argv.page + '.js';
- }
-
- if (argv.record) {
- nock = require('nock');
- nock.recorder.rec({dont_print: true});
- } else if (argv.replay) {
- try {
- require(nocksFile);
- } catch (e) {
- console.error('Exception ' + e + ' requiring '
+ nocksFile);
- console.error('Cannot replay!');
- return -1;
- }
- }
-
- return parse(null, argv, parsoidConfig, prefix,
domain).then(function(res) {
+ return promiseWorker({
+ str: str,
+ mode: mode,
+ config: config,
+ environment: environment,
+ domain: domain,
+ prefix: prefix,
+ pageName: argv.pageName,
+ // XXX
+ contentVersion: argv.contentVersion,
+ wgScriptPath: argv.wgScriptPath,
+ })
+ .then(function(out) {
var stdout = process.stdout;
- stdout.write(res.out);
- if (res.trailingNL && stdout.isTTY) {
+ stdout.write(out);
+ if (stdout.isTTY) {
stdout.write('\n');
}
-
if (argv.record) {
var nockCalls = nock.recorder.play();
var stream = fs.createWriteStream(nocksFile);
@@ -459,6 +378,8 @@
stream.end();
});
}
- }).done();
- }());
-}
+ workerFarm.end(worker);
+ });
+ })
+ .done();
+}());
diff --git a/lib/parse.js b/lib/parse.js
new file mode 100644
index 0000000..cb62cad
--- /dev/null
+++ b/lib/parse.js
@@ -0,0 +1,120 @@
+'use strict';
+require('../core-upgrade.js');
+
+var ParserEnv = require('./config/MWParserEnvironment.js').MWParserEnvironment;
+var ParsoidConfig = require('./config/ParsoidConfig.js').ParsoidConfig;
+var DU = require('./utils/DOMUtils.js').DOMUtils;
+
+var startsAtWikitext;
+var startsAtHTML = function(obj, env, str, pb) {
+ var argv = {};
+ var doc = DU.parseHTML(str);
+ pb = pb || DU.extractPageBundle(doc);
+ if (argv.selser) {
+ pb = pb || DU.extractPageBundle(env.page.dom.ownerDocument);
+ if (pb) {
+ DU.applyPageBundle(env.page.dom.ownerDocument, pb);
+ }
+ }
+ if (pb) {
+ DU.applyPageBundle(doc, pb);
+ }
+ var handler = env.getContentHandler(argv.contentmodel);
+ return handler.fromHTML(env, doc.body, argv.selser)
+ .then(function(out) {
+ if (['html2wt', 'wt2wt'].includes(obj.mode)) {
+ return out;
+ } else {
+ return startsAtWikitext(obj, env, out);
+ }
+ });
+};
+
+startsAtWikitext = function(obj, env, str) {
+ var argv = {};
+ env.setPageSrcInfo(str);
+ var handler = env.getContentHandler(argv.contentmodel);
+ return handler.toHTML(env)
+ .then(function(doc) {
+ if (argv.linting) {
+ env.log("end/parse");
+ }
+ if (['wt2html', 'html2html'].includes(obj.mode)) {
+ var out;
+ if (argv.pboutfile) {
+ // var pb = DU.extractPageBundle(doc);
+ // fs.writeFileSync(argv.pboutfile,
JSON.stringify(pb), 'utf8');
+ }
+ if (argv.normalize) {
+ out = DU.normalizeOut(doc.body, (argv.normalize
=== 'parsoid'));
+ } else {
+ out = DU.toXML(doc);
+ }
+ return out;
+ } else {
+ return startsAtHTML(obj, env, DU.toXML(doc));
+ }
+ });
+};
+
+/**
+ * Here is where things go.
+ *
+ * @param {Object} obj
+ * @param {String} obj.str The string to parse
+ * @param {String} obj.mode The mode to use
+ * @param {Object} obj.config Will be Object.assign'ed to ParsoidConfig
+ * @param {Object} obj.environment Will be Object.assign'ed to the env
+ * @param {String} obj.domain
+ * @param {String} [obj.prefix]
+ * @param {String} obj.pageName
+ * @param {Function} [cb] Optional callback
+ *
+ * @return {Promise}
+ */
+module.exports = function(obj, cb) {
+ var parsoidConfig;
+ if (parsoidConfig === undefined) {
+ var setup = function(pc) {
+ if (obj.config.localsettings) {
+ var local = require(obj.config.localsettings);
+ local.setup(pc);
+ }
+ };
+ parsoidConfig = new ParsoidConfig({ setup: setup }, obj.config);
+ }
+ return ParserEnv.getParserEnv(parsoidConfig, {
+ domain: obj.domain,
+ prefix: obj.prefix,
+ pageName: obj.pageName,
+ })
+ .then(function(env) {
+ Object.assign(env, obj.environment);
+
+ // Fetch templates from enwiki by default
+ if (obj.wgScriptPath) {
+ env.conf.wiki.wgScriptPath = obj.wgScriptPath;
+ }
+
+ // The content version to output
+ if (obj.contentVersion) {
+ env.setContentVersion(obj.contentVersion);
+ }
+
+ if (['html2wt', 'html2html'].includes(obj.mode)) {
+ return startsAtHTML(obj, env, obj.str /*, pb */);
+ } else {
+ // var target = env.normalizeAndResolvePageTitle();
+ // return TemplateRequest
+ // .setPageSrcInfo(env, target, argv.oldid)
+ // .then(function() {
+ // // Preserve fetched contentmodel.
+ // argv.contentmodel = argv.contentmodel ||
+ // env.page.meta.revision.contentmodel;
+ // return env.page.src;
+ // });
+ return startsAtWikitext(obj, env, obj.str);
+ }
+ })
+ .nodify(cb);
+};
diff --git a/npm-shrinkwrap.json b/npm-shrinkwrap.json
index 7db3193..4b17523 100644
--- a/npm-shrinkwrap.json
+++ b/npm-shrinkwrap.json
@@ -603,6 +603,11 @@
"from": "entities@>=1.1.1 <2.0.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-1.1.1.tgz"
},
+ "errno": {
+ "version": "0.1.4",
+ "from": "errno@>=0.1.1 <0.2.0-0",
+ "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.4.tgz"
+ },
"error-ex": {
"version": "1.3.1",
"from": "error-ex@>=1.2.0 <2.0.0",
@@ -1936,6 +1941,11 @@
"from": "proxy-addr@>=1.1.3 <1.2.0",
"resolved":
"https://registry.npmjs.org/proxy-addr/-/proxy-addr-1.1.4.tgz"
},
+ "prr": {
+ "version": "0.0.0",
+ "from": "prr@>=0.0.0 <0.1.0",
+ "resolved": "https://registry.npmjs.org/prr/-/prr-0.0.0.tgz"
+ },
"punycode": {
"version": "1.4.1",
"from": "punycode@>=1.4.1 <2.0.0",
@@ -2423,6 +2433,11 @@
"from": "wordwrap@>=0.0.2 <0.1.0",
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz"
},
+ "worker-farm": {
+ "version": "1.3.1",
+ "from": "worker-farm@latest",
+ "resolved":
"https://registry.npmjs.org/worker-farm/-/worker-farm-1.3.1.tgz"
+ },
"wrap-ansi": {
"version": "2.1.0",
"from": "wrap-ansi@>=2.0.0 <3.0.0",
@@ -2442,8 +2457,7 @@
"xtend": {
"version": "4.0.1",
"from": "xtend@^4.0.0",
- "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz",
- "dev": true
+ "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"
},
"y18n": {
"version": "3.2.1",
diff --git a/package.json b/package.json
index e9af930..da13cc3 100644
--- a/package.json
+++ b/package.json
@@ -28,6 +28,7 @@
"serve-favicon": "^2.4.2",
"service-runner": "^2.2.5",
"simplediff": "^0.1.1",
+ "worker-farm": "^1.3.1",
"yargs": "^6.6.0"
},
"devDependencies": {
--
To view, visit https://gerrit.wikimedia.org/r/348190
To unsubscribe, visit https://gerrit.wikimedia.org/r/settings
Gerrit-MessageType: newchange
Gerrit-Change-Id: I1e751e9cb322d770f43c9d1a0cacc529062854eb
Gerrit-PatchSet: 1
Gerrit-Project: mediawiki/services/parsoid
Gerrit-Branch: master
Gerrit-Owner: Arlolra <[email protected]>
_______________________________________________
MediaWiki-commits mailing list
[email protected]
https://lists.wikimedia.org/mailman/listinfo/mediawiki-commits