http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/.bin/uglifyjs
----------------------------------------------------------------------
diff --git a/node_modules/.bin/uglifyjs b/node_modules/.bin/uglifyjs
new file mode 100755
index 0000000..4768f76
--- /dev/null
+++ b/node_modules/.bin/uglifyjs
@@ -0,0 +1,560 @@
+#! /usr/bin/env node
+// -*- js -*-
+
+"use strict";
+
+var UglifyJS = require("../tools/node");
+var sys = require("util");
+var yargs = require("yargs");
+var fs = require("fs");
+var path = require("path");
+var async = require("async");
+var acorn;
+var ARGS = yargs
+    .usage("$0 input1.js [input2.js ...] [options]\n\
+Use a single dash to read input from the standard input.\
+\n\n\
+NOTE: by default there is no mangling/compression.\n\
+Without [options] it will simply parse input files and dump the AST\n\
+with whitespace and comments discarded.  To achieve compression and\n\
+mangling you need to use `-c` and `-m`.\
+")
+    .describe("source-map", "Specify an output file where to generate source 
map.")
+    .describe("source-map-root", "The path to the original source to be 
included in the source map.")
+    .describe("source-map-url", "The path to the source map to be added in //# 
sourceMappingURL.  Defaults to the value passed with --source-map.")
+    .describe("source-map-include-sources", "Pass this flag if you want to 
include the content of source files in the source map as sourcesContent 
property.")
+    .describe("in-source-map", "Input source map, useful if you're compressing 
JS that was generated from some other original code.")
+    .describe("screw-ie8", "Pass this flag if you don't care about full 
compliance with Internet Explorer 6-8 quirks (by default UglifyJS will try to 
be IE-proof).")
+    .describe("expr", "Parse a single expression, rather than a program (for 
parsing JSON)")
+    .describe("p", "Skip prefix for original filenames that appear in source 
maps. \
+For example -p 3 will drop 3 directories from file names and ensure they are 
relative paths. \
+You can also specify -p relative, which will make UglifyJS figure out itself 
the relative paths between original sources, \
+the source map and the output file.")
+    .describe("o", "Output file (default STDOUT).")
+    .describe("b", "Beautify output/specify output options.")
+    .describe("m", "Mangle names/pass mangler options.")
+    .describe("r", "Reserved names to exclude from mangling.")
+    .describe("c", "Enable compressor/pass compressor options. \
+Pass options like -c hoist_vars=false,if_return=false. \
+Use -c with no argument to use the default compression options.")
+    .describe("d", "Global definitions")
+    .describe("e", "Embed everything in a big function, with a configurable 
parameter/argument list.")
+
+    .describe("comments", "Preserve copyright comments in the output. \
+By default this works like Google Closure, keeping JSDoc-style comments that 
contain \"@license\" or \"@preserve\". \
+You can optionally pass one of the following arguments to this flag:\n\
+- \"all\" to keep all comments\n\
+- a valid JS regexp (needs to start with a slash) to keep only comments that 
match.\n\
+\
+Note that currently not *all* comments can be kept when compression is on, \
+because of dead code removal or cascading statements into sequences.")
+
+    .describe("preamble", "Preamble to prepend to the output.  You can use 
this to insert a \
+comment, for example for licensing information.  This will not be \
+parsed, but the source map will adjust for its presence.")
+
+    .describe("stats", "Display operations run time on STDERR.")
+    .describe("acorn", "Use Acorn for parsing.")
+    .describe("spidermonkey", "Assume input files are SpiderMonkey AST format 
(as JSON).")
+    .describe("self", "Build itself (UglifyJS2) as a library (implies 
--wrap=UglifyJS --export-all)")
+    .describe("wrap", "Embed everything in a big function, making the 
“exports” and “global” variables available. \
+You need to pass an argument to this option to specify the name that your 
module will take when included in, say, a browser.")
+    .describe("export-all", "Only used when --wrap, this tells UglifyJS to add 
code to automatically export all globals.")
+    .describe("lint", "Display some scope warnings")
+    .describe("v", "Verbose")
+    .describe("V", "Print version number and exit.")
+    .describe("noerr", "Don't throw an error for unknown options in -c, -b or 
-m.")
+    .describe("bare-returns", "Allow return outside of functions.  Useful when 
minifying CommonJS modules.")
+    .describe("keep-fnames", "Do not mangle/drop function names.  Useful for 
code relying on Function.prototype.name.")
+    .describe("quotes", "Quote style (0 - auto, 1 - single, 2 - double, 3 - 
original)")
+    .describe("reserved-file", "File containing reserved names")
+    .describe("reserve-domprops", "Make (most?) DOM properties reserved for 
--mangle-props")
+    .describe("mangle-props", "Mangle property names")
+    .describe("mangle-regex", "Only mangle property names matching the regex")
+    .describe("name-cache", "File to hold mangled names mappings")
+
+    .alias("p", "prefix")
+    .alias("o", "output")
+    .alias("v", "verbose")
+    .alias("b", "beautify")
+    .alias("m", "mangle")
+    .alias("c", "compress")
+    .alias("d", "define")
+    .alias("r", "reserved")
+    .alias("V", "version")
+    .alias("e", "enclose")
+    .alias("q", "quotes")
+
+    .string("source-map")
+    .string("source-map-root")
+    .string("source-map-url")
+    .string("b")
+    .string("beautify")
+    .string("m")
+    .string("mangle")
+    .string("c")
+    .string("compress")
+    .string("d")
+    .string("define")
+    .string("e")
+    .string("enclose")
+    .string("comments")
+    .string("wrap")
+    .string("p")
+    .string("prefix")
+    .string("name-cache")
+    .array("reserved-file")
+
+    .boolean("expr")
+    .boolean("source-map-include-sources")
+    .boolean("screw-ie8")
+    .boolean("export-all")
+    .boolean("self")
+    .boolean("v")
+    .boolean("verbose")
+    .boolean("stats")
+    .boolean("acorn")
+    .boolean("spidermonkey")
+    .boolean("lint")
+    .boolean("V")
+    .boolean("version")
+    .boolean("noerr")
+    .boolean("bare-returns")
+    .boolean("keep-fnames")
+    .boolean("mangle-props")
+    .boolean("reserve-domprops")
+
+    .wrap(80)
+
+    .argv
+;
+
+normalize(ARGS);
+
+if (ARGS.noerr) {
+    UglifyJS.DefaultsError.croak = function(msg, defs) {
+        print_error("WARN: " + msg);
+    };
+}
+
+if (ARGS.version || ARGS.V) {
+    var json = require("../package.json");
+    print(json.name + ' ' + json.version);
+    process.exit(0);
+}
+
+if (ARGS.ast_help) {
+    var desc = UglifyJS.describe_ast();
+    print(typeof desc == "string" ? desc : JSON.stringify(desc, null, 2));
+    process.exit(0);
+}
+
+if (ARGS.h || ARGS.help) {
+    print(yargs.help());
+    process.exit(0);
+}
+
+if (ARGS.acorn) {
+    acorn = require("acorn");
+}
+
+var COMPRESS = getOptions("c", true);
+var MANGLE = getOptions("m", true);
+var BEAUTIFY = getOptions("b", true);
+var RESERVED = null;
+
+if (ARGS.reserved_file) ARGS.reserved_file.forEach(function(filename){
+    RESERVED = UglifyJS.readReservedFile(filename, RESERVED);
+});
+
+if (ARGS.reserve_domprops) {
+    RESERVED = UglifyJS.readDefaultReservedFile(RESERVED);
+}
+
+if (ARGS.d) {
+    if (COMPRESS) COMPRESS.global_defs = getOptions("d");
+}
+
+if (ARGS.r) {
+    if (MANGLE) MANGLE.except = ARGS.r.replace(/^\s+|\s+$/g).split(/\s*,+\s*/);
+}
+
+if (RESERVED && MANGLE) {
+    if (!MANGLE.except) MANGLE.except = RESERVED.vars;
+    else MANGLE.except = MANGLE.except.concat(RESERVED.vars);
+}
+
+function readNameCache(key) {
+    return UglifyJS.readNameCache(ARGS.name_cache, key);
+}
+
+function writeNameCache(key, cache) {
+    return UglifyJS.writeNameCache(ARGS.name_cache, key, cache);
+}
+
+function extractRegex(str) {
+  if (/^\/.*\/[a-zA-Z]*$/.test(str)) {
+    var regex_pos = str.lastIndexOf("/");
+    return new RegExp(str.substr(1, regex_pos - 1), str.substr(regex_pos + 1));
+  } else {
+    throw new Error("Invalid regular expression: " + str);
+  }
+}
+
+if (ARGS.quotes === true) {
+    ARGS.quotes = 3;
+}
+
+var OUTPUT_OPTIONS = {
+    beautify    : BEAUTIFY ? true : false,
+    preamble    : ARGS.preamble || null,
+    quote_style : ARGS.quotes != null ? ARGS.quotes : 0
+};
+
+if (ARGS.screw_ie8) {
+    if (COMPRESS) COMPRESS.screw_ie8 = true;
+    if (MANGLE) MANGLE.screw_ie8 = true;
+    OUTPUT_OPTIONS.screw_ie8 = true;
+}
+
+if (ARGS.keep_fnames) {
+    if (COMPRESS) COMPRESS.keep_fnames = true;
+    if (MANGLE) MANGLE.keep_fnames = true;
+}
+
+if (BEAUTIFY)
+    UglifyJS.merge(OUTPUT_OPTIONS, BEAUTIFY);
+
+if (ARGS.comments != null) {
+    if (/^\/.*\/[a-zA-Z]*$/.test(ARGS.comments)) {
+        try {
+            OUTPUT_OPTIONS.comments = extractRegex(ARGS.comments);
+        } catch (e) {
+            print_error("ERROR: Invalid --comments: " + e.message);
+        }
+    } else if (ARGS.comments == "all") {
+        OUTPUT_OPTIONS.comments = true;
+    } else {
+        OUTPUT_OPTIONS.comments = function(node, comment) {
+            var text = comment.value;
+            var type = comment.type;
+            if (type == "comment2") {
+                // multiline comment
+                return /@preserve|@license|@cc_on/i.test(text);
+            }
+        }
+    }
+}
+
+var files = ARGS._.slice();
+
+if (ARGS.self) {
+    if (files.length > 0) {
+        print_error("WARN: Ignoring input files since --self was passed");
+    }
+    files = UglifyJS.FILES;
+    if (!ARGS.wrap) ARGS.wrap = "UglifyJS";
+    ARGS.export_all = true;
+}
+
+var ORIG_MAP = ARGS.in_source_map;
+
+if (ORIG_MAP) {
+    ORIG_MAP = JSON.parse(fs.readFileSync(ORIG_MAP));
+    if (files.length == 0) {
+        print_error("INFO: Using file from the input source map: " + 
ORIG_MAP.file);
+        files = [ ORIG_MAP.file ];
+    }
+    if (ARGS.source_map_root == null) {
+        ARGS.source_map_root = ORIG_MAP.sourceRoot;
+    }
+}
+
+if (files.length == 0) {
+    files = [ "-" ];
+}
+
+if (files.indexOf("-") >= 0 && ARGS.source_map) {
+    print_error("ERROR: Source map doesn't work with input from STDIN");
+    process.exit(1);
+}
+
+if (files.filter(function(el){ return el == "-" }).length > 1) {
+    print_error("ERROR: Can read a single file from STDIN (two or more dashes 
specified)");
+    process.exit(1);
+}
+
+var STATS = {};
+var OUTPUT_FILE = ARGS.o;
+var TOPLEVEL = null;
+var P_RELATIVE = ARGS.p && ARGS.p == "relative";
+var SOURCES_CONTENT = {};
+
+var SOURCE_MAP = ARGS.source_map ? UglifyJS.SourceMap({
+    file: P_RELATIVE ? path.relative(path.dirname(ARGS.source_map), 
OUTPUT_FILE) : OUTPUT_FILE,
+    root: ARGS.source_map_root,
+    orig: ORIG_MAP,
+}) : null;
+
+OUTPUT_OPTIONS.source_map = SOURCE_MAP;
+
+try {
+    var output = UglifyJS.OutputStream(OUTPUT_OPTIONS);
+    var compressor = COMPRESS && UglifyJS.Compressor(COMPRESS);
+} catch(ex) {
+    if (ex instanceof UglifyJS.DefaultsError) {
+        print_error(ex.msg);
+        print_error("Supported options:");
+        print_error(sys.inspect(ex.defs));
+        process.exit(1);
+    }
+}
+
+async.eachLimit(files, 1, function (file, cb) {
+    read_whole_file(file, function (err, code) {
+        if (err) {
+            print_error("ERROR: can't read file: " + file);
+            process.exit(1);
+        }
+        if (ARGS.p != null) {
+            if (P_RELATIVE) {
+                file = path.relative(path.dirname(ARGS.source_map), 
file).replace(/\\/g, '/');
+            } else {
+                var p = parseInt(ARGS.p, 10);
+                if (!isNaN(p)) {
+                    file = file.replace(/^\/+/, 
"").split(/\/+/).slice(ARGS.p).join("/");
+                }
+            }
+        }
+        SOURCES_CONTENT[file] = code;
+        time_it("parse", function(){
+            if (ARGS.spidermonkey) {
+                var program = JSON.parse(code);
+                if (!TOPLEVEL) TOPLEVEL = program;
+                else TOPLEVEL.body = TOPLEVEL.body.concat(program.body);
+            }
+            else if (ARGS.acorn) {
+                TOPLEVEL = acorn.parse(code, {
+                    locations     : true,
+                    sourceFile    : file,
+                    program       : TOPLEVEL
+                });
+            }
+            else {
+                try {
+                    TOPLEVEL = UglifyJS.parse(code, {
+                        filename     : file,
+                        toplevel     : TOPLEVEL,
+                        expression   : ARGS.expr,
+                        bare_returns : ARGS.bare_returns,
+                    });
+                } catch(ex) {
+                    if (ex instanceof UglifyJS.JS_Parse_Error) {
+                        print_error("Parse error at " + file + ":" + ex.line + 
"," + ex.col);
+                        print_error(ex.message);
+                        print_error(ex.stack);
+                        process.exit(1);
+                    }
+                    throw ex;
+                }
+            };
+        });
+        cb();
+    });
+}, function () {
+    if (ARGS.acorn || ARGS.spidermonkey) time_it("convert_ast", function(){
+        TOPLEVEL = UglifyJS.AST_Node.from_mozilla_ast(TOPLEVEL);
+    });
+
+    if (ARGS.wrap != null) {
+        TOPLEVEL = TOPLEVEL.wrap_commonjs(ARGS.wrap, ARGS.export_all);
+    }
+
+    if (ARGS.enclose != null) {
+        var arg_parameter_list = ARGS.enclose;
+        if (arg_parameter_list === true) {
+            arg_parameter_list = [];
+        }
+        else if (!(arg_parameter_list instanceof Array)) {
+            arg_parameter_list = [arg_parameter_list];
+        }
+        TOPLEVEL = TOPLEVEL.wrap_enclose(arg_parameter_list);
+    }
+
+    if (ARGS.mangle_props || ARGS.name_cache) (function(){
+        var reserved = RESERVED ? RESERVED.props : null;
+        var cache = readNameCache("props");
+        var regex;
+
+        try {
+          regex = ARGS.mangle_regex ? extractRegex(ARGS.mangle_regex) : null;
+        } catch (e) {
+            print_error("ERROR: Invalid --mangle-regex: " + e.message);
+            process.exit(1);
+        }
+
+        TOPLEVEL = UglifyJS.mangle_properties(TOPLEVEL, {
+            reserved   : reserved,
+            cache      : cache,
+            only_cache : !ARGS.mangle_props,
+            regex      : regex
+        });
+        writeNameCache("props", cache);
+    })();
+
+    var SCOPE_IS_NEEDED = COMPRESS || MANGLE || ARGS.lint;
+    var TL_CACHE = readNameCache("vars");
+
+    if (SCOPE_IS_NEEDED) {
+        time_it("scope", function(){
+            TOPLEVEL.figure_out_scope({ screw_ie8: ARGS.screw_ie8, cache: 
TL_CACHE });
+            if (ARGS.lint) {
+                TOPLEVEL.scope_warnings();
+            }
+        });
+    }
+
+    if (COMPRESS) {
+        time_it("squeeze", function(){
+            TOPLEVEL = TOPLEVEL.transform(compressor);
+        });
+    }
+
+    if (SCOPE_IS_NEEDED) {
+        time_it("scope", function(){
+            TOPLEVEL.figure_out_scope({ screw_ie8: ARGS.screw_ie8, cache: 
TL_CACHE });
+            if (MANGLE && !TL_CACHE) {
+                TOPLEVEL.compute_char_frequency(MANGLE);
+            }
+        });
+    }
+
+    if (MANGLE) time_it("mangle", function(){
+        MANGLE.cache = TL_CACHE;
+        TOPLEVEL.mangle_names(MANGLE);
+    });
+
+    writeNameCache("vars", TL_CACHE);
+
+    if (ARGS.source_map_include_sources) {
+        for (var file in SOURCES_CONTENT) {
+            if (SOURCES_CONTENT.hasOwnProperty(file)) {
+                SOURCE_MAP.get().setSourceContent(file, SOURCES_CONTENT[file]);
+            }
+        }
+    }
+
+    time_it("generate", function(){
+        TOPLEVEL.print(output);
+    });
+
+    output = output.get();
+
+    if (SOURCE_MAP) {
+        fs.writeFileSync(ARGS.source_map, SOURCE_MAP, "utf8");
+        var source_map_url = ARGS.source_map_url || (
+            P_RELATIVE
+                ? path.relative(path.dirname(OUTPUT_FILE), ARGS.source_map)
+                : ARGS.source_map
+        );
+        output += "\n//# sourceMappingURL=" + source_map_url;
+    }
+
+    if (OUTPUT_FILE) {
+        fs.writeFileSync(OUTPUT_FILE, output, "utf8");
+    } else {
+        print(output);
+    }
+
+    if (ARGS.stats) {
+        print_error(UglifyJS.string_template("Timing information (compressed 
{count} files):", {
+            count: files.length
+        }));
+        for (var i in STATS) if (STATS.hasOwnProperty(i)) {
+            print_error(UglifyJS.string_template("- {name}: {time}s", {
+                name: i,
+                time: (STATS[i] / 1000).toFixed(3)
+            }));
+        }
+    }
+});
+
+/* -----[ functions ]----- */
+
+function normalize(o) {
+    for (var i in o) if (o.hasOwnProperty(i) && /-/.test(i)) {
+        o[i.replace(/-/g, "_")] = o[i];
+        delete o[i];
+    }
+}
+
+function getOptions(x, constants) {
+    x = ARGS[x];
+    if (x == null) return null;
+    var ret = {};
+    if (x !== "") {
+        var ast;
+        try {
+            ast = UglifyJS.parse(x, { expression: true });
+        } catch(ex) {
+            if (ex instanceof UglifyJS.JS_Parse_Error) {
+                print_error("Error parsing arguments in: " + x);
+                process.exit(1);
+            }
+        }
+        ast.walk(new UglifyJS.TreeWalker(function(node){
+            if (node instanceof UglifyJS.AST_Seq) return; // descend
+            if (node instanceof UglifyJS.AST_Assign) {
+                var name = node.left.print_to_string({ beautify: false 
}).replace(/-/g, "_");
+                var value = node.right;
+                if (constants)
+                    value = new Function("return (" + value.print_to_string() 
+ ")")();
+                ret[name] = value;
+                return true;    // no descend
+            }
+            if (node instanceof UglifyJS.AST_Symbol || node instanceof 
UglifyJS.AST_Binary) {
+                var name = node.print_to_string({ beautify: false 
}).replace(/-/g, "_");
+                ret[name] = true;
+                return true;    // no descend
+            }
+            print_error(node.TYPE)
+            print_error("Error parsing arguments in: " + x);
+            process.exit(1);
+        }));
+    }
+    return ret;
+}
+
+function read_whole_file(filename, cb) {
+    if (filename == "-") {
+        var chunks = [];
+        process.stdin.setEncoding('utf-8');
+        process.stdin.on('data', function (chunk) {
+            chunks.push(chunk);
+        }).on('end', function () {
+            cb(null, chunks.join(""));
+        });
+        process.openStdin();
+    } else {
+        fs.readFile(filename, "utf-8", cb);
+    }
+}
+
+function time_it(name, cont) {
+    var t1 = new Date().getTime();
+    var ret = cont();
+    if (ARGS.stats) {
+        var spent = new Date().getTime() - t1;
+        if (STATS[name]) STATS[name] += spent;
+        else STATS[name] = spent;
+    }
+    return ret;
+}
+
+function print_error(msg) {
+    console.error("%s", msg);
+}
+
+function print(txt) {
+    console.log("%s", txt);
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/.bin/uuid
----------------------------------------------------------------------
diff --git a/node_modules/.bin/uuid b/node_modules/.bin/uuid
new file mode 100755
index 0000000..f732e99
--- /dev/null
+++ b/node_modules/.bin/uuid
@@ -0,0 +1,26 @@
+#!/usr/bin/env node
+
+var path = require('path');
+var uuid = require(path.join(__dirname, '..'));
+
+var arg = process.argv[2];
+
+if ('--help' === arg) {
+  console.log('\n  USAGE: uuid [version] [options]\n\n');
+  console.log('  options:\n');
+  console.log('  --help                     Display this message and exit\n');
+  process.exit(0);
+}
+
+if (null == arg) {
+  console.log(uuid());
+  process.exit(0);
+}
+
+if ('v1' !== arg && 'v4' !== arg) {
+  console.error('Version must be RFC4122 version 1 or version 4, denoted as 
"v1" or "v4"');
+  process.exit(1);
+}
+
+console.log(uuid[arg]());
+process.exit(0);

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/.bin/which
----------------------------------------------------------------------
diff --git a/node_modules/.bin/which b/node_modules/.bin/which
new file mode 100755
index 0000000..7cee372
--- /dev/null
+++ b/node_modules/.bin/which
@@ -0,0 +1,52 @@
+#!/usr/bin/env node
+var which = require("../")
+if (process.argv.length < 3)
+  usage()
+
+function usage () {
+  console.error('usage: which [-as] program ...')
+  process.exit(1)
+}
+
+var all = false
+var silent = false
+var dashdash = false
+var args = process.argv.slice(2).filter(function (arg) {
+  if (dashdash || !/^-/.test(arg))
+    return true
+
+  if (arg === '--') {
+    dashdash = true
+    return false
+  }
+
+  var flags = arg.substr(1).split('')
+  for (var f = 0; f < flags.length; f++) {
+    var flag = flags[f]
+    switch (flag) {
+      case 's':
+        silent = true
+        break
+      case 'a':
+        all = true
+        break
+      default:
+        console.error('which: illegal option -- ' + flag)
+        usage()
+    }
+  }
+  return false
+})
+
+process.exit(args.reduce(function (pv, current) {
+  try {
+    var f = which.sync(current, { all: all })
+    if (all)
+      f = f.join('\n')
+    if (!silent)
+      console.log(f)
+    return pv;
+  } catch (e) {
+    return 1;
+  }
+}, 0))

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/.bin/window-size
----------------------------------------------------------------------
diff --git a/node_modules/.bin/window-size b/node_modules/.bin/window-size
new file mode 100755
index 0000000..dd8d0e5
--- /dev/null
+++ b/node_modules/.bin/window-size
@@ -0,0 +1,30 @@
+#!/usr/bin/env node
+'use strict';
+var helpText = ['Usage',
+'  $ window-size',
+'',
+'Example',
+'  $ window-size',
+'  height: 40 ',
+'  width : 145',
+''].join('\n');
+
+function showSize () {
+  var size = require('./');
+  console.log('height: ' + size.height);
+  console.log('width : ' + size.width);
+}
+
+if (process.argv.length > 2) {
+  switch (process.argv[2]) {
+    case 'help':
+    case '--help':
+    case '-h':
+      console.log(helpText);
+      break;
+    default:
+      showSize();
+  }
+} else {
+  showSize();
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/.npmignore
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/.npmignore 
b/node_modules/JSONStream/.npmignore
new file mode 100644
index 0000000..a9a9d58
--- /dev/null
+++ b/node_modules/JSONStream/.npmignore
@@ -0,0 +1,2 @@
+node_modules/*
+node_modules

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/.travis.yml
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/.travis.yml 
b/node_modules/JSONStream/.travis.yml
new file mode 100644
index 0000000..5f30bb5
--- /dev/null
+++ b/node_modules/JSONStream/.travis.yml
@@ -0,0 +1,8 @@
+language: node_js
+node_js:
+  - 4
+  - 5
+  - 6
+sudo: false
+
+

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/LICENSE.APACHE2
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/LICENSE.APACHE2 
b/node_modules/JSONStream/LICENSE.APACHE2
new file mode 100644
index 0000000..6366c04
--- /dev/null
+++ b/node_modules/JSONStream/LICENSE.APACHE2
@@ -0,0 +1,15 @@
+Apache License, Version 2.0
+
+Copyright (c) 2011 Dominic Tarr
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/LICENSE.MIT
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/LICENSE.MIT 
b/node_modules/JSONStream/LICENSE.MIT
new file mode 100644
index 0000000..6eafbd7
--- /dev/null
+++ b/node_modules/JSONStream/LICENSE.MIT
@@ -0,0 +1,24 @@
+The MIT License
+
+Copyright (c) 2011 Dominic Tarr
+
+Permission is hereby granted, free of charge, 
+to any person obtaining a copy of this software and 
+associated documentation files (the "Software"), to 
+deal in the Software without restriction, including 
+without limitation the rights to use, copy, modify, 
+merge, publish, distribute, sublicense, and/or sell 
+copies of the Software, and to permit persons to whom 
+the Software is furnished to do so, 
+subject to the following conditions:
+
+The above copyright notice and this permission notice 
+shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR 
+ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/examples/all_docs.js
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/examples/all_docs.js 
b/node_modules/JSONStream/examples/all_docs.js
new file mode 100644
index 0000000..fa87fe5
--- /dev/null
+++ b/node_modules/JSONStream/examples/all_docs.js
@@ -0,0 +1,13 @@
+var request = require('request')
+  , JSONStream = require('JSONStream')
+  , es = require('event-stream')
+
+var parser = JSONStream.parse(['rows', true]) //emit parts that match this 
path (any element of the rows array)
+  , req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
+  , logger = es.mapSync(function (data) {  //create a stream that logs to 
stderr,
+    console.error(data)
+    return data  
+  })
+
+req.pipe(parser)
+parser.pipe(logger)

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/index.js
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/index.js b/node_modules/JSONStream/index.js
new file mode 100755
index 0000000..6d68a19
--- /dev/null
+++ b/node_modules/JSONStream/index.js
@@ -0,0 +1,253 @@
+#! /usr/bin/env node
+
+'use strict'
+
+var Parser = require('jsonparse')
+  , through = require('through')
+
+/*
+
+  the value of this.stack that creationix's jsonparse has is weird.
+
+  it makes this code ugly, but his problem is way harder that mine,
+  so i'll forgive him.
+
+*/
+
+exports.parse = function (path, map) {
+  var header, footer
+  var parser = new Parser()
+  var stream = through(function (chunk) {
+    if('string' === typeof chunk)
+      chunk = new Buffer(chunk)
+    parser.write(chunk)
+  },
+  function (data) {
+    if(data)
+      stream.write(data)
+    if (header)
+        stream.emit('header', header)
+    if (footer)
+      stream.emit('footer', footer)
+    stream.queue(null)
+  })
+
+  if('string' === typeof path)
+    path = path.split('.').map(function (e) {
+      if (e === '$*')
+        return {emitKey: true}
+      else if (e === '*')
+        return true
+      else if (e === '') // '..'.split('.') returns an empty string
+        return {recurse: true}
+      else
+        return e
+    })
+
+
+  var count = 0, _key
+  if(!path || !path.length)
+    path = null
+
+  parser.onValue = function (value) {
+    if (!this.root)
+      stream.root = value
+
+    if(! path) return
+
+    var i = 0 // iterates on path
+    var j  = 0 // iterates on stack
+    var emitKey = false;
+    var emitPath = false;
+    while (i < path.length) {
+      var key = path[i]
+      var c
+      j++
+
+      if (key && !key.recurse) {
+        c = (j === this.stack.length) ? this : this.stack[j]
+        if (!c) return
+        if (! check(key, c.key)) {
+          setHeaderFooter(c.key, value)
+          return
+        }
+        emitKey = !!key.emitKey;
+        emitPath = !!key.emitPath;
+        i++
+      } else {
+        i++
+        var nextKey = path[i]
+        if (! nextKey) return
+        while (true) {
+          c = (j === this.stack.length) ? this : this.stack[j]
+          if (!c) return
+          if (check(nextKey, c.key)) {
+            i++;
+            if (!Object.isFrozen(this.stack[j]))
+              this.stack[j].value = null
+            break
+          } else {
+            setHeaderFooter(c.key, value)
+          }
+          j++
+        }
+      }
+
+    }
+
+    // emit header
+    if (header) {
+      stream.emit('header', header);
+      header = false;
+    }
+    if (j !== this.stack.length) return
+
+    count ++
+    var actualPath = this.stack.slice(1).map(function(element) { return 
element.key }).concat([this.key])
+    var data = this.value[this.key]
+    if(null != data)
+      if(null != (data = map ? map(data, actualPath) : data)) {
+        if (emitKey || emitPath) {
+          data = { value: data };
+          if (emitKey)
+            data["key"] = this.key;
+          if (emitPath)
+            data["path"] = actualPath;
+        }
+
+        stream.queue(data)
+      }
+    delete this.value[this.key]
+    for(var k in this.stack)
+      if (!Object.isFrozen(this.stack[k]))
+        this.stack[k].value = null
+  }
+  parser._onToken = parser.onToken;
+
+  parser.onToken = function (token, value) {
+    parser._onToken(token, value);
+    if (this.stack.length === 0) {
+      if (stream.root) {
+        if(!path)
+          stream.queue(stream.root)
+        count = 0;
+        stream.root = null;
+      }
+    }
+  }
+
+  parser.onError = function (err) {
+    if(err.message.indexOf("at position") > -1)
+      err.message = "Invalid JSON (" + err.message + ")";
+    stream.emit('error', err)
+  }
+
+  return stream
+
+  function setHeaderFooter(key, value) {
+    // header has not been emitted yet
+    if (header !== false) {
+      header = header || {}
+      header[key] = value
+    }
+
+    // footer has not been emitted yet but header has
+    if (footer !== false && header === false) {
+      footer = footer || {}
+      footer[key] = value
+    }
+  }
+}
+
+function check (x, y) {
+  if ('string' === typeof x)
+    return y == x
+  else if (x && 'function' === typeof x.exec)
+    return x.exec(y)
+  else if ('boolean' === typeof x || 'object' === typeof x)
+    return x
+  else if ('function' === typeof x)
+    return x(y)
+  return false
+}
+
+exports.stringify = function (op, sep, cl, indent) {
+  indent = indent || 0
+  if (op === false){
+    op = ''
+    sep = '\n'
+    cl = ''
+  } else if (op == null) {
+
+    op = '[\n'
+    sep = '\n,\n'
+    cl = '\n]\n'
+
+  }
+
+  //else, what ever you like
+
+  var stream
+    , first = true
+    , anyData = false
+  stream = through(function (data) {
+    anyData = true
+    try {
+      var json = JSON.stringify(data, null, indent)
+    } catch (err) {
+      return stream.emit('error', err)
+    }
+    if(first) { first = false ; stream.queue(op + json)}
+    else stream.queue(sep + json)
+  },
+  function (data) {
+    if(!anyData)
+      stream.queue(op)
+    stream.queue(cl)
+    stream.queue(null)
+  })
+
+  return stream
+}
+
+exports.stringifyObject = function (op, sep, cl, indent) {
+  indent = indent || 0
+  if (op === false){
+    op = ''
+    sep = '\n'
+    cl = ''
+  } else if (op == null) {
+
+    op = '{\n'
+    sep = '\n,\n'
+    cl = '\n}\n'
+
+  }
+
+  //else, what ever you like
+
+  var first = true
+  var anyData = false
+  var stream = through(function (data) {
+    anyData = true
+    var json = JSON.stringify(data[0]) + ':' + JSON.stringify(data[1], null, 
indent)
+    if(first) { first = false ; this.queue(op + json)}
+    else this.queue(sep + json)
+  },
+  function (data) {
+    if(!anyData) this.queue(op)
+    this.queue(cl)
+
+    this.queue(null)
+  })
+
+  return stream
+}
+
+if(!module.parent && process.title !== 'browser') {
+  process.stdin
+    .pipe(exports.parse(process.argv[2]))
+    .pipe(exports.stringify('[', ',\n', ']\n', 2))
+    .pipe(process.stdout)
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/package.json
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/package.json 
b/node_modules/JSONStream/package.json
new file mode 100644
index 0000000..9f9f856
--- /dev/null
+++ b/node_modules/JSONStream/package.json
@@ -0,0 +1,113 @@
+{
+  "_args": [
+    [
+      {
+        "raw": "JSONStream@^1.0.7",
+        "scope": null,
+        "escapedName": "JSONStream",
+        "name": "JSONStream",
+        "rawSpec": "^1.0.7",
+        "spec": ">=1.0.7 <2.0.0",
+        "type": "range"
+      },
+      "/Users/yueguo/tmp/griffin-site/node_modules/warehouse"
+    ]
+  ],
+  "_from": "JSONStream@>=1.0.7 <2.0.0",
+  "_id": "[email protected]",
+  "_inCache": true,
+  "_installable": true,
+  "_location": "/JSONStream",
+  "_nodeVersion": "6.9.4",
+  "_npmOperationalInternal": {
+    "host": "packages-12-west.internal.npmjs.com",
+    "tmp": "tmp/JSONStream-1.3.1.tgz_1487992062630_0.4616028449963778"
+  },
+  "_npmUser": {
+    "name": "dominictarr",
+    "email": "[email protected]"
+  },
+  "_npmVersion": "3.10.10",
+  "_phantomChildren": {},
+  "_requested": {
+    "raw": "JSONStream@^1.0.7",
+    "scope": null,
+    "escapedName": "JSONStream",
+    "name": "JSONStream",
+    "rawSpec": "^1.0.7",
+    "spec": ">=1.0.7 <2.0.0",
+    "type": "range"
+  },
+  "_requiredBy": [
+    "/warehouse"
+  ],
+  "_resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.1.tgz";,
+  "_shasum": "707f761e01dae9e16f1bcf93703b78c70966579a",
+  "_shrinkwrap": null,
+  "_spec": "JSONStream@^1.0.7",
+  "_where": "/Users/yueguo/tmp/griffin-site/node_modules/warehouse",
+  "author": {
+    "name": "Dominic Tarr",
+    "email": "[email protected]",
+    "url": "http://bit.ly/dominictarr";
+  },
+  "bin": {
+    "JSONStream": "./index.js"
+  },
+  "bugs": {
+    "url": "https://github.com/dominictarr/JSONStream/issues";
+  },
+  "dependencies": {
+    "jsonparse": "^1.2.0",
+    "through": ">=2.2.7 <3"
+  },
+  "description": "rawStream.pipe(JSONStream.parse()).pipe(streamOfObjects)",
+  "devDependencies": {
+    "assertions": "~2.2.2",
+    "event-stream": "~0.7.0",
+    "it-is": "~1",
+    "render": "~0.1.1",
+    "tape": "~2.12.3",
+    "trees": "~0.0.3"
+  },
+  "directories": {},
+  "dist": {
+    "shasum": "707f761e01dae9e16f1bcf93703b78c70966579a",
+    "tarball": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.1.tgz";
+  },
+  "engines": {
+    "node": "*"
+  },
+  "gitHead": "71ab5610d272bb47e64957e2191df6662ee64a90",
+  "homepage": "http://github.com/dominictarr/JSONStream";,
+  "keywords": [
+    "json",
+    "stream",
+    "streaming",
+    "parser",
+    "async",
+    "parsing"
+  ],
+  "license": "(MIT OR Apache-2.0)",
+  "maintainers": [
+    {
+      "name": "dominictarr",
+      "email": "[email protected]"
+    },
+    {
+      "name": "doowb",
+      "email": "[email protected]"
+    }
+  ],
+  "name": "JSONStream",
+  "optionalDependencies": {},
+  "readme": "ERROR: No README data found!",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/dominictarr/JSONStream.git"
+  },
+  "scripts": {
+    "test": "set -e; for t in test/*.js; do echo '***' $t '***'; node $t; done"
+  },
+  "version": "1.3.1"
+}

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/readme.markdown
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/readme.markdown 
b/node_modules/JSONStream/readme.markdown
new file mode 100644
index 0000000..422c3df
--- /dev/null
+++ b/node_modules/JSONStream/readme.markdown
@@ -0,0 +1,207 @@
+# JSONStream
+
+streaming JSON.parse and stringify
+
+![](https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master)
+
+## install
+```npm install JSONStream```
+
+## example
+
+``` js
+
+var request = require('request')
+  , JSONStream = require('JSONStream')
+  , es = require('event-stream')
+
+request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
+  .pipe(JSONStream.parse('rows.*'))
+  .pipe(es.mapSync(function (data) {
+    console.error(data)
+    return data
+  }))
+```
+
+## JSONStream.parse(path)
+
+parse stream of values that match a path
+
+``` js
+  JSONStream.parse('rows.*.doc')
+```
+
+The `..` operator is the recursive descent operator from 
[JSONPath](http://goessner.net/articles/JsonPath/), which will match a child at 
any depth (see examples below).
+
+If your keys have keys that include `.` or `*` etc, use an array instead.
+`['row', true, /^doc/]`.
+
+If you use an array, `RegExp`s, booleans, and/or functions. The `..` operator 
is also available in array representation, using `{recurse: true}`.
+any object that matches the path will be emitted as 'data' (and `pipe`d down 
stream)
+
+If `path` is empty or null, no 'data' events are emitted.
+
+If you want to have keys emitted, you can prefix your `*` operator with `$`: 
`obj.$*` - in this case the data passed to the stream is an object with a `key` 
holding the key and a `value` property holding the data.
+
+### Examples
+
+query a couchdb view:
+
+``` bash
+curl -sS localhost:5984/tests/_all_docs&include_docs=true
+```
+you will get something like this:
+
+``` js
+{"total_rows":129,"offset":0,"rows":[
+  { "id":"change1_0.6995461115147918"
+  , "key":"change1_0.6995461115147918"
+  , "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
+  , "doc":{
+      "_id":  "change1_0.6995461115147918"
+    , "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
+  },
+  { "id":"change2_0.6995461115147918"
+  , "key":"change2_0.6995461115147918"
+  , "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
+  , "doc":{
+      "_id":"change2_0.6995461115147918"
+    , "_rev":"1-13677d36b98c0c075145bb8975105153"
+    , "hello":2
+    }
+  },
+]}
+
+```
+
+we are probably most interested in the `rows.*.doc`
+
+create a `Stream` that parses the documents from the feed like this:
+
+``` js
+var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
+
+stream.on('data', function(data) {
+  console.log('received:', data);
+});
+//emits anything from _before_ the first match
+stream.on('header', function (data) {
+  console.log('header:', data) // => {"total_rows":129,"offset":0}
+})
+
+```
+awesome!
+
+In case you wanted the contents the doc emitted:
+
+``` js
+var stream = JSONStream.parse(['rows', true, 'doc', {emitKey: true}]) //rows, 
ANYTHING, doc, items in docs with keys
+
+stream.on('data', function(data) {
+  console.log('key:', data.key);
+  console.log('value:', data.value);
+});
+
+```
+
+You can also emit the path:
+
+``` js
+var stream = JSONStream.parse(['rows', true, 'doc', {emitPath: true}]) //rows, 
ANYTHING, doc, items in docs with keys
+
+stream.on('data', function(data) {
+  console.log('path:', data.path);
+  console.log('value:', data.value);
+});
+
+```
+
+### recursive patterns (..)
+
+`JSONStream.parse('docs..value')` 
+(or `JSONStream.parse(['docs', {recurse: true}, 'value'])` using an array)
+will emit every `value` object that is a child, grand-child, etc. of the 
+`docs` object. In this example, it will match exactly 5 times at various depth
+levels, emitting 0, 1, 2, 3 and 4 as results.
+
+```js
+{
+  "total": 5,
+  "docs": [
+    {
+      "key": {
+        "value": 0,
+        "some": "property"
+      }
+    },
+    {"value": 1},
+    {"value": 2},
+    {"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
+    {"value": 4}
+  ]
+}
+```
+
+## JSONStream.parse(pattern, map)
+
+provide a function that can be used to map or filter
+the json output. `map` is passed the value at that node of the pattern,
+if `map` return non-nullish (anything but `null` or `undefined`)
+that value will be emitted in the stream. If it returns a nullish value,
+nothing will be emitted.
+
+`JSONStream` also emits `'header'` and `'footer'` events,
+the `'header'` event contains anything in the output that was before
+the first match, and the `'footer'`, is anything after the last match.
+
+## JSONStream.stringify(open, sep, close)
+
+Create a writable stream.
+
+you may pass in custom `open`, `close`, and `seperator` strings.
+But, by default, `JSONStream.stringify()` will create an array,
+(with default options `open='[\n', sep='\n,\n', close='\n]\n'`)
+
+If you call `JSONStream.stringify(false)`
+the elements will only be seperated by a newline.
+
+If you only write one item this will be valid JSON.
+
+If you write many items,
+you can use a `RegExp` to split it into valid chunks.
+
+## JSONStream.stringifyObject(open, sep, close)
+
+Very much like `JSONStream.stringify`,
+but creates a writable stream for objects instead of arrays.
+
+Accordingly, `open='{\n', sep='\n,\n', close='\n}\n'`.
+
+When you `.write()` to the stream you must supply an array with `[ key, data ]`
+as the first argument.
+
+## unix tool
+
+query npm to see all the modules that browserify has ever depended on.
+
+``` bash
+curl https://registry.npmjs.org/browserify | JSONStream 
'versions.*.dependencies'
+```
+
+## numbers
+
+numbers will be emitted as numbers.
+huge numbers that cannot be represented in memory as javascript numbers will 
be emitted as strings.
+cf 
https://github.com/creationix/jsonparse/commit/044b268f01c4b8f97fb936fc85d3bcfba179e5bb
 for details.
+
+## Acknowlegements
+
+this module depends on https://github.com/creationix/jsonparse
+by Tim Caswell
+and also thanks to Florent Jaby for teaching me about parsing with:
+https://github.com/Floby/node-json-streams
+
+## license
+
+Dual-licensed under the MIT License or the Apache License, version 2.0
+

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/test/bool.js
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/test/bool.js 
b/node_modules/JSONStream/test/bool.js
new file mode 100644
index 0000000..6c386d6
--- /dev/null
+++ b/node_modules/JSONStream/test/bool.js
@@ -0,0 +1,41 @@
+
+var fs = require ('fs')
+  , join = require('path').join
+  , file = join(__dirname, 'fixtures','all_npm.json')
+  , JSONStream = require('../')
+  , it = require('it-is').style('colour')
+
+  function randomObj () {
+    return (
+      Math.random () < 0.4
+      ? {hello: 'eonuhckmqjk',
+          whatever: 236515,
+          lies: true,
+          nothing: [null],
+//          stuff: [Math.random(),Math.random(),Math.random()]
+        } 
+      : ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
+    )
+  }
+
+var expected =  []
+  , stringify = JSONStream.stringify()
+  , es = require('event-stream')
+  , stringified = ''
+  , called = 0
+  , count = 10
+  , ended = false
+  
+while (count --)
+  expected.push(randomObj())
+
+  es.connect(
+    es.readArray(expected),
+    stringify,
+    JSONStream.parse([true]),
+    es.writeArray(function (err, lines) {
+    
+      it(lines).has(expected)
+      console.error('PASSED')
+    })
+  )

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/test/browser.js
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/test/browser.js 
b/node_modules/JSONStream/test/browser.js
new file mode 100644
index 0000000..3c28d49
--- /dev/null
+++ b/node_modules/JSONStream/test/browser.js
@@ -0,0 +1,18 @@
+var test = require('tape')
+var JSONStream = require('../')
+var testData = '{"rows":[{"hello":"world"}, {"foo": "bar"}]}'
+
+test('basic parsing', function (t) {
+  t.plan(2)
+  var parsed = JSONStream.parse("rows.*")
+  var parsedKeys = {}
+  parsed.on('data', function(match) {
+    parsedKeys[Object.keys(match)[0]] = true
+  })
+  parsed.on('end', function() {
+    t.equal(!!parsedKeys['hello'], true)
+    t.equal(!!parsedKeys['foo'], true)
+  })
+  parsed.write(testData)
+  parsed.end()
+})
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/test/destroy_missing.js
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/test/destroy_missing.js 
b/node_modules/JSONStream/test/destroy_missing.js
new file mode 100644
index 0000000..315fdc8
--- /dev/null
+++ b/node_modules/JSONStream/test/destroy_missing.js
@@ -0,0 +1,27 @@
+var fs = require ('fs');
+var net = require('net');
+var join = require('path').join;
+var file = join(__dirname, 'fixtures','all_npm.json');
+var JSONStream = require('../');
+
+
+var server = net.createServer(function(client) {
+    var parser = JSONStream.parse([]);
+    parser.on('end', function() {
+        console.log('close')
+        console.error('PASSED');
+        server.close();
+    });
+    client.pipe(parser);
+    var n = 4
+    client.on('data', function () {
+      if(--n) return
+      client.end();
+    })
+});
+server.listen(9999);
+
+
+var client = net.connect({ port : 9999 }, function() {
+    fs.createReadStream(file).pipe(client).on('data', console.log) //.resume();
+});

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/test/disabled/doubledot1.js
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/test/disabled/doubledot1.js 
b/node_modules/JSONStream/test/disabled/doubledot1.js
new file mode 100644
index 0000000..78149b9
--- /dev/null
+++ b/node_modules/JSONStream/test/disabled/doubledot1.js
@@ -0,0 +1,29 @@
+var fs = require ('fs')
+  , join = require('path').join
+  , file = join(__dirname, 'fixtures','all_npm.json')
+  , JSONStream = require('../')
+  , it = require('it-is')
+
+var expected = JSON.parse(fs.readFileSync(file))
+  , parser = JSONStream.parse('rows..rev')
+  , called = 0
+  , ended = false
+  , parsed = []
+
+fs.createReadStream(file).pipe(parser)
+  
+parser.on('data', function (data) {
+  called ++
+  parsed.push(data)
+})
+
+parser.on('end', function () {
+  ended = true
+})
+
+process.on('exit', function () {
+  it(called).equal(expected.rows.length)
+  for (var i = 0 ; i < expected.rows.length ; i++)
+    it(parsed[i]).deepEqual(expected.rows[i].value.rev)
+  console.error('PASSED')
+})

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/test/disabled/doubledot2.js
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/test/disabled/doubledot2.js 
b/node_modules/JSONStream/test/disabled/doubledot2.js
new file mode 100644
index 0000000..f99d881
--- /dev/null
+++ b/node_modules/JSONStream/test/disabled/doubledot2.js
@@ -0,0 +1,29 @@
+ var fs = require ('fs')
+   , join = require('path').join
+   , file = join(__dirname, 'fixtures','depth.json')
+   , JSONStream = require('../')
+   , it = require('it-is')
+
+ var expected = JSON.parse(fs.readFileSync(file))
+   , parser = JSONStream.parse(['docs', {recurse: true}, 'value'])
+   , called = 0
+   , ended = false
+   , parsed = []
+
+ fs.createReadStream(file).pipe(parser)
+  
+ parser.on('data', function (data) {
+   called ++
+   parsed.push(data)
+ })
+
+ parser.on('end', function () {
+   ended = true
+ })
+
+ process.on('exit', function () {
+   it(called).equal(5)
+   for (var i = 0 ; i < 5 ; i++)
+     it(parsed[i]).deepEqual(i)
+   console.error('PASSED')
+ })

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/test/empty.js
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/test/empty.js 
b/node_modules/JSONStream/test/empty.js
new file mode 100644
index 0000000..19e888c
--- /dev/null
+++ b/node_modules/JSONStream/test/empty.js
@@ -0,0 +1,44 @@
+var JSONStream = require('../')
+  , stream = require('stream')
+  , it = require('it-is')
+
+var output = [ [], [] ]
+
+var parser1 = JSONStream.parse(['docs', /./])
+parser1.on('data', function(data) {
+  output[0].push(data)
+})
+
+var parser2 = JSONStream.parse(['docs', /./])
+parser2.on('data', function(data) {
+  output[1].push(data)
+})
+
+var pending = 2
+function onend () {
+  if (--pending > 0) return
+  it(output).deepEqual([
+    [], [{hello: 'world'}]
+  ])
+  console.error('PASSED')
+}
+parser1.on('end', onend)
+parser2.on('end', onend)
+
+function makeReadableStream() {
+  var readStream = new stream.Stream()
+  readStream.readable = true
+  readStream.write = function (data) { this.emit('data', data) }
+  readStream.end = function (data) { this.emit('end') }
+  return readStream
+}
+
+var emptyArray = makeReadableStream()
+emptyArray.pipe(parser1)
+emptyArray.write('{"docs":[]}')
+emptyArray.end()
+
+var objectArray = makeReadableStream()
+objectArray.pipe(parser2)
+objectArray.write('{"docs":[{"hello":"world"}]}')
+objectArray.end()

http://git-wip-us.apache.org/repos/asf/incubator-griffin-site/blob/4f8fa326/node_modules/JSONStream/test/error_contents.js
----------------------------------------------------------------------
diff --git a/node_modules/JSONStream/test/error_contents.js 
b/node_modules/JSONStream/test/error_contents.js
new file mode 100644
index 0000000..13c27ae
--- /dev/null
+++ b/node_modules/JSONStream/test/error_contents.js
@@ -0,0 +1,45 @@
+
+
+var fs = require ('fs')
+  , join = require('path').join
+  , file = join(__dirname, 'fixtures','error.json')
+  , JSONStream = require('../')
+  , it = require('it-is')
+
+var expected = JSON.parse(fs.readFileSync(file))
+  , parser = JSONStream.parse(['rows'])
+  , called = 0
+  , headerCalled = 0
+  , footerCalled = 0
+  , ended = false
+  , parsed = []
+
+fs.createReadStream(file).pipe(parser)
+
+parser.on('header', function (data) {
+  headerCalled ++
+  it(data).deepEqual({
+    error: 'error_code',
+    message: 'this is an error message'
+  })
+})
+
+parser.on('footer', function (data) {
+  footerCalled ++
+})
+
+parser.on('data', function (data) {
+  called ++
+  parsed.push(data)
+})
+
+parser.on('end', function () {
+  ended = true
+})
+
+process.on('exit', function () {
+  it(called).equal(0)
+  it(headerCalled).equal(1)
+  it(footerCalled).equal(0)
+  console.error('PASSED')
+})

Reply via email to