Hello community,

here is the log from the commit of package nodejs-read-package-json for 
openSUSE:Factory checked in at 2015-07-19 11:45:35
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Comparing /work/SRC/openSUSE:Factory/nodejs-read-package-json (Old)
 and      /work/SRC/openSUSE:Factory/.nodejs-read-package-json.new (New)
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Package is "nodejs-read-package-json"

Changes:
--------
--- 
/work/SRC/openSUSE:Factory/nodejs-read-package-json/nodejs-read-package-json.changes
        2015-04-27 13:02:31.000000000 +0200
+++ 
/work/SRC/openSUSE:Factory/.nodejs-read-package-json.new/nodejs-read-package-json.changes
   2015-07-19 11:45:37.000000000 +0200
@@ -1,0 +2,5 @@
+Fri Apr 24 12:42:23 UTC 2015 - [email protected]
+
+- Update to version 2.0.0
+
+-------------------------------------------------------------------

Old:
----
  read-package-json-1.2.7.tgz

New:
----
  read-package-json-2.0.0.tgz

++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

Other differences:
------------------
++++++ nodejs-read-package-json.spec ++++++
--- /var/tmp/diff_new_pack.eFSWQO/_old  2015-07-19 11:45:38.000000000 +0200
+++ /var/tmp/diff_new_pack.eFSWQO/_new  2015-07-19 11:45:38.000000000 +0200
@@ -19,7 +19,7 @@
 %define base_name read-package-json
 
 Name:           nodejs-read-package-json
-Version:        1.2.7
+Version:        2.0.0
 Release:        0
 Summary:        Read package.json files
 License:        ISC

++++++ read-package-json-1.2.7.tgz -> read-package-json-2.0.0.tgz ++++++
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/package.json new/package/package.json
--- old/package/package.json    2014-08-29 02:29:03.000000000 +0200
+++ new/package/package.json    2015-04-07 06:21:12.000000000 +0200
@@ -1,6 +1,6 @@
 {
   "name": "read-package-json",
-  "version": "1.2.7",
+  "version": "2.0.0",
   "author": "Isaac Z. Schlueter <[email protected]> (http://blog.izs.me/)",
   "description": "The thing npm uses to read package.json files with semantics 
and defaults and validation",
   "repository": {
@@ -9,17 +9,16 @@
   },
   "main": "read-json.js",
   "scripts": {
-    "test": "tap test/*.js"
+    "test": "standard && tap test/*.js"
   },
   "dependencies": {
-    "github-url-from-git": "^1.3.0",
-    "github-url-from-username-repo": "~1.0.0",
-    "glob": "^4.0.2",
-    "lru-cache": "2",
-    "normalize-package-data": "^1.0.0"
+    "glob": "^5.0.3",
+    "json-parse-helpfulerror": "^1.0.2",
+    "normalize-package-data": "^2.0.0"
   },
   "devDependencies": {
-    "tap": "~0.2.5"
+    "standard": "^3.3.1",
+    "tap": "^0.7.1"
   },
   "optionalDependencies": {
     "graceful-fs": "2 || 3"
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/read-json.js new/package/read-json.js
--- old/package/read-json.js    2014-06-10 07:39:53.000000000 +0200
+++ new/package/read-json.js    2015-04-07 06:18:57.000000000 +0200
@@ -1,382 +1,383 @@
-// vim: set softtabstop=16 shiftwidth=16:
-
+var fs
 try {
-                var fs = require("graceful-fs")
+  fs = require('graceful-fs')
 } catch (er) {
-                var fs = require("fs")
+  fs = require('fs')
 }
 
+var path = require('path')
 
-module.exports = readJson
+var glob = require('glob')
+var normalizeData = require('normalize-package-data')
+var safeJSON = require('json-parse-helpfulerror')
 
-var LRU = require("lru-cache")
-readJson.cache = new LRU({max: 1000})
-var path = require("path")
-var glob = require("glob")
-var normalizeData = require("normalize-package-data")
+module.exports = readJson
 
 // put more stuff on here to customize.
 readJson.extraSet = [
-                gypfile,
-                serverjs,
-                scriptpath,
-                authors,
-                readme,
-                mans,
-                bins,
-                githead
+  gypfile,
+  serverjs,
+  scriptpath,
+  authors,
+  readme,
+  mans,
+  bins,
+  githead
 ]
 
 var typoWarned = {}
 
-
 function readJson (file, log_, strict_, cb_) {
-                var log, strict, cb
-                for (var i = 1; i < arguments.length - 1; i++) {
-                                if (typeof arguments[i] === 'boolean')
-                                                strict = arguments[i]
-                                else if (typeof arguments[i] === 'function')
-                                                log = arguments[i]
-                }
-                if (!log) log = function () {};
-                cb = arguments[ arguments.length - 1 ]
-
-                var c = readJson.cache.get(file)
-                if (c) {
-                                cb = cb.bind(null, null, c)
-                                return process.nextTick(cb);
-                }
-                cb = (function (orig) { return function (er, data) {
-                                if (data) readJson.cache.set(file, data);
-                                return orig(er, data)
-                } })(cb)
-                readJson_(file, log, strict, cb)
-}
+  var log, strict, cb
+  for (var i = 1; i < arguments.length - 1; i++) {
+    if (typeof arguments[i] === 'boolean') {
+      strict = arguments[i]
+    } else if (typeof arguments[i] === 'function') {
+      log = arguments[i]
+    }
+  }
 
+  if (!log) log = function () {}
+  cb = arguments[ arguments.length - 1 ]
 
-function readJson_ (file, log, strict, cb) {
-                fs.readFile(file, "utf8", function (er, d) {
-                                parseJson(file, er, d, log, strict, cb)
-                })
+  readJson_(file, log, strict, cb)
 }
 
-
-function stripBOM(content) {
-                // Remove byte order marker. This catches EF BB BF (the UTF-8 
BOM)
-                // because the buffer-to-string conversion in 
`fs.readFileSync()`
-                // translates it to FEFF, the UTF-16 BOM.
-                if (content.charCodeAt(0) === 0xFEFF) {
-                                content = content.slice(1);
-                }
-                return content;
+function readJson_ (file, log, strict, cb) {
+  fs.readFile(file, 'utf8', function (er, d) {
+    parseJson(file, er, d, log, strict, cb)
+  })
 }
 
+function stripBOM (content) {
+  // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
+  // because the buffer-to-string conversion in `fs.readFileSync()`
+  // translates it to FEFF, the UTF-16 BOM.
+  if (content.charCodeAt(0) === 0xFEFF) content = content.slice(1)
+  return content
+}
 
 function parseJson (file, er, d, log, strict, cb) {
-                if (er && er.code === "ENOENT") {
-                                indexjs(file, er, log, strict, cb)
-                                return
-                }
-                if (er) return cb(er);
-                try {
-                                d = JSON.parse(stripBOM(d))
-                } catch (er) {
-                                d = parseIndex(d)
-                                if (!d) return cb(parseError(er, file));
-                }
-                extras(file, d, log, strict, cb)
-}
+  if (er && er.code === 'ENOENT') return indexjs(file, er, log, strict, cb)
+  if (er) return cb(er)
 
+  try {
+    d = safeJSON.parse(stripBOM(d))
+  } catch (er) {
+    d = parseIndex(d)
+    if (!d) return cb(parseError(er, file))
+  }
 
-function indexjs (file, er, log, strict, cb) {
-                if (path.basename(file) === "index.js") {
-                                return cb(er);
-                }
-                var index = path.resolve(path.dirname(file), "index.js")
-                fs.readFile(index, "utf8", function (er2, d) {
-                                if (er2) return cb(er);
-                                d = parseIndex(d)
-                                if (!d) return cb(er);
-                                extras(file, d, log, strict, cb)
-                })
+  extras(file, d, log, strict, cb)
 }
 
+function indexjs (file, er, log, strict, cb) {
+  if (path.basename(file) === 'index.js') return cb(er)
+
+  var index = path.resolve(path.dirname(file), 'index.js')
+  fs.readFile(index, 'utf8', function (er2, d) {
+    if (er2) return cb(er)
+
+    d = parseIndex(d)
+    if (!d) return cb(er)
+
+    extras(file, d, log, strict, cb)
+  })
+}
 
 readJson.extras = extras
 function extras (file, data, log_, strict_, cb_) {
-                var log, strict, cb
-                for (var i = 2; i < arguments.length - 1; i++) {
-                                if (typeof arguments[i] === 'boolean')
-                                                strict = arguments[i]
-                                else if (typeof arguments[i] === 'function')
-                                                log = arguments[i]
-                }
-                if (!log) log = function () {};
-                cb = arguments[i]
-                var set = readJson.extraSet
-                var n = set.length
-                var errState = null
-                set.forEach(function (fn) {
-                                fn(file, data, then)
-                })
-                function then(er) {
-                                if (errState) return;
-                                if (er) return cb(errState = er);
-                                if (--n > 0) return;
-                                final(file, data, log, strict, cb);
-                }
+  var log, strict, cb
+  for (var i = 2; i < arguments.length - 1; i++) {
+    if (typeof arguments[i] === 'boolean') {
+      strict = arguments[i]
+    } else if (typeof arguments[i] === 'function') {
+      log = arguments[i]
+    }
+  }
+
+  if (!log) log = function () {}
+  cb = arguments[i]
+
+  var set = readJson.extraSet
+  var n = set.length
+  var errState = null
+  set.forEach(function (fn) {
+    fn(file, data, then)
+  })
+
+  function then (er) {
+    if (errState) return
+    if (er) return cb(errState = er)
+    if (--n > 0) return
+    final(file, data, log, strict, cb)
+  }
 }
 
 function scriptpath (file, data, cb) {
-                if (!data.scripts) return cb(null, data);
-                var k = Object.keys(data.scripts)
-                k.forEach(scriptpath_, data.scripts)
-                cb(null, data);
-}
-function scriptpath_(key) {
-                s = this[key]
-                // This is never allowed, and only causes problems
-                if (typeof s !== 'string')
-                                return delete this[key]
-                var spre = /^(\.[\/\\])?node_modules[\/\\].bin[\\\/]/
-                if (s.match(spre))
-                                this[key] = this[key].replace(spre, '')
+  if (!data.scripts) return cb(null, data)
+  var k = Object.keys(data.scripts)
+  k.forEach(scriptpath_, data.scripts)
+  cb(null, data)
+}
+
+function scriptpath_ (key) {
+  var s = this[key]
+  // This is never allowed, and only causes problems
+  if (typeof s !== 'string') return delete this[key]
+
+  var spre = /^(\.[\/\\])?node_modules[\/\\].bin[\\\/]/
+  if (s.match(spre)) {
+    this[key] = this[key].replace(spre, '')
+  }
 }
 
 function gypfile (file, data, cb) {
-                var dir = path.dirname(file)
-                var s = data.scripts || {}
-                if (s.install || s.preinstall)
-                                return cb(null, data);
-                glob("*.gyp", { cwd: dir }, function (er, files) {
-                                if (er) return cb(er);
-                                gypfile_(file, data, files, cb)
-                })
+  var dir = path.dirname(file)
+  var s = data.scripts || {}
+  if (s.install || s.preinstall) return cb(null, data)
+
+  glob('*.gyp', { cwd: dir }, function (er, files) {
+    if (er) return cb(er)
+    gypfile_(file, data, files, cb)
+  })
 }
 
 function gypfile_ (file, data, files, cb) {
-                if (!files.length) return cb(null, data);
-                var s = data.scripts || {}
-                s.install = "node-gyp rebuild"
-                data.scripts = s
-                data.gypfile = true
-                return cb(null, data);
+  if (!files.length) return cb(null, data)
+  var s = data.scripts || {}
+  s.install = 'node-gyp rebuild'
+  data.scripts = s
+  data.gypfile = true
+  return cb(null, data)
 }
 
 function serverjs (file, data, cb) {
-                var dir = path.dirname(file)
-                var s = data.scripts || {}
-                if (s.start) return cb(null, data)
-                glob("server.js", { cwd: dir }, function (er, files) {
-                                if (er) return cb(er);
-                                serverjs_(file, data, files, cb)
-                })
+  var dir = path.dirname(file)
+  var s = data.scripts || {}
+  if (s.start) return cb(null, data)
+  glob('server.js', { cwd: dir }, function (er, files) {
+    if (er) return cb(er)
+    serverjs_(file, data, files, cb)
+  })
 }
+
 function serverjs_ (file, data, files, cb) {
-                if (!files.length) return cb(null, data);
-                var s = data.scripts || {}
-                s.start = "node server.js"
-                data.scripts = s
-                return cb(null, data)
+  if (!files.length) return cb(null, data)
+  var s = data.scripts || {}
+  s.start = 'node server.js'
+  data.scripts = s
+  return cb(null, data)
 }
 
 function authors (file, data, cb) {
-                if (data.contributors) return cb(null, data);
-                var af = path.resolve(path.dirname(file), "AUTHORS")
-                fs.readFile(af, "utf8", function (er, ad) {
-                                // ignore error.  just checking it.
-                                if (er) return cb(null, data);
-                                authors_(file, data, ad, cb)
-                })
+  if (data.contributors) return cb(null, data)
+  var af = path.resolve(path.dirname(file), 'AUTHORS')
+  fs.readFile(af, 'utf8', function (er, ad) {
+    // ignore error.  just checking it.
+    if (er) return cb(null, data)
+    authors_(file, data, ad, cb)
+  })
 }
+
 function authors_ (file, data, ad, cb) {
-                ad = ad.split(/\r?\n/g).map(function (line) {
-                                return line.replace(/^\s*#.*$/, '').trim()
-                }).filter(function (line) {
-                                return line
-                })
-                data.contributors = ad
-                return cb(null, data)
-}
-
-var defDesc = "Unnamed repository; edit this file " +
-              "'description' to name the repository."
-function gitDescription (file, data, cb) {
-                if (data.description) return cb(null, data);
-                var dir = path.dirname(file)
-                // just cuz it'd be nice if this file mattered...
-                var gitDesc = path.resolve(dir, '.git/description')
-                fs.readFile(gitDesc, 'utf8', function (er, desc) {
-                                if (desc) desc = desc.trim()
-                                if (!er && desc !== defDesc)
-                                                data.description = desc
-                                return cb(null, data)
-                })
-}
-
-function readmeDescription (file, data) {
-                if (data.description) return cb(null, data);
-                var d = data.readme
-                if (!d) return;
-                // the first block of text before the first heading
-                // that isn't the first line heading
-                d = d.trim().split('\n')
-                for (var s = 0; d[s] && d[s].trim().match(/^(#|$)/); s ++);
-                var l = d.length
-                for (var e = s + 1; e < l && d[e].trim(); e ++);
-                data.description = d.slice(s, e).join(' ').trim()
+  ad = ad.split(/\r?\n/g).map(function (line) {
+    return line.replace(/^\s*#.*$/, '').trim()
+  }).filter(function (line) {
+    return line
+  })
+  data.contributors = ad
+  return cb(null, data)
 }
 
 function readme (file, data, cb) {
-                if (data.readme) return cb(null, data);
-                var dir = path.dirname(file)
-                var globOpts = { cwd: dir, nocase: true, mark: true }
-                glob("{README,README.*}", globOpts, function (er, files) {
-                                if (er) return cb(er);
-                                // don't accept directories.
-                                files = files.filter(function (file) {
-                                                return !file.match(/\/$/)
-                                })
-                                if (!files.length) return cb();
-                                var fn = preferMarkdownReadme(files)
-                                var rm = path.resolve(dir, fn)
-                                readme_(file, data, rm, cb)
-                })
-}
-function preferMarkdownReadme(files) {
-                var fallback = 0;
-                var re = /\.m?a?r?k?d?o?w?n?$/i
-                for (var i = 0; i < files.length; i++) {
-                                if (files[i].match(re))
-                                                return files[i]
-                                else if (files[i].match(/README$/))
-                                                fallback = i
-                }
-                // prefer README.md, followed by README; otherwise, return
-                // the first filename (which could be README)
-                return files[fallback];
-}
-function readme_(file, data, rm, cb) {
-                var rmfn = path.basename(rm);
-                fs.readFile(rm, "utf8", function (er, rm) {
-                                // maybe not readable, or something.
-                                if (er) return cb()
-                                data.readme = rm
-                                data.readmeFilename = rmfn
-                                return cb(er, data)
-                })
+  if (data.readme) return cb(null, data)
+  var dir = path.dirname(file)
+  var globOpts = { cwd: dir, nocase: true, mark: true }
+  glob('{README,README.*}', globOpts, function (er, files) {
+    if (er) return cb(er)
+    // don't accept directories.
+    files = files.filter(function (file) {
+      return !file.match(/\/$/)
+    })
+    if (!files.length) return cb()
+    var fn = preferMarkdownReadme(files)
+    var rm = path.resolve(dir, fn)
+    readme_(file, data, rm, cb)
+  })
+}
+
+function preferMarkdownReadme (files) {
+  var fallback = 0
+  var re = /\.m?a?r?k?d?o?w?n?$/i
+  for (var i = 0; i < files.length; i++) {
+    if (files[i].match(re)) {
+      return files[i]
+    } else if (files[i].match(/README$/)) {
+      fallback = i
+    }
+  }
+  // prefer README.md, followed by README; otherwise, return
+  // the first filename (which could be README)
+  return files[fallback]
+}
+
+function readme_ (file, data, rm, cb) {
+  var rmfn = path.basename(rm)
+  fs.readFile(rm, 'utf8', function (er, rm) {
+    // maybe not readable, or something.
+    if (er) return cb()
+    data.readme = rm
+    data.readmeFilename = rmfn
+    return cb(er, data)
+  })
 }
 
 function mans (file, data, cb) {
-                var m = data.directories && data.directories.man
-                if (data.man || !m) return cb(null, data);
-                m = path.resolve(path.dirname(file), m)
-                glob("**/*.[0-9]", { cwd: m }, function (er, mans) {
-                                if (er) return cb(er);
-                                mans_(file, data, mans, cb)
-                })
+  var m = data.directories && data.directories.man
+  if (data.man || !m) return cb(null, data)
+  m = path.resolve(path.dirname(file), m)
+  glob('**/*.[0-9]', { cwd: m }, function (er, mans) {
+    if (er) return cb(er)
+    mans_(file, data, mans, cb)
+  })
 }
+
 function mans_ (file, data, mans, cb) {
-                var m = data.directories && data.directories.man
-                data.man = mans.map(function (mf) {
-                                return path.resolve(path.dirname(file), m, mf)
-                })
-                return cb(null, data)
+  var m = data.directories && data.directories.man
+  data.man = mans.map(function (mf) {
+    return path.resolve(path.dirname(file), m, mf)
+  })
+  return cb(null, data)
 }
 
 function bins (file, data, cb) {
-                if (Array.isArray(data.bin)) {
-                                return bins_(file, data, data.bin, cb)
-                }
-                var m = data.directories && data.directories.bin
-                if (data.bin || !m) return cb(null, data);
-                m = path.resolve(path.dirname(file), m)
-                glob("**", { cwd: m }, function (er, bins) {
-                                if (er) return cb(er);
-                                bins_(file, data, bins, cb)
-                })
+  if (Array.isArray(data.bin)) return bins_(file, data, data.bin, cb)
+
+  var m = data.directories && data.directories.bin
+  if (data.bin || !m) return cb(null, data)
+
+  m = path.resolve(path.dirname(file), m)
+  glob('**', { cwd: m }, function (er, bins) {
+    if (er) return cb(er)
+    bins_(file, data, bins, cb)
+  })
 }
+
 function bins_ (file, data, bins, cb) {
-                var m = data.directories && data.directories.bin || '.'
-                data.bin = bins.reduce(function (acc, mf) {
-                                if (mf && mf.charAt(0) !== '.') {
-                                                var f = path.basename(mf)
-                                                acc[f] = path.join(m, mf)
-                                }
-                                return acc
-                }, {})
-                return cb(null, data)
+  var m = data.directories && data.directories.bin || '.'
+  data.bin = bins.reduce(function (acc, mf) {
+    if (mf && mf.charAt(0) !== '.') {
+      var f = path.basename(mf)
+      acc[f] = path.join(m, mf)
+    }
+    return acc
+  }, {})
+  return cb(null, data)
 }
 
 function githead (file, data, cb) {
-                if (data.gitHead) return cb(null, data);
-                var dir = path.dirname(file)
-                var head = path.resolve(dir, '.git/HEAD')
-                fs.readFile(head, 'utf8', function (er, head) {
-                                if (er) return cb(null, data);
-                                githead_(file, data, dir, head, cb)
-                })
+  if (data.gitHead) return cb(null, data)
+  var dir = path.dirname(file)
+  var head = path.resolve(dir, '.git/HEAD')
+  fs.readFile(head, 'utf8', function (er, head) {
+    if (er) return cb(null, data)
+    githead_(file, data, dir, head, cb)
+  })
 }
+
 function githead_ (file, data, dir, head, cb) {
-                if (!head.match(/^ref: /)) {
-                                data.gitHead = head.trim()
-                                return cb(null, data)
-                }
-                var headFile = head.replace(/^ref: /, '').trim()
-                headFile = path.resolve(dir, '.git', headFile)
-                fs.readFile(headFile, 'utf8', function (er, head) {
-                                if (er || !head) return cb(null, data)
-                                head = head.replace(/^ref: /, '').trim()
-                                data.gitHead = head
-                                return cb(null, data)
-                })
+  if (!head.match(/^ref: /)) {
+    data.gitHead = head.trim()
+    return cb(null, data)
+  }
+  var headFile = head.replace(/^ref: /, '').trim()
+  headFile = path.resolve(dir, '.git', headFile)
+  fs.readFile(headFile, 'utf8', function (er, head) {
+    if (er || !head) return cb(null, data)
+    head = head.replace(/^ref: /, '').trim()
+    data.gitHead = head
+    return cb(null, data)
+  })
+}
+
+/**
+ * Warn if the bin references don't point to anything.  This might be better in
+ * normalize-package-data if it had access to the file path.
+ */
+function checkBinReferences_ (file, data, warn, cb) {
+  if (!(data.bin instanceof Object)) return cb()
+
+  var keys = Object.keys(data.bin)
+  var keysLeft = keys.length
+  if (!keysLeft) return cb()
+
+  function handleExists (relName, result) {
+    keysLeft--
+    if (!result) warn('No bin file found at ' + relName)
+    if (!keysLeft) cb()
+  }
+
+  keys.forEach(function (key) {
+    var dirName = path.dirname(file)
+    var relName = data.bin[key]
+    var binPath = path.resolve(dirName, relName)
+    fs.exists(binPath, handleExists.bind(null, relName))
+  })
 }
 
 function final (file, data, log, strict, cb) {
-                var pId = makePackageId(data)
-                function warn(msg) {
-                                if (typoWarned[pId]) return;
-                                if (log) log("package.json", pId, msg);
-                }
-                try {
-                                normalizeData(data, warn, strict)
-                }
-                catch (error) {
-                                return cb(error)
-                }
-                typoWarned[pId] = true
-                readJson.cache.set(file, data)
-                cb(null, data)
+  var pId = makePackageId(data)
+
+  function warn (msg) {
+    if (typoWarned[pId]) return
+    if (log) log('package.json', pId, msg)
+  }
+
+  try {
+    normalizeData(data, warn, strict)
+  } catch (error) {
+    return cb(error)
+  }
+
+  checkBinReferences_(file, data, warn, function () {
+    typoWarned[pId] = true
+    cb(null, data)
+  })
 }
 
 function makePackageId (data) {
-                var name = cleanString(data.name)
-                var ver = cleanString(data.version)
-                return name + "@" + ver
+  var name = cleanString(data.name)
+  var ver = cleanString(data.version)
+  return name + '@' + ver
 }
-function cleanString(str) {
-                return (!str || typeof(str) !== "string") ? "" : str.trim()
+
+function cleanString (str) {
+  return (!str || typeof (str) !== 'string') ? '' : str.trim()
 }
 
 // /**package { "name": "foo", "version": "1.2.3", ... } **/
 function parseIndex (data) {
-                data = data.split(/^\/\*\*package(?:\s|$)/m)
-                if (data.length < 2) return null
-                data = data[1]
-                data = data.split(/\*\*\/$/m)
-                if (data.length < 2) return null
-                data = data[0]
-                data = data.replace(/^\s*\*/mg, "")
-                try {
-                                return JSON.parse(data)
-                } catch (er) {
-                                return null
-                }
+  data = data.split(/^\/\*\*package(?:\s|$)/m)
+
+  if (data.length < 2) return null
+  data = data[1]
+  data = data.split(/\*\*\/$/m)
+
+  if (data.length < 2) return null
+  data = data[0]
+  data = data.replace(/^\s*\*/mg, '')
+
+  try {
+    return safeJSON.parse(data)
+  } catch (er) {
+    return null
+  }
 }
 
 function parseError (ex, file) {
-                var e = new Error("Failed to parse json\n"+ex.message)
-                e.code = "EJSONPARSE"
-                e.file = file
-                return e
+  var e = new Error('Failed to parse json\n' + ex.message)
+  e.code = 'EJSONPARSE'
+  e.file = file
+  return e
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/test/basic.js new/package/test/basic.js
--- old/package/test/basic.js   2014-06-10 07:39:53.000000000 +0200
+++ new/package/test/basic.js   2015-04-07 06:18:57.000000000 +0200
@@ -1,45 +1,46 @@
-// vim: set softtabstop=16 shiftwidth=16:
-var tap = require("tap")
-var readJson = require("../")
-var path = require("path")
-var fs = require("fs")
-var readme = fs.readFileSync(path.resolve(__dirname, "../README.md"), "utf8")
-var package = require("../package.json")
+var fs = require('fs')
+var path = require('path')
+
+var tap = require('tap')
+
+var readJson = require('../')
+
+var readme = fs.readFileSync(path.resolve(__dirname, '../README.md'), 'utf8')
+var pkg = require('../package.json')
 var isGit
 try {
-                fs.readFileSync(path.resolve(__dirname, '../.git/HEAD'));
-                isGit = true
+  fs.readFileSync(path.resolve(__dirname, '../.git/HEAD'))
+  isGit = true
 } catch (e) {
-                isGit = false
+  isGit = false
 }
 
-console.error("basic test")
-tap.test("basic test", function (t) {
-                var p = path.resolve(__dirname, "../package.json")
-                readJson(p, function (er, data) {
-                                if (er) throw er;
-                                basic_(t, data)
-                })
+tap.test('basic test', function (t) {
+  var p = path.resolve(__dirname, '../package.json')
+  readJson(p, function (er, data) {
+    if (er) throw er
+    basic_(t, data)
+  })
 })
+
 function basic_ (t, data) {
-                t.ok(data)
-                t.equal(data.version, package.version)
-                t.equal(data._id, data.name + "@" + data.version)
-                t.equal(data.name, package.name)
-                t.type(data.author, "object")
-                t.equal(data.readme, readme)
-                t.deepEqual(data.scripts, package.scripts)
-                t.equal(data.main, package.main)
-                t.equal(data.readmeFilename, 'README.md')
-
-                if (isGit) t.similar(data.gitHead, /^[a-f0-9]{40}$/);
-
-                // optional deps are folded in.
-                t.deepEqual(data.optionalDependencies,
-                            package.optionalDependencies)
-                t.has(data.dependencies, package.optionalDependencies)
-                t.has(data.dependencies, package.dependencies)
+  t.ok(data)
+  t.equal(data.version, pkg.version)
+  t.equal(data._id, data.name + '@' + data.version)
+  t.equal(data.name, pkg.name)
+  t.type(data.author, 'object')
+  t.equal(data.readme, readme)
+  t.deepEqual(data.scripts, pkg.scripts)
+  t.equal(data.main, pkg.main)
+  t.equal(data.readmeFilename, 'README.md')
+
+  if (isGit) t.similar(data.gitHead, /^[a-f0-9]{40}$/)
+
+  // optional deps are folded in.
+  t.deepEqual(data.optionalDependencies, pkg.optionalDependencies)
+  t.has(data.dependencies, pkg.optionalDependencies)
+  t.has(data.dependencies, pkg.dependencies)
 
-                t.deepEqual(data.devDependencies, package.devDependencies)
-                t.end()
+  t.deepEqual(data.devDependencies, pkg.devDependencies)
+  t.end()
 }
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/test/bin.js new/package/test/bin.js
--- old/package/test/bin.js     1970-01-01 01:00:00.000000000 +0100
+++ new/package/test/bin.js     2015-04-07 06:18:57.000000000 +0200
@@ -0,0 +1,43 @@
+var path = require('path')
+
+var tap = require('tap')
+
+var readJson = require('../')
+
+var createWarningCollector = function () {
+  var warn = function (msg) {
+    warn.warnings.push(arguments)
+  }
+  warn.warnings = []
+  return warn
+}
+
+tap.test('Bin test', function (t) {
+  var p = path.resolve(__dirname, 'fixtures/bin.json')
+  var warn = createWarningCollector()
+  readJson(p, warn, function (er, data) {
+    t.equals(warn.warnings.length, 0)
+    t.deepEqual(data.bin, {'bin-test': './bin/echo'})
+    t.end()
+  })
+})
+
+tap.test('Bad bin test', function (t) {
+  var p = path.resolve(__dirname, 'fixtures/badbin.json')
+  var warn = createWarningCollector()
+  readJson(p, warn, function (er, data) {
+    t.equals(warn.warnings.length, 1)
+    t.equals(warn.warnings[0][2], 'No bin file found at ./bin/typo')
+    t.end()
+  })
+})
+
+tap.test('Empty bin test', function (t) {
+  var p = path.resolve(__dirname, 'fixtures/emptybin.json')
+  var warn = createWarningCollector()
+  readJson(p, warn, function (er, data) {
+    t.equals(warn.warnings.length, 0)
+    t.same(data.bin, {}, 'no mapping to bin because object was empty')
+    t.end()
+  })
+})
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/test/bom.js new/package/test/bom.js
--- old/package/test/bom.js     2014-06-10 07:39:53.000000000 +0200
+++ new/package/test/bom.js     2015-04-07 06:18:57.000000000 +0200
@@ -1,19 +1,16 @@
-// vim: set softtabstop=16 shiftwidth=16:
-var tap = require("tap")
-var readJson = require("../")
-var path = require("path")
-var fs = require("fs")
+var tap = require('tap')
+var readJson = require('../')
+var path = require('path')
 
-console.error("BOM test")
-tap.test("BOM test", function (t) {
-                var p = path.resolve(__dirname, "fixtures/bom.json")
-                readJson(p, function (er, data) {
-                                if (er) throw er;
-                                p = path.resolve(__dirname, 
"fixtures/nobom.json")
-                                readJson(p, function (er, data2) {
-                                                if (er) throw er;
-                                                t.deepEqual(data, data2)
-                                                t.end()
-                                })
-                })
+tap.test('BOM test', function (t) {
+  var p = path.resolve(__dirname, 'fixtures/bom.json')
+  readJson(p, function (er, data) {
+    if (er) throw er
+    p = path.resolve(__dirname, 'fixtures/nobom.json')
+    readJson(p, function (er, data2) {
+      if (er) throw er
+      t.deepEqual(data, data2)
+      t.end()
+    })
+  })
 })
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/test/fixtures/badbin.json 
new/package/test/fixtures/badbin.json
--- old/package/test/fixtures/badbin.json       1970-01-01 01:00:00.000000000 
+0100
+++ new/package/test/fixtures/badbin.json       2015-04-07 06:18:57.000000000 
+0200
@@ -0,0 +1,11 @@
+{
+  "name": "badbin-test",
+  "description": "my desc",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/npm/read-package-json.git"
+  },
+  "version": "0.0.1",
+  "readme": "hello world",
+  "bin": "./bin/typo"
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/test/fixtures/bin/echo 
new/package/test/fixtures/bin/echo
--- old/package/test/fixtures/bin/echo  1970-01-01 01:00:00.000000000 +0100
+++ new/package/test/fixtures/bin/echo  2015-04-07 06:18:57.000000000 +0200
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+echo "Hello world"
\ No newline at end of file
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/test/fixtures/bin.json 
new/package/test/fixtures/bin.json
--- old/package/test/fixtures/bin.json  1970-01-01 01:00:00.000000000 +0100
+++ new/package/test/fixtures/bin.json  2015-04-07 06:18:57.000000000 +0200
@@ -0,0 +1,11 @@
+{
+  "name": "bin-test",
+  "description": "my desc",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/npm/read-package-json.git"
+  },
+  "version": "0.0.1",
+  "readme": "hello world",
+  "bin": "./bin/echo"
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/test/fixtures/emptybin.json 
new/package/test/fixtures/emptybin.json
--- old/package/test/fixtures/emptybin.json     1970-01-01 01:00:00.000000000 
+0100
+++ new/package/test/fixtures/emptybin.json     2015-04-07 06:18:57.000000000 
+0200
@@ -0,0 +1,11 @@
+{
+  "name": "emptybin-test",
+  "description": "my desc",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/npm/read-package-json.git"
+  },
+  "version": "0.0.1",
+  "readme": "hello world",
+  "bin": {}
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/test/fixtures/erroneous.json 
new/package/test/fixtures/erroneous.json
--- old/package/test/fixtures/erroneous.json    1970-01-01 01:00:00.000000000 
+0100
+++ new/package/test/fixtures/erroneous.json    2015-04-07 06:18:57.000000000 
+0200
@@ -0,0 +1,4 @@
+{
+  'wrong': 'kind',
+  'of': 'quotes'
+}
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/test/helpful.js new/package/test/helpful.js
--- old/package/test/helpful.js 1970-01-01 01:00:00.000000000 +0100
+++ new/package/test/helpful.js 2015-04-07 06:18:57.000000000 +0200
@@ -0,0 +1,12 @@
+var tap = require('tap')
+var readJson = require('../')
+var path = require('path')
+var p = path.resolve(__dirname, 'fixtures/erroneous.json')
+
+tap.test('erroneous package data', function (t) {
+  readJson(p, function (er, data) {
+    t.ok(er instanceof Error)
+    t.ok(er.message.match(/Unexpected token '\\''/))
+    t.end()
+  })
+})
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/test/non-json.js new/package/test/non-json.js
--- old/package/test/non-json.js        2014-06-10 07:39:53.000000000 +0200
+++ new/package/test/non-json.js        2015-04-07 06:20:52.000000000 +0200
@@ -1,52 +1,57 @@
-// vim: set softtabstop=16 shiftwidth=16:
+var path = require('path')
+
 var tap = require('tap')
+
 var readJson = require('../')
-var path = require('path')
-var fs = require('fs')
-var expect =
-{ name: 'read-package-json',
+
+var expect = {
+  name: 'read-package-json',
   version: '0.1.1',
-  author: 
-   { name: 'Isaac Z. Schlueter',
-     email: '[email protected]',
-     url: 'http://blog.izs.me/' },
+  author: {
+    name: 'Isaac Z. Schlueter',
+    email: '[email protected]',
+    url: 'http://blog.izs.me/'
+  },
   description: 'The thing npm uses to read package.json files with semantics 
and defaults and validation',
-  repository: 
-   { type: 'git',
-     url: 'git://github.com/isaacs/read-package-json.git' },
-  bugs: {url: "https://github.com/isaacs/read-package-json/issues"; },
+  repository: {
+    type: 'git',
+    url: 'git://github.com/isaacs/read-package-json.git'
+  },
+  bugs: {
+    url: 'https://github.com/isaacs/read-package-json/issues'
+  },
   main: 'read-json.js',
   scripts: { test: 'tap test/*.js' },
-  dependencies: 
-   { glob: '~3.1.9',
-     'lru-cache': '~1.1.0',
-     semver: '~1.0.14',
-     slide: '~1.1.3',
-     npmlog: '0',
-     'graceful-fs': '~1.1.8' },
+  dependencies: {
+    glob: '~3.1.9',
+    'lru-cache': '~1.1.0',
+    semver: '~1.0.14',
+    slide: '~1.1.3',
+    npmlog: '0',
+    'graceful-fs': '~1.1.8'
+  },
   devDependencies: { tap: '~0.2.5' },
- homepage: "https://github.com/isaacs/read-package-json";,
- optionalDependencies: { npmlog: '0', 'graceful-fs': '~1.1.8' },
+  homepage: 'https://github.com/isaacs/read-package-json#readme',
+  optionalDependencies: { npmlog: '0', 'graceful-fs': '~1.1.8' },
   _id: '[email protected]',
-  readme: 'ERROR: No README data found!' }
+  readme: 'ERROR: No README data found!'
+}
 
 tap.test('from css', function (t) {
-                var c = path.join(__dirname, 'fixtures', 'not-json.css')
-                readJson(c, function (er, d) {
-                                t.same(d, expect)
-                                t.end()
-                })
+  var c = path.join(__dirname, 'fixtures', 'not-json.css')
+  readJson(c, function (er, d) {
+    t.same(d, expect)
+    t.end()
+  })
 })
 
 tap.test('from js', function (t) {
-                readJson(__filename, function (er, d) {
-                                t.same(d, expect)
-                                t.end()
-                })
+  readJson(__filename, function (er, d) {
+    t.same(d, expect)
+    t.end()
+  })
 })
 
-
-
 /**package
 {
   "name": "read-package-json",
@@ -76,4 +81,3 @@
   }
 }
 **/
-
diff -urN '--exclude=CVS' '--exclude=.cvsignore' '--exclude=.svn' 
'--exclude=.svnignore' old/package/test/readmes.js new/package/test/readmes.js
--- old/package/test/readmes.js 2014-06-10 07:39:53.000000000 +0200
+++ new/package/test/readmes.js 2015-04-07 06:18:57.000000000 +0200
@@ -1,29 +1,28 @@
-// vim: set softtabstop=16 shiftwidth=16:
-var tap = require("tap")
-var readJson = require("../")
-var path = require("path")
-var fs = require("fs")
-var p = path.resolve(__dirname, "fixtures/readmes/package.json")
+var path = require('path')
+
+var tap = require('tap')
+var p = path.resolve(__dirname, 'fixtures/readmes/package.json')
+
+var readJson = require('../')
 
 var expect = {}
 var expect = {
-  "name" : "readmes",
-  "version" : "99.999.999999999",
-  "readme" : "*markdown*\n",
-  "readmeFilename" : "README.md",
-  "description" : "*markdown*",
-  "_id" : "[email protected]"
+  'name': 'readmes',
+  'version': '99.999.999999999',
+  'readme': '*markdown*\n',
+  'readmeFilename': 'README.md',
+  'description': '*markdown*',
+  '_id': '[email protected]'
 }
 
-console.error("readme test")
-tap.test("readme test", function (t) {
-                readJson(p, function (er, data) {
-                                if (er) throw er;
-                                test(t, data)
-                })
+tap.test('readme test', function (t) {
+  readJson(p, function (er, data) {
+    t.ifError(er, 'read README without error')
+    test(t, data)
+  })
 })
 
-function test(t, data) {
-                t.deepEqual(data, expect)
-                t.end()
+function test (t, data) {
+  t.deepEqual(data, expect)
+  t.end()
 }


Reply via email to