1var fs = require('fs')
2
3var path = require('path')
4
5var { glob } = require('glob')
6var normalizeData = require('normalize-package-data')
7var safeJSON = require('json-parse-even-better-errors')
8var util = require('util')
9var normalizePackageBin = require('npm-normalize-package-bin')
10
11module.exports = readJson
12
13// put more stuff on here to customize.
14readJson.extraSet = [
15  bundleDependencies,
16  gypfile,
17  serverjs,
18  scriptpath,
19  authors,
20  readme,
21  mans,
22  bins,
23  githead,
24  fillTypes,
25]
26
27var typoWarned = {}
28var cache = {}
29
30function readJson (file, log_, strict_, cb_) {
31  var log, strict, cb
32  for (var i = 1; i < arguments.length - 1; i++) {
33    if (typeof arguments[i] === 'boolean') {
34      strict = arguments[i]
35    } else if (typeof arguments[i] === 'function') {
36      log = arguments[i]
37    }
38  }
39
40  if (!log) {
41    log = function () {}
42  }
43  cb = arguments[arguments.length - 1]
44
45  readJson_(file, log, strict, cb)
46}
47
48function readJson_ (file, log, strict, cb) {
49  fs.readFile(file, 'utf8', function (er, d) {
50    parseJson(file, er, d, log, strict, cb)
51  })
52}
53
54function stripBOM (content) {
55  // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
56  // because the buffer-to-string conversion in `fs.readFileSync()`
57  // translates it to FEFF, the UTF-16 BOM.
58  if (content.charCodeAt(0) === 0xFEFF) {
59    content = content.slice(1)
60  }
61  return content
62}
63
64function jsonClone (obj) {
65  if (obj == null) {
66    return obj
67  } else if (Array.isArray(obj)) {
68    var newarr = new Array(obj.length)
69    for (var ii in obj) {
70      newarr[ii] = jsonClone(obj[ii])
71    }
72    return newarr
73  } else if (typeof obj === 'object') {
74    var newobj = {}
75    for (var kk in obj) {
76      newobj[kk] = jsonClone(obj[kk])
77    }
78    return newobj
79  } else {
80    return obj
81  }
82}
83
84function parseJson (file, er, d, log, strict, cb) {
85  if (er && er.code === 'ENOENT') {
86    return fs.stat(path.dirname(file), function (err, stat) {
87      if (!err && stat && !stat.isDirectory()) {
88        // ENOTDIR isn't used on Windows, but npm expects it.
89        er = Object.create(er)
90        er.code = 'ENOTDIR'
91        return cb(er)
92      } else {
93        return indexjs(file, er, log, strict, cb)
94      }
95    })
96  }
97  if (er) {
98    return cb(er)
99  }
100
101  if (cache[d]) {
102    return cb(null, jsonClone(cache[d]))
103  }
104
105  var data
106
107  try {
108    data = safeJSON(stripBOM(d))
109    for (var key in data) {
110      if (/^_/.test(key)) {
111        delete data[key]
112      }
113    }
114  } catch (jsonErr) {
115    data = parseIndex(d)
116    if (!data) {
117      return cb(parseError(jsonErr, file))
118    }
119  }
120  extrasCached(file, d, data, log, strict, cb)
121}
122
123function extrasCached (file, d, data, log, strict, cb) {
124  extras(file, data, log, strict, function (err, extrasData) {
125    if (!err) {
126      cache[d] = jsonClone(extrasData)
127    }
128    cb(err, extrasData)
129  })
130}
131
132function indexjs (file, er, log, strict, cb) {
133  if (path.basename(file) === 'index.js') {
134    return cb(er)
135  }
136
137  var index = path.resolve(path.dirname(file), 'index.js')
138  fs.readFile(index, 'utf8', function (er2, d) {
139    if (er2) {
140      return cb(er)
141    }
142
143    if (cache[d]) {
144      return cb(null, cache[d])
145    }
146
147    var data = parseIndex(d)
148    if (!data) {
149      return cb(er)
150    }
151
152    extrasCached(file, d, data, log, strict, cb)
153  })
154}
155
156readJson.extras = extras
157function extras (file, data, log_, strict_, cb_) {
158  var log, strict, cb
159  for (var i = 2; i < arguments.length - 1; i++) {
160    if (typeof arguments[i] === 'boolean') {
161      strict = arguments[i]
162    } else if (typeof arguments[i] === 'function') {
163      log = arguments[i]
164    }
165  }
166
167  if (!log) {
168    log = function () {}
169  }
170  cb = arguments[i]
171
172  var set = readJson.extraSet
173  var n = set.length
174  var errState = null
175  set.forEach(function (fn) {
176    fn(file, data, then)
177  })
178
179  function then (er) {
180    if (errState) {
181      return
182    }
183    if (er) {
184      return cb(errState = er)
185    }
186    if (--n > 0) {
187      return
188    }
189    final(file, data, log, strict, cb)
190  }
191}
192
193function scriptpath (file, data, cb) {
194  if (!data.scripts) {
195    return cb(null, data)
196  }
197  var k = Object.keys(data.scripts)
198  k.forEach(scriptpath_, data.scripts)
199  cb(null, data)
200}
201
202function scriptpath_ (key) {
203  var s = this[key]
204  // This is never allowed, and only causes problems
205  if (typeof s !== 'string') {
206    return delete this[key]
207  }
208
209  var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
210  if (s.match(spre)) {
211    this[key] = this[key].replace(spre, '')
212  }
213}
214
215function gypfile (file, data, cb) {
216  var dir = path.dirname(file)
217  var s = data.scripts || {}
218  if (s.install || s.preinstall) {
219    return cb(null, data)
220  }
221
222  if (data.gypfile === false) {
223    return cb(null, data)
224  }
225  glob('*.gyp', { cwd: dir })
226    .then(files => gypfile_(file, data, files, cb))
227    .catch(er => cb(er))
228}
229
230function gypfile_ (file, data, files, cb) {
231  if (!files.length) {
232    return cb(null, data)
233  }
234  var s = data.scripts || {}
235  s.install = 'node-gyp rebuild'
236  data.scripts = s
237  data.gypfile = true
238  return cb(null, data)
239}
240
241function serverjs (file, data, cb) {
242  var dir = path.dirname(file)
243  var s = data.scripts || {}
244  if (s.start) {
245    return cb(null, data)
246  }
247  fs.access(path.join(dir, 'server.js'), (err) => {
248    if (!err) {
249      s.start = 'node server.js'
250      data.scripts = s
251    }
252    return cb(null, data)
253  })
254}
255
256function authors (file, data, cb) {
257  if (data.contributors) {
258    return cb(null, data)
259  }
260  var af = path.resolve(path.dirname(file), 'AUTHORS')
261  fs.readFile(af, 'utf8', function (er, ad) {
262    // ignore error.  just checking it.
263    if (er) {
264      return cb(null, data)
265    }
266    authors_(file, data, ad, cb)
267  })
268}
269
270function authors_ (file, data, ad, cb) {
271  ad = ad.split(/\r?\n/g).map(function (line) {
272    return line.replace(/^\s*#.*$/, '').trim()
273  }).filter(function (line) {
274    return line
275  })
276  data.contributors = ad
277  return cb(null, data)
278}
279
280function readme (file, data, cb) {
281  if (data.readme) {
282    return cb(null, data)
283  }
284  var dir = path.dirname(file)
285  var globOpts = { cwd: dir, nocase: true, mark: true }
286  glob('{README,README.*}', globOpts)
287    .then(files => {
288      // don't accept directories.
289      files = files.filter(function (filtered) {
290        return !filtered.match(/\/$/)
291      })
292      if (!files.length) {
293        return cb()
294      }
295      var fn = preferMarkdownReadme(files)
296      var rm = path.resolve(dir, fn)
297      return readme_(file, data, rm, cb)
298    })
299    .catch(er => cb(er))
300}
301
302function preferMarkdownReadme (files) {
303  var fallback = 0
304  var re = /\.m?a?r?k?d?o?w?n?$/i
305  for (var i = 0; i < files.length; i++) {
306    if (files[i].match(re)) {
307      return files[i]
308    } else if (files[i].match(/README$/)) {
309      fallback = i
310    }
311  }
312  // prefer README.md, followed by README; otherwise, return
313  // the first filename (which could be README)
314  return files[fallback]
315}
316
317function readme_ (file, data, rm, cb) {
318  var rmfn = path.basename(rm)
319  fs.readFile(rm, 'utf8', function (er, rmData) {
320    // maybe not readable, or something.
321    if (er) {
322      return cb()
323    }
324    data.readme = rmData
325    data.readmeFilename = rmfn
326    return cb(er, data)
327  })
328}
329
330function mans (file, data, cb) {
331  let cwd = data.directories && data.directories.man
332  if (data.man || !cwd) {
333    return cb(null, data)
334  }
335  const dirname = path.dirname(file)
336  cwd = path.resolve(path.dirname(file), cwd)
337  glob('**/*.[0-9]', { cwd })
338    .then(mansGlob => {
339      data.man = mansGlob.map(man =>
340        path.relative(dirname, path.join(cwd, man)).split(path.sep).join('/')
341      )
342      return cb(null, data)
343    })
344    .catch(er => cb(er))
345}
346
347function bins (file, data, cb) {
348  data = normalizePackageBin(data)
349
350  var m = data.directories && data.directories.bin
351  if (data.bin || !m) {
352    return cb(null, data)
353  }
354
355  m = path.resolve(path.dirname(file), path.join('.', path.join('/', m)))
356  glob('**', { cwd: m })
357    .then(binsGlob => bins_(file, data, binsGlob, cb))
358    .catch(er => cb(er))
359}
360
361function bins_ (file, data, binsGlob, cb) {
362  var m = (data.directories && data.directories.bin) || '.'
363  data.bin = binsGlob.reduce(function (acc, mf) {
364    if (mf && mf.charAt(0) !== '.') {
365      var f = path.basename(mf)
366      acc[f] = path.join(m, mf)
367    }
368    return acc
369  }, {})
370  return cb(null, normalizePackageBin(data))
371}
372
373function bundleDependencies (file, data, cb) {
374  var bd = 'bundleDependencies'
375  var bdd = 'bundledDependencies'
376  // normalize key name
377  if (data[bdd] !== undefined) {
378    if (data[bd] === undefined) {
379      data[bd] = data[bdd]
380    }
381    delete data[bdd]
382  }
383  if (data[bd] === false) {
384    delete data[bd]
385  } else if (data[bd] === true) {
386    data[bd] = Object.keys(data.dependencies || {})
387  } else if (data[bd] !== undefined && !Array.isArray(data[bd])) {
388    delete data[bd]
389  }
390  return cb(null, data)
391}
392
393function githead (file, data, cb) {
394  if (data.gitHead) {
395    return cb(null, data)
396  }
397  var dir = path.dirname(file)
398  var head = path.resolve(dir, '.git/HEAD')
399  fs.readFile(head, 'utf8', function (er, headData) {
400    if (er) {
401      var parent = path.dirname(dir)
402      if (parent === dir) {
403        return cb(null, data)
404      }
405      return githead(dir, data, cb)
406    }
407    githead_(data, dir, headData, cb)
408  })
409}
410
411function githead_ (data, dir, head, cb) {
412  if (!head.match(/^ref: /)) {
413    data.gitHead = head.trim()
414    return cb(null, data)
415  }
416  var headRef = head.replace(/^ref: /, '').trim()
417  var headFile = path.resolve(dir, '.git', headRef)
418  fs.readFile(headFile, 'utf8', function (er, headData) {
419    if (er || !headData) {
420      var packFile = path.resolve(dir, '.git/packed-refs')
421      return fs.readFile(packFile, 'utf8', function (readFileErr, refs) {
422        if (readFileErr || !refs) {
423          return cb(null, data)
424        }
425        refs = refs.split('\n')
426        for (var i = 0; i < refs.length; i++) {
427          var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
428          if (match && match[2].trim() === headRef) {
429            data.gitHead = match[1]
430            break
431          }
432        }
433        return cb(null, data)
434      })
435    }
436    headData = headData.replace(/^ref: /, '').trim()
437    data.gitHead = headData
438    return cb(null, data)
439  })
440}
441
442/**
443 * Warn if the bin references don't point to anything.  This might be better in
444 * normalize-package-data if it had access to the file path.
445 */
446function checkBinReferences_ (file, data, warn, cb) {
447  if (!(data.bin instanceof Object)) {
448    return cb()
449  }
450
451  var keys = Object.keys(data.bin)
452  var keysLeft = keys.length
453  if (!keysLeft) {
454    return cb()
455  }
456
457  function handleExists (relName, result) {
458    keysLeft--
459    if (!result) {
460      warn('No bin file found at ' + relName)
461    }
462    if (!keysLeft) {
463      cb()
464    }
465  }
466
467  keys.forEach(function (key) {
468    var dirName = path.dirname(file)
469    var relName = data.bin[key]
470    /* istanbul ignore if - impossible, bins have been normalized */
471    if (typeof relName !== 'string') {
472      var msg = 'Bin filename for ' + key +
473        ' is not a string: ' + util.inspect(relName)
474      warn(msg)
475      delete data.bin[key]
476      handleExists(relName, true)
477      return
478    }
479    var binPath = path.resolve(dirName, relName)
480    fs.stat(binPath, (err) => handleExists(relName, !err))
481  })
482}
483
484function final (file, data, log, strict, cb) {
485  var pId = makePackageId(data)
486
487  function warn (msg) {
488    if (typoWarned[pId]) {
489      return
490    }
491    if (log) {
492      log('package.json', pId, msg)
493    }
494  }
495
496  try {
497    normalizeData(data, warn, strict)
498  } catch (error) {
499    return cb(error)
500  }
501
502  checkBinReferences_(file, data, warn, function () {
503    typoWarned[pId] = true
504    cb(null, data)
505  })
506}
507
508function fillTypes (file, data, cb) {
509  var index = data.main || 'index.js'
510
511  if (typeof index !== 'string') {
512    return cb(new TypeError('The "main" attribute must be of type string.'))
513  }
514
515  // TODO exports is much more complicated than this in verbose format
516  // We need to support for instance
517
518  // "exports": {
519  //   ".": [
520  //     {
521  //       "default": "./lib/npm.js"
522  //     },
523  //     "./lib/npm.js"
524  //   ],
525  //   "./package.json": "./package.json"
526  // },
527  // as well as conditional exports
528
529  // if (data.exports && typeof data.exports === 'string') {
530  //   index = data.exports
531  // }
532
533  // if (data.exports && data.exports['.']) {
534  //   index = data.exports['.']
535  //   if (typeof index !== 'string') {
536  //   }
537  // }
538
539  var extless =
540    path.join(path.dirname(index), path.basename(index, path.extname(index)))
541  var dts = `./${extless}.d.ts`
542  var dtsPath = path.join(path.dirname(file), dts)
543  var hasDTSFields = 'types' in data || 'typings' in data
544  if (!hasDTSFields && fs.existsSync(dtsPath)) {
545    data.types = dts.split(path.sep).join('/')
546  }
547
548  cb(null, data)
549}
550
551function makePackageId (data) {
552  var name = cleanString(data.name)
553  var ver = cleanString(data.version)
554  return name + '@' + ver
555}
556
557function cleanString (str) {
558  return (!str || typeof (str) !== 'string') ? '' : str.trim()
559}
560
561// /**package { "name": "foo", "version": "1.2.3", ... } **/
562function parseIndex (data) {
563  data = data.split(/^\/\*\*package(?:\s|$)/m)
564
565  if (data.length < 2) {
566    return null
567  }
568  data = data[1]
569  data = data.split(/\*\*\/$/m)
570
571  if (data.length < 2) {
572    return null
573  }
574  data = data[0]
575  data = data.replace(/^\s*\*/mg, '')
576
577  try {
578    return safeJSON(data)
579  } catch (er) {
580    return null
581  }
582}
583
584function parseError (ex, file) {
585  var e = new Error('Failed to parse json\n' + ex.message)
586  e.code = 'EJSONPARSE'
587  e.path = file
588  return e
589}
590