Icard/angular-clarity-master(work.../node_modules/read-package-json/lib/read-json.js

590 lines
13 KiB
JavaScript

var fs = require('fs')
var path = require('path')
var { glob } = require('glob')
var normalizeData = require('normalize-package-data')
var safeJSON = require('json-parse-even-better-errors')
var util = require('util')
var normalizePackageBin = require('npm-normalize-package-bin')
module.exports = readJson
// put more stuff on here to customize.
readJson.extraSet = [
bundleDependencies,
gypfile,
serverjs,
scriptpath,
authors,
readme,
mans,
bins,
githead,
fillTypes,
]
var typoWarned = {}
var cache = {}
function readJson (file, log_, strict_, cb_) {
var log, strict, cb
for (var i = 1; i < arguments.length - 1; i++) {
if (typeof arguments[i] === 'boolean') {
strict = arguments[i]
} else if (typeof arguments[i] === 'function') {
log = arguments[i]
}
}
if (!log) {
log = function () {}
}
cb = arguments[arguments.length - 1]
readJson_(file, log, strict, cb)
}
function readJson_ (file, log, strict, cb) {
fs.readFile(file, 'utf8', function (er, d) {
parseJson(file, er, d, log, strict, cb)
})
}
function stripBOM (content) {
// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
// because the buffer-to-string conversion in `fs.readFileSync()`
// translates it to FEFF, the UTF-16 BOM.
if (content.charCodeAt(0) === 0xFEFF) {
content = content.slice(1)
}
return content
}
function jsonClone (obj) {
if (obj == null) {
return obj
} else if (Array.isArray(obj)) {
var newarr = new Array(obj.length)
for (var ii in obj) {
newarr[ii] = jsonClone(obj[ii])
}
return newarr
} else if (typeof obj === 'object') {
var newobj = {}
for (var kk in obj) {
newobj[kk] = jsonClone(obj[kk])
}
return newobj
} else {
return obj
}
}
function parseJson (file, er, d, log, strict, cb) {
if (er && er.code === 'ENOENT') {
return fs.stat(path.dirname(file), function (err, stat) {
if (!err && stat && !stat.isDirectory()) {
// ENOTDIR isn't used on Windows, but npm expects it.
er = Object.create(er)
er.code = 'ENOTDIR'
return cb(er)
} else {
return indexjs(file, er, log, strict, cb)
}
})
}
if (er) {
return cb(er)
}
if (cache[d]) {
return cb(null, jsonClone(cache[d]))
}
var data
try {
data = safeJSON(stripBOM(d))
for (var key in data) {
if (/^_/.test(key)) {
delete data[key]
}
}
} catch (jsonErr) {
data = parseIndex(d)
if (!data) {
return cb(parseError(jsonErr, file))
}
}
extrasCached(file, d, data, log, strict, cb)
}
function extrasCached (file, d, data, log, strict, cb) {
extras(file, data, log, strict, function (err, extrasData) {
if (!err) {
cache[d] = jsonClone(extrasData)
}
cb(err, extrasData)
})
}
function indexjs (file, er, log, strict, cb) {
if (path.basename(file) === 'index.js') {
return cb(er)
}
var index = path.resolve(path.dirname(file), 'index.js')
fs.readFile(index, 'utf8', function (er2, d) {
if (er2) {
return cb(er)
}
if (cache[d]) {
return cb(null, cache[d])
}
var data = parseIndex(d)
if (!data) {
return cb(er)
}
extrasCached(file, d, data, log, strict, cb)
})
}
readJson.extras = extras
function extras (file, data, log_, strict_, cb_) {
var log, strict, cb
for (var i = 2; i < arguments.length - 1; i++) {
if (typeof arguments[i] === 'boolean') {
strict = arguments[i]
} else if (typeof arguments[i] === 'function') {
log = arguments[i]
}
}
if (!log) {
log = function () {}
}
cb = arguments[i]
var set = readJson.extraSet
var n = set.length
var errState = null
set.forEach(function (fn) {
fn(file, data, then)
})
function then (er) {
if (errState) {
return
}
if (er) {
return cb(errState = er)
}
if (--n > 0) {
return
}
final(file, data, log, strict, cb)
}
}
function scriptpath (file, data, cb) {
if (!data.scripts) {
return cb(null, data)
}
var k = Object.keys(data.scripts)
k.forEach(scriptpath_, data.scripts)
cb(null, data)
}
function scriptpath_ (key) {
var s = this[key]
// This is never allowed, and only causes problems
if (typeof s !== 'string') {
return delete this[key]
}
var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
if (s.match(spre)) {
this[key] = this[key].replace(spre, '')
}
}
function gypfile (file, data, cb) {
var dir = path.dirname(file)
var s = data.scripts || {}
if (s.install || s.preinstall) {
return cb(null, data)
}
if (data.gypfile === false) {
return cb(null, data)
}
glob('*.gyp', { cwd: dir })
.then(files => gypfile_(file, data, files, cb))
.catch(er => cb(er))
}
function gypfile_ (file, data, files, cb) {
if (!files.length) {
return cb(null, data)
}
var s = data.scripts || {}
s.install = 'node-gyp rebuild'
data.scripts = s
data.gypfile = true
return cb(null, data)
}
function serverjs (file, data, cb) {
var dir = path.dirname(file)
var s = data.scripts || {}
if (s.start) {
return cb(null, data)
}
fs.access(path.join(dir, 'server.js'), (err) => {
if (!err) {
s.start = 'node server.js'
data.scripts = s
}
return cb(null, data)
})
}
function authors (file, data, cb) {
if (data.contributors) {
return cb(null, data)
}
var af = path.resolve(path.dirname(file), 'AUTHORS')
fs.readFile(af, 'utf8', function (er, ad) {
// ignore error. just checking it.
if (er) {
return cb(null, data)
}
authors_(file, data, ad, cb)
})
}
function authors_ (file, data, ad, cb) {
ad = ad.split(/\r?\n/g).map(function (line) {
return line.replace(/^\s*#.*$/, '').trim()
}).filter(function (line) {
return line
})
data.contributors = ad
return cb(null, data)
}
function readme (file, data, cb) {
if (data.readme) {
return cb(null, data)
}
var dir = path.dirname(file)
var globOpts = { cwd: dir, nocase: true, mark: true }
glob('{README,README.*}', globOpts)
.then(files => {
// don't accept directories.
files = files.filter(function (filtered) {
return !filtered.match(/\/$/)
})
if (!files.length) {
return cb()
}
var fn = preferMarkdownReadme(files)
var rm = path.resolve(dir, fn)
return readme_(file, data, rm, cb)
})
.catch(er => cb(er))
}
function preferMarkdownReadme (files) {
var fallback = 0
var re = /\.m?a?r?k?d?o?w?n?$/i
for (var i = 0; i < files.length; i++) {
if (files[i].match(re)) {
return files[i]
} else if (files[i].match(/README$/)) {
fallback = i
}
}
// prefer README.md, followed by README; otherwise, return
// the first filename (which could be README)
return files[fallback]
}
function readme_ (file, data, rm, cb) {
var rmfn = path.basename(rm)
fs.readFile(rm, 'utf8', function (er, rmData) {
// maybe not readable, or something.
if (er) {
return cb()
}
data.readme = rmData
data.readmeFilename = rmfn
return cb(er, data)
})
}
function mans (file, data, cb) {
let cwd = data.directories && data.directories.man
if (data.man || !cwd) {
return cb(null, data)
}
const dirname = path.dirname(file)
cwd = path.resolve(path.dirname(file), cwd)
glob('**/*.[0-9]', { cwd })
.then(mansGlob => {
data.man = mansGlob.map(man =>
path.relative(dirname, path.join(cwd, man)).split(path.sep).join('/')
)
return cb(null, data)
})
.catch(er => cb(er))
}
function bins (file, data, cb) {
data = normalizePackageBin(data)
var m = data.directories && data.directories.bin
if (data.bin || !m) {
return cb(null, data)
}
m = path.resolve(path.dirname(file), path.join('.', path.join('/', m)))
glob('**', { cwd: m })
.then(binsGlob => bins_(file, data, binsGlob, cb))
.catch(er => cb(er))
}
function bins_ (file, data, binsGlob, cb) {
var m = (data.directories && data.directories.bin) || '.'
data.bin = binsGlob.reduce(function (acc, mf) {
if (mf && mf.charAt(0) !== '.') {
var f = path.basename(mf)
acc[f] = path.join(m, mf)
}
return acc
}, {})
return cb(null, normalizePackageBin(data))
}
function bundleDependencies (file, data, cb) {
var bd = 'bundleDependencies'
var bdd = 'bundledDependencies'
// normalize key name
if (data[bdd] !== undefined) {
if (data[bd] === undefined) {
data[bd] = data[bdd]
}
delete data[bdd]
}
if (data[bd] === false) {
delete data[bd]
} else if (data[bd] === true) {
data[bd] = Object.keys(data.dependencies || {})
} else if (data[bd] !== undefined && !Array.isArray(data[bd])) {
delete data[bd]
}
return cb(null, data)
}
function githead (file, data, cb) {
if (data.gitHead) {
return cb(null, data)
}
var dir = path.dirname(file)
var head = path.resolve(dir, '.git/HEAD')
fs.readFile(head, 'utf8', function (er, headData) {
if (er) {
var parent = path.dirname(dir)
if (parent === dir) {
return cb(null, data)
}
return githead(dir, data, cb)
}
githead_(data, dir, headData, cb)
})
}
function githead_ (data, dir, head, cb) {
if (!head.match(/^ref: /)) {
data.gitHead = head.trim()
return cb(null, data)
}
var headRef = head.replace(/^ref: /, '').trim()
var headFile = path.resolve(dir, '.git', headRef)
fs.readFile(headFile, 'utf8', function (er, headData) {
if (er || !headData) {
var packFile = path.resolve(dir, '.git/packed-refs')
return fs.readFile(packFile, 'utf8', function (readFileErr, refs) {
if (readFileErr || !refs) {
return cb(null, data)
}
refs = refs.split('\n')
for (var i = 0; i < refs.length; i++) {
var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
if (match && match[2].trim() === headRef) {
data.gitHead = match[1]
break
}
}
return cb(null, data)
})
}
headData = headData.replace(/^ref: /, '').trim()
data.gitHead = headData
return cb(null, data)
})
}
/**
* Warn if the bin references don't point to anything. This might be better in
* normalize-package-data if it had access to the file path.
*/
function checkBinReferences_ (file, data, warn, cb) {
if (!(data.bin instanceof Object)) {
return cb()
}
var keys = Object.keys(data.bin)
var keysLeft = keys.length
if (!keysLeft) {
return cb()
}
function handleExists (relName, result) {
keysLeft--
if (!result) {
warn('No bin file found at ' + relName)
}
if (!keysLeft) {
cb()
}
}
keys.forEach(function (key) {
var dirName = path.dirname(file)
var relName = data.bin[key]
/* istanbul ignore if - impossible, bins have been normalized */
if (typeof relName !== 'string') {
var msg = 'Bin filename for ' + key +
' is not a string: ' + util.inspect(relName)
warn(msg)
delete data.bin[key]
handleExists(relName, true)
return
}
var binPath = path.resolve(dirName, relName)
fs.stat(binPath, (err) => handleExists(relName, !err))
})
}
function final (file, data, log, strict, cb) {
var pId = makePackageId(data)
function warn (msg) {
if (typoWarned[pId]) {
return
}
if (log) {
log('package.json', pId, msg)
}
}
try {
normalizeData(data, warn, strict)
} catch (error) {
return cb(error)
}
checkBinReferences_(file, data, warn, function () {
typoWarned[pId] = true
cb(null, data)
})
}
function fillTypes (file, data, cb) {
var index = data.main || 'index.js'
if (typeof index !== 'string') {
return cb(new TypeError('The "main" attribute must be of type string.'))
}
// TODO exports is much more complicated than this in verbose format
// We need to support for instance
// "exports": {
// ".": [
// {
// "default": "./lib/npm.js"
// },
// "./lib/npm.js"
// ],
// "./package.json": "./package.json"
// },
// as well as conditional exports
// if (data.exports && typeof data.exports === 'string') {
// index = data.exports
// }
// if (data.exports && data.exports['.']) {
// index = data.exports['.']
// if (typeof index !== 'string') {
// }
// }
var extless =
path.join(path.dirname(index), path.basename(index, path.extname(index)))
var dts = `./${extless}.d.ts`
var dtsPath = path.join(path.dirname(file), dts)
var hasDTSFields = 'types' in data || 'typings' in data
if (!hasDTSFields && fs.existsSync(dtsPath)) {
data.types = dts.split(path.sep).join('/')
}
cb(null, data)
}
function makePackageId (data) {
var name = cleanString(data.name)
var ver = cleanString(data.version)
return name + '@' + ver
}
function cleanString (str) {
return (!str || typeof (str) !== 'string') ? '' : str.trim()
}
// /**package { "name": "foo", "version": "1.2.3", ... } **/
function parseIndex (data) {
data = data.split(/^\/\*\*package(?:\s|$)/m)
if (data.length < 2) {
return null
}
data = data[1]
data = data.split(/\*\*\/$/m)
if (data.length < 2) {
return null
}
data = data[0]
data = data.replace(/^\s*\*/mg, '')
try {
return safeJSON(data)
} catch (er) {
return null
}
}
function parseError (ex, file) {
var e = new Error('Failed to parse json\n' + ex.message)
e.code = 'EJSONPARSE'
e.path = file
return e
}