1
0
Fork 0
mirror of https://github.com/shimataro/ssh-key-action.git synced 2025-06-19 22:52:10 +10:00

* first action! (#1)

This commit is contained in:
shimataro 2019-09-18 20:39:54 +09:00 committed by GitHub
parent 8deacc95b1
commit ace1e6a69a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3750 changed files with 1155519 additions and 0 deletions

89
node_modules/pacote/lib/extract-stream.js generated vendored Normal file
View file

@ -0,0 +1,89 @@
'use strict'
const Minipass = require('minipass')
const path = require('path')
const tar = require('tar')
module.exports = extractStream
module.exports._computeMode = computeMode
class Transformer extends Minipass {
constructor (spec, opts) {
super()
this.spec = spec
this.opts = opts
this.str = ''
}
write (data) {
this.str += data
return true
}
end () {
const replaced = this.str.replace(
/}\s*$/,
`\n,"_resolved": ${
JSON.stringify(this.opts.resolved || '')
}\n,"_integrity": ${
JSON.stringify(this.opts.integrity || '')
}\n,"_from": ${
JSON.stringify(this.spec.toString())
}\n}`
)
super.write(replaced)
return super.end()
}
}
function computeMode (fileMode, optMode, umask) {
return (fileMode | optMode) & ~(umask || 0)
}
function pkgJsonTransform (spec, opts) {
return entry => {
if (entry.path === 'package.json') {
const transformed = new Transformer(spec, opts)
return transformed
}
}
}
function extractStream (spec, dest, opts) {
opts = opts || {}
const sawIgnores = new Set()
return tar.x({
cwd: dest,
filter: (name, entry) => !entry.header.type.match(/^.*link$/i),
strip: 1,
onwarn: msg => opts.log && opts.log.warn('tar', msg),
uid: opts.uid,
gid: opts.gid,
umask: opts.umask,
transform: opts.resolved && pkgJsonTransform(spec, opts),
onentry (entry) {
if (entry.type.toLowerCase() === 'file') {
entry.mode = computeMode(entry.mode, opts.fmode, opts.umask)
} else if (entry.type.toLowerCase() === 'directory') {
entry.mode = computeMode(entry.mode, opts.dmode, opts.umask)
} else {
entry.mode = computeMode(entry.mode, 0, opts.umask)
}
// Note: This mirrors logic in the fs read operations that are
// employed during tarball creation, in the fstream-npm module.
// It is duplicated here to handle tarballs that are created
// using other means, such as system tar or git archive.
if (entry.type.toLowerCase() === 'file') {
const base = path.basename(entry.path)
if (base === '.npmignore') {
sawIgnores.add(entry.path)
} else if (base === '.gitignore') {
const npmignore = entry.path.replace(/\.gitignore$/, '.npmignore')
if (!sawIgnores.has(npmignore)) {
// Rename, may be clobbered later.
entry.path = npmignore
}
}
}
}
})
}

82
node_modules/pacote/lib/fetch.js generated vendored Normal file
View file

@ -0,0 +1,82 @@
'use strict'
const duck = require('protoduck')
const Fetcher = duck.define(['spec', 'opts', 'manifest'], {
packument: ['spec', 'opts'],
manifest: ['spec', 'opts'],
tarball: ['spec', 'opts'],
fromManifest: ['manifest', 'spec', 'opts'],
clearMemoized () {}
}, { name: 'Fetcher' })
module.exports = Fetcher
module.exports.packument = packument
function packument (spec, opts) {
const fetcher = getFetcher(spec.type)
return fetcher.packument(spec, opts)
}
module.exports.manifest = manifest
function manifest (spec, opts) {
const fetcher = getFetcher(spec.type)
return fetcher.manifest(spec, opts)
}
module.exports.tarball = tarball
function tarball (spec, opts) {
return getFetcher(spec.type).tarball(spec, opts)
}
module.exports.fromManifest = fromManifest
function fromManifest (manifest, spec, opts) {
return getFetcher(spec.type).fromManifest(manifest, spec, opts)
}
const fetchers = {}
module.exports.clearMemoized = clearMemoized
function clearMemoized () {
Object.keys(fetchers).forEach(k => {
fetchers[k].clearMemoized()
})
}
function getFetcher (type) {
if (!fetchers[type]) {
// This is spelled out both to prevent sketchy stuff and to make life
// easier for bundlers/preprocessors.
switch (type) {
case 'alias':
fetchers[type] = require('./fetchers/alias')
break
case 'directory':
fetchers[type] = require('./fetchers/directory')
break
case 'file':
fetchers[type] = require('./fetchers/file')
break
case 'git':
fetchers[type] = require('./fetchers/git')
break
case 'hosted':
fetchers[type] = require('./fetchers/hosted')
break
case 'range':
fetchers[type] = require('./fetchers/range')
break
case 'remote':
fetchers[type] = require('./fetchers/remote')
break
case 'tag':
fetchers[type] = require('./fetchers/tag')
break
case 'version':
fetchers[type] = require('./fetchers/version')
break
default:
throw new Error(`Invalid dependency type requested: ${type}`)
}
}
return fetchers[type]
}

24
node_modules/pacote/lib/fetchers/alias.js generated vendored Normal file
View file

@ -0,0 +1,24 @@
'use strict'
const Fetcher = require('../fetch')
const fetchRegistry = require('./registry')
const fetchRemote = module.exports = Object.create(null)
Fetcher.impl(fetchRemote, {
packument (spec, opts) {
return fetchRegistry.packument(spec.subSpec, opts)
},
manifest (spec, opts) {
return fetchRegistry.manifest(spec.subSpec, opts)
},
tarball (spec, opts) {
return fetchRegistry.tarball(spec.subSpec, opts)
},
fromManifest (manifest, spec, opts) {
return fetchRegistry.fromManifest(manifest, spec.subSpec, opts)
}
})

88
node_modules/pacote/lib/fetchers/directory.js generated vendored Normal file
View file

@ -0,0 +1,88 @@
'use strict'
const BB = require('bluebird')
const Fetcher = require('../fetch')
const glob = BB.promisify(require('glob'))
const packDir = require('../util/pack-dir')
const readJson = require('../util/read-json')
const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const through = require('mississippi').through
const readFileAsync = BB.promisify(require('fs').readFile)
const fetchDirectory = module.exports = Object.create(null)
Fetcher.impl(fetchDirectory, {
packument (spec, opts) {
return this.manifest(spec, opts).then(manifest => {
return Object.assign({}, manifest, {
'dist-tags': {
'latest': manifest.version
},
time: {
[manifest.version]: (new Date()).toISOString()
},
versions: {
[manifest.version]: manifest
}
})
})
},
// `directory` manifests come from the actual manifest/lockfile data.
manifest (spec, opts) {
const pkgPath = path.join(spec.fetchSpec, 'package.json')
const srPath = path.join(spec.fetchSpec, 'npm-shrinkwrap.json')
return BB.join(
readFileAsync(pkgPath).then(readJson).catch({ code: 'ENOENT' }, err => {
err.code = 'ENOPACKAGEJSON'
throw err
}),
readFileAsync(srPath).then(readJson).catch({ code: 'ENOENT' }, () => null),
(pkg, sr) => {
pkg._shrinkwrap = sr
pkg._hasShrinkwrap = !!sr
pkg._resolved = spec.fetchSpec
pkg._integrity = false // Don't auto-calculate integrity
pkg._shasum = false // Don't auto-calculate shasum either
return pkg
}
).then(pkg => {
if (!pkg.bin && pkg.directories && pkg.directories.bin) {
const dirBin = pkg.directories.bin
return glob(path.join(spec.fetchSpec, dirBin, '/**'), { nodir: true }).then(matches => {
matches.forEach(filePath => {
const relative = path.relative(spec.fetchSpec, filePath)
if (relative && relative[0] !== '.') {
if (!pkg.bin) { pkg.bin = {} }
pkg.bin[path.basename(relative)] = relative
}
})
}).then(() => pkg)
} else {
return pkg
}
})
},
// As of npm@5, the npm installer doesn't pack + install directories: it just
// creates symlinks. This code is here because `npm pack` still needs the
// ability to create a tarball from a local directory.
tarball (spec, opts) {
const stream = through()
this.manifest(spec, opts).then(mani => {
return pipe(this.fromManifest(mani, spec, opts), stream)
}).catch(err => stream.emit('error', err))
return stream
},
// `directory` tarballs are generated in a very similar way to git tarballs.
fromManifest (manifest, spec, opts) {
const stream = through()
packDir(manifest, manifest._resolved, manifest._resolved, stream, opts).catch(err => {
stream.emit('error', err)
})
return stream
}
})

78
node_modules/pacote/lib/fetchers/file.js generated vendored Normal file
View file

@ -0,0 +1,78 @@
'use strict'
const BB = require('bluebird')
const cacache = require('cacache')
const Fetcher = require('../fetch')
const fs = require('fs')
const pipe = BB.promisify(require('mississippi').pipe)
const through = require('mississippi').through
const readFileAsync = BB.promisify(fs.readFile)
const statAsync = BB.promisify(fs.stat)
const MAX_BULK_SIZE = 2 * 1024 * 1024 // 2MB
// `file` packages refer to local tarball files.
const fetchFile = module.exports = Object.create(null)
Fetcher.impl(fetchFile, {
packument (spec, opts) {
return BB.reject(new Error('Not implemented yet'))
},
manifest (spec, opts) {
// We can't do much here. `finalizeManifest` will take care of
// calling `tarball` to fill out all the necessary details.
return BB.resolve(null)
},
// All the heavy lifting for `file` packages is done here.
// They're never cached. We just read straight out of the file.
// TODO - maybe they *should* be cached?
tarball (spec, opts) {
const src = spec._resolved || spec.fetchSpec
const stream = through()
statAsync(src).then(stat => {
if (spec._resolved) { stream.emit('manifest', spec) }
if (stat.size <= MAX_BULK_SIZE) {
// YAY LET'S DO THING IN BULK
return readFileAsync(src).then(data => {
if (opts.cache) {
return cacache.put(
opts.cache, `pacote:tarball:file:${src}`, data, {
integrity: opts.integrity
}
).then(integrity => ({ data, integrity }))
} else {
return { data }
}
}).then(info => {
if (info.integrity) { stream.emit('integrity', info.integrity) }
stream.write(info.data, () => {
stream.end()
})
})
} else {
let integrity
const cacheWriter = !opts.cache
? BB.resolve(null)
: (pipe(
fs.createReadStream(src),
cacache.put.stream(opts.cache, `pacote:tarball:${src}`, {
integrity: opts.integrity
}).on('integrity', d => { integrity = d })
))
return cacheWriter.then(() => {
if (integrity) { stream.emit('integrity', integrity) }
return pipe(fs.createReadStream(src), stream)
})
}
}).catch(err => stream.emit('error', err))
return stream
},
fromManifest (manifest, spec, opts) {
return this.tarball(manifest || spec, opts)
}
})

178
node_modules/pacote/lib/fetchers/git.js generated vendored Normal file
View file

@ -0,0 +1,178 @@
'use strict'
const BB = require('bluebird')
const cacache = require('cacache')
const cacheKey = require('../util/cache-key')
const Fetcher = require('../fetch')
const git = require('../util/git')
const mkdirp = BB.promisify(require('mkdirp'))
const pickManifest = require('npm-pick-manifest')
const optCheck = require('../util/opt-check')
const osenv = require('osenv')
const packDir = require('../util/pack-dir')
const PassThrough = require('stream').PassThrough
const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const rimraf = BB.promisify(require('rimraf'))
const uniqueFilename = require('unique-filename')
// `git` dependencies are fetched from git repositories and packed up.
const fetchGit = module.exports = Object.create(null)
Fetcher.impl(fetchGit, {
packument (spec, opts) {
return BB.reject(new Error('Not implemented yet.'))
},
manifest (spec, opts) {
opts = optCheck(opts)
if (spec.hosted && spec.hosted.getDefaultRepresentation() === 'shortcut') {
return hostedManifest(spec, opts)
} else {
// If it's not a shortcut, don't do fallbacks.
return plainManifest(spec.fetchSpec, spec, opts)
}
},
tarball (spec, opts) {
opts = optCheck(opts)
const stream = new PassThrough()
this.manifest(spec, opts).then(manifest => {
stream.emit('manifest', manifest)
return pipe(
this.fromManifest(
manifest, spec, opts
).on('integrity', i => stream.emit('integrity', i)), stream
)
}).catch(err => stream.emit('error', err))
return stream
},
fromManifest (manifest, spec, opts) {
opts = optCheck(opts)
let streamError
const stream = new PassThrough().on('error', e => { streamError = e })
const cacheName = manifest._uniqueResolved || manifest._resolved || ''
const cacheStream = (
opts.cache &&
cacache.get.stream(
opts.cache, cacheKey('packed-dir', cacheName), opts
).on('integrity', i => stream.emit('integrity', i))
)
cacheStream.pipe(stream)
cacheStream.on('error', err => {
if (err.code !== 'ENOENT') {
return stream.emit('error', err)
} else {
stream.emit('reset')
return withTmp(opts, tmp => {
if (streamError) { throw streamError }
return cloneRepo(
spec, manifest._repo, manifest._ref, manifest._rawRef, tmp, opts
).then(HEAD => {
if (streamError) { throw streamError }
manifest._resolved = spec.saveSpec.replace(/(:?#.*)?$/, `#${HEAD}`)
manifest._uniqueResolved = manifest._resolved
return packDir(manifest, manifest._uniqueResolved, tmp, stream, opts)
})
}).catch(err => stream.emit('error', err))
}
})
return stream
}
})
function hostedManifest (spec, opts) {
return BB.resolve(null).then(() => {
if (!spec.hosted.git()) {
throw new Error(`No git url for ${spec}`)
}
return plainManifest(spec.hosted.git(), spec, opts)
}).catch(err => {
if (!spec.hosted.https()) {
throw err
}
return plainManifest(spec.hosted.https(), spec, opts)
}).catch(err => {
if (!spec.hosted.sshurl()) {
throw err
}
return plainManifest(spec.hosted.sshurl(), spec, opts)
})
}
function plainManifest (repo, spec, opts) {
const rawRef = spec.gitCommittish || spec.gitRange
return resolve(
repo, spec, spec.name, opts
).then(ref => {
if (ref) {
const resolved = spec.saveSpec.replace(/(?:#.*)?$/, `#${ref.sha}`)
return {
_repo: repo,
_resolved: resolved,
_spec: spec,
_ref: ref,
_rawRef: spec.gitCommittish || spec.gitRange,
_uniqueResolved: resolved,
_integrity: false,
_shasum: false
}
} else {
// We're SOL and need a full clone :(
//
// If we're confident enough that `rawRef` is a commit SHA,
// then we can at least get `finalize-manifest` to cache its result.
const resolved = spec.saveSpec.replace(/(?:#.*)?$/, rawRef ? `#${rawRef}` : '')
return {
_repo: repo,
_rawRef: rawRef,
_resolved: rawRef && rawRef.match(/^[a-f0-9]{40}$/) && resolved,
_uniqueResolved: rawRef && rawRef.match(/^[a-f0-9]{40}$/) && resolved,
_integrity: false,
_shasum: false
}
}
})
}
function resolve (url, spec, name, opts) {
const isSemver = !!spec.gitRange
return git.revs(url, opts).then(remoteRefs => {
return isSemver
? pickManifest({
versions: remoteRefs.versions,
'dist-tags': remoteRefs['dist-tags'],
name: name
}, spec.gitRange, opts)
: remoteRefs
? BB.resolve(
remoteRefs.refs[spec.gitCommittish] || remoteRefs.refs[remoteRefs.shas[spec.gitCommittish]]
)
: null
})
}
function withTmp (opts, cb) {
if (opts.cache) {
// cacache has a special facility for working in a tmp dir
return cacache.tmp.withTmp(opts.cache, { tmpPrefix: 'git-clone' }, cb)
} else {
const tmpDir = path.join(osenv.tmpdir(), 'pacote-git-tmp')
const tmpName = uniqueFilename(tmpDir, 'git-clone')
const tmp = mkdirp(tmpName).then(() => tmpName).disposer(rimraf)
return BB.using(tmp, cb)
}
}
// Only certain whitelisted hosted gits support shadow cloning
const SHALLOW_HOSTS = new Set(['github', 'gist', 'gitlab', 'bitbucket'])
function cloneRepo (spec, repo, resolvedRef, rawRef, tmp, opts) {
const ref = resolvedRef ? resolvedRef.ref : rawRef
if (resolvedRef && spec.hosted && SHALLOW_HOSTS.has(spec.hosted.type)) {
return git.shallow(repo, ref, tmp, opts)
} else {
return git.clone(repo, ref, tmp, opts)
}
}

3
node_modules/pacote/lib/fetchers/hosted.js generated vendored Normal file
View file

@ -0,0 +1,3 @@
'use strict'
module.exports = require('./git')

3
node_modules/pacote/lib/fetchers/range.js generated vendored Normal file
View file

@ -0,0 +1,3 @@
'use strict'
module.exports = require('./registry')

32
node_modules/pacote/lib/fetchers/registry/index.js generated vendored Normal file
View file

@ -0,0 +1,32 @@
'use strict'
const cacache = require('cacache')
const Fetcher = require('../../fetch')
const regManifest = require('./manifest')
const regPackument = require('./packument')
const regTarball = require('./tarball')
const fetchRegistry = module.exports = Object.create(null)
Fetcher.impl(fetchRegistry, {
packument (spec, opts) {
return regPackument(spec, opts)
},
manifest (spec, opts) {
return regManifest(spec, opts)
},
tarball (spec, opts) {
return regTarball(spec, opts)
},
fromManifest (manifest, spec, opts) {
return regTarball.fromManifest(manifest, spec, opts)
},
clearMemoized () {
cacache.clearMemoized()
regPackument.clearMemoized()
}
})

81
node_modules/pacote/lib/fetchers/registry/manifest.js generated vendored Normal file
View file

@ -0,0 +1,81 @@
'use strict'
const fetch = require('npm-registry-fetch')
const fetchPackument = require('./packument')
const optCheck = require('../../util/opt-check')
const pickManifest = require('npm-pick-manifest')
const ssri = require('ssri')
module.exports = manifest
function manifest (spec, opts) {
opts = optCheck(opts)
return getManifest(spec, opts).then(manifest => {
return annotateManifest(spec, manifest, opts)
})
}
function getManifest (spec, opts) {
opts = opts.concat({
fullMetadata: opts.enjoyBy ? true : opts.fullMetadata
})
return fetchPackument(spec, opts).then(packument => {
try {
return pickManifest(packument, spec.fetchSpec, {
defaultTag: opts.defaultTag,
enjoyBy: opts.enjoyBy,
includeDeprecated: opts.includeDeprecated
})
} catch (err) {
if ((err.code === 'ETARGET' || err.code === 'E403') && packument._cached && !opts.offline) {
opts.log.silly(
'registry:manifest',
`no matching version for ${spec.name}@${spec.fetchSpec} in the cache. Forcing revalidation.`
)
opts = opts.concat({
preferOffline: false,
preferOnline: true
})
return fetchPackument(spec, opts.concat({
// Fetch full metadata in case ETARGET was due to corgi delay
fullMetadata: true
})).then(packument => {
return pickManifest(packument, spec.fetchSpec, {
defaultTag: opts.defaultTag,
enjoyBy: opts.enjoyBy
})
})
} else {
throw err
}
}
})
}
function annotateManifest (spec, manifest, opts) {
const shasum = manifest.dist && manifest.dist.shasum
manifest._integrity = manifest.dist && manifest.dist.integrity
manifest._shasum = shasum
if (!manifest._integrity && shasum) {
// Use legacy dist.shasum field if available.
manifest._integrity = ssri.fromHex(shasum, 'sha1').toString()
}
manifest._resolved = (
manifest.dist && manifest.dist.tarball
)
if (!manifest._resolved) {
const registry = fetch.pickRegistry(spec, opts)
const uri = registry.replace(/\/?$/, '/') + spec.escapedName
const err = new Error(
`Manifest for ${manifest.name}@${manifest.version} from ${uri} is missing a tarball url (pkg.dist.tarball). Guessing a default.`
)
err.code = 'ENOTARBALL'
err.manifest = manifest
if (!manifest._warnings) { manifest._warnings = [] }
manifest._warnings.push(err.message)
manifest._resolved =
`${registry}/${manifest.name}/-/${manifest.name}-${manifest.version}.tgz`
}
return manifest
}

92
node_modules/pacote/lib/fetchers/registry/packument.js generated vendored Normal file
View file

@ -0,0 +1,92 @@
'use strict'
const BB = require('bluebird')
const fetch = require('npm-registry-fetch')
const LRU = require('lru-cache')
const optCheck = require('../../util/opt-check')
// Corgis are cute. 🐕🐶
const CORGI_DOC = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
const JSON_DOC = 'application/json'
module.exports = packument
function packument (spec, opts) {
opts = optCheck(opts)
const registry = fetch.pickRegistry(spec, opts)
const uri = registry.replace(/\/?$/, '/') + spec.escapedName
return fetchPackument(uri, registry, spec, opts)
}
const MEMO = new LRU({
length: m => m._contentLength,
max: 200 * 1024 * 1024, // 200MB
maxAge: 30 * 1000 // 30s
})
module.exports.clearMemoized = clearMemoized
function clearMemoized () {
MEMO.reset()
}
function fetchPackument (uri, registry, spec, opts) {
const mem = pickMem(opts)
const accept = opts.fullMetadata ? JSON_DOC : CORGI_DOC
const memoKey = `${uri}~(${accept})`
if (mem && !opts.preferOnline && mem.has(memoKey)) {
return BB.resolve(mem.get(memoKey))
}
return fetch(uri, opts.concat({
headers: {
'pacote-req-type': 'packument',
'pacote-pkg-id': `registry:${spec.name}`,
accept
},
spec
}, opts, {
// Force integrity to null: we never check integrity hashes for manifests
integrity: null
})).then(res => res.json().then(packument => {
packument._cached = res.headers.has('x-local-cache')
packument._contentLength = +res.headers.get('content-length')
// NOTE - we need to call pickMem again because proxy
// objects get reused!
const mem = pickMem(opts)
if (mem) {
mem.set(memoKey, packument)
}
return packument
})).catch(err => {
if (err.code === 'E404' && !opts.fullMetadata) {
return fetchPackument(uri, registry, spec, opts.concat({
fullMetadata: true
}))
} else {
throw err
}
})
}
class ObjProxy {
get (key) { return this.obj[key] }
set (key, val) { this.obj[key] = val }
}
// This object is used synchronously and immediately, so
// we can safely reuse it instead of consing up new ones
const PROX = new ObjProxy()
function pickMem (opts) {
if (!opts || !opts.memoize) {
return MEMO
} else if (opts.memoize.get && opts.memoize.set) {
return opts.memoize
} else if (typeof opts.memoize === 'object') {
PROX.obj = opts.memoize
return PROX
} else {
return null
}
}

102
node_modules/pacote/lib/fetchers/registry/tarball.js generated vendored Normal file
View file

@ -0,0 +1,102 @@
'use strict'
const BB = require('bluebird')
const fetch = require('npm-registry-fetch')
const manifest = require('./manifest')
const optCheck = require('../../util/opt-check')
const PassThrough = require('stream').PassThrough
const ssri = require('ssri')
const url = require('url')
module.exports = tarball
function tarball (spec, opts) {
opts = optCheck(opts)
const registry = fetch.pickRegistry(spec, opts)
const stream = new PassThrough()
let mani
if (
opts.resolved &&
// spec.type === 'version' &&
opts.resolved.indexOf(registry) === 0
) {
// fakeChild is a shortcut to avoid looking up a manifest!
mani = BB.resolve({
name: spec.name,
version: spec.fetchSpec,
_integrity: opts.integrity,
_resolved: opts.resolved,
_fakeChild: true
})
} else {
// We can't trust opts.resolved if it's going to a separate host.
mani = manifest(spec, opts)
}
mani.then(mani => {
!mani._fakeChild && stream.emit('manifest', mani)
const fetchStream = fromManifest(mani, spec, opts).on(
'integrity', i => stream.emit('integrity', i)
)
fetchStream.on('error', err => stream.emit('error', err))
fetchStream.pipe(stream)
return null
}).catch(err => stream.emit('error', err))
return stream
}
module.exports.fromManifest = fromManifest
function fromManifest (manifest, spec, opts) {
opts = optCheck(opts)
if (spec.scope) { opts = opts.concat({ scope: spec.scope }) }
const stream = new PassThrough()
const registry = fetch.pickRegistry(spec, opts)
const uri = getTarballUrl(spec, registry, manifest, opts)
fetch(uri, opts.concat({
headers: {
'pacote-req-type': 'tarball',
'pacote-pkg-id': `registry:${manifest.name}@${uri}`
},
integrity: manifest._integrity,
algorithms: [
manifest._integrity
? ssri.parse(manifest._integrity).pickAlgorithm()
: 'sha1'
],
spec
}, opts))
.then(res => {
const hash = res.headers.get('x-local-cache-hash')
if (hash) {
stream.emit('integrity', decodeURIComponent(hash))
}
res.body.on('error', err => stream.emit('error', err))
res.body.pipe(stream)
return null
})
.catch(err => stream.emit('error', err))
return stream
}
function getTarballUrl (spec, registry, mani, opts) {
const reg = url.parse(registry)
const tarball = url.parse(mani._resolved)
// https://github.com/npm/npm/pull/9471
//
// TL;DR: Some alternative registries host tarballs on http and packuments
// on https, and vice-versa. There's also a case where people who can't use
// SSL to access the npm registry, for example, might use
// `--registry=http://registry.npmjs.org/`. In this case, we need to
// rewrite `tarball` to match the protocol.
//
if (reg.hostname === tarball.hostname && reg.protocol !== tarball.protocol) {
tarball.protocol = reg.protocol
// Ports might be same host different protocol!
if (reg.port !== tarball.port) {
delete tarball.host
tarball.port = reg.port
}
delete tarball.href
}
return url.format(tarball)
}

34
node_modules/pacote/lib/fetchers/remote.js generated vendored Normal file
View file

@ -0,0 +1,34 @@
'use strict'
const BB = require('bluebird')
const Fetcher = require('../fetch')
const fetchRegistry = require('./registry')
const fetchRemote = module.exports = Object.create(null)
Fetcher.impl(fetchRemote, {
packument (spec, opts) {
return BB.reject(new Error('Not implemented yet'))
},
manifest (spec, opts) {
// We can't get the manifest for a remote tarball until
// we extract the tarball itself.
// `finalize-manifest` takes care of this process of extracting
// a manifest based on ./tarball.js
return BB.resolve(null)
},
tarball (spec, opts) {
const uri = spec._resolved || spec.fetchSpec
return fetchRegistry.fromManifest({
_resolved: uri,
_integrity: opts.integrity
}, spec, opts)
},
fromManifest (manifest, spec, opts) {
return this.tarball(manifest || spec, opts)
}
})

3
node_modules/pacote/lib/fetchers/tag.js generated vendored Normal file
View file

@ -0,0 +1,3 @@
'use strict'
module.exports = require('./registry')

3
node_modules/pacote/lib/fetchers/version.js generated vendored Normal file
View file

@ -0,0 +1,3 @@
'use strict'
module.exports = require('./registry')

255
node_modules/pacote/lib/finalize-manifest.js generated vendored Normal file
View file

@ -0,0 +1,255 @@
'use strict'
const BB = require('bluebird')
const cacache = require('cacache')
const cacheKey = require('./util/cache-key')
const fetchFromManifest = require('./fetch').fromManifest
const finished = require('./util/finished')
const minimatch = require('minimatch')
const normalize = require('normalize-package-data')
const optCheck = require('./util/opt-check')
const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const ssri = require('ssri')
const tar = require('tar')
const readJson = require('./util/read-json')
// `finalizeManifest` takes as input the various kinds of manifests that
// manifest handlers ('lib/fetchers/*.js#manifest()') return, and makes sure
// they are:
//
// * filled out with any required data that the handler couldn't fill in
// * formatted consistently
// * cached so we don't have to repeat this work more than necessary
//
// The biggest thing this package might do is do a full tarball extraction in
// order to find missing bits of metadata required by the npm installer. For
// example, it will fill in `_shrinkwrap`, `_integrity`, and other details that
// the plain manifest handlers would require a tarball to fill out. If a
// handler returns everything necessary, this process is skipped.
//
// If we get to the tarball phase, the corresponding tarball handler for the
// requested type will be invoked and the entire tarball will be read from the
// stream.
//
module.exports = finalizeManifest
function finalizeManifest (pkg, spec, opts) {
const key = finalKey(pkg, spec)
opts = optCheck(opts)
const cachedManifest = (opts.cache && key && !opts.preferOnline && !opts.fullMetadata && !opts.enjoyBy)
? cacache.get.info(opts.cache, key, opts)
: BB.resolve(null)
return cachedManifest.then(cached => {
if (cached && cached.metadata && cached.metadata.manifest) {
return new Manifest(cached.metadata.manifest)
} else {
return tarballedProps(pkg, spec, opts).then(props => {
return pkg && pkg.name
? new Manifest(pkg, props, opts.fullMetadata)
: new Manifest(props, null, opts.fullMetadata)
}).then(manifest => {
const cacheKey = key || finalKey(manifest, spec)
if (!opts.cache || !cacheKey) {
return manifest
} else {
return cacache.put(
opts.cache, cacheKey, '.', {
metadata: {
id: manifest._id,
manifest,
type: 'finalized-manifest'
}
}
).then(() => manifest)
}
})
}
})
}
module.exports.Manifest = Manifest
function Manifest (pkg, fromTarball, fullMetadata) {
fromTarball = fromTarball || {}
if (fullMetadata) {
Object.assign(this, pkg)
}
this.name = pkg.name
this.version = pkg.version
this.engines = pkg.engines || fromTarball.engines
this.cpu = pkg.cpu || fromTarball.cpu
this.os = pkg.os || fromTarball.os
this.dependencies = pkg.dependencies || {}
this.optionalDependencies = pkg.optionalDependencies || {}
this.devDependencies = pkg.devDependencies || {}
const bundled = (
pkg.bundledDependencies ||
pkg.bundleDependencies ||
false
)
this.bundleDependencies = bundled
this.peerDependencies = pkg.peerDependencies || {}
this.deprecated = pkg.deprecated || false
// These depend entirely on each handler
this._resolved = pkg._resolved
// Not all handlers (or registries) provide these out of the box,
// and if they don't, we need to extract and read the tarball ourselves.
// These are details required by the installer.
this._integrity = pkg._integrity || fromTarball._integrity || null
this._shasum = pkg._shasum || fromTarball._shasum || null
this._shrinkwrap = pkg._shrinkwrap || fromTarball._shrinkwrap || null
this.bin = pkg.bin || fromTarball.bin || null
if (this.bin && Array.isArray(this.bin)) {
// Code yanked from read-package-json.
const m = (pkg.directories && pkg.directories.bin) || '.'
this.bin = this.bin.reduce((acc, mf) => {
if (mf && mf.charAt(0) !== '.') {
const f = path.basename(mf)
acc[f] = path.join(m, mf)
}
return acc
}, {})
}
this._id = null
// TODO - freezing and inextensibility pending npm changes. See test suite.
// Object.preventExtensions(this)
normalize(this)
// I don't want this why did you give it to me. Go away. 🔥🔥🔥🔥
delete this.readme
// Object.freeze(this)
}
// Some things aren't filled in by standard manifest fetching.
// If this function needs to do its work, it will grab the
// package tarball, extract it, and take whatever it needs
// from the stream.
function tarballedProps (pkg, spec, opts) {
const needsShrinkwrap = (!pkg || (
pkg._hasShrinkwrap !== false &&
!pkg._shrinkwrap
))
const needsBin = !!(!pkg || (
!pkg.bin &&
pkg.directories &&
pkg.directories.bin
))
const needsIntegrity = !pkg || (!pkg._integrity && pkg._integrity !== false)
const needsShasum = !pkg || (!pkg._shasum && pkg._shasum !== false)
const needsHash = needsIntegrity || needsShasum
const needsManifest = !pkg || !pkg.name
const needsExtract = needsShrinkwrap || needsBin || needsManifest
if (!needsShrinkwrap && !needsBin && !needsHash && !needsManifest) {
return BB.resolve({})
} else {
opts = optCheck(opts)
const tarStream = fetchFromManifest(pkg, spec, opts)
const extracted = needsExtract && new tar.Parse()
return BB.join(
needsShrinkwrap && jsonFromStream('npm-shrinkwrap.json', extracted),
needsManifest && jsonFromStream('package.json', extracted),
needsBin && getPaths(extracted),
needsHash && ssri.fromStream(tarStream, { algorithms: ['sha1', 'sha512'] }),
needsExtract && pipe(tarStream, extracted),
(sr, mani, paths, hash) => {
if (needsManifest && !mani) {
const err = new Error(`Non-registry package missing package.json: ${spec}.`)
err.code = 'ENOPACKAGEJSON'
throw err
}
const extraProps = mani || {}
delete extraProps._resolved
// drain out the rest of the tarball
tarStream.resume()
// if we have directories.bin, we need to collect any matching files
// to add to bin
if (paths && paths.length) {
const dirBin = mani
? (mani && mani.directories && mani.directories.bin)
: (pkg && pkg.directories && pkg.directories.bin)
if (dirBin) {
extraProps.bin = {}
paths.forEach(filePath => {
if (minimatch(filePath, dirBin + '/**')) {
const relative = path.relative(dirBin, filePath)
if (relative && relative[0] !== '.') {
extraProps.bin[path.basename(relative)] = path.join(dirBin, relative)
}
}
})
}
}
return Object.assign(extraProps, {
_shrinkwrap: sr,
_resolved: (mani && mani._resolved) ||
(pkg && pkg._resolved) ||
spec.fetchSpec,
_integrity: needsIntegrity && hash && hash.sha512 && hash.sha512[0].toString(),
_shasum: needsShasum && hash && hash.sha1 && hash.sha1[0].hexDigest()
})
}
)
}
}
function jsonFromStream (filename, dataStream) {
return BB.fromNode(cb => {
dataStream.on('error', cb)
dataStream.on('close', cb)
dataStream.on('entry', entry => {
const filePath = entry.header.path.replace(/[^/]+\//, '')
if (filePath !== filename) {
entry.resume()
} else {
let data = ''
entry.on('error', cb)
finished(entry).then(() => {
try {
cb(null, readJson(data))
} catch (err) {
cb(err)
}
}, err => {
cb(err)
})
entry.on('data', d => { data += d })
}
})
})
}
function getPaths (dataStream) {
return BB.fromNode(cb => {
let paths = []
dataStream.on('error', cb)
dataStream.on('close', () => cb(null, paths))
dataStream.on('entry', function handler (entry) {
const filePath = entry.header.path.replace(/[^/]+\//, '')
entry.resume()
paths.push(filePath)
})
})
}
function finalKey (pkg, spec) {
if (pkg && pkg._uniqueResolved) {
// git packages have a unique, identifiable id, but no tar sha
return cacheKey(`${spec.type}-manifest`, pkg._uniqueResolved)
} else {
return (
pkg && pkg._integrity &&
cacheKey(
`${spec.type}-manifest`,
`${pkg._resolved}:${ssri.stringify(pkg._integrity)}`
)
)
}
}

6
node_modules/pacote/lib/util/cache-key.js generated vendored Normal file
View file

@ -0,0 +1,6 @@
'use strict'
module.exports = cacheKey
function cacheKey (type, identifier) {
return ['pacote', type, identifier].join(':')
}

17
node_modules/pacote/lib/util/finished.js generated vendored Normal file
View file

@ -0,0 +1,17 @@
'use strict'
const BB = require('bluebird')
module.exports = function (child, hasExitCode = false) {
return BB.fromNode(function (cb) {
child.on('error', cb)
child.on(hasExitCode ? 'close' : 'end', function (exitCode) {
if (exitCode === undefined || exitCode === 0) {
cb()
} else {
let err = new Error('exited with error code: ' + exitCode)
cb(err)
}
})
})
}

274
node_modules/pacote/lib/util/git.js generated vendored Normal file
View file

@ -0,0 +1,274 @@
'use strict'
const BB = require('bluebird')
const cp = require('child_process')
const execFileAsync = BB.promisify(cp.execFile, {
multiArgs: true
})
const finished = require('./finished')
const LRU = require('lru-cache')
const optCheck = require('./opt-check')
const osenv = require('osenv')
const path = require('path')
const pinflight = require('promise-inflight')
const promiseRetry = require('promise-retry')
const uniqueFilename = require('unique-filename')
const which = BB.promisify(require('which'))
const semver = require('semver')
const GOOD_ENV_VARS = new Set([
'GIT_ASKPASS',
'GIT_EXEC_PATH',
'GIT_PROXY_COMMAND',
'GIT_SSH',
'GIT_SSH_COMMAND',
'GIT_SSL_CAINFO',
'GIT_SSL_NO_VERIFY'
])
const GIT_TRANSIENT_ERRORS = [
'remote error: Internal Server Error',
'The remote end hung up unexpectedly',
'Connection timed out',
'Operation timed out',
'Failed to connect to .* Timed out',
'Connection reset by peer',
'SSL_ERROR_SYSCALL',
'The requested URL returned error: 503'
].join('|')
const GIT_TRANSIENT_ERROR_RE = new RegExp(GIT_TRANSIENT_ERRORS)
const GIT_TRANSIENT_ERROR_MAX_RETRY_NUMBER = 3
function shouldRetry (error, number) {
return GIT_TRANSIENT_ERROR_RE.test(error) && (number < GIT_TRANSIENT_ERROR_MAX_RETRY_NUMBER)
}
const GIT_ = 'GIT_'
let GITENV
function gitEnv () {
if (GITENV) { return GITENV }
const tmpDir = path.join(osenv.tmpdir(), 'pacote-git-template-tmp')
const tmpName = uniqueFilename(tmpDir, 'git-clone')
GITENV = {
GIT_ASKPASS: 'echo',
GIT_TEMPLATE_DIR: tmpName
}
Object.keys(process.env).forEach(k => {
if (GOOD_ENV_VARS.has(k) || !k.startsWith(GIT_)) {
GITENV[k] = process.env[k]
}
})
return GITENV
}
let GITPATH
try {
GITPATH = which.sync('git')
} catch (e) {}
module.exports.clone = fullClone
function fullClone (repo, committish, target, opts) {
opts = optCheck(opts)
const gitArgs = ['clone', '--mirror', '-q', repo, path.join(target, '.git')]
if (process.platform === 'win32') {
gitArgs.push('--config', 'core.longpaths=true')
}
return execGit(gitArgs, { cwd: target }, opts).then(() => {
return execGit(['init'], { cwd: target }, opts)
}).then(() => {
return execGit(['checkout', committish || 'HEAD'], { cwd: target }, opts)
}).then(() => {
return updateSubmodules(target, opts)
}).then(() => headSha(target, opts))
}
module.exports.shallow = shallowClone
function shallowClone (repo, branch, target, opts) {
opts = optCheck(opts)
const gitArgs = ['clone', '--depth=1', '-q']
if (branch) {
gitArgs.push('-b', branch)
}
gitArgs.push(repo, target)
if (process.platform === 'win32') {
gitArgs.push('--config', 'core.longpaths=true')
}
return execGit(gitArgs, {
cwd: target
}, opts).then(() => {
return updateSubmodules(target, opts)
}).then(() => headSha(target, opts))
}
function updateSubmodules (localRepo, opts) {
const gitArgs = ['submodule', 'update', '-q', '--init', '--recursive']
return execGit(gitArgs, {
cwd: localRepo
}, opts)
}
function headSha (repo, opts) {
opts = optCheck(opts)
return execGit(['rev-parse', '--revs-only', 'HEAD'], { cwd: repo }, opts).spread(stdout => {
return stdout.trim()
})
}
const CARET_BRACES = '^{}'
const REVS = new LRU({
max: 100,
maxAge: 5 * 60 * 1000
})
module.exports.revs = revs
function revs (repo, opts) {
opts = optCheck(opts)
const cached = REVS.get(repo)
if (cached) {
return BB.resolve(cached)
}
return pinflight(`ls-remote:${repo}`, () => {
return spawnGit(['ls-remote', '-h', '-t', repo], {
env: gitEnv()
}, opts).then((stdout) => {
return stdout.split('\n').reduce((revs, line) => {
const split = line.split(/\s+/, 2)
if (split.length < 2) { return revs }
const sha = split[0].trim()
const ref = split[1].trim().match(/(?:refs\/[^/]+\/)?(.*)/)[1]
if (!ref) { return revs } // ???
if (ref.endsWith(CARET_BRACES)) { return revs } // refs/tags/x^{} crap
const type = refType(line)
const doc = { sha, ref, type }
revs.refs[ref] = doc
// We can check out shallow clones on specific SHAs if we have a ref
if (revs.shas[sha]) {
revs.shas[sha].push(ref)
} else {
revs.shas[sha] = [ref]
}
if (type === 'tag') {
const match = ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/)
if (match && semver.valid(match[1], true)) {
revs.versions[semver.clean(match[1], true)] = doc
}
}
return revs
}, { versions: {}, 'dist-tags': {}, refs: {}, shas: {} })
}, err => {
err.message = `Error while executing:\n${GITPATH} ls-remote -h -t ${repo}\n\n${err.stderr}\n${err.message}`
throw err
}).then(revs => {
if (revs.refs.HEAD) {
const HEAD = revs.refs.HEAD
Object.keys(revs.versions).forEach(v => {
if (v.sha === HEAD.sha) {
revs['dist-tags'].HEAD = v
if (!revs.refs.latest) {
revs['dist-tags'].latest = revs.refs.HEAD
}
}
})
}
REVS.set(repo, revs)
return revs
})
})
}
module.exports._exec = execGit
function execGit (gitArgs, gitOpts, opts) {
opts = optCheck(opts)
return checkGit(opts).then(gitPath => {
return promiseRetry((retry, number) => {
if (number !== 1) {
opts.log.silly('pacote', 'Retrying git command: ' + gitArgs.join(' ') + ' attempt # ' + number)
}
return execFileAsync(gitPath, gitArgs, mkOpts(gitOpts, opts)).catch((err) => {
if (shouldRetry(err, number)) {
retry(err)
} else {
throw err
}
})
}, opts.retry != null ? opts.retry : {
retries: opts['fetch-retries'],
factor: opts['fetch-retry-factor'],
maxTimeout: opts['fetch-retry-maxtimeout'],
minTimeout: opts['fetch-retry-mintimeout']
})
})
}
module.exports._spawn = spawnGit
function spawnGit (gitArgs, gitOpts, opts) {
opts = optCheck(opts)
return checkGit(opts).then(gitPath => {
return promiseRetry((retry, number) => {
if (number !== 1) {
opts.log.silly('pacote', 'Retrying git command: ' + gitArgs.join(' ') + ' attempt # ' + number)
}
const child = cp.spawn(gitPath, gitArgs, mkOpts(gitOpts, opts))
let stdout = ''
let stderr = ''
child.stdout.on('data', d => { stdout += d })
child.stderr.on('data', d => { stderr += d })
return finished(child, true).catch(err => {
if (shouldRetry(stderr, number)) {
retry(err)
} else {
err.stderr = stderr
throw err
}
}).then(() => {
return stdout
})
}, opts.retry)
})
}
function mkOpts (_gitOpts, opts) {
const gitOpts = {
env: gitEnv()
}
if (+opts.uid && !isNaN(opts.uid)) {
gitOpts.uid = +opts.uid
}
if (+opts.gid && !isNaN(opts.gid)) {
gitOpts.gid = +opts.gid
}
Object.assign(gitOpts, _gitOpts)
return gitOpts
}
function checkGit (opts) {
if (opts.git) {
return BB.resolve(opts.git)
} else if (!GITPATH) {
const err = new Error('No git binary found in $PATH')
err.code = 'ENOGIT'
return BB.reject(err)
} else {
return BB.resolve(GITPATH)
}
}
const REFS_TAGS = 'refs/tags/'
const REFS_HEADS = 'refs/heads/'
const HEAD = 'HEAD'
function refType (ref) {
return ref.indexOf(REFS_TAGS) !== -1
? 'tag'
: ref.indexOf(REFS_HEADS) !== -1
? 'branch'
: ref.endsWith(HEAD)
? 'head'
: 'other'
}

48
node_modules/pacote/lib/util/opt-check.js generated vendored Normal file
View file

@ -0,0 +1,48 @@
'use strict'
const figgyPudding = require('figgy-pudding')
const logger = require('./proclog.js')
const AUTH_REGEX = /^(?:.*:)?(token|_authToken|username|_password|password|email|always-auth|_auth|otp)$/
const SCOPE_REGISTRY_REGEX = /@.*:registry$/gi
module.exports = figgyPudding({
annotate: {},
cache: {},
defaultTag: 'tag',
dirPacker: {},
dmode: {},
'enjoy-by': 'enjoyBy',
enjoyBy: {},
before: 'enjoyBy',
fmode: {},
'fetch-retries': { default: 2 },
'fetch-retry-factor': { default: 10 },
'fetch-retry-maxtimeout': { default: 60000 },
'fetch-retry-mintimeout': { default: 10000 },
fullMetadata: 'full-metadata',
'full-metadata': { default: false },
gid: {},
git: {},
includeDeprecated: { default: true },
'include-deprecated': 'includeDeprecated',
integrity: {},
log: { default: logger },
memoize: {},
offline: {},
preferOffline: 'prefer-offline',
'prefer-offline': {},
preferOnline: 'prefer-online',
'prefer-online': {},
registry: { default: 'https://registry.npmjs.org/' },
resolved: {},
retry: {},
scope: {},
tag: { default: 'latest' },
uid: {},
umask: {},
where: {}
}, {
other (key) {
return key.match(AUTH_REGEX) || key.match(SCOPE_REGISTRY_REGEX)
}
})

44
node_modules/pacote/lib/util/pack-dir.js generated vendored Normal file
View file

@ -0,0 +1,44 @@
'use strict'
const BB = require('bluebird')
const cacache = require('cacache')
const cacheKey = require('./cache-key')
const optCheck = require('./opt-check')
const packlist = require('npm-packlist')
const pipe = BB.promisify(require('mississippi').pipe)
const tar = require('tar')
module.exports = packDir
function packDir (manifest, label, dir, target, opts) {
opts = optCheck(opts)
const packer = opts.dirPacker
? BB.resolve(opts.dirPacker(manifest, dir))
: mkPacker(dir)
if (!opts.cache) {
return packer.then(packer => pipe(packer, target))
} else {
const cacher = cacache.put.stream(
opts.cache, cacheKey('packed-dir', label), opts
).on('integrity', i => {
target.emit('integrity', i)
})
return packer.then(packer => BB.all([
pipe(packer, cacher),
pipe(packer, target)
]))
}
}
function mkPacker (dir) {
return packlist({ path: dir }).then(files => {
return tar.c({
cwd: dir,
gzip: true,
portable: true,
prefix: 'package/'
}, files)
})
}

23
node_modules/pacote/lib/util/proclog.js generated vendored Normal file
View file

@ -0,0 +1,23 @@
'use strict'
const LEVELS = [
'notice',
'error',
'warn',
'info',
'verbose',
'http',
'silly',
'pause',
'resume'
]
const logger = {}
for (const level of LEVELS) {
logger[level] = log(level)
}
module.exports = logger
function log (level) {
return (category, ...args) => process.emit('log', level, category, ...args)
}

15
node_modules/pacote/lib/util/read-json.js generated vendored Normal file
View file

@ -0,0 +1,15 @@
'use strict'
module.exports = function (content) {
// Code also yanked from read-package-json.
function stripBOM (content) {
content = content.toString()
// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
// because the buffer-to-string conversion in `fs.readFileSync()`
// translates it to FEFF, the UTF-16 BOM.
if (content.charCodeAt(0) === 0xFEFF) return content.slice(1)
return content
}
return JSON.parse(stripBOM(content))
}

135
node_modules/pacote/lib/with-tarball-stream.js generated vendored Normal file
View file

@ -0,0 +1,135 @@
'use strict'
const BB = require('bluebird')
const cacache = require('cacache')
const fetch = require('./fetch.js')
const fs = require('fs')
const npa = require('npm-package-arg')
const optCheck = require('./util/opt-check.js')
const path = require('path')
const ssri = require('ssri')
const retry = require('promise-retry')
const statAsync = BB.promisify(fs.stat)
const RETRIABLE_ERRORS = new Set(['ENOENT', 'EINTEGRITY', 'Z_DATA_ERROR'])
module.exports = withTarballStream
function withTarballStream (spec, opts, streamHandler) {
opts = optCheck(opts)
spec = npa(spec, opts.where)
// First, we check for a file: resolved shortcut
const tryFile = (
!opts.preferOnline &&
opts.integrity &&
opts.resolved &&
opts.resolved.startsWith('file:')
)
? BB.try(() => {
// NOTE - this is a special shortcut! Packages installed as files do not
// have a `resolved` field -- this specific case only occurs when you have,
// say, a git dependency or a registry dependency that you've packaged into
// a local file, and put that file: spec in the `resolved` field.
opts.log.silly('pacote', `trying ${spec} by local file: ${opts.resolved}`)
const file = path.resolve(opts.where || '.', opts.resolved.substr(5))
return statAsync(file)
.then(() => {
const verifier = ssri.integrityStream({ integrity: opts.integrity })
const stream = fs.createReadStream(file)
.on('error', err => verifier.emit('error', err))
.pipe(verifier)
return streamHandler(stream)
})
.catch(err => {
if (err.code === 'EINTEGRITY') {
opts.log.warn('pacote', `EINTEGRITY while extracting ${spec} from ${file}.You will have to recreate the file.`)
opts.log.verbose('pacote', `EINTEGRITY for ${spec}: ${err.message}`)
}
throw err
})
})
: BB.reject(Object.assign(new Error('no file!'), { code: 'ENOENT' }))
const tryDigest = tryFile
.catch(err => {
if (
opts.preferOnline ||
!opts.cache ||
!opts.integrity ||
!RETRIABLE_ERRORS.has(err.code)
) {
throw err
} else {
opts.log.silly('tarball', `trying ${spec} by hash: ${opts.integrity}`)
const stream = cacache.get.stream.byDigest(
opts.cache, opts.integrity, opts
)
stream.once('error', err => stream.on('newListener', (ev, l) => {
if (ev === 'error') { l(err) }
}))
return streamHandler(stream)
.catch(err => {
if (err.code === 'EINTEGRITY' || err.code === 'Z_DATA_ERROR') {
opts.log.warn('tarball', `cached data for ${spec} (${opts.integrity}) seems to be corrupted. Refreshing cache.`)
return cleanUpCached(opts.cache, opts.integrity, opts)
.then(() => { throw err })
} else {
throw err
}
})
}
})
const trySpec = tryDigest
.catch(err => {
if (!RETRIABLE_ERRORS.has(err.code)) {
// If it's not one of our retriable errors, bail out and give up.
throw err
} else {
opts.log.silly(
'tarball',
`no local data for ${spec}. Extracting by manifest.`
)
return BB.resolve(retry((tryAgain, attemptNum) => {
const tardata = fetch.tarball(spec, opts)
if (!opts.resolved) {
tardata.on('manifest', m => {
opts = opts.concat({ resolved: m._resolved })
})
tardata.on('integrity', i => {
opts = opts.concat({ integrity: i })
})
}
return BB.try(() => streamHandler(tardata))
.catch(err => {
// Retry once if we have a cache, to clear up any weird conditions.
// Don't retry network errors, though -- make-fetch-happen has already
// taken care of making sure we're all set on that front.
if (opts.cache && err.code && !String(err.code).match(/^E\d{3}$/)) {
if (err.code === 'EINTEGRITY' || err.code === 'Z_DATA_ERROR') {
opts.log.warn('tarball', `tarball data for ${spec} (${opts.integrity}) seems to be corrupted. Trying one more time.`)
}
return cleanUpCached(opts.cache, err.sri, opts)
.then(() => tryAgain(err))
} else {
throw err
}
})
}, { retries: 1 }))
}
})
return trySpec
.catch(err => {
if (err.code === 'EINTEGRITY') {
err.message = `Verification failed while extracting ${spec}:\n${err.message}`
}
throw err
})
}
function cleanUpCached (cachePath, integrity, opts) {
return cacache.rm.content(cachePath, integrity, opts)
}