1
0
Fork 0
mirror of https://github.com/shimataro/ssh-key-action.git synced 2025-06-19 22:52:10 +10:00

* first action! (#1)

This commit is contained in:
shimataro 2019-09-18 20:39:54 +09:00 committed by GitHub
parent 8deacc95b1
commit ace1e6a69a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3750 changed files with 1155519 additions and 0 deletions

1377
node_modules/pacote/CHANGELOG.md generated vendored Normal file

File diff suppressed because it is too large Load diff

21
node_modules/pacote/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) Kat Marchán, npm, Inc., and Contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.

288
node_modules/pacote/README.md generated vendored Normal file
View file

@ -0,0 +1,288 @@
# pacote [![npm version](https://img.shields.io/npm/v/pacote.svg)](https://npm.im/pacote) [![license](https://img.shields.io/npm/l/pacote.svg)](https://npm.im/pacote) [![Travis](https://img.shields.io/travis/npm/pacote.svg)](https://travis-ci.org/npm/pacote) [![AppVeyor](https://ci.appveyor.com/api/projects/status/github/npm/pacote?svg=true)](https://ci.appveyor.com/project/npm/pacote) [![Coverage Status](https://coveralls.io/repos/github/npm/pacote/badge.svg?branch=latest)](https://coveralls.io/github/npm/pacote?branch=latest)
[`pacote`](https://github.com/npm/pacote) is a Node.js library for downloading
[npm](https://npmjs.org)-compatible packages. It supports all package specifier
syntax that `npm install` and its ilk support. It transparently caches anything
needed to reduce excess operations, using [`cacache`](https://npm.im/cacache).
## Install
`$ npm install --save pacote`
## Table of Contents
* [Example](#example)
* [Features](#features)
* [Contributing](#contributing)
* [API](#api)
* [`manifest`](#manifest)
* [`packument`](#packument)
* [`extract`](#extract)
* [`tarball`](#tarball)
* [`tarball.stream`](#tarball-stream)
* [`tarball.toFile`](#tarball-to-file)
* ~~[`prefetch`](#prefetch)~~ (deprecated)
* [`clearMemoized`](#clearMemoized)
* [`options`](#options)
### Example
```javascript
const pacote = require('pacote')
pacote.manifest('pacote@^1').then(pkg => {
console.log('package manifest for registry pkg:', pkg)
// { "name": "pacote", "version": "1.0.0", ... }
})
pacote.extract('http://hi.com/pkg.tgz', './here').then(() => {
console.log('remote tarball contents extracted to ./here')
})
```
### Features
* Handles all package types [npm](https://npm.im/npm) does
* [high-performance, reliable, verified local cache](https://npm.im/cacache)
* offline mode
* authentication support (private git, private npm registries, etc)
* github, gitlab, and bitbucket-aware
* semver range support for git dependencies
### Contributing
The pacote team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
### API
#### <a name="manifest"></a> `> pacote.manifest(spec, [opts])`
Fetches the *manifest* for a package. Manifest objects are similar and based
on the `package.json` for that package, but with pre-processed and limited
fields. The object has the following shape:
```javascript
{
"name": PkgName,
"version": SemverString,
"dependencies": { PkgName: SemverString },
"optionalDependencies": { PkgName: SemverString },
"devDependencies": { PkgName: SemverString },
"peerDependencies": { PkgName: SemverString },
"bundleDependencies": false || [PkgName],
"bin": { BinName: Path },
"_resolved": TarballSource, // different for each package type
"_integrity": SubresourceIntegrityHash,
"_shrinkwrap": null || ShrinkwrapJsonObj
}
```
Note that depending on the spec type, some additional fields might be present.
For example, packages from `registry.npmjs.org` have additional metadata
appended by the registry.
##### Example
```javascript
pacote.manifest('pacote@1.0.0').then(pkgJson => {
// fetched `package.json` data from the registry
})
```
#### <a name="packument"></a> `> pacote.packument(spec, [opts])`
Fetches the *packument* for a package. Packument objects are general metadata
about a project corresponding to registry metadata, and include version and
`dist-tag` information about a package's available versions, rather than a
specific version. It may include additional metadata not usually available
through the individual package metadata objects.
It generally looks something like this:
```javascript
{
"name": PkgName,
"dist-tags": {
'latest': VersionString,
[TagName]: VersionString,
...
},
"versions": {
[VersionString]: Manifest,
...
}
}
```
Note that depending on the spec type, some additional fields might be present.
For example, packages from `registry.npmjs.org` have additional metadata
appended by the registry.
##### Example
```javascript
pacote.packument('pacote').then(pkgJson => {
// fetched package versions metadata from the registry
})
```
#### <a name="extract"></a> `> pacote.extract(spec, destination, [opts])`
Extracts package data identified by `<spec>` into a directory named
`<destination>`, which will be created if it does not already exist.
If `opts.digest` is provided and the data it identifies is present in the cache,
`extract` will bypass most of its operations and go straight to extracting the
tarball.
##### Example
```javascript
pacote.extract('pacote@1.0.0', './woot', {
digest: 'deadbeef'
}).then(() => {
// Succeeds as long as `pacote@1.0.0` still exists somewhere. Network and
// other operations are bypassed entirely if `digest` is present in the cache.
})
```
#### <a name="tarball"></a> `> pacote.tarball(spec, [opts])`
Fetches package data identified by `<spec>` and returns the data as a buffer.
This API has two variants:
* `pacote.tarball.stream(spec, [opts])` - Same as `pacote.tarball`, except it returns a stream instead of a Promise.
* `pacote.tarball.toFile(spec, dest, [opts])` - Instead of returning data directly, data will be written directly to `dest`, and create any required directories along the way.
##### Example
```javascript
pacote.tarball('pacote@1.0.0', { cache: './my-cache' }).then(data => {
// data is the tarball data for pacote@1.0.0
})
```
#### <a name="tarball-stream"></a> `> pacote.tarball.stream(spec, [opts])`
Same as `pacote.tarball`, except it returns a stream instead of a Promise.
##### Example
```javascript
pacote.tarball.stream('pacote@1.0.0')
.pipe(fs.createWriteStream('./pacote-1.0.0.tgz'))
```
#### <a name="tarball-to-file"></a> `> pacote.tarball.toFile(spec, dest, [opts])`
Like `pacote.tarball`, but instead of returning data directly, data will be
written directly to `dest`, and create any required directories along the way.
##### Example
```javascript
pacote.tarball.toFile('pacote@1.0.0', './pacote-1.0.0.tgz')
.then(() => /* pacote tarball written directly to ./pacote-1.0.0.tgz */)
```
#### <a name="prefetch"></a> `> pacote.prefetch(spec, [opts])`
##### THIS API IS DEPRECATED. USE `pacote.tarball()` INSTEAD
Fetches package data identified by `<spec>`, usually for the purpose of warming
up the local package cache (with `opts.cache`). It does not return anything.
##### Example
```javascript
pacote.prefetch('pacote@1.0.0', { cache: './my-cache' }).then(() => {
// ./my-cache now has both the manifest and tarball for `pacote@1.0.0`.
})
```
#### <a name="clearMemoized"></a> `> pacote.clearMemoized()`
This utility function can be used to force pacote to release its references
to any memoized data in its various internal caches. It might help free
some memory.
```javascript
pacote.manifest(...).then(() => pacote.clearMemoized)
```
#### <a name="options"></a> `> options`
`pacote` accepts [the options for
`npm-registry-fetch`](https://npm.im/npm-registry-fetch#fetch-options) as-is,
with a couple of additional `pacote-specific` ones:
##### <a name="dirPacker"></a> `opts.dirPacker`
* Type: Function
* Default: Uses [`npm-packlist`](https://npm.im/npm-packlist) and [`tar`](https://npm.im/tar) to make a tarball.
Expects a function that takes a single argument, `dir`, and returns a
`ReadableStream` that outputs packaged tarball data. Used when creating tarballs
for package specs that are not already packaged, such as git and directory
dependencies. The default `opts.dirPacker` does not execute `prepare` scripts,
even though npm itself does.
##### <a name="opts-enjoy-by"></a> `opts.enjoy-by`
* Alias: `opts.enjoyBy`, `opts.before`
* Type: Date-able
* Default: undefined
If passed in, will be used while resolving to filter the versions for **registry
dependencies** such that versions published **after** `opts.enjoy-by` are not
considered -- as if they'd never been published.
##### <a name="opts-include-deprecated"></a> `opts.include-deprecated`
* Alias: `opts.includeDeprecated`
* Type: Boolean
* Default: false
If false, deprecated versions will be skipped when selecting from registry range
specifiers. If true, deprecations do not affect version selection.
##### <a name="opts-full-metadata"></a> `opts.full-metadata`
* Type: Boolean
* Default: false
If `true`, the full packument will be fetched when doing metadata requests. By
defaul, `pacote` only fetches the summarized packuments, also called "corgis".
##### <a name="opts-tag"></a> `opts.tag`
* Alias: `opts.defaultTag`
* Type: String
* Default: `'latest'`
Package version resolution tag. When processing registry spec ranges, this
option is used to determine what dist-tag to treat as "latest". For more details
about how `pacote` selects versions and how `tag` is involved, see [the
documentation for `npm-pick-manifest`](https://npm.im/npm-pick-manifest).
##### <a name="opts-resolved"></a> `opts.resolved`
* Type: String
* Default: null
When fetching tarballs, this option can be passed in to skip registry metadata
lookups when downloading tarballs. If the string is a `file:` URL, pacote will
try to read the referenced local file before attempting to do any further
lookups. This option does not bypass integrity checks when `opts.integrity` is
passed in.
##### <a name="opts-where"></a> `opts.where`
* Type: String
* Default: null
Passed as an argument to [`npm-package-arg`](https://npm.im/npm-package-arg)
when resolving `spec` arguments. Used to determine what path to resolve local
path specs relatively from.

99
node_modules/pacote/extract.js generated vendored Normal file
View file

@ -0,0 +1,99 @@
'use strict'
const BB = require('bluebird')
const extractStream = require('./lib/extract-stream.js')
const fs = require('fs')
const mkdirp = BB.promisify(require('mkdirp'))
const npa = require('npm-package-arg')
const optCheck = require('./lib/util/opt-check.js')
const path = require('path')
const rimraf = BB.promisify(require('rimraf'))
const withTarballStream = require('./lib/with-tarball-stream.js')
const inferOwner = require('infer-owner')
const chown = BB.promisify(require('chownr'))
const truncateAsync = BB.promisify(fs.truncate)
const readFileAsync = BB.promisify(fs.readFile)
const appendFileAsync = BB.promisify(fs.appendFile)
// you used to call me on my...
const selfOwner = process.getuid ? {
uid: process.getuid(),
gid: process.getgid()
} : {
uid: undefined,
gid: undefined
}
module.exports = extract
function extract (spec, dest, opts) {
opts = optCheck(opts)
spec = npa(spec, opts.where)
if (spec.type === 'git' && !opts.cache) {
throw new TypeError('Extracting git packages requires a cache folder')
}
if (typeof dest !== 'string') {
throw new TypeError('Extract requires a destination')
}
const startTime = Date.now()
return inferOwner(dest).then(({ uid, gid }) => {
opts = opts.concat({ uid, gid })
return withTarballStream(spec, opts, stream => {
return tryExtract(spec, stream, dest, opts)
})
.then(() => {
if (!opts.resolved) {
const pjson = path.join(dest, 'package.json')
return readFileAsync(pjson, 'utf8')
.then(str => truncateAsync(pjson)
.then(() => appendFileAsync(pjson, str.replace(
/}\s*$/,
`\n,"_resolved": ${
JSON.stringify(opts.resolved || '')
}\n,"_integrity": ${
JSON.stringify(opts.integrity || '')
}\n,"_from": ${
JSON.stringify(spec.toString())
}\n}`
))))
}
})
.then(() => opts.log.silly(
'extract',
`${spec} extracted to ${dest} (${Date.now() - startTime}ms)`
))
})
}
function tryExtract (spec, tarStream, dest, opts) {
return new BB((resolve, reject) => {
tarStream.on('error', reject)
rimraf(dest)
.then(() => mkdirp(dest))
.then((made) => {
// respect the current ownership of unpack targets
// but don't try to chown if we're not root.
if (selfOwner.uid === 0 &&
typeof selfOwner.gid === 'number' &&
selfOwner.uid !== opts.uid && selfOwner.gid !== opts.gid) {
return chown(made || dest, opts.uid, opts.gid)
}
})
.then(() => {
const xtractor = extractStream(spec, dest, opts)
xtractor.on('error', reject)
xtractor.on('close', resolve)
tarStream.pipe(xtractor)
})
.catch(reject)
})
.catch(err => {
if (err.code === 'EINTEGRITY') {
err.message = `Verification failed while extracting ${spec}:\n${err.message}`
}
throw err
})
}

10
node_modules/pacote/index.js generated vendored Normal file
View file

@ -0,0 +1,10 @@
'use strict'
module.exports = {
extract: require('./extract'),
manifest: require('./manifest'),
packument: require('./packument'),
prefetch: require('./prefetch'),
tarball: require('./tarball'),
clearMemoized: require('./lib/fetch').clearMemoized
}

89
node_modules/pacote/lib/extract-stream.js generated vendored Normal file
View file

@ -0,0 +1,89 @@
'use strict'
const Minipass = require('minipass')
const path = require('path')
const tar = require('tar')
module.exports = extractStream
module.exports._computeMode = computeMode
class Transformer extends Minipass {
constructor (spec, opts) {
super()
this.spec = spec
this.opts = opts
this.str = ''
}
write (data) {
this.str += data
return true
}
end () {
const replaced = this.str.replace(
/}\s*$/,
`\n,"_resolved": ${
JSON.stringify(this.opts.resolved || '')
}\n,"_integrity": ${
JSON.stringify(this.opts.integrity || '')
}\n,"_from": ${
JSON.stringify(this.spec.toString())
}\n}`
)
super.write(replaced)
return super.end()
}
}
function computeMode (fileMode, optMode, umask) {
return (fileMode | optMode) & ~(umask || 0)
}
function pkgJsonTransform (spec, opts) {
return entry => {
if (entry.path === 'package.json') {
const transformed = new Transformer(spec, opts)
return transformed
}
}
}
function extractStream (spec, dest, opts) {
opts = opts || {}
const sawIgnores = new Set()
return tar.x({
cwd: dest,
filter: (name, entry) => !entry.header.type.match(/^.*link$/i),
strip: 1,
onwarn: msg => opts.log && opts.log.warn('tar', msg),
uid: opts.uid,
gid: opts.gid,
umask: opts.umask,
transform: opts.resolved && pkgJsonTransform(spec, opts),
onentry (entry) {
if (entry.type.toLowerCase() === 'file') {
entry.mode = computeMode(entry.mode, opts.fmode, opts.umask)
} else if (entry.type.toLowerCase() === 'directory') {
entry.mode = computeMode(entry.mode, opts.dmode, opts.umask)
} else {
entry.mode = computeMode(entry.mode, 0, opts.umask)
}
// Note: This mirrors logic in the fs read operations that are
// employed during tarball creation, in the fstream-npm module.
// It is duplicated here to handle tarballs that are created
// using other means, such as system tar or git archive.
if (entry.type.toLowerCase() === 'file') {
const base = path.basename(entry.path)
if (base === '.npmignore') {
sawIgnores.add(entry.path)
} else if (base === '.gitignore') {
const npmignore = entry.path.replace(/\.gitignore$/, '.npmignore')
if (!sawIgnores.has(npmignore)) {
// Rename, may be clobbered later.
entry.path = npmignore
}
}
}
}
})
}

82
node_modules/pacote/lib/fetch.js generated vendored Normal file
View file

@ -0,0 +1,82 @@
'use strict'
const duck = require('protoduck')
const Fetcher = duck.define(['spec', 'opts', 'manifest'], {
packument: ['spec', 'opts'],
manifest: ['spec', 'opts'],
tarball: ['spec', 'opts'],
fromManifest: ['manifest', 'spec', 'opts'],
clearMemoized () {}
}, { name: 'Fetcher' })
module.exports = Fetcher
module.exports.packument = packument
function packument (spec, opts) {
const fetcher = getFetcher(spec.type)
return fetcher.packument(spec, opts)
}
module.exports.manifest = manifest
function manifest (spec, opts) {
const fetcher = getFetcher(spec.type)
return fetcher.manifest(spec, opts)
}
module.exports.tarball = tarball
function tarball (spec, opts) {
return getFetcher(spec.type).tarball(spec, opts)
}
module.exports.fromManifest = fromManifest
function fromManifest (manifest, spec, opts) {
return getFetcher(spec.type).fromManifest(manifest, spec, opts)
}
const fetchers = {}
module.exports.clearMemoized = clearMemoized
function clearMemoized () {
Object.keys(fetchers).forEach(k => {
fetchers[k].clearMemoized()
})
}
function getFetcher (type) {
if (!fetchers[type]) {
// This is spelled out both to prevent sketchy stuff and to make life
// easier for bundlers/preprocessors.
switch (type) {
case 'alias':
fetchers[type] = require('./fetchers/alias')
break
case 'directory':
fetchers[type] = require('./fetchers/directory')
break
case 'file':
fetchers[type] = require('./fetchers/file')
break
case 'git':
fetchers[type] = require('./fetchers/git')
break
case 'hosted':
fetchers[type] = require('./fetchers/hosted')
break
case 'range':
fetchers[type] = require('./fetchers/range')
break
case 'remote':
fetchers[type] = require('./fetchers/remote')
break
case 'tag':
fetchers[type] = require('./fetchers/tag')
break
case 'version':
fetchers[type] = require('./fetchers/version')
break
default:
throw new Error(`Invalid dependency type requested: ${type}`)
}
}
return fetchers[type]
}

24
node_modules/pacote/lib/fetchers/alias.js generated vendored Normal file
View file

@ -0,0 +1,24 @@
'use strict'
const Fetcher = require('../fetch')
const fetchRegistry = require('./registry')
const fetchRemote = module.exports = Object.create(null)
Fetcher.impl(fetchRemote, {
packument (spec, opts) {
return fetchRegistry.packument(spec.subSpec, opts)
},
manifest (spec, opts) {
return fetchRegistry.manifest(spec.subSpec, opts)
},
tarball (spec, opts) {
return fetchRegistry.tarball(spec.subSpec, opts)
},
fromManifest (manifest, spec, opts) {
return fetchRegistry.fromManifest(manifest, spec.subSpec, opts)
}
})

88
node_modules/pacote/lib/fetchers/directory.js generated vendored Normal file
View file

@ -0,0 +1,88 @@
'use strict'
const BB = require('bluebird')
const Fetcher = require('../fetch')
const glob = BB.promisify(require('glob'))
const packDir = require('../util/pack-dir')
const readJson = require('../util/read-json')
const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const through = require('mississippi').through
const readFileAsync = BB.promisify(require('fs').readFile)
const fetchDirectory = module.exports = Object.create(null)
Fetcher.impl(fetchDirectory, {
packument (spec, opts) {
return this.manifest(spec, opts).then(manifest => {
return Object.assign({}, manifest, {
'dist-tags': {
'latest': manifest.version
},
time: {
[manifest.version]: (new Date()).toISOString()
},
versions: {
[manifest.version]: manifest
}
})
})
},
// `directory` manifests come from the actual manifest/lockfile data.
manifest (spec, opts) {
const pkgPath = path.join(spec.fetchSpec, 'package.json')
const srPath = path.join(spec.fetchSpec, 'npm-shrinkwrap.json')
return BB.join(
readFileAsync(pkgPath).then(readJson).catch({ code: 'ENOENT' }, err => {
err.code = 'ENOPACKAGEJSON'
throw err
}),
readFileAsync(srPath).then(readJson).catch({ code: 'ENOENT' }, () => null),
(pkg, sr) => {
pkg._shrinkwrap = sr
pkg._hasShrinkwrap = !!sr
pkg._resolved = spec.fetchSpec
pkg._integrity = false // Don't auto-calculate integrity
pkg._shasum = false // Don't auto-calculate shasum either
return pkg
}
).then(pkg => {
if (!pkg.bin && pkg.directories && pkg.directories.bin) {
const dirBin = pkg.directories.bin
return glob(path.join(spec.fetchSpec, dirBin, '/**'), { nodir: true }).then(matches => {
matches.forEach(filePath => {
const relative = path.relative(spec.fetchSpec, filePath)
if (relative && relative[0] !== '.') {
if (!pkg.bin) { pkg.bin = {} }
pkg.bin[path.basename(relative)] = relative
}
})
}).then(() => pkg)
} else {
return pkg
}
})
},
// As of npm@5, the npm installer doesn't pack + install directories: it just
// creates symlinks. This code is here because `npm pack` still needs the
// ability to create a tarball from a local directory.
tarball (spec, opts) {
const stream = through()
this.manifest(spec, opts).then(mani => {
return pipe(this.fromManifest(mani, spec, opts), stream)
}).catch(err => stream.emit('error', err))
return stream
},
// `directory` tarballs are generated in a very similar way to git tarballs.
fromManifest (manifest, spec, opts) {
const stream = through()
packDir(manifest, manifest._resolved, manifest._resolved, stream, opts).catch(err => {
stream.emit('error', err)
})
return stream
}
})

78
node_modules/pacote/lib/fetchers/file.js generated vendored Normal file
View file

@ -0,0 +1,78 @@
'use strict'
const BB = require('bluebird')
const cacache = require('cacache')
const Fetcher = require('../fetch')
const fs = require('fs')
const pipe = BB.promisify(require('mississippi').pipe)
const through = require('mississippi').through
const readFileAsync = BB.promisify(fs.readFile)
const statAsync = BB.promisify(fs.stat)
const MAX_BULK_SIZE = 2 * 1024 * 1024 // 2MB
// `file` packages refer to local tarball files.
const fetchFile = module.exports = Object.create(null)
Fetcher.impl(fetchFile, {
packument (spec, opts) {
return BB.reject(new Error('Not implemented yet'))
},
manifest (spec, opts) {
// We can't do much here. `finalizeManifest` will take care of
// calling `tarball` to fill out all the necessary details.
return BB.resolve(null)
},
// All the heavy lifting for `file` packages is done here.
// They're never cached. We just read straight out of the file.
// TODO - maybe they *should* be cached?
tarball (spec, opts) {
const src = spec._resolved || spec.fetchSpec
const stream = through()
statAsync(src).then(stat => {
if (spec._resolved) { stream.emit('manifest', spec) }
if (stat.size <= MAX_BULK_SIZE) {
// YAY LET'S DO THING IN BULK
return readFileAsync(src).then(data => {
if (opts.cache) {
return cacache.put(
opts.cache, `pacote:tarball:file:${src}`, data, {
integrity: opts.integrity
}
).then(integrity => ({ data, integrity }))
} else {
return { data }
}
}).then(info => {
if (info.integrity) { stream.emit('integrity', info.integrity) }
stream.write(info.data, () => {
stream.end()
})
})
} else {
let integrity
const cacheWriter = !opts.cache
? BB.resolve(null)
: (pipe(
fs.createReadStream(src),
cacache.put.stream(opts.cache, `pacote:tarball:${src}`, {
integrity: opts.integrity
}).on('integrity', d => { integrity = d })
))
return cacheWriter.then(() => {
if (integrity) { stream.emit('integrity', integrity) }
return pipe(fs.createReadStream(src), stream)
})
}
}).catch(err => stream.emit('error', err))
return stream
},
fromManifest (manifest, spec, opts) {
return this.tarball(manifest || spec, opts)
}
})

178
node_modules/pacote/lib/fetchers/git.js generated vendored Normal file
View file

@ -0,0 +1,178 @@
'use strict'
const BB = require('bluebird')
const cacache = require('cacache')
const cacheKey = require('../util/cache-key')
const Fetcher = require('../fetch')
const git = require('../util/git')
const mkdirp = BB.promisify(require('mkdirp'))
const pickManifest = require('npm-pick-manifest')
const optCheck = require('../util/opt-check')
const osenv = require('osenv')
const packDir = require('../util/pack-dir')
const PassThrough = require('stream').PassThrough
const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const rimraf = BB.promisify(require('rimraf'))
const uniqueFilename = require('unique-filename')
// `git` dependencies are fetched from git repositories and packed up.
const fetchGit = module.exports = Object.create(null)
Fetcher.impl(fetchGit, {
packument (spec, opts) {
return BB.reject(new Error('Not implemented yet.'))
},
manifest (spec, opts) {
opts = optCheck(opts)
if (spec.hosted && spec.hosted.getDefaultRepresentation() === 'shortcut') {
return hostedManifest(spec, opts)
} else {
// If it's not a shortcut, don't do fallbacks.
return plainManifest(spec.fetchSpec, spec, opts)
}
},
tarball (spec, opts) {
opts = optCheck(opts)
const stream = new PassThrough()
this.manifest(spec, opts).then(manifest => {
stream.emit('manifest', manifest)
return pipe(
this.fromManifest(
manifest, spec, opts
).on('integrity', i => stream.emit('integrity', i)), stream
)
}).catch(err => stream.emit('error', err))
return stream
},
fromManifest (manifest, spec, opts) {
opts = optCheck(opts)
let streamError
const stream = new PassThrough().on('error', e => { streamError = e })
const cacheName = manifest._uniqueResolved || manifest._resolved || ''
const cacheStream = (
opts.cache &&
cacache.get.stream(
opts.cache, cacheKey('packed-dir', cacheName), opts
).on('integrity', i => stream.emit('integrity', i))
)
cacheStream.pipe(stream)
cacheStream.on('error', err => {
if (err.code !== 'ENOENT') {
return stream.emit('error', err)
} else {
stream.emit('reset')
return withTmp(opts, tmp => {
if (streamError) { throw streamError }
return cloneRepo(
spec, manifest._repo, manifest._ref, manifest._rawRef, tmp, opts
).then(HEAD => {
if (streamError) { throw streamError }
manifest._resolved = spec.saveSpec.replace(/(:?#.*)?$/, `#${HEAD}`)
manifest._uniqueResolved = manifest._resolved
return packDir(manifest, manifest._uniqueResolved, tmp, stream, opts)
})
}).catch(err => stream.emit('error', err))
}
})
return stream
}
})
function hostedManifest (spec, opts) {
return BB.resolve(null).then(() => {
if (!spec.hosted.git()) {
throw new Error(`No git url for ${spec}`)
}
return plainManifest(spec.hosted.git(), spec, opts)
}).catch(err => {
if (!spec.hosted.https()) {
throw err
}
return plainManifest(spec.hosted.https(), spec, opts)
}).catch(err => {
if (!spec.hosted.sshurl()) {
throw err
}
return plainManifest(spec.hosted.sshurl(), spec, opts)
})
}
function plainManifest (repo, spec, opts) {
const rawRef = spec.gitCommittish || spec.gitRange
return resolve(
repo, spec, spec.name, opts
).then(ref => {
if (ref) {
const resolved = spec.saveSpec.replace(/(?:#.*)?$/, `#${ref.sha}`)
return {
_repo: repo,
_resolved: resolved,
_spec: spec,
_ref: ref,
_rawRef: spec.gitCommittish || spec.gitRange,
_uniqueResolved: resolved,
_integrity: false,
_shasum: false
}
} else {
// We're SOL and need a full clone :(
//
// If we're confident enough that `rawRef` is a commit SHA,
// then we can at least get `finalize-manifest` to cache its result.
const resolved = spec.saveSpec.replace(/(?:#.*)?$/, rawRef ? `#${rawRef}` : '')
return {
_repo: repo,
_rawRef: rawRef,
_resolved: rawRef && rawRef.match(/^[a-f0-9]{40}$/) && resolved,
_uniqueResolved: rawRef && rawRef.match(/^[a-f0-9]{40}$/) && resolved,
_integrity: false,
_shasum: false
}
}
})
}
function resolve (url, spec, name, opts) {
const isSemver = !!spec.gitRange
return git.revs(url, opts).then(remoteRefs => {
return isSemver
? pickManifest({
versions: remoteRefs.versions,
'dist-tags': remoteRefs['dist-tags'],
name: name
}, spec.gitRange, opts)
: remoteRefs
? BB.resolve(
remoteRefs.refs[spec.gitCommittish] || remoteRefs.refs[remoteRefs.shas[spec.gitCommittish]]
)
: null
})
}
function withTmp (opts, cb) {
if (opts.cache) {
// cacache has a special facility for working in a tmp dir
return cacache.tmp.withTmp(opts.cache, { tmpPrefix: 'git-clone' }, cb)
} else {
const tmpDir = path.join(osenv.tmpdir(), 'pacote-git-tmp')
const tmpName = uniqueFilename(tmpDir, 'git-clone')
const tmp = mkdirp(tmpName).then(() => tmpName).disposer(rimraf)
return BB.using(tmp, cb)
}
}
// Only certain whitelisted hosted gits support shadow cloning
const SHALLOW_HOSTS = new Set(['github', 'gist', 'gitlab', 'bitbucket'])
function cloneRepo (spec, repo, resolvedRef, rawRef, tmp, opts) {
const ref = resolvedRef ? resolvedRef.ref : rawRef
if (resolvedRef && spec.hosted && SHALLOW_HOSTS.has(spec.hosted.type)) {
return git.shallow(repo, ref, tmp, opts)
} else {
return git.clone(repo, ref, tmp, opts)
}
}

3
node_modules/pacote/lib/fetchers/hosted.js generated vendored Normal file
View file

@ -0,0 +1,3 @@
'use strict'
module.exports = require('./git')

3
node_modules/pacote/lib/fetchers/range.js generated vendored Normal file
View file

@ -0,0 +1,3 @@
'use strict'
module.exports = require('./registry')

32
node_modules/pacote/lib/fetchers/registry/index.js generated vendored Normal file
View file

@ -0,0 +1,32 @@
'use strict'
const cacache = require('cacache')
const Fetcher = require('../../fetch')
const regManifest = require('./manifest')
const regPackument = require('./packument')
const regTarball = require('./tarball')
const fetchRegistry = module.exports = Object.create(null)
Fetcher.impl(fetchRegistry, {
packument (spec, opts) {
return regPackument(spec, opts)
},
manifest (spec, opts) {
return regManifest(spec, opts)
},
tarball (spec, opts) {
return regTarball(spec, opts)
},
fromManifest (manifest, spec, opts) {
return regTarball.fromManifest(manifest, spec, opts)
},
clearMemoized () {
cacache.clearMemoized()
regPackument.clearMemoized()
}
})

81
node_modules/pacote/lib/fetchers/registry/manifest.js generated vendored Normal file
View file

@ -0,0 +1,81 @@
'use strict'
const fetch = require('npm-registry-fetch')
const fetchPackument = require('./packument')
const optCheck = require('../../util/opt-check')
const pickManifest = require('npm-pick-manifest')
const ssri = require('ssri')
module.exports = manifest
function manifest (spec, opts) {
opts = optCheck(opts)
return getManifest(spec, opts).then(manifest => {
return annotateManifest(spec, manifest, opts)
})
}
function getManifest (spec, opts) {
opts = opts.concat({
fullMetadata: opts.enjoyBy ? true : opts.fullMetadata
})
return fetchPackument(spec, opts).then(packument => {
try {
return pickManifest(packument, spec.fetchSpec, {
defaultTag: opts.defaultTag,
enjoyBy: opts.enjoyBy,
includeDeprecated: opts.includeDeprecated
})
} catch (err) {
if ((err.code === 'ETARGET' || err.code === 'E403') && packument._cached && !opts.offline) {
opts.log.silly(
'registry:manifest',
`no matching version for ${spec.name}@${spec.fetchSpec} in the cache. Forcing revalidation.`
)
opts = opts.concat({
preferOffline: false,
preferOnline: true
})
return fetchPackument(spec, opts.concat({
// Fetch full metadata in case ETARGET was due to corgi delay
fullMetadata: true
})).then(packument => {
return pickManifest(packument, spec.fetchSpec, {
defaultTag: opts.defaultTag,
enjoyBy: opts.enjoyBy
})
})
} else {
throw err
}
}
})
}
function annotateManifest (spec, manifest, opts) {
const shasum = manifest.dist && manifest.dist.shasum
manifest._integrity = manifest.dist && manifest.dist.integrity
manifest._shasum = shasum
if (!manifest._integrity && shasum) {
// Use legacy dist.shasum field if available.
manifest._integrity = ssri.fromHex(shasum, 'sha1').toString()
}
manifest._resolved = (
manifest.dist && manifest.dist.tarball
)
if (!manifest._resolved) {
const registry = fetch.pickRegistry(spec, opts)
const uri = registry.replace(/\/?$/, '/') + spec.escapedName
const err = new Error(
`Manifest for ${manifest.name}@${manifest.version} from ${uri} is missing a tarball url (pkg.dist.tarball). Guessing a default.`
)
err.code = 'ENOTARBALL'
err.manifest = manifest
if (!manifest._warnings) { manifest._warnings = [] }
manifest._warnings.push(err.message)
manifest._resolved =
`${registry}/${manifest.name}/-/${manifest.name}-${manifest.version}.tgz`
}
return manifest
}

92
node_modules/pacote/lib/fetchers/registry/packument.js generated vendored Normal file
View file

@ -0,0 +1,92 @@
'use strict'
const BB = require('bluebird')
const fetch = require('npm-registry-fetch')
const LRU = require('lru-cache')
const optCheck = require('../../util/opt-check')
// Corgis are cute. 🐕🐶
const CORGI_DOC = 'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*'
const JSON_DOC = 'application/json'
module.exports = packument
function packument (spec, opts) {
opts = optCheck(opts)
const registry = fetch.pickRegistry(spec, opts)
const uri = registry.replace(/\/?$/, '/') + spec.escapedName
return fetchPackument(uri, registry, spec, opts)
}
const MEMO = new LRU({
length: m => m._contentLength,
max: 200 * 1024 * 1024, // 200MB
maxAge: 30 * 1000 // 30s
})
module.exports.clearMemoized = clearMemoized
function clearMemoized () {
MEMO.reset()
}
function fetchPackument (uri, registry, spec, opts) {
const mem = pickMem(opts)
const accept = opts.fullMetadata ? JSON_DOC : CORGI_DOC
const memoKey = `${uri}~(${accept})`
if (mem && !opts.preferOnline && mem.has(memoKey)) {
return BB.resolve(mem.get(memoKey))
}
return fetch(uri, opts.concat({
headers: {
'pacote-req-type': 'packument',
'pacote-pkg-id': `registry:${spec.name}`,
accept
},
spec
}, opts, {
// Force integrity to null: we never check integrity hashes for manifests
integrity: null
})).then(res => res.json().then(packument => {
packument._cached = res.headers.has('x-local-cache')
packument._contentLength = +res.headers.get('content-length')
// NOTE - we need to call pickMem again because proxy
// objects get reused!
const mem = pickMem(opts)
if (mem) {
mem.set(memoKey, packument)
}
return packument
})).catch(err => {
if (err.code === 'E404' && !opts.fullMetadata) {
return fetchPackument(uri, registry, spec, opts.concat({
fullMetadata: true
}))
} else {
throw err
}
})
}
class ObjProxy {
get (key) { return this.obj[key] }
set (key, val) { this.obj[key] = val }
}
// This object is used synchronously and immediately, so
// we can safely reuse it instead of consing up new ones
const PROX = new ObjProxy()
function pickMem (opts) {
if (!opts || !opts.memoize) {
return MEMO
} else if (opts.memoize.get && opts.memoize.set) {
return opts.memoize
} else if (typeof opts.memoize === 'object') {
PROX.obj = opts.memoize
return PROX
} else {
return null
}
}

102
node_modules/pacote/lib/fetchers/registry/tarball.js generated vendored Normal file
View file

@ -0,0 +1,102 @@
'use strict'
const BB = require('bluebird')
const fetch = require('npm-registry-fetch')
const manifest = require('./manifest')
const optCheck = require('../../util/opt-check')
const PassThrough = require('stream').PassThrough
const ssri = require('ssri')
const url = require('url')
module.exports = tarball
function tarball (spec, opts) {
opts = optCheck(opts)
const registry = fetch.pickRegistry(spec, opts)
const stream = new PassThrough()
let mani
if (
opts.resolved &&
// spec.type === 'version' &&
opts.resolved.indexOf(registry) === 0
) {
// fakeChild is a shortcut to avoid looking up a manifest!
mani = BB.resolve({
name: spec.name,
version: spec.fetchSpec,
_integrity: opts.integrity,
_resolved: opts.resolved,
_fakeChild: true
})
} else {
// We can't trust opts.resolved if it's going to a separate host.
mani = manifest(spec, opts)
}
mani.then(mani => {
!mani._fakeChild && stream.emit('manifest', mani)
const fetchStream = fromManifest(mani, spec, opts).on(
'integrity', i => stream.emit('integrity', i)
)
fetchStream.on('error', err => stream.emit('error', err))
fetchStream.pipe(stream)
return null
}).catch(err => stream.emit('error', err))
return stream
}
module.exports.fromManifest = fromManifest
function fromManifest (manifest, spec, opts) {
opts = optCheck(opts)
if (spec.scope) { opts = opts.concat({ scope: spec.scope }) }
const stream = new PassThrough()
const registry = fetch.pickRegistry(spec, opts)
const uri = getTarballUrl(spec, registry, manifest, opts)
fetch(uri, opts.concat({
headers: {
'pacote-req-type': 'tarball',
'pacote-pkg-id': `registry:${manifest.name}@${uri}`
},
integrity: manifest._integrity,
algorithms: [
manifest._integrity
? ssri.parse(manifest._integrity).pickAlgorithm()
: 'sha1'
],
spec
}, opts))
.then(res => {
const hash = res.headers.get('x-local-cache-hash')
if (hash) {
stream.emit('integrity', decodeURIComponent(hash))
}
res.body.on('error', err => stream.emit('error', err))
res.body.pipe(stream)
return null
})
.catch(err => stream.emit('error', err))
return stream
}
function getTarballUrl (spec, registry, mani, opts) {
const reg = url.parse(registry)
const tarball = url.parse(mani._resolved)
// https://github.com/npm/npm/pull/9471
//
// TL;DR: Some alternative registries host tarballs on http and packuments
// on https, and vice-versa. There's also a case where people who can't use
// SSL to access the npm registry, for example, might use
// `--registry=http://registry.npmjs.org/`. In this case, we need to
// rewrite `tarball` to match the protocol.
//
if (reg.hostname === tarball.hostname && reg.protocol !== tarball.protocol) {
tarball.protocol = reg.protocol
// Ports might be same host different protocol!
if (reg.port !== tarball.port) {
delete tarball.host
tarball.port = reg.port
}
delete tarball.href
}
return url.format(tarball)
}

34
node_modules/pacote/lib/fetchers/remote.js generated vendored Normal file
View file

@ -0,0 +1,34 @@
'use strict'
const BB = require('bluebird')
const Fetcher = require('../fetch')
const fetchRegistry = require('./registry')
const fetchRemote = module.exports = Object.create(null)
Fetcher.impl(fetchRemote, {
packument (spec, opts) {
return BB.reject(new Error('Not implemented yet'))
},
manifest (spec, opts) {
// We can't get the manifest for a remote tarball until
// we extract the tarball itself.
// `finalize-manifest` takes care of this process of extracting
// a manifest based on ./tarball.js
return BB.resolve(null)
},
tarball (spec, opts) {
const uri = spec._resolved || spec.fetchSpec
return fetchRegistry.fromManifest({
_resolved: uri,
_integrity: opts.integrity
}, spec, opts)
},
fromManifest (manifest, spec, opts) {
return this.tarball(manifest || spec, opts)
}
})

3
node_modules/pacote/lib/fetchers/tag.js generated vendored Normal file
View file

@ -0,0 +1,3 @@
'use strict'
module.exports = require('./registry')

3
node_modules/pacote/lib/fetchers/version.js generated vendored Normal file
View file

@ -0,0 +1,3 @@
'use strict'
module.exports = require('./registry')

255
node_modules/pacote/lib/finalize-manifest.js generated vendored Normal file
View file

@ -0,0 +1,255 @@
'use strict'
const BB = require('bluebird')
const cacache = require('cacache')
const cacheKey = require('./util/cache-key')
const fetchFromManifest = require('./fetch').fromManifest
const finished = require('./util/finished')
const minimatch = require('minimatch')
const normalize = require('normalize-package-data')
const optCheck = require('./util/opt-check')
const path = require('path')
const pipe = BB.promisify(require('mississippi').pipe)
const ssri = require('ssri')
const tar = require('tar')
const readJson = require('./util/read-json')
// `finalizeManifest` takes as input the various kinds of manifests that
// manifest handlers ('lib/fetchers/*.js#manifest()') return, and makes sure
// they are:
//
// * filled out with any required data that the handler couldn't fill in
// * formatted consistently
// * cached so we don't have to repeat this work more than necessary
//
// The biggest thing this package might do is do a full tarball extraction in
// order to find missing bits of metadata required by the npm installer. For
// example, it will fill in `_shrinkwrap`, `_integrity`, and other details that
// the plain manifest handlers would require a tarball to fill out. If a
// handler returns everything necessary, this process is skipped.
//
// If we get to the tarball phase, the corresponding tarball handler for the
// requested type will be invoked and the entire tarball will be read from the
// stream.
//
module.exports = finalizeManifest
function finalizeManifest (pkg, spec, opts) {
const key = finalKey(pkg, spec)
opts = optCheck(opts)
const cachedManifest = (opts.cache && key && !opts.preferOnline && !opts.fullMetadata && !opts.enjoyBy)
? cacache.get.info(opts.cache, key, opts)
: BB.resolve(null)
return cachedManifest.then(cached => {
if (cached && cached.metadata && cached.metadata.manifest) {
return new Manifest(cached.metadata.manifest)
} else {
return tarballedProps(pkg, spec, opts).then(props => {
return pkg && pkg.name
? new Manifest(pkg, props, opts.fullMetadata)
: new Manifest(props, null, opts.fullMetadata)
}).then(manifest => {
const cacheKey = key || finalKey(manifest, spec)
if (!opts.cache || !cacheKey) {
return manifest
} else {
return cacache.put(
opts.cache, cacheKey, '.', {
metadata: {
id: manifest._id,
manifest,
type: 'finalized-manifest'
}
}
).then(() => manifest)
}
})
}
})
}
module.exports.Manifest = Manifest
function Manifest (pkg, fromTarball, fullMetadata) {
fromTarball = fromTarball || {}
if (fullMetadata) {
Object.assign(this, pkg)
}
this.name = pkg.name
this.version = pkg.version
this.engines = pkg.engines || fromTarball.engines
this.cpu = pkg.cpu || fromTarball.cpu
this.os = pkg.os || fromTarball.os
this.dependencies = pkg.dependencies || {}
this.optionalDependencies = pkg.optionalDependencies || {}
this.devDependencies = pkg.devDependencies || {}
const bundled = (
pkg.bundledDependencies ||
pkg.bundleDependencies ||
false
)
this.bundleDependencies = bundled
this.peerDependencies = pkg.peerDependencies || {}
this.deprecated = pkg.deprecated || false
// These depend entirely on each handler
this._resolved = pkg._resolved
// Not all handlers (or registries) provide these out of the box,
// and if they don't, we need to extract and read the tarball ourselves.
// These are details required by the installer.
this._integrity = pkg._integrity || fromTarball._integrity || null
this._shasum = pkg._shasum || fromTarball._shasum || null
this._shrinkwrap = pkg._shrinkwrap || fromTarball._shrinkwrap || null
this.bin = pkg.bin || fromTarball.bin || null
if (this.bin && Array.isArray(this.bin)) {
// Code yanked from read-package-json.
const m = (pkg.directories && pkg.directories.bin) || '.'
this.bin = this.bin.reduce((acc, mf) => {
if (mf && mf.charAt(0) !== '.') {
const f = path.basename(mf)
acc[f] = path.join(m, mf)
}
return acc
}, {})
}
this._id = null
// TODO - freezing and inextensibility pending npm changes. See test suite.
// Object.preventExtensions(this)
normalize(this)
// I don't want this why did you give it to me. Go away. 🔥🔥🔥🔥
delete this.readme
// Object.freeze(this)
}
// Some things aren't filled in by standard manifest fetching.
// If this function needs to do its work, it will grab the
// package tarball, extract it, and take whatever it needs
// from the stream.
function tarballedProps (pkg, spec, opts) {
const needsShrinkwrap = (!pkg || (
pkg._hasShrinkwrap !== false &&
!pkg._shrinkwrap
))
const needsBin = !!(!pkg || (
!pkg.bin &&
pkg.directories &&
pkg.directories.bin
))
const needsIntegrity = !pkg || (!pkg._integrity && pkg._integrity !== false)
const needsShasum = !pkg || (!pkg._shasum && pkg._shasum !== false)
const needsHash = needsIntegrity || needsShasum
const needsManifest = !pkg || !pkg.name
const needsExtract = needsShrinkwrap || needsBin || needsManifest
if (!needsShrinkwrap && !needsBin && !needsHash && !needsManifest) {
return BB.resolve({})
} else {
opts = optCheck(opts)
const tarStream = fetchFromManifest(pkg, spec, opts)
const extracted = needsExtract && new tar.Parse()
return BB.join(
needsShrinkwrap && jsonFromStream('npm-shrinkwrap.json', extracted),
needsManifest && jsonFromStream('package.json', extracted),
needsBin && getPaths(extracted),
needsHash && ssri.fromStream(tarStream, { algorithms: ['sha1', 'sha512'] }),
needsExtract && pipe(tarStream, extracted),
(sr, mani, paths, hash) => {
if (needsManifest && !mani) {
const err = new Error(`Non-registry package missing package.json: ${spec}.`)
err.code = 'ENOPACKAGEJSON'
throw err
}
const extraProps = mani || {}
delete extraProps._resolved
// drain out the rest of the tarball
tarStream.resume()
// if we have directories.bin, we need to collect any matching files
// to add to bin
if (paths && paths.length) {
const dirBin = mani
? (mani && mani.directories && mani.directories.bin)
: (pkg && pkg.directories && pkg.directories.bin)
if (dirBin) {
extraProps.bin = {}
paths.forEach(filePath => {
if (minimatch(filePath, dirBin + '/**')) {
const relative = path.relative(dirBin, filePath)
if (relative && relative[0] !== '.') {
extraProps.bin[path.basename(relative)] = path.join(dirBin, relative)
}
}
})
}
}
return Object.assign(extraProps, {
_shrinkwrap: sr,
_resolved: (mani && mani._resolved) ||
(pkg && pkg._resolved) ||
spec.fetchSpec,
_integrity: needsIntegrity && hash && hash.sha512 && hash.sha512[0].toString(),
_shasum: needsShasum && hash && hash.sha1 && hash.sha1[0].hexDigest()
})
}
)
}
}
function jsonFromStream (filename, dataStream) {
return BB.fromNode(cb => {
dataStream.on('error', cb)
dataStream.on('close', cb)
dataStream.on('entry', entry => {
const filePath = entry.header.path.replace(/[^/]+\//, '')
if (filePath !== filename) {
entry.resume()
} else {
let data = ''
entry.on('error', cb)
finished(entry).then(() => {
try {
cb(null, readJson(data))
} catch (err) {
cb(err)
}
}, err => {
cb(err)
})
entry.on('data', d => { data += d })
}
})
})
}
function getPaths (dataStream) {
return BB.fromNode(cb => {
let paths = []
dataStream.on('error', cb)
dataStream.on('close', () => cb(null, paths))
dataStream.on('entry', function handler (entry) {
const filePath = entry.header.path.replace(/[^/]+\//, '')
entry.resume()
paths.push(filePath)
})
})
}
function finalKey (pkg, spec) {
if (pkg && pkg._uniqueResolved) {
// git packages have a unique, identifiable id, but no tar sha
return cacheKey(`${spec.type}-manifest`, pkg._uniqueResolved)
} else {
return (
pkg && pkg._integrity &&
cacheKey(
`${spec.type}-manifest`,
`${pkg._resolved}:${ssri.stringify(pkg._integrity)}`
)
)
}
}

6
node_modules/pacote/lib/util/cache-key.js generated vendored Normal file
View file

@ -0,0 +1,6 @@
'use strict'
module.exports = cacheKey
function cacheKey (type, identifier) {
return ['pacote', type, identifier].join(':')
}

17
node_modules/pacote/lib/util/finished.js generated vendored Normal file
View file

@ -0,0 +1,17 @@
'use strict'
const BB = require('bluebird')
module.exports = function (child, hasExitCode = false) {
return BB.fromNode(function (cb) {
child.on('error', cb)
child.on(hasExitCode ? 'close' : 'end', function (exitCode) {
if (exitCode === undefined || exitCode === 0) {
cb()
} else {
let err = new Error('exited with error code: ' + exitCode)
cb(err)
}
})
})
}

274
node_modules/pacote/lib/util/git.js generated vendored Normal file
View file

@ -0,0 +1,274 @@
'use strict'
const BB = require('bluebird')
const cp = require('child_process')
const execFileAsync = BB.promisify(cp.execFile, {
multiArgs: true
})
const finished = require('./finished')
const LRU = require('lru-cache')
const optCheck = require('./opt-check')
const osenv = require('osenv')
const path = require('path')
const pinflight = require('promise-inflight')
const promiseRetry = require('promise-retry')
const uniqueFilename = require('unique-filename')
const which = BB.promisify(require('which'))
const semver = require('semver')
const GOOD_ENV_VARS = new Set([
'GIT_ASKPASS',
'GIT_EXEC_PATH',
'GIT_PROXY_COMMAND',
'GIT_SSH',
'GIT_SSH_COMMAND',
'GIT_SSL_CAINFO',
'GIT_SSL_NO_VERIFY'
])
const GIT_TRANSIENT_ERRORS = [
'remote error: Internal Server Error',
'The remote end hung up unexpectedly',
'Connection timed out',
'Operation timed out',
'Failed to connect to .* Timed out',
'Connection reset by peer',
'SSL_ERROR_SYSCALL',
'The requested URL returned error: 503'
].join('|')
const GIT_TRANSIENT_ERROR_RE = new RegExp(GIT_TRANSIENT_ERRORS)
const GIT_TRANSIENT_ERROR_MAX_RETRY_NUMBER = 3
function shouldRetry (error, number) {
return GIT_TRANSIENT_ERROR_RE.test(error) && (number < GIT_TRANSIENT_ERROR_MAX_RETRY_NUMBER)
}
const GIT_ = 'GIT_'
let GITENV
function gitEnv () {
if (GITENV) { return GITENV }
const tmpDir = path.join(osenv.tmpdir(), 'pacote-git-template-tmp')
const tmpName = uniqueFilename(tmpDir, 'git-clone')
GITENV = {
GIT_ASKPASS: 'echo',
GIT_TEMPLATE_DIR: tmpName
}
Object.keys(process.env).forEach(k => {
if (GOOD_ENV_VARS.has(k) || !k.startsWith(GIT_)) {
GITENV[k] = process.env[k]
}
})
return GITENV
}
let GITPATH
try {
GITPATH = which.sync('git')
} catch (e) {}
module.exports.clone = fullClone
function fullClone (repo, committish, target, opts) {
opts = optCheck(opts)
const gitArgs = ['clone', '--mirror', '-q', repo, path.join(target, '.git')]
if (process.platform === 'win32') {
gitArgs.push('--config', 'core.longpaths=true')
}
return execGit(gitArgs, { cwd: target }, opts).then(() => {
return execGit(['init'], { cwd: target }, opts)
}).then(() => {
return execGit(['checkout', committish || 'HEAD'], { cwd: target }, opts)
}).then(() => {
return updateSubmodules(target, opts)
}).then(() => headSha(target, opts))
}
module.exports.shallow = shallowClone
function shallowClone (repo, branch, target, opts) {
opts = optCheck(opts)
const gitArgs = ['clone', '--depth=1', '-q']
if (branch) {
gitArgs.push('-b', branch)
}
gitArgs.push(repo, target)
if (process.platform === 'win32') {
gitArgs.push('--config', 'core.longpaths=true')
}
return execGit(gitArgs, {
cwd: target
}, opts).then(() => {
return updateSubmodules(target, opts)
}).then(() => headSha(target, opts))
}
function updateSubmodules (localRepo, opts) {
const gitArgs = ['submodule', 'update', '-q', '--init', '--recursive']
return execGit(gitArgs, {
cwd: localRepo
}, opts)
}
function headSha (repo, opts) {
opts = optCheck(opts)
return execGit(['rev-parse', '--revs-only', 'HEAD'], { cwd: repo }, opts).spread(stdout => {
return stdout.trim()
})
}
const CARET_BRACES = '^{}'
const REVS = new LRU({
max: 100,
maxAge: 5 * 60 * 1000
})
module.exports.revs = revs
function revs (repo, opts) {
opts = optCheck(opts)
const cached = REVS.get(repo)
if (cached) {
return BB.resolve(cached)
}
return pinflight(`ls-remote:${repo}`, () => {
return spawnGit(['ls-remote', '-h', '-t', repo], {
env: gitEnv()
}, opts).then((stdout) => {
return stdout.split('\n').reduce((revs, line) => {
const split = line.split(/\s+/, 2)
if (split.length < 2) { return revs }
const sha = split[0].trim()
const ref = split[1].trim().match(/(?:refs\/[^/]+\/)?(.*)/)[1]
if (!ref) { return revs } // ???
if (ref.endsWith(CARET_BRACES)) { return revs } // refs/tags/x^{} crap
const type = refType(line)
const doc = { sha, ref, type }
revs.refs[ref] = doc
// We can check out shallow clones on specific SHAs if we have a ref
if (revs.shas[sha]) {
revs.shas[sha].push(ref)
} else {
revs.shas[sha] = [ref]
}
if (type === 'tag') {
const match = ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/)
if (match && semver.valid(match[1], true)) {
revs.versions[semver.clean(match[1], true)] = doc
}
}
return revs
}, { versions: {}, 'dist-tags': {}, refs: {}, shas: {} })
}, err => {
err.message = `Error while executing:\n${GITPATH} ls-remote -h -t ${repo}\n\n${err.stderr}\n${err.message}`
throw err
}).then(revs => {
if (revs.refs.HEAD) {
const HEAD = revs.refs.HEAD
Object.keys(revs.versions).forEach(v => {
if (v.sha === HEAD.sha) {
revs['dist-tags'].HEAD = v
if (!revs.refs.latest) {
revs['dist-tags'].latest = revs.refs.HEAD
}
}
})
}
REVS.set(repo, revs)
return revs
})
})
}
module.exports._exec = execGit
function execGit (gitArgs, gitOpts, opts) {
opts = optCheck(opts)
return checkGit(opts).then(gitPath => {
return promiseRetry((retry, number) => {
if (number !== 1) {
opts.log.silly('pacote', 'Retrying git command: ' + gitArgs.join(' ') + ' attempt # ' + number)
}
return execFileAsync(gitPath, gitArgs, mkOpts(gitOpts, opts)).catch((err) => {
if (shouldRetry(err, number)) {
retry(err)
} else {
throw err
}
})
}, opts.retry != null ? opts.retry : {
retries: opts['fetch-retries'],
factor: opts['fetch-retry-factor'],
maxTimeout: opts['fetch-retry-maxtimeout'],
minTimeout: opts['fetch-retry-mintimeout']
})
})
}
module.exports._spawn = spawnGit
function spawnGit (gitArgs, gitOpts, opts) {
opts = optCheck(opts)
return checkGit(opts).then(gitPath => {
return promiseRetry((retry, number) => {
if (number !== 1) {
opts.log.silly('pacote', 'Retrying git command: ' + gitArgs.join(' ') + ' attempt # ' + number)
}
const child = cp.spawn(gitPath, gitArgs, mkOpts(gitOpts, opts))
let stdout = ''
let stderr = ''
child.stdout.on('data', d => { stdout += d })
child.stderr.on('data', d => { stderr += d })
return finished(child, true).catch(err => {
if (shouldRetry(stderr, number)) {
retry(err)
} else {
err.stderr = stderr
throw err
}
}).then(() => {
return stdout
})
}, opts.retry)
})
}
function mkOpts (_gitOpts, opts) {
const gitOpts = {
env: gitEnv()
}
if (+opts.uid && !isNaN(opts.uid)) {
gitOpts.uid = +opts.uid
}
if (+opts.gid && !isNaN(opts.gid)) {
gitOpts.gid = +opts.gid
}
Object.assign(gitOpts, _gitOpts)
return gitOpts
}
function checkGit (opts) {
if (opts.git) {
return BB.resolve(opts.git)
} else if (!GITPATH) {
const err = new Error('No git binary found in $PATH')
err.code = 'ENOGIT'
return BB.reject(err)
} else {
return BB.resolve(GITPATH)
}
}
const REFS_TAGS = 'refs/tags/'
const REFS_HEADS = 'refs/heads/'
const HEAD = 'HEAD'
function refType (ref) {
return ref.indexOf(REFS_TAGS) !== -1
? 'tag'
: ref.indexOf(REFS_HEADS) !== -1
? 'branch'
: ref.endsWith(HEAD)
? 'head'
: 'other'
}

48
node_modules/pacote/lib/util/opt-check.js generated vendored Normal file
View file

@ -0,0 +1,48 @@
'use strict'
const figgyPudding = require('figgy-pudding')
const logger = require('./proclog.js')
const AUTH_REGEX = /^(?:.*:)?(token|_authToken|username|_password|password|email|always-auth|_auth|otp)$/
const SCOPE_REGISTRY_REGEX = /@.*:registry$/gi
module.exports = figgyPudding({
annotate: {},
cache: {},
defaultTag: 'tag',
dirPacker: {},
dmode: {},
'enjoy-by': 'enjoyBy',
enjoyBy: {},
before: 'enjoyBy',
fmode: {},
'fetch-retries': { default: 2 },
'fetch-retry-factor': { default: 10 },
'fetch-retry-maxtimeout': { default: 60000 },
'fetch-retry-mintimeout': { default: 10000 },
fullMetadata: 'full-metadata',
'full-metadata': { default: false },
gid: {},
git: {},
includeDeprecated: { default: true },
'include-deprecated': 'includeDeprecated',
integrity: {},
log: { default: logger },
memoize: {},
offline: {},
preferOffline: 'prefer-offline',
'prefer-offline': {},
preferOnline: 'prefer-online',
'prefer-online': {},
registry: { default: 'https://registry.npmjs.org/' },
resolved: {},
retry: {},
scope: {},
tag: { default: 'latest' },
uid: {},
umask: {},
where: {}
}, {
other (key) {
return key.match(AUTH_REGEX) || key.match(SCOPE_REGISTRY_REGEX)
}
})

44
node_modules/pacote/lib/util/pack-dir.js generated vendored Normal file
View file

@ -0,0 +1,44 @@
'use strict'
const BB = require('bluebird')
const cacache = require('cacache')
const cacheKey = require('./cache-key')
const optCheck = require('./opt-check')
const packlist = require('npm-packlist')
const pipe = BB.promisify(require('mississippi').pipe)
const tar = require('tar')
module.exports = packDir
function packDir (manifest, label, dir, target, opts) {
opts = optCheck(opts)
const packer = opts.dirPacker
? BB.resolve(opts.dirPacker(manifest, dir))
: mkPacker(dir)
if (!opts.cache) {
return packer.then(packer => pipe(packer, target))
} else {
const cacher = cacache.put.stream(
opts.cache, cacheKey('packed-dir', label), opts
).on('integrity', i => {
target.emit('integrity', i)
})
return packer.then(packer => BB.all([
pipe(packer, cacher),
pipe(packer, target)
]))
}
}
function mkPacker (dir) {
return packlist({ path: dir }).then(files => {
return tar.c({
cwd: dir,
gzip: true,
portable: true,
prefix: 'package/'
}, files)
})
}

23
node_modules/pacote/lib/util/proclog.js generated vendored Normal file
View file

@ -0,0 +1,23 @@
'use strict'
const LEVELS = [
'notice',
'error',
'warn',
'info',
'verbose',
'http',
'silly',
'pause',
'resume'
]
const logger = {}
for (const level of LEVELS) {
logger[level] = log(level)
}
module.exports = logger
function log (level) {
return (category, ...args) => process.emit('log', level, category, ...args)
}

15
node_modules/pacote/lib/util/read-json.js generated vendored Normal file
View file

@ -0,0 +1,15 @@
'use strict'
module.exports = function (content) {
// Code also yanked from read-package-json.
function stripBOM (content) {
content = content.toString()
// Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
// because the buffer-to-string conversion in `fs.readFileSync()`
// translates it to FEFF, the UTF-16 BOM.
if (content.charCodeAt(0) === 0xFEFF) return content.slice(1)
return content
}
return JSON.parse(stripBOM(content))
}

135
node_modules/pacote/lib/with-tarball-stream.js generated vendored Normal file
View file

@ -0,0 +1,135 @@
'use strict'
const BB = require('bluebird')
const cacache = require('cacache')
const fetch = require('./fetch.js')
const fs = require('fs')
const npa = require('npm-package-arg')
const optCheck = require('./util/opt-check.js')
const path = require('path')
const ssri = require('ssri')
const retry = require('promise-retry')
const statAsync = BB.promisify(fs.stat)
const RETRIABLE_ERRORS = new Set(['ENOENT', 'EINTEGRITY', 'Z_DATA_ERROR'])
module.exports = withTarballStream
function withTarballStream (spec, opts, streamHandler) {
opts = optCheck(opts)
spec = npa(spec, opts.where)
// First, we check for a file: resolved shortcut
const tryFile = (
!opts.preferOnline &&
opts.integrity &&
opts.resolved &&
opts.resolved.startsWith('file:')
)
? BB.try(() => {
// NOTE - this is a special shortcut! Packages installed as files do not
// have a `resolved` field -- this specific case only occurs when you have,
// say, a git dependency or a registry dependency that you've packaged into
// a local file, and put that file: spec in the `resolved` field.
opts.log.silly('pacote', `trying ${spec} by local file: ${opts.resolved}`)
const file = path.resolve(opts.where || '.', opts.resolved.substr(5))
return statAsync(file)
.then(() => {
const verifier = ssri.integrityStream({ integrity: opts.integrity })
const stream = fs.createReadStream(file)
.on('error', err => verifier.emit('error', err))
.pipe(verifier)
return streamHandler(stream)
})
.catch(err => {
if (err.code === 'EINTEGRITY') {
opts.log.warn('pacote', `EINTEGRITY while extracting ${spec} from ${file}.You will have to recreate the file.`)
opts.log.verbose('pacote', `EINTEGRITY for ${spec}: ${err.message}`)
}
throw err
})
})
: BB.reject(Object.assign(new Error('no file!'), { code: 'ENOENT' }))
const tryDigest = tryFile
.catch(err => {
if (
opts.preferOnline ||
!opts.cache ||
!opts.integrity ||
!RETRIABLE_ERRORS.has(err.code)
) {
throw err
} else {
opts.log.silly('tarball', `trying ${spec} by hash: ${opts.integrity}`)
const stream = cacache.get.stream.byDigest(
opts.cache, opts.integrity, opts
)
stream.once('error', err => stream.on('newListener', (ev, l) => {
if (ev === 'error') { l(err) }
}))
return streamHandler(stream)
.catch(err => {
if (err.code === 'EINTEGRITY' || err.code === 'Z_DATA_ERROR') {
opts.log.warn('tarball', `cached data for ${spec} (${opts.integrity}) seems to be corrupted. Refreshing cache.`)
return cleanUpCached(opts.cache, opts.integrity, opts)
.then(() => { throw err })
} else {
throw err
}
})
}
})
const trySpec = tryDigest
.catch(err => {
if (!RETRIABLE_ERRORS.has(err.code)) {
// If it's not one of our retriable errors, bail out and give up.
throw err
} else {
opts.log.silly(
'tarball',
`no local data for ${spec}. Extracting by manifest.`
)
return BB.resolve(retry((tryAgain, attemptNum) => {
const tardata = fetch.tarball(spec, opts)
if (!opts.resolved) {
tardata.on('manifest', m => {
opts = opts.concat({ resolved: m._resolved })
})
tardata.on('integrity', i => {
opts = opts.concat({ integrity: i })
})
}
return BB.try(() => streamHandler(tardata))
.catch(err => {
// Retry once if we have a cache, to clear up any weird conditions.
// Don't retry network errors, though -- make-fetch-happen has already
// taken care of making sure we're all set on that front.
if (opts.cache && err.code && !String(err.code).match(/^E\d{3}$/)) {
if (err.code === 'EINTEGRITY' || err.code === 'Z_DATA_ERROR') {
opts.log.warn('tarball', `tarball data for ${spec} (${opts.integrity}) seems to be corrupted. Trying one more time.`)
}
return cleanUpCached(opts.cache, err.sri, opts)
.then(() => tryAgain(err))
} else {
throw err
}
})
}, { retries: 1 }))
}
})
return trySpec
.catch(err => {
if (err.code === 'EINTEGRITY') {
err.message = `Verification failed while extracting ${spec}:\n${err.message}`
}
throw err
})
}
function cleanUpCached (cachePath, integrity, opts) {
return cacache.rm.content(cachePath, integrity, opts)
}

38
node_modules/pacote/manifest.js generated vendored Normal file
View file

@ -0,0 +1,38 @@
'use strict'
const fetchManifest = require('./lib/fetch').manifest
const finalizeManifest = require('./lib/finalize-manifest')
const optCheck = require('./lib/util/opt-check')
const pinflight = require('promise-inflight')
const npa = require('npm-package-arg')
module.exports = manifest
function manifest (spec, opts) {
opts = optCheck(opts)
spec = npa(spec, opts.where)
const label = [
spec.name,
spec.saveSpec || spec.fetchSpec,
spec.type,
opts.cache,
opts.registry,
opts.scope
].join(':')
return pinflight(label, () => {
const startTime = Date.now()
return fetchManifest(spec, opts).then(rawManifest => {
return finalizeManifest(rawManifest, spec, opts)
}).then(manifest => {
if (opts.annotate) {
manifest._from = spec.saveSpec || spec.raw
manifest._requested = spec
manifest._spec = spec.raw
manifest._where = opts.where
}
const elapsedTime = Date.now() - startTime
opts.log.silly('pacote', `${spec.type} manifest for ${spec.name}@${spec.saveSpec || spec.fetchSpec} fetched in ${elapsedTime}ms`)
return manifest
})
})
}

117
node_modules/pacote/package.json generated vendored Normal file
View file

@ -0,0 +1,117 @@
{
"_from": "pacote@^9.5.8",
"_id": "pacote@9.5.8",
"_inBundle": false,
"_integrity": "sha512-0Tl8Oi/K0Lo4MZmH0/6IsT3gpGf9eEAznLXEQPKgPq7FscnbUOyopnVpwXlnQdIbCUaojWy1Wd7VMyqfVsRrIw==",
"_location": "/pacote",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "pacote@^9.5.8",
"name": "pacote",
"escapedName": "pacote",
"rawSpec": "^9.5.8",
"saveSpec": null,
"fetchSpec": "^9.5.8"
},
"_requiredBy": [
"/npm-check-updates"
],
"_resolved": "https://registry.npmjs.org/pacote/-/pacote-9.5.8.tgz",
"_shasum": "23480efdc4fa74515855c9ecf39cf64078f99786",
"_spec": "pacote@^9.5.8",
"_where": "/home/shimataro/projects/actions/ssh-key-action/node_modules/npm-check-updates",
"author": {
"name": "Kat Marchán",
"email": "kzm@sykosomatic.org"
},
"bugs": {
"url": "https://github.com/npm/pacote/issues"
},
"bundleDependencies": false,
"contributors": [
{
"name": "Charlotte Spencer",
"email": "charlottelaspencer@gmail.com"
},
{
"name": "Rebecca Turner",
"email": "me@re-becca.org"
}
],
"dependencies": {
"bluebird": "^3.5.3",
"cacache": "^12.0.2",
"chownr": "^1.1.2",
"figgy-pudding": "^3.5.1",
"get-stream": "^4.1.0",
"glob": "^7.1.3",
"infer-owner": "^1.0.4",
"lru-cache": "^5.1.1",
"make-fetch-happen": "^5.0.0",
"minimatch": "^3.0.4",
"minipass": "^2.3.5",
"mississippi": "^3.0.0",
"mkdirp": "^0.5.1",
"normalize-package-data": "^2.4.0",
"npm-package-arg": "^6.1.0",
"npm-packlist": "^1.1.12",
"npm-pick-manifest": "^3.0.0",
"npm-registry-fetch": "^4.0.0",
"osenv": "^0.1.5",
"promise-inflight": "^1.0.1",
"promise-retry": "^1.1.1",
"protoduck": "^5.0.1",
"rimraf": "^2.6.2",
"safe-buffer": "^5.1.2",
"semver": "^5.6.0",
"ssri": "^6.0.1",
"tar": "^4.4.10",
"unique-filename": "^1.1.1",
"which": "^1.3.1"
},
"deprecated": false,
"description": "JavaScript package downloader",
"devDependencies": {
"nock": "^10.0.3",
"npmlog": "^4.1.2",
"nyc": "^14.1.1",
"require-inject": "^1.4.3",
"standard": "^12.0.1",
"standard-version": "^4.4.0",
"tacks": "^1.2.7",
"tap": "^12.7.0",
"tar-stream": "^1.6.2",
"weallbehave": "^1.2.0",
"weallcontribute": "^1.0.7"
},
"files": [
"*.js",
"lib"
],
"homepage": "https://github.com/npm/pacote#readme",
"keywords": [
"packages",
"npm",
"git"
],
"license": "MIT",
"main": "index.js",
"name": "pacote",
"repository": {
"type": "git",
"url": "git+https://github.com/npm/pacote.git"
},
"scripts": {
"postrelease": "npm publish && git push --follow-tags",
"prerelease": "npm t",
"pretest": "standard",
"release": "standard-version -s",
"test": "nyc --all -- tap -J test/*.js",
"test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
"update-coc": "weallbehave -o . && git add CODE_OF_CONDUCT.md && git commit -m 'docs(coc): updated CODE_OF_CONDUCT.md'",
"update-contrib": "weallcontribute -o . && git add CONTRIBUTING.md && git commit -m 'docs(contributing): updated CONTRIBUTING.md'"
},
"version": "9.5.8"
}

29
node_modules/pacote/packument.js generated vendored Normal file
View file

@ -0,0 +1,29 @@
'use strict'
const fetchPackument = require('./lib/fetch').packument
const optCheck = require('./lib/util/opt-check')
const pinflight = require('promise-inflight')
const npa = require('npm-package-arg')
module.exports = packument
function packument (spec, opts) {
opts = optCheck(opts)
spec = npa(spec, opts.where)
const label = [
spec.name,
spec.saveSpec || spec.fetchSpec,
spec.type,
opts.cache,
opts.registry,
opts.scope
].join(':')
const startTime = Date.now()
return pinflight(label, () => {
return fetchPackument(spec, opts)
}).then(p => {
const elapsedTime = Date.now() - startTime
opts.log.silly('pacote', `${spec.registry ? 'registry' : spec.type} packument for ${spec.name}@${spec.saveSpec || spec.fetchSpec} fetched in ${elapsedTime}ms`)
return p
})
}

64
node_modules/pacote/prefetch.js generated vendored Normal file
View file

@ -0,0 +1,64 @@
'use strict'
const BB = require('bluebird')
const cacache = require('cacache')
const finished = BB.promisify(require('mississippi').finished)
const optCheck = require('./lib/util/opt-check')
const npa = require('npm-package-arg')
module.exports = prefetch
function prefetch (spec, opts) {
opts = optCheck(opts)
spec = npa(spec, opts.where)
opts.log.warn('prefetch', 'pacote.prefetch() is deprecated. Please use pacote.tarball() instead.')
const startTime = Date.now()
if (!opts.cache) {
opts.log.info('prefetch', 'skipping prefetch: no cache provided')
return BB.resolve({ spec })
}
if (opts.integrity && !opts.preferOnline) {
opts.log.silly('prefetch', 'checking if', opts.integrity, 'is already cached')
return cacache.get.hasContent(opts.cache, opts.integrity).then(info => {
if (info) {
opts.log.silly('prefetch', `content already exists for ${spec} (${Date.now() - startTime}ms)`)
return {
spec,
integrity: info.integrity,
size: info.size,
byDigest: true
}
} else {
return prefetchByManifest(startTime, spec, opts)
}
})
} else {
opts.log.silly('prefetch', `no integrity hash provided for ${spec} - fetching by manifest`)
return prefetchByManifest(startTime, spec, opts)
}
}
let fetch
function prefetchByManifest (start, spec, opts) {
let manifest
let integrity
return BB.resolve().then(() => {
if (!fetch) {
fetch = require('./lib/fetch')
}
const stream = fetch.tarball(spec, opts)
if (!stream) { return }
stream.on('data', function () {})
stream.on('manifest', m => { manifest = m })
stream.on('integrity', i => { integrity = i })
return finished(stream)
}).then(() => {
opts.log.silly('prefetch', `${spec} done in ${Date.now() - start}ms`)
return {
manifest,
spec,
integrity: integrity || (manifest && manifest._integrity),
byDigest: false
}
})
}

67
node_modules/pacote/tarball.js generated vendored Normal file
View file

@ -0,0 +1,67 @@
'use strict'
const BB = require('bluebird')
const fs = require('fs')
const getStream = require('get-stream')
const mkdirp = BB.promisify(require('mkdirp'))
const npa = require('npm-package-arg')
const optCheck = require('./lib/util/opt-check.js')
const PassThrough = require('stream').PassThrough
const path = require('path')
const rimraf = BB.promisify(require('rimraf'))
const withTarballStream = require('./lib/with-tarball-stream.js')
module.exports = tarball
function tarball (spec, opts) {
opts = optCheck(opts)
spec = npa(spec, opts.where)
return withTarballStream(spec, opts, stream => getStream.buffer(stream))
}
module.exports.stream = tarballStream
function tarballStream (spec, opts) {
opts = optCheck(opts)
spec = npa(spec, opts.where)
const output = new PassThrough()
let hasTouchedOutput = false
let lastError = null
withTarballStream(spec, opts, stream => {
if (hasTouchedOutput && lastError) {
throw lastError
} else if (hasTouchedOutput) {
throw new Error('abort, abort!')
} else {
return new BB((resolve, reject) => {
stream.on('error', reject)
output.on('error', reject)
output.on('error', () => { hasTouchedOutput = true })
output.on('finish', resolve)
stream.pipe(output)
stream.once('data', () => { hasTouchedOutput = true })
}).catch(err => {
lastError = err
throw err
})
}
})
.catch(err => output.emit('error', err))
return output
}
module.exports.toFile = tarballToFile
function tarballToFile (spec, dest, opts) {
opts = optCheck(opts)
spec = npa(spec, opts.where)
return mkdirp(path.dirname(dest))
.then(() => withTarballStream(spec, opts, stream => {
return rimraf(dest)
.then(() => new BB((resolve, reject) => {
const writer = fs.createWriteStream(dest)
stream.on('error', reject)
writer.on('error', reject)
writer.on('close', resolve)
stream.pipe(writer)
}))
}))
}