mirror of
https://github.com/shimataro/ssh-key-action.git
synced 2025-06-19 22:52:10 +10:00
* first action! (#1)
This commit is contained in:
parent
8deacc95b1
commit
ace1e6a69a
3750 changed files with 1155519 additions and 0 deletions
26
node_modules/cacache/lib/content/path.js
generated
vendored
Normal file
26
node_modules/cacache/lib/content/path.js
generated
vendored
Normal file
|
@ -0,0 +1,26 @@
|
|||
'use strict'
|
||||
|
||||
const contentVer = require('../../package.json')['cache-version'].content
|
||||
const hashToSegments = require('../util/hash-to-segments')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
|
||||
// Current format of content file path:
|
||||
//
|
||||
// sha512-BaSE64Hex= ->
|
||||
// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
|
||||
//
|
||||
module.exports = contentPath
|
||||
function contentPath (cache, integrity) {
|
||||
const sri = ssri.parse(integrity, { single: true })
|
||||
// contentPath is the *strongest* algo given
|
||||
return path.join.apply(path, [
|
||||
contentDir(cache),
|
||||
sri.algorithm
|
||||
].concat(hashToSegments(sri.hexDigest())))
|
||||
}
|
||||
|
||||
module.exports._contentDir = contentDir
|
||||
function contentDir (cache) {
|
||||
return path.join(cache, `content-v${contentVer}`)
|
||||
}
|
195
node_modules/cacache/lib/content/read.js
generated
vendored
Normal file
195
node_modules/cacache/lib/content/read.js
generated
vendored
Normal file
|
@ -0,0 +1,195 @@
|
|||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const contentPath = require('./path')
|
||||
const figgyPudding = require('figgy-pudding')
|
||||
const fs = require('graceful-fs')
|
||||
const PassThrough = require('stream').PassThrough
|
||||
const pipe = BB.promisify(require('mississippi').pipe)
|
||||
const ssri = require('ssri')
|
||||
const Y = require('../util/y.js')
|
||||
|
||||
const lstatAsync = BB.promisify(fs.lstat)
|
||||
const readFileAsync = BB.promisify(fs.readFile)
|
||||
|
||||
const ReadOpts = figgyPudding({
|
||||
size: {}
|
||||
})
|
||||
|
||||
module.exports = read
|
||||
function read (cache, integrity, opts) {
|
||||
opts = ReadOpts(opts)
|
||||
return withContentSri(cache, integrity, (cpath, sri) => {
|
||||
return readFileAsync(cpath, null).then(data => {
|
||||
if (typeof opts.size === 'number' && opts.size !== data.length) {
|
||||
throw sizeError(opts.size, data.length)
|
||||
} else if (ssri.checkData(data, sri)) {
|
||||
return data
|
||||
} else {
|
||||
throw integrityError(sri, cpath)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.sync = readSync
|
||||
function readSync (cache, integrity, opts) {
|
||||
opts = ReadOpts(opts)
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
const data = fs.readFileSync(cpath)
|
||||
if (typeof opts.size === 'number' && opts.size !== data.length) {
|
||||
throw sizeError(opts.size, data.length)
|
||||
} else if (ssri.checkData(data, sri)) {
|
||||
return data
|
||||
} else {
|
||||
throw integrityError(sri, cpath)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.stream = readStream
|
||||
module.exports.readStream = readStream
|
||||
function readStream (cache, integrity, opts) {
|
||||
opts = ReadOpts(opts)
|
||||
const stream = new PassThrough()
|
||||
withContentSri(cache, integrity, (cpath, sri) => {
|
||||
return lstatAsync(cpath).then(stat => ({ cpath, sri, stat }))
|
||||
}).then(({ cpath, sri, stat }) => {
|
||||
return pipe(
|
||||
fs.createReadStream(cpath),
|
||||
ssri.integrityStream({
|
||||
integrity: sri,
|
||||
size: opts.size
|
||||
}),
|
||||
stream
|
||||
)
|
||||
}).catch(err => {
|
||||
stream.emit('error', err)
|
||||
})
|
||||
return stream
|
||||
}
|
||||
|
||||
let copyFileAsync
|
||||
if (fs.copyFile) {
|
||||
module.exports.copy = copy
|
||||
module.exports.copy.sync = copySync
|
||||
copyFileAsync = BB.promisify(fs.copyFile)
|
||||
}
|
||||
|
||||
function copy (cache, integrity, dest, opts) {
|
||||
opts = ReadOpts(opts)
|
||||
return withContentSri(cache, integrity, (cpath, sri) => {
|
||||
return copyFileAsync(cpath, dest)
|
||||
})
|
||||
}
|
||||
|
||||
function copySync (cache, integrity, dest, opts) {
|
||||
opts = ReadOpts(opts)
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
return fs.copyFileSync(cpath, dest)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.hasContent = hasContent
|
||||
function hasContent (cache, integrity) {
|
||||
if (!integrity) { return BB.resolve(false) }
|
||||
return withContentSri(cache, integrity, (cpath, sri) => {
|
||||
return lstatAsync(cpath).then(stat => ({ size: stat.size, sri, stat }))
|
||||
}).catch(err => {
|
||||
if (err.code === 'ENOENT') { return false }
|
||||
if (err.code === 'EPERM') {
|
||||
if (process.platform !== 'win32') {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.hasContent.sync = hasContentSync
|
||||
function hasContentSync (cache, integrity) {
|
||||
if (!integrity) { return false }
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
try {
|
||||
const stat = fs.lstatSync(cpath)
|
||||
return { size: stat.size, sri, stat }
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') { return false }
|
||||
if (err.code === 'EPERM') {
|
||||
if (process.platform !== 'win32') {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function withContentSri (cache, integrity, fn) {
|
||||
return BB.try(() => {
|
||||
const sri = ssri.parse(integrity)
|
||||
// If `integrity` has multiple entries, pick the first digest
|
||||
// with available local data.
|
||||
const algo = sri.pickAlgorithm()
|
||||
const digests = sri[algo]
|
||||
if (digests.length <= 1) {
|
||||
const cpath = contentPath(cache, digests[0])
|
||||
return fn(cpath, digests[0])
|
||||
} else {
|
||||
return BB.any(sri[sri.pickAlgorithm()].map(meta => {
|
||||
return withContentSri(cache, meta, fn)
|
||||
}, { concurrency: 1 }))
|
||||
.catch(err => {
|
||||
if ([].some.call(err, e => e.code === 'ENOENT')) {
|
||||
throw Object.assign(
|
||||
new Error('No matching content found for ' + sri.toString()),
|
||||
{ code: 'ENOENT' }
|
||||
)
|
||||
} else {
|
||||
throw err[0]
|
||||
}
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
function withContentSriSync (cache, integrity, fn) {
|
||||
const sri = ssri.parse(integrity)
|
||||
// If `integrity` has multiple entries, pick the first digest
|
||||
// with available local data.
|
||||
const algo = sri.pickAlgorithm()
|
||||
const digests = sri[algo]
|
||||
if (digests.length <= 1) {
|
||||
const cpath = contentPath(cache, digests[0])
|
||||
return fn(cpath, digests[0])
|
||||
} else {
|
||||
let lastErr = null
|
||||
for (const meta of sri[sri.pickAlgorithm()]) {
|
||||
try {
|
||||
return withContentSriSync(cache, meta, fn)
|
||||
} catch (err) {
|
||||
lastErr = err
|
||||
}
|
||||
}
|
||||
if (lastErr) { throw lastErr }
|
||||
}
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
err.code = 'EBADSIZE'
|
||||
return err
|
||||
}
|
||||
|
||||
function integrityError (sri, path) {
|
||||
var err = new Error(Y`Integrity verification failed for ${sri} (${path})`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.sri = sri
|
||||
err.path = path
|
||||
return err
|
||||
}
|
21
node_modules/cacache/lib/content/rm.js
generated
vendored
Normal file
21
node_modules/cacache/lib/content/rm.js
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const contentPath = require('./path')
|
||||
const hasContent = require('./read').hasContent
|
||||
const rimraf = BB.promisify(require('rimraf'))
|
||||
|
||||
module.exports = rm
|
||||
function rm (cache, integrity) {
|
||||
return hasContent(cache, integrity).then(content => {
|
||||
if (content) {
|
||||
const sri = content.sri
|
||||
if (sri) {
|
||||
return rimraf(contentPath(cache, sri)).then(() => true)
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
})
|
||||
}
|
164
node_modules/cacache/lib/content/write.js
generated
vendored
Normal file
164
node_modules/cacache/lib/content/write.js
generated
vendored
Normal file
|
@ -0,0 +1,164 @@
|
|||
'use strict'
|
||||
|
||||
const BB = require('bluebird')
|
||||
|
||||
const contentPath = require('./path')
|
||||
const fixOwner = require('../util/fix-owner')
|
||||
const fs = require('graceful-fs')
|
||||
const moveFile = require('../util/move-file')
|
||||
const PassThrough = require('stream').PassThrough
|
||||
const path = require('path')
|
||||
const pipe = BB.promisify(require('mississippi').pipe)
|
||||
const rimraf = BB.promisify(require('rimraf'))
|
||||
const ssri = require('ssri')
|
||||
const to = require('mississippi').to
|
||||
const uniqueFilename = require('unique-filename')
|
||||
const Y = require('../util/y.js')
|
||||
|
||||
const writeFileAsync = BB.promisify(fs.writeFile)
|
||||
|
||||
module.exports = write
|
||||
function write (cache, data, opts) {
|
||||
opts = opts || {}
|
||||
if (opts.algorithms && opts.algorithms.length > 1) {
|
||||
throw new Error(
|
||||
Y`opts.algorithms only supports a single algorithm for now`
|
||||
)
|
||||
}
|
||||
if (typeof opts.size === 'number' && data.length !== opts.size) {
|
||||
return BB.reject(sizeError(opts.size, data.length))
|
||||
}
|
||||
const sri = ssri.fromData(data, {
|
||||
algorithms: opts.algorithms
|
||||
})
|
||||
if (opts.integrity && !ssri.checkData(data, opts.integrity, opts)) {
|
||||
return BB.reject(checksumError(opts.integrity, sri))
|
||||
}
|
||||
return BB.using(makeTmp(cache, opts), tmp => (
|
||||
writeFileAsync(
|
||||
tmp.target, data, { flag: 'wx' }
|
||||
).then(() => (
|
||||
moveToDestination(tmp, cache, sri, opts)
|
||||
))
|
||||
)).then(() => ({ integrity: sri, size: data.length }))
|
||||
}
|
||||
|
||||
module.exports.stream = writeStream
|
||||
function writeStream (cache, opts) {
|
||||
opts = opts || {}
|
||||
const inputStream = new PassThrough()
|
||||
let inputErr = false
|
||||
function errCheck () {
|
||||
if (inputErr) { throw inputErr }
|
||||
}
|
||||
|
||||
let allDone
|
||||
const ret = to((c, n, cb) => {
|
||||
if (!allDone) {
|
||||
allDone = handleContent(inputStream, cache, opts, errCheck)
|
||||
}
|
||||
inputStream.write(c, n, cb)
|
||||
}, cb => {
|
||||
inputStream.end(() => {
|
||||
if (!allDone) {
|
||||
const e = new Error(Y`Cache input stream was empty`)
|
||||
e.code = 'ENODATA'
|
||||
return ret.emit('error', e)
|
||||
}
|
||||
allDone.then(res => {
|
||||
res.integrity && ret.emit('integrity', res.integrity)
|
||||
res.size !== null && ret.emit('size', res.size)
|
||||
cb()
|
||||
}, e => {
|
||||
ret.emit('error', e)
|
||||
})
|
||||
})
|
||||
})
|
||||
ret.once('error', e => {
|
||||
inputErr = e
|
||||
})
|
||||
return ret
|
||||
}
|
||||
|
||||
function handleContent (inputStream, cache, opts, errCheck) {
|
||||
return BB.using(makeTmp(cache, opts), tmp => {
|
||||
errCheck()
|
||||
return pipeToTmp(
|
||||
inputStream, cache, tmp.target, opts, errCheck
|
||||
).then(res => {
|
||||
return moveToDestination(
|
||||
tmp, cache, res.integrity, opts, errCheck
|
||||
).then(() => res)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function pipeToTmp (inputStream, cache, tmpTarget, opts, errCheck) {
|
||||
return BB.resolve().then(() => {
|
||||
let integrity
|
||||
let size
|
||||
const hashStream = ssri.integrityStream({
|
||||
integrity: opts.integrity,
|
||||
algorithms: opts.algorithms,
|
||||
size: opts.size
|
||||
}).on('integrity', s => {
|
||||
integrity = s
|
||||
}).on('size', s => {
|
||||
size = s
|
||||
})
|
||||
const outStream = fs.createWriteStream(tmpTarget, {
|
||||
flags: 'wx'
|
||||
})
|
||||
errCheck()
|
||||
return pipe(inputStream, hashStream, outStream).then(() => {
|
||||
return { integrity, size }
|
||||
}).catch(err => {
|
||||
return rimraf(tmpTarget).then(() => { throw err })
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function makeTmp (cache, opts) {
|
||||
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
|
||||
return fixOwner.mkdirfix(
|
||||
cache, path.dirname(tmpTarget)
|
||||
).then(() => ({
|
||||
target: tmpTarget,
|
||||
moved: false
|
||||
})).disposer(tmp => (!tmp.moved && rimraf(tmp.target)))
|
||||
}
|
||||
|
||||
function moveToDestination (tmp, cache, sri, opts, errCheck) {
|
||||
errCheck && errCheck()
|
||||
const destination = contentPath(cache, sri)
|
||||
const destDir = path.dirname(destination)
|
||||
|
||||
return fixOwner.mkdirfix(
|
||||
cache, destDir
|
||||
).then(() => {
|
||||
errCheck && errCheck()
|
||||
return moveFile(tmp.target, destination)
|
||||
}).then(() => {
|
||||
errCheck && errCheck()
|
||||
tmp.moved = true
|
||||
return fixOwner.chownr(cache, destination)
|
||||
})
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
var err = new Error(Y`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
err.code = 'EBADSIZE'
|
||||
return err
|
||||
}
|
||||
|
||||
function checksumError (expected, found) {
|
||||
var err = new Error(Y`Integrity check failed:
|
||||
Wanted: ${expected}
|
||||
Found: ${found}`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
return err
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue