1
0
Fork 0
mirror of https://github.com/shimataro/ssh-key-action.git synced 2025-06-19 22:52:10 +10:00

* first action! (#1)

This commit is contained in:
shimataro 2019-09-18 20:39:54 +09:00 committed by GitHub
parent 8deacc95b1
commit ace1e6a69a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3750 changed files with 1155519 additions and 0 deletions

9
node_modules/got/license generated vendored Normal file
View file

@ -0,0 +1,9 @@
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

106
node_modules/got/package.json generated vendored Normal file
View file

@ -0,0 +1,106 @@
{
"_from": "got@^9.6.0",
"_id": "got@9.6.0",
"_inBundle": false,
"_integrity": "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==",
"_location": "/got",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "got@^9.6.0",
"name": "got",
"escapedName": "got",
"rawSpec": "^9.6.0",
"saveSpec": null,
"fetchSpec": "^9.6.0"
},
"_requiredBy": [
"/package-json"
],
"_resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz",
"_shasum": "edf45e7d67f99545705de1f7bbeeeb121765ed85",
"_spec": "got@^9.6.0",
"_where": "/home/shimataro/projects/actions/ssh-key-action/node_modules/package-json",
"ava": {
"concurrency": 4
},
"browser": {
"decompress-response": false,
"electron": false
},
"bugs": {
"url": "https://github.com/sindresorhus/got/issues"
},
"bundleDependencies": false,
"dependencies": {
"@sindresorhus/is": "^0.14.0",
"@szmarczak/http-timer": "^1.1.2",
"cacheable-request": "^6.0.0",
"decompress-response": "^3.3.0",
"duplexer3": "^0.1.4",
"get-stream": "^4.1.0",
"lowercase-keys": "^1.0.1",
"mimic-response": "^1.0.1",
"p-cancelable": "^1.0.0",
"to-readable-stream": "^1.0.0",
"url-parse-lax": "^3.0.0"
},
"deprecated": false,
"description": "Simplified HTTP requests",
"devDependencies": {
"ava": "^1.1.0",
"coveralls": "^3.0.0",
"delay": "^4.1.0",
"form-data": "^2.3.3",
"get-port": "^4.0.0",
"np": "^3.1.0",
"nyc": "^13.1.0",
"p-event": "^2.1.0",
"pem": "^1.13.2",
"proxyquire": "^2.0.1",
"sinon": "^7.2.2",
"slow-stream": "0.0.4",
"tempfile": "^2.0.0",
"tempy": "^0.2.1",
"tough-cookie": "^3.0.0",
"xo": "^0.24.0"
},
"engines": {
"node": ">=8.6"
},
"files": [
"source"
],
"homepage": "https://github.com/sindresorhus/got#readme",
"keywords": [
"http",
"https",
"get",
"got",
"url",
"uri",
"request",
"util",
"utility",
"simple",
"curl",
"wget",
"fetch",
"net",
"network",
"electron"
],
"license": "MIT",
"main": "source",
"name": "got",
"repository": {
"type": "git",
"url": "git+https://github.com/sindresorhus/got.git"
},
"scripts": {
"release": "np",
"test": "xo && nyc ava"
},
"version": "9.6.0"
}

1237
node_modules/got/readme.md generated vendored Normal file

File diff suppressed because it is too large Load diff

108
node_modules/got/source/as-promise.js generated vendored Normal file
View file

@ -0,0 +1,108 @@
'use strict';
const EventEmitter = require('events');
const getStream = require('get-stream');
const is = require('@sindresorhus/is');
const PCancelable = require('p-cancelable');
const requestAsEventEmitter = require('./request-as-event-emitter');
const {HTTPError, ParseError, ReadError} = require('./errors');
const {options: mergeOptions} = require('./merge');
const {reNormalize} = require('./normalize-arguments');
const asPromise = options => {
const proxy = new EventEmitter();
const promise = new PCancelable((resolve, reject, onCancel) => {
const emitter = requestAsEventEmitter(options);
onCancel(emitter.abort);
emitter.on('response', async response => {
proxy.emit('response', response);
const stream = is.null(options.encoding) ? getStream.buffer(response) : getStream(response, options);
let data;
try {
data = await stream;
} catch (error) {
reject(new ReadError(error, options));
return;
}
const limitStatusCode = options.followRedirect ? 299 : 399;
response.body = data;
try {
for (const [index, hook] of Object.entries(options.hooks.afterResponse)) {
// eslint-disable-next-line no-await-in-loop
response = await hook(response, updatedOptions => {
updatedOptions = reNormalize(mergeOptions(options, {
...updatedOptions,
retry: 0,
throwHttpErrors: false
}));
// Remove any further hooks for that request, because we we'll call them anyway.
// The loop continues. We don't want duplicates (asPromise recursion).
updatedOptions.hooks.afterResponse = options.hooks.afterResponse.slice(0, index);
return asPromise(updatedOptions);
});
}
} catch (error) {
reject(error);
return;
}
const {statusCode} = response;
if (options.json && response.body) {
try {
response.body = JSON.parse(response.body);
} catch (error) {
if (statusCode >= 200 && statusCode < 300) {
const parseError = new ParseError(error, statusCode, options, data);
Object.defineProperty(parseError, 'response', {value: response});
reject(parseError);
return;
}
}
}
if (statusCode !== 304 && (statusCode < 200 || statusCode > limitStatusCode)) {
const error = new HTTPError(response, options);
Object.defineProperty(error, 'response', {value: response});
if (emitter.retry(error) === false) {
if (options.throwHttpErrors) {
reject(error);
return;
}
resolve(response);
}
return;
}
resolve(response);
});
emitter.once('error', reject);
[
'request',
'redirect',
'uploadProgress',
'downloadProgress'
].forEach(event => emitter.on(event, (...args) => proxy.emit(event, ...args)));
});
promise.on = (name, fn) => {
proxy.on(name, fn);
return promise;
};
return promise;
};
module.exports = asPromise;

93
node_modules/got/source/as-stream.js generated vendored Normal file
View file

@ -0,0 +1,93 @@
'use strict';
const {PassThrough} = require('stream');
const duplexer3 = require('duplexer3');
const requestAsEventEmitter = require('./request-as-event-emitter');
const {HTTPError, ReadError} = require('./errors');
module.exports = options => {
const input = new PassThrough();
const output = new PassThrough();
const proxy = duplexer3(input, output);
const piped = new Set();
let isFinished = false;
options.retry.retries = () => 0;
if (options.body) {
proxy.write = () => {
throw new Error('Got\'s stream is not writable when the `body` option is used');
};
}
const emitter = requestAsEventEmitter(options, input);
// Cancels the request
proxy._destroy = emitter.abort;
emitter.on('response', response => {
const {statusCode} = response;
response.on('error', error => {
proxy.emit('error', new ReadError(error, options));
});
if (options.throwHttpErrors && statusCode !== 304 && (statusCode < 200 || statusCode > 299)) {
proxy.emit('error', new HTTPError(response, options), null, response);
return;
}
isFinished = true;
response.pipe(output);
for (const destination of piped) {
if (destination.headersSent) {
continue;
}
for (const [key, value] of Object.entries(response.headers)) {
// Got gives *decompressed* data. Overriding `content-encoding` header would result in an error.
// It's not possible to decompress already decompressed data, is it?
const allowed = options.decompress ? key !== 'content-encoding' : true;
if (allowed) {
destination.setHeader(key, value);
}
}
destination.statusCode = response.statusCode;
}
proxy.emit('response', response);
});
[
'error',
'request',
'redirect',
'uploadProgress',
'downloadProgress'
].forEach(event => emitter.on(event, (...args) => proxy.emit(event, ...args)));
const pipe = proxy.pipe.bind(proxy);
const unpipe = proxy.unpipe.bind(proxy);
proxy.pipe = (destination, options) => {
if (isFinished) {
throw new Error('Failed to pipe. The response has been emitted already.');
}
const result = pipe(destination, options);
if (Reflect.has(destination, 'setHeader')) {
piped.add(destination);
}
return result;
};
proxy.unpipe = stream => {
piped.delete(stream);
return unpipe(stream);
};
return proxy;
};

79
node_modules/got/source/create.js generated vendored Normal file
View file

@ -0,0 +1,79 @@
'use strict';
const errors = require('./errors');
const asStream = require('./as-stream');
const asPromise = require('./as-promise');
const normalizeArguments = require('./normalize-arguments');
const merge = require('./merge');
const deepFreeze = require('./utils/deep-freeze');
const getPromiseOrStream = options => options.stream ? asStream(options) : asPromise(options);
const aliases = [
'get',
'post',
'put',
'patch',
'head',
'delete'
];
const create = defaults => {
defaults = merge({}, defaults);
normalizeArguments.preNormalize(defaults.options);
if (!defaults.handler) {
// This can't be getPromiseOrStream, because when merging
// the chain would stop at this point and no further handlers would be called.
defaults.handler = (options, next) => next(options);
}
function got(url, options) {
try {
return defaults.handler(normalizeArguments(url, options, defaults), getPromiseOrStream);
} catch (error) {
if (options && options.stream) {
throw error;
} else {
return Promise.reject(error);
}
}
}
got.create = create;
got.extend = options => {
let mutableDefaults;
if (options && Reflect.has(options, 'mutableDefaults')) {
mutableDefaults = options.mutableDefaults;
delete options.mutableDefaults;
} else {
mutableDefaults = defaults.mutableDefaults;
}
return create({
options: merge.options(defaults.options, options),
handler: defaults.handler,
mutableDefaults
});
};
got.mergeInstances = (...args) => create(merge.instances(args));
got.stream = (url, options) => got(url, {...options, stream: true});
for (const method of aliases) {
got[method] = (url, options) => got(url, {...options, method});
got.stream[method] = (url, options) => got.stream(url, {...options, method});
}
Object.assign(got, {...errors, mergeOptions: merge.options});
Object.defineProperty(got, 'defaults', {
value: defaults.mutableDefaults ? defaults : deepFreeze(defaults),
writable: defaults.mutableDefaults,
configurable: defaults.mutableDefaults,
enumerable: true
});
return got;
};
module.exports = create;

107
node_modules/got/source/errors.js generated vendored Normal file
View file

@ -0,0 +1,107 @@
'use strict';
const urlLib = require('url');
const http = require('http');
const PCancelable = require('p-cancelable');
const is = require('@sindresorhus/is');
class GotError extends Error {
constructor(message, error, options) {
super(message);
Error.captureStackTrace(this, this.constructor);
this.name = 'GotError';
if (!is.undefined(error.code)) {
this.code = error.code;
}
Object.assign(this, {
host: options.host,
hostname: options.hostname,
method: options.method,
path: options.path,
socketPath: options.socketPath,
protocol: options.protocol,
url: options.href,
gotOptions: options
});
}
}
module.exports.GotError = GotError;
module.exports.CacheError = class extends GotError {
constructor(error, options) {
super(error.message, error, options);
this.name = 'CacheError';
}
};
module.exports.RequestError = class extends GotError {
constructor(error, options) {
super(error.message, error, options);
this.name = 'RequestError';
}
};
module.exports.ReadError = class extends GotError {
constructor(error, options) {
super(error.message, error, options);
this.name = 'ReadError';
}
};
module.exports.ParseError = class extends GotError {
constructor(error, statusCode, options, data) {
super(`${error.message} in "${urlLib.format(options)}": \n${data.slice(0, 77)}...`, error, options);
this.name = 'ParseError';
this.statusCode = statusCode;
this.statusMessage = http.STATUS_CODES[this.statusCode];
}
};
module.exports.HTTPError = class extends GotError {
constructor(response, options) {
const {statusCode} = response;
let {statusMessage} = response;
if (statusMessage) {
statusMessage = statusMessage.replace(/\r?\n/g, ' ').trim();
} else {
statusMessage = http.STATUS_CODES[statusCode];
}
super(`Response code ${statusCode} (${statusMessage})`, {}, options);
this.name = 'HTTPError';
this.statusCode = statusCode;
this.statusMessage = statusMessage;
this.headers = response.headers;
this.body = response.body;
}
};
module.exports.MaxRedirectsError = class extends GotError {
constructor(statusCode, redirectUrls, options) {
super('Redirected 10 times. Aborting.', {}, options);
this.name = 'MaxRedirectsError';
this.statusCode = statusCode;
this.statusMessage = http.STATUS_CODES[this.statusCode];
this.redirectUrls = redirectUrls;
}
};
module.exports.UnsupportedProtocolError = class extends GotError {
constructor(options) {
super(`Unsupported protocol "${options.protocol}"`, {}, options);
this.name = 'UnsupportedProtocolError';
}
};
module.exports.TimeoutError = class extends GotError {
constructor(error, options) {
super(error.message, {code: 'ETIMEDOUT'}, options);
this.name = 'TimeoutError';
this.event = error.event;
}
};
module.exports.CancelError = PCancelable.CancelError;

31
node_modules/got/source/get-response.js generated vendored Normal file
View file

@ -0,0 +1,31 @@
'use strict';
const decompressResponse = require('decompress-response');
const is = require('@sindresorhus/is');
const mimicResponse = require('mimic-response');
const progress = require('./progress');
module.exports = (response, options, emitter) => {
const downloadBodySize = Number(response.headers['content-length']) || null;
const progressStream = progress.download(response, emitter, downloadBodySize);
mimicResponse(response, progressStream);
const newResponse = options.decompress === true &&
is.function(decompressResponse) &&
options.method !== 'HEAD' ? decompressResponse(progressStream) : progressStream;
if (!options.decompress && ['gzip', 'deflate'].includes(response.headers['content-encoding'])) {
options.encoding = null;
}
emitter.emit('response', newResponse);
emitter.emit('downloadProgress', {
percent: 0,
transferred: 0,
total: downloadBodySize
});
response.pipe(progressStream);
};

60
node_modules/got/source/index.js generated vendored Normal file
View file

@ -0,0 +1,60 @@
'use strict';
const pkg = require('../package.json');
const create = require('./create');
const defaults = {
options: {
retry: {
retries: 2,
methods: [
'GET',
'PUT',
'HEAD',
'DELETE',
'OPTIONS',
'TRACE'
],
statusCodes: [
408,
413,
429,
500,
502,
503,
504
],
errorCodes: [
'ETIMEDOUT',
'ECONNRESET',
'EADDRINUSE',
'ECONNREFUSED',
'EPIPE',
'ENOTFOUND',
'ENETUNREACH',
'EAI_AGAIN'
]
},
headers: {
'user-agent': `${pkg.name}/${pkg.version} (https://github.com/sindresorhus/got)`
},
hooks: {
beforeRequest: [],
beforeRedirect: [],
beforeRetry: [],
afterResponse: []
},
decompress: true,
throwHttpErrors: true,
followRedirect: true,
stream: false,
form: false,
json: false,
cache: false,
useElectronNet: false
},
mutableDefaults: false
};
const got = create(defaults);
module.exports = got;

10
node_modules/got/source/known-hook-events.js generated vendored Normal file
View file

@ -0,0 +1,10 @@
'use strict';
module.exports = [
'beforeError',
'init',
'beforeRequest',
'beforeRedirect',
'beforeRetry',
'afterResponse'
];

73
node_modules/got/source/merge.js generated vendored Normal file
View file

@ -0,0 +1,73 @@
'use strict';
const {URL} = require('url');
const is = require('@sindresorhus/is');
const knownHookEvents = require('./known-hook-events');
const merge = (target, ...sources) => {
for (const source of sources) {
for (const [key, sourceValue] of Object.entries(source)) {
if (is.undefined(sourceValue)) {
continue;
}
const targetValue = target[key];
if (is.urlInstance(targetValue) && (is.urlInstance(sourceValue) || is.string(sourceValue))) {
target[key] = new URL(sourceValue, targetValue);
} else if (is.plainObject(sourceValue)) {
if (is.plainObject(targetValue)) {
target[key] = merge({}, targetValue, sourceValue);
} else {
target[key] = merge({}, sourceValue);
}
} else if (is.array(sourceValue)) {
target[key] = merge([], sourceValue);
} else {
target[key] = sourceValue;
}
}
}
return target;
};
const mergeOptions = (...sources) => {
sources = sources.map(source => source || {});
const merged = merge({}, ...sources);
const hooks = {};
for (const hook of knownHookEvents) {
hooks[hook] = [];
}
for (const source of sources) {
if (source.hooks) {
for (const hook of knownHookEvents) {
hooks[hook] = hooks[hook].concat(source.hooks[hook]);
}
}
}
merged.hooks = hooks;
return merged;
};
const mergeInstances = (instances, methods) => {
const handlers = instances.map(instance => instance.defaults.handler);
const size = instances.length - 1;
return {
methods,
options: mergeOptions(...instances.map(instance => instance.defaults.options)),
handler: (options, next) => {
let iteration = -1;
const iterate = options => handlers[++iteration](options, iteration === size ? next : iterate);
return iterate(options);
}
};
};
module.exports = merge;
module.exports.options = mergeOptions;
module.exports.instances = mergeInstances;

265
node_modules/got/source/normalize-arguments.js generated vendored Normal file
View file

@ -0,0 +1,265 @@
'use strict';
const {URL, URLSearchParams} = require('url'); // TODO: Use the `URL` global when targeting Node.js 10
const urlLib = require('url');
const is = require('@sindresorhus/is');
const urlParseLax = require('url-parse-lax');
const lowercaseKeys = require('lowercase-keys');
const urlToOptions = require('./utils/url-to-options');
const isFormData = require('./utils/is-form-data');
const merge = require('./merge');
const knownHookEvents = require('./known-hook-events');
const retryAfterStatusCodes = new Set([413, 429, 503]);
// `preNormalize` handles static options (e.g. headers).
// For example, when you create a custom instance and make a request
// with no static changes, they won't be normalized again.
//
// `normalize` operates on dynamic options - they cannot be saved.
// For example, `body` is everytime different per request.
// When it's done normalizing the new options, it performs merge()
// on the prenormalized options and the normalized ones.
const preNormalize = (options, defaults) => {
if (is.nullOrUndefined(options.headers)) {
options.headers = {};
} else {
options.headers = lowercaseKeys(options.headers);
}
if (options.baseUrl && !options.baseUrl.toString().endsWith('/')) {
options.baseUrl += '/';
}
if (options.stream) {
options.json = false;
}
if (is.nullOrUndefined(options.hooks)) {
options.hooks = {};
} else if (!is.object(options.hooks)) {
throw new TypeError(`Parameter \`hooks\` must be an object, not ${is(options.hooks)}`);
}
for (const event of knownHookEvents) {
if (is.nullOrUndefined(options.hooks[event])) {
if (defaults) {
options.hooks[event] = [...defaults.hooks[event]];
} else {
options.hooks[event] = [];
}
}
}
if (is.number(options.timeout)) {
options.gotTimeout = {request: options.timeout};
} else if (is.object(options.timeout)) {
options.gotTimeout = options.timeout;
}
delete options.timeout;
const {retry} = options;
options.retry = {
retries: 0,
methods: [],
statusCodes: [],
errorCodes: []
};
if (is.nonEmptyObject(defaults) && retry !== false) {
options.retry = {...defaults.retry};
}
if (retry !== false) {
if (is.number(retry)) {
options.retry.retries = retry;
} else {
options.retry = {...options.retry, ...retry};
}
}
if (options.gotTimeout) {
options.retry.maxRetryAfter = Math.min(...[options.gotTimeout.request, options.gotTimeout.connection].filter(n => !is.nullOrUndefined(n)));
}
if (is.array(options.retry.methods)) {
options.retry.methods = new Set(options.retry.methods.map(method => method.toUpperCase()));
}
if (is.array(options.retry.statusCodes)) {
options.retry.statusCodes = new Set(options.retry.statusCodes);
}
if (is.array(options.retry.errorCodes)) {
options.retry.errorCodes = new Set(options.retry.errorCodes);
}
return options;
};
const normalize = (url, options, defaults) => {
if (is.plainObject(url)) {
options = {...url, ...options};
url = options.url || {};
delete options.url;
}
if (defaults) {
options = merge({}, defaults.options, options ? preNormalize(options, defaults.options) : {});
} else {
options = merge({}, preNormalize(options));
}
if (!is.string(url) && !is.object(url)) {
throw new TypeError(`Parameter \`url\` must be a string or object, not ${is(url)}`);
}
if (is.string(url)) {
if (options.baseUrl) {
if (url.toString().startsWith('/')) {
url = url.toString().slice(1);
}
url = urlToOptions(new URL(url, options.baseUrl));
} else {
url = url.replace(/^unix:/, 'http://$&');
url = urlParseLax(url);
}
} else if (is(url) === 'URL') {
url = urlToOptions(url);
}
// Override both null/undefined with default protocol
options = merge({path: ''}, url, {protocol: url.protocol || 'https:'}, options);
for (const hook of options.hooks.init) {
const called = hook(options);
if (is.promise(called)) {
throw new TypeError('The `init` hook must be a synchronous function');
}
}
const {baseUrl} = options;
Object.defineProperty(options, 'baseUrl', {
set: () => {
throw new Error('Failed to set baseUrl. Options are normalized already.');
},
get: () => baseUrl
});
const {query} = options;
if (is.nonEmptyString(query) || is.nonEmptyObject(query) || query instanceof URLSearchParams) {
if (!is.string(query)) {
options.query = (new URLSearchParams(query)).toString();
}
options.path = `${options.path.split('?')[0]}?${options.query}`;
delete options.query;
}
if (options.hostname === 'unix') {
const matches = /(.+?):(.+)/.exec(options.path);
if (matches) {
const [, socketPath, path] = matches;
options = {
...options,
socketPath,
path,
host: null
};
}
}
const {headers} = options;
for (const [key, value] of Object.entries(headers)) {
if (is.nullOrUndefined(value)) {
delete headers[key];
}
}
if (options.json && is.undefined(headers.accept)) {
headers.accept = 'application/json';
}
if (options.decompress && is.undefined(headers['accept-encoding'])) {
headers['accept-encoding'] = 'gzip, deflate';
}
const {body} = options;
if (is.nullOrUndefined(body)) {
options.method = options.method ? options.method.toUpperCase() : 'GET';
} else {
const isObject = is.object(body) && !is.buffer(body) && !is.nodeStream(body);
if (!is.nodeStream(body) && !is.string(body) && !is.buffer(body) && !(options.form || options.json)) {
throw new TypeError('The `body` option must be a stream.Readable, string or Buffer');
}
if (options.json && !(isObject || is.array(body))) {
throw new TypeError('The `body` option must be an Object or Array when the `json` option is used');
}
if (options.form && !isObject) {
throw new TypeError('The `body` option must be an Object when the `form` option is used');
}
if (isFormData(body)) {
// Special case for https://github.com/form-data/form-data
headers['content-type'] = headers['content-type'] || `multipart/form-data; boundary=${body.getBoundary()}`;
} else if (options.form) {
headers['content-type'] = headers['content-type'] || 'application/x-www-form-urlencoded';
options.body = (new URLSearchParams(body)).toString();
} else if (options.json) {
headers['content-type'] = headers['content-type'] || 'application/json';
options.body = JSON.stringify(body);
}
options.method = options.method ? options.method.toUpperCase() : 'POST';
}
if (!is.function(options.retry.retries)) {
const {retries} = options.retry;
options.retry.retries = (iteration, error) => {
if (iteration > retries) {
return 0;
}
if ((!error || !options.retry.errorCodes.has(error.code)) && (!options.retry.methods.has(error.method) || !options.retry.statusCodes.has(error.statusCode))) {
return 0;
}
if (Reflect.has(error, 'headers') && Reflect.has(error.headers, 'retry-after') && retryAfterStatusCodes.has(error.statusCode)) {
let after = Number(error.headers['retry-after']);
if (is.nan(after)) {
after = Date.parse(error.headers['retry-after']) - Date.now();
} else {
after *= 1000;
}
if (after > options.retry.maxRetryAfter) {
return 0;
}
return after;
}
if (error.statusCode === 413) {
return 0;
}
const noise = Math.random() * 100;
return ((2 ** (iteration - 1)) * 1000) + noise;
};
}
return options;
};
const reNormalize = options => normalize(urlLib.format(options), options);
module.exports = normalize;
module.exports.preNormalize = preNormalize;
module.exports.reNormalize = reNormalize;

96
node_modules/got/source/progress.js generated vendored Normal file
View file

@ -0,0 +1,96 @@
'use strict';
const {Transform} = require('stream');
module.exports = {
download(response, emitter, downloadBodySize) {
let downloaded = 0;
return new Transform({
transform(chunk, encoding, callback) {
downloaded += chunk.length;
const percent = downloadBodySize ? downloaded / downloadBodySize : 0;
// Let `flush()` be responsible for emitting the last event
if (percent < 1) {
emitter.emit('downloadProgress', {
percent,
transferred: downloaded,
total: downloadBodySize
});
}
callback(null, chunk);
},
flush(callback) {
emitter.emit('downloadProgress', {
percent: 1,
transferred: downloaded,
total: downloadBodySize
});
callback();
}
});
},
upload(request, emitter, uploadBodySize) {
const uploadEventFrequency = 150;
let uploaded = 0;
let progressInterval;
emitter.emit('uploadProgress', {
percent: 0,
transferred: 0,
total: uploadBodySize
});
request.once('error', () => {
clearInterval(progressInterval);
});
request.once('response', () => {
clearInterval(progressInterval);
emitter.emit('uploadProgress', {
percent: 1,
transferred: uploaded,
total: uploadBodySize
});
});
request.once('socket', socket => {
const onSocketConnect = () => {
progressInterval = setInterval(() => {
const lastUploaded = uploaded;
/* istanbul ignore next: see #490 (occurs randomly!) */
const headersSize = request._header ? Buffer.byteLength(request._header) : 0;
uploaded = socket.bytesWritten - headersSize;
// Don't emit events with unchanged progress and
// prevent last event from being emitted, because
// it's emitted when `response` is emitted
if (uploaded === lastUploaded || uploaded === uploadBodySize) {
return;
}
emitter.emit('uploadProgress', {
percent: uploadBodySize ? uploaded / uploadBodySize : 0,
transferred: uploaded,
total: uploadBodySize
});
}, uploadEventFrequency);
};
/* istanbul ignore next: hard to test */
if (socket.connecting) {
socket.once('connect', onSocketConnect);
} else if (socket.writable) {
// The socket is being reused from pool,
// so the connect event will not be emitted
onSocketConnect();
}
});
}
};

312
node_modules/got/source/request-as-event-emitter.js generated vendored Normal file
View file

@ -0,0 +1,312 @@
'use strict';
const {URL} = require('url'); // TODO: Use the `URL` global when targeting Node.js 10
const util = require('util');
const EventEmitter = require('events');
const http = require('http');
const https = require('https');
const urlLib = require('url');
const CacheableRequest = require('cacheable-request');
const toReadableStream = require('to-readable-stream');
const is = require('@sindresorhus/is');
const timer = require('@szmarczak/http-timer');
const timedOut = require('./utils/timed-out');
const getBodySize = require('./utils/get-body-size');
const getResponse = require('./get-response');
const progress = require('./progress');
const {CacheError, UnsupportedProtocolError, MaxRedirectsError, RequestError, TimeoutError} = require('./errors');
const urlToOptions = require('./utils/url-to-options');
const getMethodRedirectCodes = new Set([300, 301, 302, 303, 304, 305, 307, 308]);
const allMethodRedirectCodes = new Set([300, 303, 307, 308]);
module.exports = (options, input) => {
const emitter = new EventEmitter();
const redirects = [];
let currentRequest;
let requestUrl;
let redirectString;
let uploadBodySize;
let retryCount = 0;
let shouldAbort = false;
const setCookie = options.cookieJar ? util.promisify(options.cookieJar.setCookie.bind(options.cookieJar)) : null;
const getCookieString = options.cookieJar ? util.promisify(options.cookieJar.getCookieString.bind(options.cookieJar)) : null;
const agents = is.object(options.agent) ? options.agent : null;
const emitError = async error => {
try {
for (const hook of options.hooks.beforeError) {
// eslint-disable-next-line no-await-in-loop
error = await hook(error);
}
emitter.emit('error', error);
} catch (error2) {
emitter.emit('error', error2);
}
};
const get = async options => {
const currentUrl = redirectString || requestUrl;
if (options.protocol !== 'http:' && options.protocol !== 'https:') {
throw new UnsupportedProtocolError(options);
}
decodeURI(currentUrl);
let fn;
if (is.function(options.request)) {
fn = {request: options.request};
} else {
fn = options.protocol === 'https:' ? https : http;
}
if (agents) {
const protocolName = options.protocol === 'https:' ? 'https' : 'http';
options.agent = agents[protocolName] || options.agent;
}
/* istanbul ignore next: electron.net is broken */
if (options.useElectronNet && process.versions.electron) {
const r = ({x: require})['yx'.slice(1)]; // Trick webpack
const electron = r('electron');
fn = electron.net || electron.remote.net;
}
if (options.cookieJar) {
const cookieString = await getCookieString(currentUrl, {});
if (is.nonEmptyString(cookieString)) {
options.headers.cookie = cookieString;
}
}
let timings;
const handleResponse = async response => {
try {
/* istanbul ignore next: fixes https://github.com/electron/electron/blob/cbb460d47628a7a146adf4419ed48550a98b2923/lib/browser/api/net.js#L59-L65 */
if (options.useElectronNet) {
response = new Proxy(response, {
get: (target, name) => {
if (name === 'trailers' || name === 'rawTrailers') {
return [];
}
const value = target[name];
return is.function(value) ? value.bind(target) : value;
}
});
}
const {statusCode} = response;
response.url = currentUrl;
response.requestUrl = requestUrl;
response.retryCount = retryCount;
response.timings = timings;
response.redirectUrls = redirects;
response.request = {
gotOptions: options
};
const rawCookies = response.headers['set-cookie'];
if (options.cookieJar && rawCookies) {
await Promise.all(rawCookies.map(rawCookie => setCookie(rawCookie, response.url)));
}
if (options.followRedirect && 'location' in response.headers) {
if (allMethodRedirectCodes.has(statusCode) || (getMethodRedirectCodes.has(statusCode) && (options.method === 'GET' || options.method === 'HEAD'))) {
response.resume(); // We're being redirected, we don't care about the response.
if (statusCode === 303) {
// Server responded with "see other", indicating that the resource exists at another location,
// and the client should request it from that location via GET or HEAD.
options.method = 'GET';
}
if (redirects.length >= 10) {
throw new MaxRedirectsError(statusCode, redirects, options);
}
// Handles invalid URLs. See https://github.com/sindresorhus/got/issues/604
const redirectBuffer = Buffer.from(response.headers.location, 'binary').toString();
const redirectURL = new URL(redirectBuffer, currentUrl);
redirectString = redirectURL.toString();
redirects.push(redirectString);
const redirectOptions = {
...options,
...urlToOptions(redirectURL)
};
for (const hook of options.hooks.beforeRedirect) {
// eslint-disable-next-line no-await-in-loop
await hook(redirectOptions);
}
emitter.emit('redirect', response, redirectOptions);
await get(redirectOptions);
return;
}
}
getResponse(response, options, emitter);
} catch (error) {
emitError(error);
}
};
const handleRequest = request => {
if (shouldAbort) {
request.once('error', () => {});
request.abort();
return;
}
currentRequest = request;
request.once('error', error => {
if (request.aborted) {
return;
}
if (error instanceof timedOut.TimeoutError) {
error = new TimeoutError(error, options);
} else {
error = new RequestError(error, options);
}
if (emitter.retry(error) === false) {
emitError(error);
}
});
timings = timer(request);
progress.upload(request, emitter, uploadBodySize);
if (options.gotTimeout) {
timedOut(request, options.gotTimeout, options);
}
emitter.emit('request', request);
const uploadComplete = () => {
request.emit('upload-complete');
};
try {
if (is.nodeStream(options.body)) {
options.body.once('end', uploadComplete);
options.body.pipe(request);
options.body = undefined;
} else if (options.body) {
request.end(options.body, uploadComplete);
} else if (input && (options.method === 'POST' || options.method === 'PUT' || options.method === 'PATCH')) {
input.once('end', uploadComplete);
input.pipe(request);
} else {
request.end(uploadComplete);
}
} catch (error) {
emitError(new RequestError(error, options));
}
};
if (options.cache) {
const cacheableRequest = new CacheableRequest(fn.request, options.cache);
const cacheRequest = cacheableRequest(options, handleResponse);
cacheRequest.once('error', error => {
if (error instanceof CacheableRequest.RequestError) {
emitError(new RequestError(error, options));
} else {
emitError(new CacheError(error, options));
}
});
cacheRequest.once('request', handleRequest);
} else {
// Catches errors thrown by calling fn.request(...)
try {
handleRequest(fn.request(options, handleResponse));
} catch (error) {
emitError(new RequestError(error, options));
}
}
};
emitter.retry = error => {
let backoff;
try {
backoff = options.retry.retries(++retryCount, error);
} catch (error2) {
emitError(error2);
return;
}
if (backoff) {
const retry = async options => {
try {
for (const hook of options.hooks.beforeRetry) {
// eslint-disable-next-line no-await-in-loop
await hook(options, error, retryCount);
}
await get(options);
} catch (error) {
emitError(error);
}
};
setTimeout(retry, backoff, {...options, forceRefresh: true});
return true;
}
return false;
};
emitter.abort = () => {
if (currentRequest) {
currentRequest.once('error', () => {});
currentRequest.abort();
} else {
shouldAbort = true;
}
};
setImmediate(async () => {
try {
// Convert buffer to stream to receive upload progress events (#322)
const {body} = options;
if (is.buffer(body)) {
options.body = toReadableStream(body);
uploadBodySize = body.length;
} else {
uploadBodySize = await getBodySize(options);
}
if (is.undefined(options.headers['content-length']) && is.undefined(options.headers['transfer-encoding'])) {
if ((uploadBodySize > 0 || options.method === 'PUT') && !is.null(uploadBodySize)) {
options.headers['content-length'] = uploadBodySize;
}
}
for (const hook of options.hooks.beforeRequest) {
// eslint-disable-next-line no-await-in-loop
await hook(options);
}
requestUrl = options.href || (new URL(options.path, urlLib.format(options))).toString();
await get(options);
} catch (error) {
emitError(error);
}
});
return emitter;
};

12
node_modules/got/source/utils/deep-freeze.js generated vendored Normal file
View file

@ -0,0 +1,12 @@
'use strict';
const is = require('@sindresorhus/is');
module.exports = function deepFreeze(object) {
for (const [key, value] of Object.entries(object)) {
if (is.plainObject(value) || is.array(value)) {
deepFreeze(object[key]);
}
}
return Object.freeze(object);
};

32
node_modules/got/source/utils/get-body-size.js generated vendored Normal file
View file

@ -0,0 +1,32 @@
'use strict';
const fs = require('fs');
const util = require('util');
const is = require('@sindresorhus/is');
const isFormData = require('./is-form-data');
module.exports = async options => {
const {body} = options;
if (options.headers['content-length']) {
return Number(options.headers['content-length']);
}
if (!body && !options.stream) {
return 0;
}
if (is.string(body)) {
return Buffer.byteLength(body);
}
if (isFormData(body)) {
return util.promisify(body.getLength.bind(body))();
}
if (body instanceof fs.ReadStream) {
const {size} = await util.promisify(fs.stat)(body.path);
return size;
}
return null;
};

4
node_modules/got/source/utils/is-form-data.js generated vendored Normal file
View file

@ -0,0 +1,4 @@
'use strict';
const is = require('@sindresorhus/is');
module.exports = body => is.nodeStream(body) && is.function(body.getBoundary);

160
node_modules/got/source/utils/timed-out.js generated vendored Normal file
View file

@ -0,0 +1,160 @@
'use strict';
const net = require('net');
class TimeoutError extends Error {
constructor(threshold, event) {
super(`Timeout awaiting '${event}' for ${threshold}ms`);
this.name = 'TimeoutError';
this.code = 'ETIMEDOUT';
this.event = event;
}
}
const reentry = Symbol('reentry');
const noop = () => {};
module.exports = (request, delays, options) => {
/* istanbul ignore next: this makes sure timed-out isn't called twice */
if (request[reentry]) {
return;
}
request[reentry] = true;
let stopNewTimeouts = false;
const addTimeout = (delay, callback, ...args) => {
// An error had been thrown before. Going further would result in uncaught errors.
// See https://github.com/sindresorhus/got/issues/631#issuecomment-435675051
if (stopNewTimeouts) {
return noop;
}
// Event loop order is timers, poll, immediates.
// The timed event may emit during the current tick poll phase, so
// defer calling the handler until the poll phase completes.
let immediate;
const timeout = setTimeout(() => {
immediate = setImmediate(callback, delay, ...args);
/* istanbul ignore next: added in node v9.7.0 */
if (immediate.unref) {
immediate.unref();
}
}, delay);
/* istanbul ignore next: in order to support electron renderer */
if (timeout.unref) {
timeout.unref();
}
const cancel = () => {
clearTimeout(timeout);
clearImmediate(immediate);
};
cancelers.push(cancel);
return cancel;
};
const {host, hostname} = options;
const timeoutHandler = (delay, event) => {
request.emit('error', new TimeoutError(delay, event));
request.once('error', () => {}); // Ignore the `socket hung up` error made by request.abort()
request.abort();
};
const cancelers = [];
const cancelTimeouts = () => {
stopNewTimeouts = true;
cancelers.forEach(cancelTimeout => cancelTimeout());
};
request.once('error', cancelTimeouts);
request.once('response', response => {
response.once('end', cancelTimeouts);
});
if (delays.request !== undefined) {
addTimeout(delays.request, timeoutHandler, 'request');
}
if (delays.socket !== undefined) {
const socketTimeoutHandler = () => {
timeoutHandler(delays.socket, 'socket');
};
request.setTimeout(delays.socket, socketTimeoutHandler);
// `request.setTimeout(0)` causes a memory leak.
// We can just remove the listener and forget about the timer - it's unreffed.
// See https://github.com/sindresorhus/got/issues/690
cancelers.push(() => request.removeListener('timeout', socketTimeoutHandler));
}
if (delays.lookup !== undefined && !request.socketPath && !net.isIP(hostname || host)) {
request.once('socket', socket => {
/* istanbul ignore next: hard to test */
if (socket.connecting) {
const cancelTimeout = addTimeout(delays.lookup, timeoutHandler, 'lookup');
socket.once('lookup', cancelTimeout);
}
});
}
if (delays.connect !== undefined) {
request.once('socket', socket => {
/* istanbul ignore next: hard to test */
if (socket.connecting) {
const timeConnect = () => addTimeout(delays.connect, timeoutHandler, 'connect');
if (request.socketPath || net.isIP(hostname || host)) {
socket.once('connect', timeConnect());
} else {
socket.once('lookup', error => {
if (error === null) {
socket.once('connect', timeConnect());
}
});
}
}
});
}
if (delays.secureConnect !== undefined && options.protocol === 'https:') {
request.once('socket', socket => {
/* istanbul ignore next: hard to test */
if (socket.connecting) {
socket.once('connect', () => {
const cancelTimeout = addTimeout(delays.secureConnect, timeoutHandler, 'secureConnect');
socket.once('secureConnect', cancelTimeout);
});
}
});
}
if (delays.send !== undefined) {
request.once('socket', socket => {
const timeRequest = () => addTimeout(delays.send, timeoutHandler, 'send');
/* istanbul ignore next: hard to test */
if (socket.connecting) {
socket.once('connect', () => {
request.once('upload-complete', timeRequest());
});
} else {
request.once('upload-complete', timeRequest());
}
});
}
if (delays.response !== undefined) {
request.once('upload-complete', () => {
const cancelTimeout = addTimeout(delays.response, timeoutHandler, 'response');
request.once('response', cancelTimeout);
});
}
};
module.exports.TimeoutError = TimeoutError;

25
node_modules/got/source/utils/url-to-options.js generated vendored Normal file
View file

@ -0,0 +1,25 @@
'use strict';
const is = require('@sindresorhus/is');
module.exports = url => {
const options = {
protocol: url.protocol,
hostname: url.hostname.startsWith('[') ? url.hostname.slice(1, -1) : url.hostname,
hash: url.hash,
search: url.search,
pathname: url.pathname,
href: url.href
};
if (is.string(url.port) && url.port.length > 0) {
options.port = Number(url.port);
}
if (url.username || url.password) {
options.auth = `${url.username}:${url.password}`;
}
options.path = is.null(url.search) ? url.pathname : `${url.pathname}${url.search}`;
return options;
};