1
0
Fork 0
mirror of https://github.com/shimataro/ssh-key-action.git synced 2025-06-19 22:52:10 +10:00

* first action! (#1)

This commit is contained in:
shimataro 2019-09-18 20:39:54 +09:00 committed by GitHub
parent 8deacc95b1
commit ace1e6a69a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
3750 changed files with 1155519 additions and 0 deletions

8
node_modules/JSONStream/.travis.yml generated vendored Normal file
View file

@ -0,0 +1,8 @@
language: node_js
node_js:
- 4
- 5
- 6
sudo: false

15
node_modules/JSONStream/LICENSE.APACHE2 generated vendored Normal file
View file

@ -0,0 +1,15 @@
Apache License, Version 2.0
Copyright (c) 2011 Dominic Tarr
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

24
node_modules/JSONStream/LICENSE.MIT generated vendored Normal file
View file

@ -0,0 +1,24 @@
The MIT License
Copyright (c) 2011 Dominic Tarr
Permission is hereby granted, free of charge,
to any person obtaining a copy of this software and
associated documentation files (the "Software"), to
deal in the Software without restriction, including
without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom
the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

12
node_modules/JSONStream/bin.js generated vendored Executable file
View file

@ -0,0 +1,12 @@
#! /usr/bin/env node
var JSONStream = require('./')
if(!module.parent && process.title !== 'browser') {
process.stdin
.pipe(JSONStream.parse(process.argv[2]))
.pipe(JSONStream.stringify('[', ',\n', ']\n', 2))
.pipe(process.stdout)
}

13
node_modules/JSONStream/examples/all_docs.js generated vendored Normal file
View file

@ -0,0 +1,13 @@
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
var parser = JSONStream.parse(['rows', true]) //emit parts that match this path (any element of the rows array)
, req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
, logger = es.mapSync(function (data) { //create a stream that logs to stderr,
console.error(data)
return data
})
req.pipe(parser)
parser.pipe(logger)

247
node_modules/JSONStream/index.js generated vendored Executable file
View file

@ -0,0 +1,247 @@
'use strict'
var Parser = require('jsonparse')
, through = require('through')
var bufferFrom = Buffer.from && Buffer.from !== Uint8Array.from
/*
the value of this.stack that creationix's jsonparse has is weird.
it makes this code ugly, but his problem is way harder that mine,
so i'll forgive him.
*/
exports.parse = function (path, map) {
var header, footer
var parser = new Parser()
var stream = through(function (chunk) {
if('string' === typeof chunk)
chunk = bufferFrom ? Buffer.from(chunk) : new Buffer(chunk)
parser.write(chunk)
},
function (data) {
if(data)
stream.write(data)
if (header)
stream.emit('header', header)
if (footer)
stream.emit('footer', footer)
stream.queue(null)
})
if('string' === typeof path)
path = path.split('.').map(function (e) {
if (e === '$*')
return {emitKey: true}
else if (e === '*')
return true
else if (e === '') // '..'.split('.') returns an empty string
return {recurse: true}
else
return e
})
var count = 0, _key
if(!path || !path.length)
path = null
parser.onValue = function (value) {
if (!this.root)
stream.root = value
if(! path) return
var i = 0 // iterates on path
var j = 0 // iterates on stack
var emitKey = false;
var emitPath = false;
while (i < path.length) {
var key = path[i]
var c
j++
if (key && !key.recurse) {
c = (j === this.stack.length) ? this : this.stack[j]
if (!c) return
if (! check(key, c.key)) {
setHeaderFooter(c.key, value)
return
}
emitKey = !!key.emitKey;
emitPath = !!key.emitPath;
i++
} else {
i++
var nextKey = path[i]
if (! nextKey) return
while (true) {
c = (j === this.stack.length) ? this : this.stack[j]
if (!c) return
if (check(nextKey, c.key)) {
i++;
if (!Object.isFrozen(this.stack[j]))
this.stack[j].value = null
break
} else {
setHeaderFooter(c.key, value)
}
j++
}
}
}
// emit header
if (header) {
stream.emit('header', header);
header = false;
}
if (j !== this.stack.length) return
count ++
var actualPath = this.stack.slice(1).map(function(element) { return element.key }).concat([this.key])
var data = value
if(null != data)
if(null != (data = map ? map(data, actualPath) : data)) {
if (emitKey || emitPath) {
data = { value: data };
if (emitKey)
data["key"] = this.key;
if (emitPath)
data["path"] = actualPath;
}
stream.queue(data)
}
if (this.value) delete this.value[this.key]
for(var k in this.stack)
if (!Object.isFrozen(this.stack[k]))
this.stack[k].value = null
}
parser._onToken = parser.onToken;
parser.onToken = function (token, value) {
parser._onToken(token, value);
if (this.stack.length === 0) {
if (stream.root) {
if(!path)
stream.queue(stream.root)
count = 0;
stream.root = null;
}
}
}
parser.onError = function (err) {
if(err.message.indexOf("at position") > -1)
err.message = "Invalid JSON (" + err.message + ")";
stream.emit('error', err)
}
return stream
function setHeaderFooter(key, value) {
// header has not been emitted yet
if (header !== false) {
header = header || {}
header[key] = value
}
// footer has not been emitted yet but header has
if (footer !== false && header === false) {
footer = footer || {}
footer[key] = value
}
}
}
function check (x, y) {
if ('string' === typeof x)
return y == x
else if (x && 'function' === typeof x.exec)
return x.exec(y)
else if ('boolean' === typeof x || 'object' === typeof x)
return x
else if ('function' === typeof x)
return x(y)
return false
}
exports.stringify = function (op, sep, cl, indent) {
indent = indent || 0
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '[\n'
sep = '\n,\n'
cl = '\n]\n'
}
//else, what ever you like
var stream
, first = true
, anyData = false
stream = through(function (data) {
anyData = true
try {
var json = JSON.stringify(data, null, indent)
} catch (err) {
return stream.emit('error', err)
}
if(first) { first = false ; stream.queue(op + json)}
else stream.queue(sep + json)
},
function (data) {
if(!anyData)
stream.queue(op)
stream.queue(cl)
stream.queue(null)
})
return stream
}
exports.stringifyObject = function (op, sep, cl, indent) {
indent = indent || 0
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '{\n'
sep = '\n,\n'
cl = '\n}\n'
}
//else, what ever you like
var first = true
var anyData = false
var stream = through(function (data) {
anyData = true
var json = JSON.stringify(data[0]) + ':' + JSON.stringify(data[1], null, indent)
if(first) { first = false ; this.queue(op + json)}
else this.queue(sep + json)
},
function (data) {
if(!anyData) this.queue(op)
this.queue(cl)
this.queue(null)
})
return stream
}

74
node_modules/JSONStream/package.json generated vendored Normal file
View file

@ -0,0 +1,74 @@
{
"_from": "JSONStream@^1.3.4",
"_id": "JSONStream@1.3.5",
"_inBundle": false,
"_integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==",
"_location": "/JSONStream",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "JSONStream@^1.3.4",
"name": "JSONStream",
"escapedName": "JSONStream",
"rawSpec": "^1.3.4",
"saveSpec": null,
"fetchSpec": "^1.3.4"
},
"_requiredBy": [
"/npm-registry-fetch"
],
"_resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz",
"_shasum": "3208c1f08d3a4d99261ab64f92302bc15e111ca0",
"_spec": "JSONStream@^1.3.4",
"_where": "/home/shimataro/projects/actions/ssh-key-action/node_modules/npm-registry-fetch",
"author": {
"name": "Dominic Tarr",
"email": "dominic.tarr@gmail.com",
"url": "http://bit.ly/dominictarr"
},
"bin": {
"JSONStream": "./bin.js"
},
"bugs": {
"url": "https://github.com/dominictarr/JSONStream/issues"
},
"bundleDependencies": false,
"dependencies": {
"jsonparse": "^1.2.0",
"through": ">=2.2.7 <3"
},
"deprecated": false,
"description": "rawStream.pipe(JSONStream.parse()).pipe(streamOfObjects)",
"devDependencies": {
"assertions": "~2.2.2",
"event-stream": "~0.7.0",
"it-is": "~1",
"render": "~0.1.1",
"tape": "~2.12.3",
"trees": "~0.0.3"
},
"engines": {
"node": "*"
},
"homepage": "http://github.com/dominictarr/JSONStream",
"keywords": [
"json",
"stream",
"streaming",
"parser",
"async",
"parsing"
],
"license": "(MIT OR Apache-2.0)",
"name": "JSONStream",
"optionalDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/dominictarr/JSONStream.git"
},
"scripts": {
"test": "node test/run.js"
},
"version": "1.3.5"
}

207
node_modules/JSONStream/readme.markdown generated vendored Normal file
View file

@ -0,0 +1,207 @@
# JSONStream
streaming JSON.parse and stringify
![](https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master)
## install
```npm install JSONStream```
## example
``` js
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
.pipe(JSONStream.parse('rows.*'))
.pipe(es.mapSync(function (data) {
console.error(data)
return data
}))
```
## JSONStream.parse(path)
parse stream of values that match a path
``` js
JSONStream.parse('rows.*.doc')
```
The `..` operator is the recursive descent operator from [JSONPath](http://goessner.net/articles/JsonPath/), which will match a child at any depth (see examples below).
If your keys have keys that include `.` or `*` etc, use an array instead.
`['row', true, /^doc/]`.
If you use an array, `RegExp`s, booleans, and/or functions. The `..` operator is also available in array representation, using `{recurse: true}`.
any object that matches the path will be emitted as 'data' (and `pipe`d down stream)
If `path` is empty or null, no 'data' events are emitted.
If you want to have keys emitted, you can prefix your `*` operator with `$`: `obj.$*` - in this case the data passed to the stream is an object with a `key` holding the key and a `value` property holding the data.
### Examples
query a couchdb view:
``` bash
curl -sS localhost:5984/tests/_all_docs&include_docs=true
```
you will get something like this:
``` js
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
},
]}
```
we are probably most interested in the `rows.*.doc`
create a `Stream` that parses the documents from the feed like this:
``` js
var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
stream.on('data', function(data) {
console.log('received:', data);
});
//emits anything from _before_ the first match
stream.on('header', function (data) {
console.log('header:', data) // => {"total_rows":129,"offset":0}
})
```
awesome!
In case you wanted the contents the doc emitted:
``` js
var stream = JSONStream.parse(['rows', true, 'doc', {emitKey: true}]) //rows, ANYTHING, doc, items in docs with keys
stream.on('data', function(data) {
console.log('key:', data.key);
console.log('value:', data.value);
});
```
You can also emit the path:
``` js
var stream = JSONStream.parse(['rows', true, 'doc', {emitPath: true}]) //rows, ANYTHING, doc, items in docs with keys
stream.on('data', function(data) {
console.log('path:', data.path);
console.log('value:', data.value);
});
```
### recursive patterns (..)
`JSONStream.parse('docs..value')`
(or `JSONStream.parse(['docs', {recurse: true}, 'value'])` using an array)
will emit every `value` object that is a child, grand-child, etc. of the
`docs` object. In this example, it will match exactly 5 times at various depth
levels, emitting 0, 1, 2, 3 and 4 as results.
```js
{
"total": 5,
"docs": [
{
"key": {
"value": 0,
"some": "property"
}
},
{"value": 1},
{"value": 2},
{"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
{"value": 4}
]
}
```
## JSONStream.parse(pattern, map)
provide a function that can be used to map or filter
the json output. `map` is passed the value at that node of the pattern,
if `map` return non-nullish (anything but `null` or `undefined`)
that value will be emitted in the stream. If it returns a nullish value,
nothing will be emitted.
`JSONStream` also emits `'header'` and `'footer'` events,
the `'header'` event contains anything in the output that was before
the first match, and the `'footer'`, is anything after the last match.
## JSONStream.stringify(open, sep, close)
Create a writable stream.
you may pass in custom `open`, `close`, and `seperator` strings.
But, by default, `JSONStream.stringify()` will create an array,
(with default options `open='[\n', sep='\n,\n', close='\n]\n'`)
If you call `JSONStream.stringify(false)`
the elements will only be seperated by a newline.
If you only write one item this will be valid JSON.
If you write many items,
you can use a `RegExp` to split it into valid chunks.
## JSONStream.stringifyObject(open, sep, close)
Very much like `JSONStream.stringify`,
but creates a writable stream for objects instead of arrays.
Accordingly, `open='{\n', sep='\n,\n', close='\n}\n'`.
When you `.write()` to the stream you must supply an array with `[ key, data ]`
as the first argument.
## unix tool
query npm to see all the modules that browserify has ever depended on.
``` bash
curl https://registry.npmjs.org/browserify | JSONStream 'versions.*.dependencies'
```
## numbers
numbers will be emitted as numbers.
huge numbers that cannot be represented in memory as javascript numbers will be emitted as strings.
cf https://github.com/creationix/jsonparse/commit/044b268f01c4b8f97fb936fc85d3bcfba179e5bb for details.
## Acknowlegements
this module depends on https://github.com/creationix/jsonparse
by Tim Caswell
and also thanks to Florent Jaby for teaching me about parsing with:
https://github.com/Floby/node-json-streams
## license
Dual-licensed under the MIT License or the Apache License, version 2.0

41
node_modules/JSONStream/test/bool.js generated vendored Normal file
View file

@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
// stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
JSONStream.parse([true]),
es.writeArray(function (err, lines) {
it(lines).has(expected)
console.error('PASSED')
})
)

18
node_modules/JSONStream/test/browser.js generated vendored Normal file
View file

@ -0,0 +1,18 @@
var test = require('tape')
var JSONStream = require('../')
var testData = '{"rows":[{"hello":"world"}, {"foo": "bar"}]}'
test('basic parsing', function (t) {
t.plan(2)
var parsed = JSONStream.parse("rows.*")
var parsedKeys = {}
parsed.on('data', function(match) {
parsedKeys[Object.keys(match)[0]] = true
})
parsed.on('end', function() {
t.equal(!!parsedKeys['hello'], true)
t.equal(!!parsedKeys['foo'], true)
})
parsed.write(testData)
parsed.end()
})

27
node_modules/JSONStream/test/destroy_missing.js generated vendored Normal file
View file

@ -0,0 +1,27 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var JSONStream = require('../');
var server = net.createServer(function(client) {
var parser = JSONStream.parse([]);
parser.on('end', function() {
console.log('close')
console.error('PASSED');
server.close();
});
client.pipe(parser);
var n = 4
client.on('data', function () {
if(--n) return
client.end();
})
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
fs.createReadStream(file).pipe(client).on('data', console.log) //.resume();
});

29
node_modules/JSONStream/test/doubledot1.js generated vendored Normal file
View file

@ -0,0 +1,29 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse('rows..rev')
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
for (var i = 0 ; i < expected.rows.length ; i++)
it(parsed[i]).deepEqual(expected.rows[i].value.rev)
console.error('PASSED')
})

30
node_modules/JSONStream/test/doubledot2.js generated vendored Normal file
View file

@ -0,0 +1,30 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','depth.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['docs', {recurse: true}, 'value'])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
var expectedValues = [0, [1], {"a": 2}, "3", 4]
it(called).equal(expectedValues.length)
for (var i = 0 ; i < 5 ; i++)
it(parsed[i]).deepEqual(expectedValues[i])
console.error('PASSED')
})

44
node_modules/JSONStream/test/empty.js generated vendored Normal file
View file

@ -0,0 +1,44 @@
var JSONStream = require('../')
, stream = require('stream')
, it = require('it-is')
var output = [ [], [] ]
var parser1 = JSONStream.parse(['docs', /./])
parser1.on('data', function(data) {
output[0].push(data)
})
var parser2 = JSONStream.parse(['docs', /./])
parser2.on('data', function(data) {
output[1].push(data)
})
var pending = 2
function onend () {
if (--pending > 0) return
it(output).deepEqual([
[], [{hello: 'world'}]
])
console.error('PASSED')
}
parser1.on('end', onend)
parser2.on('end', onend)
function makeReadableStream() {
var readStream = new stream.Stream()
readStream.readable = true
readStream.write = function (data) { this.emit('data', data) }
readStream.end = function (data) { this.emit('end') }
return readStream
}
var emptyArray = makeReadableStream()
emptyArray.pipe(parser1)
emptyArray.write('{"docs":[]}')
emptyArray.end()
var objectArray = makeReadableStream()
objectArray.pipe(parser2)
objectArray.write('{"docs":[{"hello":"world"}]}')
objectArray.end()

45
node_modules/JSONStream/test/error_contents.js generated vendored Normal file
View file

@ -0,0 +1,45 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','error.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows'])
, called = 0
, headerCalled = 0
, footerCalled = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('header', function (data) {
headerCalled ++
it(data).deepEqual({
error: 'error_code',
message: 'this is an error message'
})
})
parser.on('footer', function (data) {
footerCalled ++
})
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(0)
it(headerCalled).equal(1)
it(footerCalled).equal(0)
console.error('PASSED')
})

4030
node_modules/JSONStream/test/fixtures/all_npm.json generated vendored Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,18 @@
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
},
]}

15
node_modules/JSONStream/test/fixtures/depth.json generated vendored Normal file
View file

@ -0,0 +1,15 @@
{
"total": 5,
"docs": [
{
"key": {
"value": 0,
"some": "property"
}
},
{"value": [1]},
{"value": {"a":2}},
{"blbl": [{}, {"a":0, "b":1, "value":"3"}, 10]},
{"value": 4}
]
}

1
node_modules/JSONStream/test/fixtures/error.json generated vendored Normal file
View file

@ -0,0 +1 @@
{"error": "error_code", "message": "this is an error message"}

View file

@ -0,0 +1,19 @@
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
}
],
"foo": {"bar": "baz"}}

39
node_modules/JSONStream/test/fn.js generated vendored Normal file
View file

@ -0,0 +1,39 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
function fn (s) {
return !isNaN(parseInt(s, 10))
}
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', fn])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

135
node_modules/JSONStream/test/gen.js generated vendored Normal file
View file

@ -0,0 +1,135 @@
return // dont run this test for now since tape is weird and broken on 0.10
var fs = require('fs')
var JSONStream = require('../')
var file = process.argv[2] || '/tmp/JSONStream-test-large.json'
var size = Number(process.argv[3] || 100000)
var tape = require('tape')
// if (process.title !== 'browser') {
tape('out of mem', function (t) {
t.plan(1)
//////////////////////////////////////////////////////
// Produces a random number between arg1 and arg2
//////////////////////////////////////////////////////
var randomNumber = function (min, max) {
var number = Math.floor(Math.random() * (max - min + 1) + min);
return number;
};
//////////////////////////////////////////////////////
// Produces a random string of a length between arg1 and arg2
//////////////////////////////////////////////////////
var randomString = function (min, max) {
// add several spaces to increase chanses of creating 'words'
var chars = ' 0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
var result = '';
var randomLength = randomNumber(min, max);
for (var i = randomLength; i > 0; --i) {
result += chars[Math.round(Math.random() * (chars.length - 1))];
}
return result;
};
//////////////////////////////////////////////////////
// Produces a random JSON document, as a string
//////////////////////////////////////////////////////
var randomJsonDoc = function () {
var doc = {
"CrashOccurenceID": randomNumber(10000, 50000),
"CrashID": randomNumber(1000, 10000),
"SiteName": randomString(10, 25),
"MachineName": randomString(10, 25),
"Date": randomString(26, 26),
"ProcessDuration": randomString(18, 18),
"ThreadIdentityName": null,
"WindowsIdentityName": randomString(15, 40),
"OperatingSystemName": randomString(35, 65),
"DetailedExceptionInformation": randomString(100, 800)
};
doc = JSON.stringify(doc);
doc = doc.replace(/\,/g, ',\n'); // add new lines after each attribute
return doc;
};
//////////////////////////////////////////////////////
// generates test data
//////////////////////////////////////////////////////
var generateTestData = function (cb) {
console.log('generating large data file...');
var stream = fs.createWriteStream(file, {
encoding: 'utf8'
});
var i = 0;
var max = size;
var writing = false
var split = ',\n';
var doc = randomJsonDoc();
stream.write('[');
function write () {
if(writing) return
writing = true
while(++i < max) {
if(Math.random() < 0.001)
console.log('generate..', i + ' / ' + size)
if(!stream.write(doc + split)) {
writing = false
return stream.once('drain', write)
}
}
stream.write(doc + ']')
stream.end();
console.log('END')
}
write()
stream.on('close', cb)
};
//////////////////////////////////////////////////////
// Shows that parsing 100000 instances using JSONStream fails
//
// After several seconds, you will get this crash
// FATAL ERROR: JS Allocation failed - process out of memory
//////////////////////////////////////////////////////
var testJSONStreamParse_causesOutOfMem = function (done) {
var items = 0
console.log('parsing data files using JSONStream...');
var parser = JSONStream.parse([true]);
var stream = fs.createReadStream(file);
stream.pipe(parser);
parser.on('data', function (data) {
items++
if(Math.random() < 0.01) console.log(items, '...')
});
parser.on('end', function () {
t.equal(items, size)
});
};
//////////////////////////////////////////////////////
// main
//////////////////////////////////////////////////////
fs.stat(file, function (err, stat) {
console.log(stat)
if(err)
generateTestData(testJSONStreamParse_causesOutOfMem);
else
testJSONStreamParse_causesOutOfMem()
})
})
// }

55
node_modules/JSONStream/test/header_footer.js generated vendored Normal file
View file

@ -0,0 +1,55 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','header_footer.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/])
, called = 0
, headerCalled = 0
, footerCalled = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('header', function (data) {
headerCalled ++
it(data).deepEqual({
total_rows: 129,
offset: 0
})
})
parser.on('footer', function (data) {
footerCalled ++
it(data).deepEqual({
foo: { bar: 'baz' }
})
})
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
it(headerCalled).equal(1)
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(headerCalled).equal(1)
it(footerCalled).equal(1)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

34
node_modules/JSONStream/test/issues.js generated vendored Normal file
View file

@ -0,0 +1,34 @@
var JSONStream = require('../');
var test = require('tape')
test('#66', function (t) {
var error = 0;
var stream = JSONStream
.parse()
.on('error', function (err) {
t.ok(err);
error++;
})
.on('end', function () {
t.ok(error === 1);
t.end();
});
stream.write('["foo":bar[');
stream.end();
});
test('#81 - failure to parse nested objects', function (t) {
var stream = JSONStream
.parse('.bar.foo')
.on('error', function (err) {
t.error(err);
})
.on('end', function () {
t.end();
});
stream.write('{"bar":{"foo":"baz"}}');
stream.end();
});

105
node_modules/JSONStream/test/keys.js generated vendored Normal file
View file

@ -0,0 +1,105 @@
var test = require('tape');
var fs = require ('fs');
var join = require('path').join;
var couch_sample_file = join(__dirname, 'fixtures','couch_sample.json');
var JSONStream = require('../');
var fixture = {
obj: {
one: 1,
two: 2,
three: 3
}
};
function assertFixtureKeys(stream, t) {
var keys = [];
var values = [];
stream.on('data', function(data) {
keys.push(data.key);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(keys, ['one', 'two', 'three']);
t.deepEqual(values, [1,2,3]);
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
}
test('keys via string', function(t) {
var stream = JSONStream.parse('obj.$*');
assertFixtureKeys(stream, t);
});
test('keys via array', function(t) {
var stream = JSONStream.parse(['obj',{emitKey: true}]);
assertFixtureKeys(stream, t);
});
test('path via array', function(t) {
var stream = JSONStream.parse(['obj',{emitPath: true}]);
var paths = [];
var values = [];
stream.on('data', function(data) {
console.log(JSON.stringify(data));
paths.push(data.path);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(paths, [['obj', 'one'], ['obj', 'two'], ['obj', 'three']]);
t.deepEqual(values, [1,2,3]);
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
});
test('advanced keys', function(t) {
var advanced = fs.readFileSync(couch_sample_file);
var stream = JSONStream.parse(['rows', true, 'doc', {emitKey: true}]);
var keys = [];
var values = [];
stream.on('data', function(data) {
keys.push(data.key);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(keys, [
'_id', '_rev', 'hello',
'_id', '_rev', 'hello'
]);
t.deepEqual(values, [
"change1_0.6995461115147918", "1-e240bae28c7bb3667f02760f6398d508", 1,
"change2_0.6995461115147918", "1-13677d36b98c0c075145bb8975105153", 2
]);
t.end();
});
stream.write(advanced);
stream.end();
});
test('parent keys', function(t) {
var stream = JSONStream.parse('$*');
var d = null;
stream.on('data', function(data) {
if(d) t.fail('should only be called once');
d = data;
});
stream.on('end', function() {
t.deepEqual(d,{
key: 'obj',
value: fixture.obj
});
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
})

40
node_modules/JSONStream/test/map.js generated vendored Normal file
View file

@ -0,0 +1,40 @@
var test = require('tape')
var JSONStream = require('../')
test('map function', function (t) {
var actual = []
stream = JSONStream.parse([true], function (e) { return e*10 })
stream.on('data', function (v) { actual.push(v)})
stream.on('end', function () {
t.deepEqual(actual, [10,20,30,40,50,60])
t.end()
})
stream.write(JSON.stringify([1,2,3,4,5,6], null, 2))
stream.end()
})
test('filter function', function (t) {
var actual = []
stream = JSONStream
.parse([true], function (e) { return e%2 ? e : null})
.on('data', function (v) { actual.push(v)})
.on('end', function () {
t.deepEqual(actual, [1,3,5])
t.end()
})
stream.write(JSON.stringify([1,2,3,4,5,6], null, 2))
stream.end()
})

36
node_modules/JSONStream/test/multiple_objects.js generated vendored Normal file
View file

@ -0,0 +1,36 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var it = require('it-is');
var JSONStream = require('../');
var str = fs.readFileSync(file);
var datas = {}
var server = net.createServer(function(client) {
var data_calls = 0;
var parser = JSONStream.parse(['rows', true, 'key']);
parser.on('data', function(data) {
++ data_calls;
datas[data] = (datas[data] || 0) + 1
it(data).typeof('string')
});
parser.on('end', function() {
console.log('END')
var min = Infinity
for (var d in datas)
min = min > datas[d] ? datas[d] : min
it(min).equal(3);
server.close();
});
client.pipe(parser);
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
var msgs = str + ' ' + str + '\n\n' + str
client.end(msgs);
});

29
node_modules/JSONStream/test/multiple_objects_error.js generated vendored Normal file
View file

@ -0,0 +1,29 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var it = require('it-is');
var JSONStream = require('../');
var str = fs.readFileSync(file);
var server = net.createServer(function(client) {
var data_calls = 0;
var parser = JSONStream.parse();
parser.on('error', function(err) {
console.log(err);
server.close();
});
parser.on('end', function() {
console.log('END');
server.close();
});
client.pipe(parser);
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
var msgs = str + '}';
client.end(msgs);
});

28
node_modules/JSONStream/test/null.js generated vendored Normal file
View file

@ -0,0 +1,28 @@
var JSONStream = require('../')
var data = [
{ID: 1, optional: null},
{ID: 2, optional: null},
{ID: 3, optional: 20},
{ID: 4, optional: null},
{ID: 5, optional: 'hello'},
{ID: 6, optional: null}
]
var test = require('tape')
test ('null properties', function (t) {
var actual = []
var stream =
JSONStream.parse('*.optional')
.on('data', function (v) { actual.push(v) })
.on('end', function () {
t.deepEqual(actual, [20, 'hello'])
t.end()
})
stream.write(JSON.stringify(data, null, 2))
stream.end()
})

29
node_modules/JSONStream/test/parsejson.js generated vendored Normal file
View file

@ -0,0 +1,29 @@
/*
sometimes jsonparse changes numbers slightly.
*/
var r = Math.random()
, Parser = require('jsonparse')
, p = new Parser()
, assert = require('assert')
, times = 20
, bufferFrom = Buffer.from && Buffer.from !== Uint8Array.from
, str
while (times --) {
assert.equal(JSON.parse(JSON.stringify(r)), r, 'core JSON')
p.onValue = function (v) {
console.error('parsed', v)
assert.equal(v,r)
}
console.error('correct', r)
str = JSON.stringify([r])
p.write (bufferFrom ? Buffer.from(str) : new Buffer(str))
}

13
node_modules/JSONStream/test/run.js generated vendored Normal file
View file

@ -0,0 +1,13 @@
var readdirSync = require('fs').readdirSync
var spawnSync = require('child_process').spawnSync
var extname = require('path').extname
var files = readdirSync(__dirname)
files.forEach(function(file){
if (extname(file) !== '.js' || file === 'run.js')
return
console.log(`*** ${file} ***`)
var result = spawnSync(process.argv0, [file], { stdio: 'inherit', cwd: __dirname} )
if (result.status !== 0)
process.exit(result.status)
})

41
node_modules/JSONStream/test/stringify.js generated vendored Normal file
View file

@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
//JSONStream.parse([/./]),
es.writeArray(function (err, lines) {
it(JSON.parse(lines.join(''))).deepEqual(expected)
console.error('PASSED')
})
)

47
node_modules/JSONStream/test/stringify_object.js generated vendored Normal file
View file

@ -0,0 +1,47 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
, es = require('event-stream')
, pending = 10
, passed = true
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
for (var ix = 0; ix < pending; ix++) (function (count) {
var expected = {}
, stringify = JSONStream.stringifyObject()
es.connect(
stringify,
es.writeArray(function (err, lines) {
it(JSON.parse(lines.join(''))).deepEqual(expected)
if (--pending === 0) {
console.error('PASSED')
}
})
)
while (count --) {
var key = Math.random().toString(16).slice(2)
expected[key] = randomObj()
stringify.write([ key, expected[key] ])
}
process.nextTick(function () {
stringify.end()
})
})(ix)

35
node_modules/JSONStream/test/test.js generated vendored Normal file
View file

@ -0,0 +1,35 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

29
node_modules/JSONStream/test/test2.js generated vendored Normal file
View file

@ -0,0 +1,29 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, '..','package.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse([])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it(data).deepEqual(expected)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(1)
console.error('PASSED')
})

41
node_modules/JSONStream/test/two-ways.js generated vendored Normal file
View file

@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
// stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
JSONStream.parse([/./]),
es.writeArray(function (err, lines) {
it(lines).has(expected)
console.error('PASSED')
})
)