push all website files

This commit is contained in:
Jacob Levine
2019-01-06 13:14:45 -06:00
parent d7301e26c3
commit d2d5d4c04e
15662 changed files with 2166516 additions and 0 deletions

15
website/node_modules/npm/node_modules/.bin/JSONStream generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../JSONStream/bin.js" "$@"
ret=$?
else
node "$basedir/../JSONStream/bin.js" "$@"
ret=$?
fi
exit $ret

View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\JSONStream\bin.js" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\JSONStream\bin.js" %*
)

15
website/node_modules/npm/node_modules/.bin/errno generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../errno/cli.js" "$@"
ret=$?
else
node "$basedir/../errno/cli.js" "$@"
ret=$?
fi
exit $ret

7
website/node_modules/npm/node_modules/.bin/errno.cmd generated vendored Normal file
View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\errno\cli.js" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\errno\cli.js" %*
)

15
website/node_modules/npm/node_modules/.bin/is-ci generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../is-ci/bin.js" "$@"
ret=$?
else
node "$basedir/../is-ci/bin.js" "$@"
ret=$?
fi
exit $ret

7
website/node_modules/npm/node_modules/.bin/is-ci.cmd generated vendored Normal file
View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\is-ci\bin.js" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\is-ci\bin.js" %*
)

15
website/node_modules/npm/node_modules/.bin/mkdirp generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../mkdirp/bin/cmd.js" "$@"
ret=$?
else
node "$basedir/../mkdirp/bin/cmd.js" "$@"
ret=$?
fi
exit $ret

View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\mkdirp\bin\cmd.js" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\mkdirp\bin\cmd.js" %*
)

15
website/node_modules/npm/node_modules/.bin/node-gyp generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../node-gyp/bin/node-gyp.js" "$@"
ret=$?
else
node "$basedir/../node-gyp/bin/node-gyp.js" "$@"
ret=$?
fi
exit $ret

View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\node-gyp\bin\node-gyp.js" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\node-gyp\bin\node-gyp.js" %*
)

15
website/node_modules/npm/node_modules/.bin/nopt generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../nopt/bin/nopt.js" "$@"
ret=$?
else
node "$basedir/../nopt/bin/nopt.js" "$@"
ret=$?
fi
exit $ret

7
website/node_modules/npm/node_modules/.bin/nopt.cmd generated vendored Normal file
View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\nopt\bin\nopt.js" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\nopt\bin\nopt.js" %*
)

15
website/node_modules/npm/node_modules/.bin/opener generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../opener/bin/opener-bin.js" "$@"
ret=$?
else
node "$basedir/../opener/bin/opener-bin.js" "$@"
ret=$?
fi
exit $ret

View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\opener\bin\opener-bin.js" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\opener\bin\opener-bin.js" %*
)

View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../qrcode-terminal/bin/qrcode-terminal.js" "$@"
ret=$?
else
node "$basedir/../qrcode-terminal/bin/qrcode-terminal.js" "$@"
ret=$?
fi
exit $ret

View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\qrcode-terminal\bin\qrcode-terminal.js" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\qrcode-terminal\bin\qrcode-terminal.js" %*
)

15
website/node_modules/npm/node_modules/.bin/rc generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../rc/cli.js" "$@"
ret=$?
else
node "$basedir/../rc/cli.js" "$@"
ret=$?
fi
exit $ret

7
website/node_modules/npm/node_modules/.bin/rc.cmd generated vendored Normal file
View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\rc\cli.js" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\rc\cli.js" %*
)

15
website/node_modules/npm/node_modules/.bin/rimraf generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../rimraf/bin.js" "$@"
ret=$?
else
node "$basedir/../rimraf/bin.js" "$@"
ret=$?
fi
exit $ret

View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\rimraf\bin.js" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\rimraf\bin.js" %*
)

15
website/node_modules/npm/node_modules/.bin/semver generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../semver/bin/semver" "$@"
ret=$?
else
node "$basedir/../semver/bin/semver" "$@"
ret=$?
fi
exit $ret

View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\semver\bin\semver" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\semver\bin\semver" %*
)

15
website/node_modules/npm/node_modules/.bin/sshpk-conv generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../sshpk/bin/sshpk-conv" "$@"
ret=$?
else
node "$basedir/../sshpk/bin/sshpk-conv" "$@"
ret=$?
fi
exit $ret

View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\sshpk\bin\sshpk-conv" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\sshpk\bin\sshpk-conv" %*
)

15
website/node_modules/npm/node_modules/.bin/sshpk-sign generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../sshpk/bin/sshpk-sign" "$@"
ret=$?
else
node "$basedir/../sshpk/bin/sshpk-sign" "$@"
ret=$?
fi
exit $ret

View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\sshpk\bin\sshpk-sign" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\sshpk\bin\sshpk-sign" %*
)

View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../sshpk/bin/sshpk-verify" "$@"
ret=$?
else
node "$basedir/../sshpk/bin/sshpk-verify" "$@"
ret=$?
fi
exit $ret

View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\sshpk\bin\sshpk-verify" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\sshpk\bin\sshpk-verify" %*
)

15
website/node_modules/npm/node_modules/.bin/uuid generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../uuid/bin/uuid" "$@"
ret=$?
else
node "$basedir/../uuid/bin/uuid" "$@"
ret=$?
fi
exit $ret

7
website/node_modules/npm/node_modules/.bin/uuid.cmd generated vendored Normal file
View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\uuid\bin\uuid" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\uuid\bin\uuid" %*
)

15
website/node_modules/npm/node_modules/.bin/which generated vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')")
case `uname` in
*CYGWIN*) basedir=`cygpath -w "$basedir"`;;
esac
if [ -x "$basedir/node" ]; then
"$basedir/node" "$basedir/../which/bin/which" "$@"
ret=$?
else
node "$basedir/../which/bin/which" "$@"
ret=$?
fi
exit $ret

7
website/node_modules/npm/node_modules/.bin/which.cmd generated vendored Normal file
View File

@@ -0,0 +1,7 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\..\which\bin\which" %*
) ELSE (
@SETLOCAL
@SET PATHEXT=%PATHEXT:;.JS;=;%
node "%~dp0\..\which\bin\which" %*
)

View File

@@ -0,0 +1,8 @@
language: node_js
node_js:
- 4
- 5
- 6
sudo: false

View File

@@ -0,0 +1,15 @@
Apache License, Version 2.0
Copyright (c) 2011 Dominic Tarr
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@@ -0,0 +1,24 @@
The MIT License
Copyright (c) 2011 Dominic Tarr
Permission is hereby granted, free of charge,
to any person obtaining a copy of this software and
associated documentation files (the "Software"), to
deal in the Software without restriction, including
without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom
the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

View File

@@ -0,0 +1,12 @@
#! /usr/bin/env node
var JSONStream = require('./')
if(!module.parent && process.title !== 'browser') {
process.stdin
.pipe(JSONStream.parse(process.argv[2]))
.pipe(JSONStream.stringify('[', ',\n', ']\n', 2))
.pipe(process.stdout)
}

View File

@@ -0,0 +1,13 @@
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
var parser = JSONStream.parse(['rows', true]) //emit parts that match this path (any element of the rows array)
, req = request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
, logger = es.mapSync(function (data) { //create a stream that logs to stderr,
console.error(data)
return data
})
req.pipe(parser)
parser.pipe(logger)

View File

@@ -0,0 +1,245 @@
'use strict'
var Parser = require('jsonparse')
, through = require('through')
/*
the value of this.stack that creationix's jsonparse has is weird.
it makes this code ugly, but his problem is way harder that mine,
so i'll forgive him.
*/
exports.parse = function (path, map) {
var header, footer
var parser = new Parser()
var stream = through(function (chunk) {
if('string' === typeof chunk)
chunk = new Buffer(chunk)
parser.write(chunk)
},
function (data) {
if(data)
stream.write(data)
if (header)
stream.emit('header', header)
if (footer)
stream.emit('footer', footer)
stream.queue(null)
})
if('string' === typeof path)
path = path.split('.').map(function (e) {
if (e === '$*')
return {emitKey: true}
else if (e === '*')
return true
else if (e === '') // '..'.split('.') returns an empty string
return {recurse: true}
else
return e
})
var count = 0, _key
if(!path || !path.length)
path = null
parser.onValue = function (value) {
if (!this.root)
stream.root = value
if(! path) return
var i = 0 // iterates on path
var j = 0 // iterates on stack
var emitKey = false;
var emitPath = false;
while (i < path.length) {
var key = path[i]
var c
j++
if (key && !key.recurse) {
c = (j === this.stack.length) ? this : this.stack[j]
if (!c) return
if (! check(key, c.key)) {
setHeaderFooter(c.key, value)
return
}
emitKey = !!key.emitKey;
emitPath = !!key.emitPath;
i++
} else {
i++
var nextKey = path[i]
if (! nextKey) return
while (true) {
c = (j === this.stack.length) ? this : this.stack[j]
if (!c) return
if (check(nextKey, c.key)) {
i++;
if (!Object.isFrozen(this.stack[j]))
this.stack[j].value = null
break
} else {
setHeaderFooter(c.key, value)
}
j++
}
}
}
// emit header
if (header) {
stream.emit('header', header);
header = false;
}
if (j !== this.stack.length) return
count ++
var actualPath = this.stack.slice(1).map(function(element) { return element.key }).concat([this.key])
var data = value
if(null != data)
if(null != (data = map ? map(data, actualPath) : data)) {
if (emitKey || emitPath) {
data = { value: data };
if (emitKey)
data["key"] = this.key;
if (emitPath)
data["path"] = actualPath;
}
stream.queue(data)
}
if (this.value) delete this.value[this.key]
for(var k in this.stack)
if (!Object.isFrozen(this.stack[k]))
this.stack[k].value = null
}
parser._onToken = parser.onToken;
parser.onToken = function (token, value) {
parser._onToken(token, value);
if (this.stack.length === 0) {
if (stream.root) {
if(!path)
stream.queue(stream.root)
count = 0;
stream.root = null;
}
}
}
parser.onError = function (err) {
if(err.message.indexOf("at position") > -1)
err.message = "Invalid JSON (" + err.message + ")";
stream.emit('error', err)
}
return stream
function setHeaderFooter(key, value) {
// header has not been emitted yet
if (header !== false) {
header = header || {}
header[key] = value
}
// footer has not been emitted yet but header has
if (footer !== false && header === false) {
footer = footer || {}
footer[key] = value
}
}
}
function check (x, y) {
if ('string' === typeof x)
return y == x
else if (x && 'function' === typeof x.exec)
return x.exec(y)
else if ('boolean' === typeof x || 'object' === typeof x)
return x
else if ('function' === typeof x)
return x(y)
return false
}
exports.stringify = function (op, sep, cl, indent) {
indent = indent || 0
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '[\n'
sep = '\n,\n'
cl = '\n]\n'
}
//else, what ever you like
var stream
, first = true
, anyData = false
stream = through(function (data) {
anyData = true
try {
var json = JSON.stringify(data, null, indent)
} catch (err) {
return stream.emit('error', err)
}
if(first) { first = false ; stream.queue(op + json)}
else stream.queue(sep + json)
},
function (data) {
if(!anyData)
stream.queue(op)
stream.queue(cl)
stream.queue(null)
})
return stream
}
exports.stringifyObject = function (op, sep, cl, indent) {
indent = indent || 0
if (op === false){
op = ''
sep = '\n'
cl = ''
} else if (op == null) {
op = '{\n'
sep = '\n,\n'
cl = '\n}\n'
}
//else, what ever you like
var first = true
var anyData = false
var stream = through(function (data) {
anyData = true
var json = JSON.stringify(data[0]) + ':' + JSON.stringify(data[1], null, indent)
if(first) { first = false ; this.queue(op + json)}
else this.queue(sep + json)
},
function (data) {
if(!anyData) this.queue(op)
this.queue(cl)
this.queue(null)
})
return stream
}

View File

@@ -0,0 +1,74 @@
{
"_from": "JSONStream@1.3.4",
"_id": "JSONStream@1.3.4",
"_inBundle": true,
"_integrity": "sha512-Y7vfi3I5oMOYIr+WxV8NZxDSwcbNgzdKYsTNInmycOq9bUYwGg9ryu57Wg5NLmCjqdFPNUmpMBo3kSJN9tCbXg==",
"_location": "/npm/JSONStream",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "JSONStream@1.3.4",
"name": "JSONStream",
"escapedName": "JSONStream",
"rawSpec": "1.3.4",
"saveSpec": null,
"fetchSpec": "1.3.4"
},
"_requiredBy": [
"/npm"
],
"_resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.4.tgz",
"_shasum": "615bb2adb0cd34c8f4c447b5f6512fa1d8f16a2e",
"_spec": "JSONStream@1.3.4",
"_where": "/Users/zkat/Documents/code/work/npm",
"author": {
"name": "Dominic Tarr",
"email": "dominic.tarr@gmail.com",
"url": "http://bit.ly/dominictarr"
},
"bin": {
"JSONStream": "./bin.js"
},
"bugs": {
"url": "https://github.com/dominictarr/JSONStream/issues"
},
"bundleDependencies": false,
"dependencies": {
"jsonparse": "^1.2.0",
"through": ">=2.2.7 <3"
},
"deprecated": false,
"description": "rawStream.pipe(JSONStream.parse()).pipe(streamOfObjects)",
"devDependencies": {
"assertions": "~2.2.2",
"event-stream": "~0.7.0",
"it-is": "~1",
"render": "~0.1.1",
"tape": "~2.12.3",
"trees": "~0.0.3"
},
"engines": {
"node": "*"
},
"homepage": "http://github.com/dominictarr/JSONStream",
"keywords": [
"json",
"stream",
"streaming",
"parser",
"async",
"parsing"
],
"license": "(MIT OR Apache-2.0)",
"name": "JSONStream",
"optionalDependencies": {},
"repository": {
"type": "git",
"url": "git://github.com/dominictarr/JSONStream.git"
},
"scripts": {
"test": "set -e; for t in test/*.js; do echo '***' $t '***'; node $t; done"
},
"version": "1.3.4"
}

View File

@@ -0,0 +1,207 @@
# JSONStream
streaming JSON.parse and stringify
![](https://secure.travis-ci.org/dominictarr/JSONStream.png?branch=master)
## install
```npm install JSONStream```
## example
``` js
var request = require('request')
, JSONStream = require('JSONStream')
, es = require('event-stream')
request({url: 'http://isaacs.couchone.com/registry/_all_docs'})
.pipe(JSONStream.parse('rows.*'))
.pipe(es.mapSync(function (data) {
console.error(data)
return data
}))
```
## JSONStream.parse(path)
parse stream of values that match a path
``` js
JSONStream.parse('rows.*.doc')
```
The `..` operator is the recursive descent operator from [JSONPath](http://goessner.net/articles/JsonPath/), which will match a child at any depth (see examples below).
If your keys have keys that include `.` or `*` etc, use an array instead.
`['row', true, /^doc/]`.
If you use an array, `RegExp`s, booleans, and/or functions. The `..` operator is also available in array representation, using `{recurse: true}`.
any object that matches the path will be emitted as 'data' (and `pipe`d down stream)
If `path` is empty or null, no 'data' events are emitted.
If you want to have keys emitted, you can prefix your `*` operator with `$`: `obj.$*` - in this case the data passed to the stream is an object with a `key` holding the key and a `value` property holding the data.
### Examples
query a couchdb view:
``` bash
curl -sS localhost:5984/tests/_all_docs&include_docs=true
```
you will get something like this:
``` js
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
},
]}
```
we are probably most interested in the `rows.*.doc`
create a `Stream` that parses the documents from the feed like this:
``` js
var stream = JSONStream.parse(['rows', true, 'doc']) //rows, ANYTHING, doc
stream.on('data', function(data) {
console.log('received:', data);
});
//emits anything from _before_ the first match
stream.on('header', function (data) {
console.log('header:', data) // => {"total_rows":129,"offset":0}
})
```
awesome!
In case you wanted the contents the doc emitted:
``` js
var stream = JSONStream.parse(['rows', true, 'doc', {emitKey: true}]) //rows, ANYTHING, doc, items in docs with keys
stream.on('data', function(data) {
console.log('key:', data.key);
console.log('value:', data.value);
});
```
You can also emit the path:
``` js
var stream = JSONStream.parse(['rows', true, 'doc', {emitPath: true}]) //rows, ANYTHING, doc, items in docs with keys
stream.on('data', function(data) {
console.log('path:', data.path);
console.log('value:', data.value);
});
```
### recursive patterns (..)
`JSONStream.parse('docs..value')`
(or `JSONStream.parse(['docs', {recurse: true}, 'value'])` using an array)
will emit every `value` object that is a child, grand-child, etc. of the
`docs` object. In this example, it will match exactly 5 times at various depth
levels, emitting 0, 1, 2, 3 and 4 as results.
```js
{
"total": 5,
"docs": [
{
"key": {
"value": 0,
"some": "property"
}
},
{"value": 1},
{"value": 2},
{"blbl": [{}, {"a":0, "b":1, "value":3}, 10]},
{"value": 4}
]
}
```
## JSONStream.parse(pattern, map)
provide a function that can be used to map or filter
the json output. `map` is passed the value at that node of the pattern,
if `map` return non-nullish (anything but `null` or `undefined`)
that value will be emitted in the stream. If it returns a nullish value,
nothing will be emitted.
`JSONStream` also emits `'header'` and `'footer'` events,
the `'header'` event contains anything in the output that was before
the first match, and the `'footer'`, is anything after the last match.
## JSONStream.stringify(open, sep, close)
Create a writable stream.
you may pass in custom `open`, `close`, and `seperator` strings.
But, by default, `JSONStream.stringify()` will create an array,
(with default options `open='[\n', sep='\n,\n', close='\n]\n'`)
If you call `JSONStream.stringify(false)`
the elements will only be seperated by a newline.
If you only write one item this will be valid JSON.
If you write many items,
you can use a `RegExp` to split it into valid chunks.
## JSONStream.stringifyObject(open, sep, close)
Very much like `JSONStream.stringify`,
but creates a writable stream for objects instead of arrays.
Accordingly, `open='{\n', sep='\n,\n', close='\n}\n'`.
When you `.write()` to the stream you must supply an array with `[ key, data ]`
as the first argument.
## unix tool
query npm to see all the modules that browserify has ever depended on.
``` bash
curl https://registry.npmjs.org/browserify | JSONStream 'versions.*.dependencies'
```
## numbers
numbers will be emitted as numbers.
huge numbers that cannot be represented in memory as javascript numbers will be emitted as strings.
cf https://github.com/creationix/jsonparse/commit/044b268f01c4b8f97fb936fc85d3bcfba179e5bb for details.
## Acknowlegements
this module depends on https://github.com/creationix/jsonparse
by Tim Caswell
and also thanks to Florent Jaby for teaching me about parsing with:
https://github.com/Floby/node-json-streams
## license
Dual-licensed under the MIT License or the Apache License, version 2.0

View File

@@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
// stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
JSONStream.parse([true]),
es.writeArray(function (err, lines) {
it(lines).has(expected)
console.error('PASSED')
})
)

View File

@@ -0,0 +1,18 @@
var test = require('tape')
var JSONStream = require('../')
var testData = '{"rows":[{"hello":"world"}, {"foo": "bar"}]}'
test('basic parsing', function (t) {
t.plan(2)
var parsed = JSONStream.parse("rows.*")
var parsedKeys = {}
parsed.on('data', function(match) {
parsedKeys[Object.keys(match)[0]] = true
})
parsed.on('end', function() {
t.equal(!!parsedKeys['hello'], true)
t.equal(!!parsedKeys['foo'], true)
})
parsed.write(testData)
parsed.end()
})

View File

@@ -0,0 +1,27 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var JSONStream = require('../');
var server = net.createServer(function(client) {
var parser = JSONStream.parse([]);
parser.on('end', function() {
console.log('close')
console.error('PASSED');
server.close();
});
client.pipe(parser);
var n = 4
client.on('data', function () {
if(--n) return
client.end();
})
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
fs.createReadStream(file).pipe(client).on('data', console.log) //.resume();
});

View File

@@ -0,0 +1,29 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse('rows..rev')
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
for (var i = 0 ; i < expected.rows.length ; i++)
it(parsed[i]).deepEqual(expected.rows[i].value.rev)
console.error('PASSED')
})

View File

@@ -0,0 +1,30 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','depth.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['docs', {recurse: true}, 'value'])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
var expectedValues = [0, [1], {"a": 2}, "3", 4]
it(called).equal(expectedValues.length)
for (var i = 0 ; i < 5 ; i++)
it(parsed[i]).deepEqual(expectedValues[i])
console.error('PASSED')
})

View File

@@ -0,0 +1,44 @@
var JSONStream = require('../')
, stream = require('stream')
, it = require('it-is')
var output = [ [], [] ]
var parser1 = JSONStream.parse(['docs', /./])
parser1.on('data', function(data) {
output[0].push(data)
})
var parser2 = JSONStream.parse(['docs', /./])
parser2.on('data', function(data) {
output[1].push(data)
})
var pending = 2
function onend () {
if (--pending > 0) return
it(output).deepEqual([
[], [{hello: 'world'}]
])
console.error('PASSED')
}
parser1.on('end', onend)
parser2.on('end', onend)
function makeReadableStream() {
var readStream = new stream.Stream()
readStream.readable = true
readStream.write = function (data) { this.emit('data', data) }
readStream.end = function (data) { this.emit('end') }
return readStream
}
var emptyArray = makeReadableStream()
emptyArray.pipe(parser1)
emptyArray.write('{"docs":[]}')
emptyArray.end()
var objectArray = makeReadableStream()
objectArray.pipe(parser2)
objectArray.write('{"docs":[{"hello":"world"}]}')
objectArray.end()

View File

@@ -0,0 +1,45 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','error.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows'])
, called = 0
, headerCalled = 0
, footerCalled = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('header', function (data) {
headerCalled ++
it(data).deepEqual({
error: 'error_code',
message: 'this is an error message'
})
})
parser.on('footer', function (data) {
footerCalled ++
})
parser.on('data', function (data) {
called ++
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(0)
it(headerCalled).equal(1)
it(footerCalled).equal(0)
console.error('PASSED')
})

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,18 @@
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
},
]}

View File

@@ -0,0 +1,15 @@
{
"total": 5,
"docs": [
{
"key": {
"value": 0,
"some": "property"
}
},
{"value": [1]},
{"value": {"a":2}},
{"blbl": [{}, {"a":0, "b":1, "value":"3"}, 10]},
{"value": 4}
]
}

View File

@@ -0,0 +1 @@
{"error": "error_code", "message": "this is an error message"}

View File

@@ -0,0 +1,19 @@
{"total_rows":129,"offset":0,"rows":[
{ "id":"change1_0.6995461115147918"
, "key":"change1_0.6995461115147918"
, "value":{"rev":"1-e240bae28c7bb3667f02760f6398d508"}
, "doc":{
"_id": "change1_0.6995461115147918"
, "_rev": "1-e240bae28c7bb3667f02760f6398d508","hello":1}
},
{ "id":"change2_0.6995461115147918"
, "key":"change2_0.6995461115147918"
, "value":{"rev":"1-13677d36b98c0c075145bb8975105153"}
, "doc":{
"_id":"change2_0.6995461115147918"
, "_rev":"1-13677d36b98c0c075145bb8975105153"
, "hello":2
}
}
],
"foo": {"bar": "baz"}}

View File

@@ -0,0 +1,39 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
function fn (s) {
return !isNaN(parseInt(s, 10))
}
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', fn])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

View File

@@ -0,0 +1,135 @@
return // dont run this test for now since tape is weird and broken on 0.10
var fs = require('fs')
var JSONStream = require('../')
var file = process.argv[2] || '/tmp/JSONStream-test-large.json'
var size = Number(process.argv[3] || 100000)
var tape = require('tape')
// if (process.title !== 'browser') {
tape('out of mem', function (t) {
t.plan(1)
//////////////////////////////////////////////////////
// Produces a random number between arg1 and arg2
//////////////////////////////////////////////////////
var randomNumber = function (min, max) {
var number = Math.floor(Math.random() * (max - min + 1) + min);
return number;
};
//////////////////////////////////////////////////////
// Produces a random string of a length between arg1 and arg2
//////////////////////////////////////////////////////
var randomString = function (min, max) {
// add several spaces to increase chanses of creating 'words'
var chars = ' 0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ';
var result = '';
var randomLength = randomNumber(min, max);
for (var i = randomLength; i > 0; --i) {
result += chars[Math.round(Math.random() * (chars.length - 1))];
}
return result;
};
//////////////////////////////////////////////////////
// Produces a random JSON document, as a string
//////////////////////////////////////////////////////
var randomJsonDoc = function () {
var doc = {
"CrashOccurenceID": randomNumber(10000, 50000),
"CrashID": randomNumber(1000, 10000),
"SiteName": randomString(10, 25),
"MachineName": randomString(10, 25),
"Date": randomString(26, 26),
"ProcessDuration": randomString(18, 18),
"ThreadIdentityName": null,
"WindowsIdentityName": randomString(15, 40),
"OperatingSystemName": randomString(35, 65),
"DetailedExceptionInformation": randomString(100, 800)
};
doc = JSON.stringify(doc);
doc = doc.replace(/\,/g, ',\n'); // add new lines after each attribute
return doc;
};
//////////////////////////////////////////////////////
// generates test data
//////////////////////////////////////////////////////
var generateTestData = function (cb) {
console.log('generating large data file...');
var stream = fs.createWriteStream(file, {
encoding: 'utf8'
});
var i = 0;
var max = size;
var writing = false
var split = ',\n';
var doc = randomJsonDoc();
stream.write('[');
function write () {
if(writing) return
writing = true
while(++i < max) {
if(Math.random() < 0.001)
console.log('generate..', i + ' / ' + size)
if(!stream.write(doc + split)) {
writing = false
return stream.once('drain', write)
}
}
stream.write(doc + ']')
stream.end();
console.log('END')
}
write()
stream.on('close', cb)
};
//////////////////////////////////////////////////////
// Shows that parsing 100000 instances using JSONStream fails
//
// After several seconds, you will get this crash
// FATAL ERROR: JS Allocation failed - process out of memory
//////////////////////////////////////////////////////
var testJSONStreamParse_causesOutOfMem = function (done) {
var items = 0
console.log('parsing data files using JSONStream...');
var parser = JSONStream.parse([true]);
var stream = fs.createReadStream(file);
stream.pipe(parser);
parser.on('data', function (data) {
items++
if(Math.random() < 0.01) console.log(items, '...')
});
parser.on('end', function () {
t.equal(items, size)
});
};
//////////////////////////////////////////////////////
// main
//////////////////////////////////////////////////////
fs.stat(file, function (err, stat) {
console.log(stat)
if(err)
generateTestData(testJSONStreamParse_causesOutOfMem);
else
testJSONStreamParse_causesOutOfMem()
})
})
// }

View File

@@ -0,0 +1,55 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','header_footer.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/])
, called = 0
, headerCalled = 0
, footerCalled = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('header', function (data) {
headerCalled ++
it(data).deepEqual({
total_rows: 129,
offset: 0
})
})
parser.on('footer', function (data) {
footerCalled ++
it(data).deepEqual({
foo: { bar: 'baz' }
})
})
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
it(headerCalled).equal(1)
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(headerCalled).equal(1)
it(footerCalled).equal(1)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

View File

@@ -0,0 +1,34 @@
var JSONStream = require('../');
var test = require('tape')
test('#66', function (t) {
var error = 0;
var stream = JSONStream
.parse()
.on('error', function (err) {
t.ok(err);
error++;
})
.on('end', function () {
t.ok(error === 1);
t.end();
});
stream.write('["foo":bar[');
stream.end();
});
test('#81 - failure to parse nested objects', function (t) {
var stream = JSONStream
.parse('.bar.foo')
.on('error', function (err) {
t.error(err);
})
.on('end', function () {
t.end();
});
stream.write('{"bar":{"foo":"baz"}}');
stream.end();
});

View File

@@ -0,0 +1,105 @@
var test = require('tape');
var fs = require ('fs');
var join = require('path').join;
var couch_sample_file = join(__dirname, 'fixtures','couch_sample.json');
var JSONStream = require('../');
var fixture = {
obj: {
one: 1,
two: 2,
three: 3
}
};
function assertFixtureKeys(stream, t) {
var keys = [];
var values = [];
stream.on('data', function(data) {
keys.push(data.key);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(keys, ['one', 'two', 'three']);
t.deepEqual(values, [1,2,3]);
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
}
test('keys via string', function(t) {
var stream = JSONStream.parse('obj.$*');
assertFixtureKeys(stream, t);
});
test('keys via array', function(t) {
var stream = JSONStream.parse(['obj',{emitKey: true}]);
assertFixtureKeys(stream, t);
});
test('path via array', function(t) {
var stream = JSONStream.parse(['obj',{emitPath: true}]);
var paths = [];
var values = [];
stream.on('data', function(data) {
console.log(JSON.stringify(data));
paths.push(data.path);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(paths, [['obj', 'one'], ['obj', 'two'], ['obj', 'three']]);
t.deepEqual(values, [1,2,3]);
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
});
test('advanced keys', function(t) {
var advanced = fs.readFileSync(couch_sample_file);
var stream = JSONStream.parse(['rows', true, 'doc', {emitKey: true}]);
var keys = [];
var values = [];
stream.on('data', function(data) {
keys.push(data.key);
values.push(data.value);
});
stream.on('end', function() {
t.deepEqual(keys, [
'_id', '_rev', 'hello',
'_id', '_rev', 'hello'
]);
t.deepEqual(values, [
"change1_0.6995461115147918", "1-e240bae28c7bb3667f02760f6398d508", 1,
"change2_0.6995461115147918", "1-13677d36b98c0c075145bb8975105153", 2
]);
t.end();
});
stream.write(advanced);
stream.end();
});
test('parent keys', function(t) {
var stream = JSONStream.parse('$*');
var d = null;
stream.on('data', function(data) {
if(d) t.fail('should only be called once');
d = data;
});
stream.on('end', function() {
t.deepEqual(d,{
key: 'obj',
value: fixture.obj
});
t.end();
});
stream.write(JSON.stringify(fixture));
stream.end();
})

View File

@@ -0,0 +1,40 @@
var test = require('tape')
var JSONStream = require('../')
test('map function', function (t) {
var actual = []
stream = JSONStream.parse([true], function (e) { return e*10 })
stream.on('data', function (v) { actual.push(v)})
stream.on('end', function () {
t.deepEqual(actual, [10,20,30,40,50,60])
t.end()
})
stream.write(JSON.stringify([1,2,3,4,5,6], null, 2))
stream.end()
})
test('filter function', function (t) {
var actual = []
stream = JSONStream
.parse([true], function (e) { return e%2 ? e : null})
.on('data', function (v) { actual.push(v)})
.on('end', function () {
t.deepEqual(actual, [1,3,5])
t.end()
})
stream.write(JSON.stringify([1,2,3,4,5,6], null, 2))
stream.end()
})

View File

@@ -0,0 +1,36 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var it = require('it-is');
var JSONStream = require('../');
var str = fs.readFileSync(file);
var datas = {}
var server = net.createServer(function(client) {
var data_calls = 0;
var parser = JSONStream.parse(['rows', true, 'key']);
parser.on('data', function(data) {
++ data_calls;
datas[data] = (datas[data] || 0) + 1
it(data).typeof('string')
});
parser.on('end', function() {
console.log('END')
var min = Infinity
for (var d in datas)
min = min > datas[d] ? datas[d] : min
it(min).equal(3);
server.close();
});
client.pipe(parser);
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
var msgs = str + ' ' + str + '\n\n' + str
client.end(msgs);
});

View File

@@ -0,0 +1,29 @@
var fs = require ('fs');
var net = require('net');
var join = require('path').join;
var file = join(__dirname, 'fixtures','all_npm.json');
var it = require('it-is');
var JSONStream = require('../');
var str = fs.readFileSync(file);
var server = net.createServer(function(client) {
var data_calls = 0;
var parser = JSONStream.parse();
parser.on('error', function(err) {
console.log(err);
server.close();
});
parser.on('end', function() {
console.log('END');
server.close();
});
client.pipe(parser);
});
server.listen(9999);
var client = net.connect({ port : 9999 }, function() {
var msgs = str + '}';
client.end(msgs);
});

View File

@@ -0,0 +1,28 @@
var JSONStream = require('../')
var data = [
{ID: 1, optional: null},
{ID: 2, optional: null},
{ID: 3, optional: 20},
{ID: 4, optional: null},
{ID: 5, optional: 'hello'},
{ID: 6, optional: null}
]
var test = require('tape')
test ('null properties', function (t) {
var actual = []
var stream =
JSONStream.parse('*.optional')
.on('data', function (v) { actual.push(v) })
.on('end', function () {
t.deepEqual(actual, [20, 'hello'])
t.end()
})
stream.write(JSON.stringify(data, null, 2))
stream.end()
})

View File

@@ -0,0 +1,25 @@
/*
sometimes jsonparse changes numbers slightly.
*/
var r = Math.random()
, Parser = require('jsonparse')
, p = new Parser()
, assert = require('assert')
, times = 20
while (times --) {
assert.equal(JSON.parse(JSON.stringify(r)), r, 'core JSON')
p.onValue = function (v) {
console.error('parsed', v)
assert.equal(v,r)
}
console.error('correct', r)
p.write (new Buffer(JSON.stringify([r])))
}

View File

@@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
//JSONStream.parse([/./]),
es.writeArray(function (err, lines) {
it(JSON.parse(lines.join(''))).deepEqual(expected)
console.error('PASSED')
})
)

View File

@@ -0,0 +1,47 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
, es = require('event-stream')
, pending = 10
, passed = true
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
for (var ix = 0; ix < pending; ix++) (function (count) {
var expected = {}
, stringify = JSONStream.stringifyObject()
es.connect(
stringify,
es.writeArray(function (err, lines) {
it(JSON.parse(lines.join(''))).deepEqual(expected)
if (--pending === 0) {
console.error('PASSED')
}
})
)
while (count --) {
var key = Math.random().toString(16).slice(2)
expected[key] = randomObj()
stringify.write([ key, expected[key] ])
}
process.nextTick(function () {
stringify.end()
})
})(ix)

View File

@@ -0,0 +1,35 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse(['rows', /\d+/ /*, 'value'*/])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it.has({
id: it.typeof('string'),
value: {rev: it.typeof('string')},
key:it.typeof('string')
})
parsed.push(data)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(expected.rows.length)
it(parsed).deepEqual(expected.rows)
console.error('PASSED')
})

View File

@@ -0,0 +1,29 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, '..','package.json')
, JSONStream = require('../')
, it = require('it-is')
var expected = JSON.parse(fs.readFileSync(file))
, parser = JSONStream.parse([])
, called = 0
, ended = false
, parsed = []
fs.createReadStream(file).pipe(parser)
parser.on('data', function (data) {
called ++
it(data).deepEqual(expected)
})
parser.on('end', function () {
ended = true
})
process.on('exit', function () {
it(called).equal(1)
console.error('PASSED')
})

View File

@@ -0,0 +1,41 @@
var fs = require ('fs')
, join = require('path').join
, file = join(__dirname, 'fixtures','all_npm.json')
, JSONStream = require('../')
, it = require('it-is').style('colour')
function randomObj () {
return (
Math.random () < 0.4
? {hello: 'eonuhckmqjk',
whatever: 236515,
lies: true,
nothing: [null],
// stuff: [Math.random(),Math.random(),Math.random()]
}
: ['AOREC', 'reoubaor', {ouec: 62642}, [[[], {}, 53]]]
)
}
var expected = []
, stringify = JSONStream.stringify()
, es = require('event-stream')
, stringified = ''
, called = 0
, count = 10
, ended = false
while (count --)
expected.push(randomObj())
es.connect(
es.readArray(expected),
stringify,
JSONStream.parse([/./]),
es.writeArray(function (err, lines) {
it(lines).has(expected)
console.error('PASSED')
})
)

46
website/node_modules/npm/node_modules/abbrev/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,46 @@
This software is dual-licensed under the ISC and MIT licenses.
You may use this software under EITHER of the following licenses.
----------
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
----------
Copyright Isaac Z. Schlueter and Contributors
All rights reserved.
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.

23
website/node_modules/npm/node_modules/abbrev/README.md generated vendored Normal file
View File

@@ -0,0 +1,23 @@
# abbrev-js
Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev).
Usage:
var abbrev = require("abbrev");
abbrev("foo", "fool", "folding", "flop");
// returns:
{ fl: 'flop'
, flo: 'flop'
, flop: 'flop'
, fol: 'folding'
, fold: 'folding'
, foldi: 'folding'
, foldin: 'folding'
, folding: 'folding'
, foo: 'foo'
, fool: 'fool'
}
This is handy for command-line scripts, or other cases where you want to be able to accept shorthands.

61
website/node_modules/npm/node_modules/abbrev/abbrev.js generated vendored Normal file
View File

@@ -0,0 +1,61 @@
module.exports = exports = abbrev.abbrev = abbrev
abbrev.monkeyPatch = monkeyPatch
function monkeyPatch () {
Object.defineProperty(Array.prototype, 'abbrev', {
value: function () { return abbrev(this) },
enumerable: false, configurable: true, writable: true
})
Object.defineProperty(Object.prototype, 'abbrev', {
value: function () { return abbrev(Object.keys(this)) },
enumerable: false, configurable: true, writable: true
})
}
function abbrev (list) {
if (arguments.length !== 1 || !Array.isArray(list)) {
list = Array.prototype.slice.call(arguments, 0)
}
for (var i = 0, l = list.length, args = [] ; i < l ; i ++) {
args[i] = typeof list[i] === "string" ? list[i] : String(list[i])
}
// sort them lexicographically, so that they're next to their nearest kin
args = args.sort(lexSort)
// walk through each, seeing how much it has in common with the next and previous
var abbrevs = {}
, prev = ""
for (var i = 0, l = args.length ; i < l ; i ++) {
var current = args[i]
, next = args[i + 1] || ""
, nextMatches = true
, prevMatches = true
if (current === next) continue
for (var j = 0, cl = current.length ; j < cl ; j ++) {
var curChar = current.charAt(j)
nextMatches = nextMatches && curChar === next.charAt(j)
prevMatches = prevMatches && curChar === prev.charAt(j)
if (!nextMatches && !prevMatches) {
j ++
break
}
}
prev = current
if (j === cl) {
abbrevs[current] = current
continue
}
for (var a = current.substr(0, j) ; j <= cl ; j ++) {
abbrevs[a] = current
a += current.charAt(j)
}
}
return abbrevs
}
function lexSort (a, b) {
return a === b ? 0 : a > b ? 1 : -1
}

View File

@@ -0,0 +1,61 @@
{
"_args": [
[
"abbrev@1.1.1",
"/Users/rebecca/code/npm"
]
],
"_from": "abbrev@1.1.1",
"_id": "abbrev@1.1.1",
"_inBundle": true,
"_integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==",
"_location": "/npm/abbrev",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "abbrev@1.1.1",
"name": "abbrev",
"escapedName": "abbrev",
"rawSpec": "1.1.1",
"saveSpec": null,
"fetchSpec": "1.1.1"
},
"_requiredBy": [
"/npm",
"/npm/node-gyp/nopt",
"/npm/nopt"
],
"_resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz",
"_spec": "1.1.1",
"_where": "/Users/rebecca/code/npm",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me"
},
"bugs": {
"url": "https://github.com/isaacs/abbrev-js/issues"
},
"description": "Like ruby's abbrev module, but in js",
"devDependencies": {
"tap": "^10.1"
},
"files": [
"abbrev.js"
],
"homepage": "https://github.com/isaacs/abbrev-js#readme",
"license": "ISC",
"main": "abbrev.js",
"name": "abbrev",
"repository": {
"type": "git",
"url": "git+ssh://git@github.com/isaacs/abbrev-js.git"
},
"scripts": {
"postpublish": "git push origin --all; git push origin --tags",
"postversion": "npm publish",
"preversion": "npm test",
"test": "tap test.js --100"
},
"version": "1.1.1"
}

View File

@@ -0,0 +1,23 @@
sudo: false
language: node_js
node_js:
- "4"
- "5"
- "6"
- "7"
- "8"
- "9"
install:
- PATH="`npm bin`:`npm bin -g`:$PATH"
# Install dependencies and build
- npm install
script:
# Output useful info for debugging
- node --version
- npm --version
# Run tests
- npm test

View File

@@ -0,0 +1,113 @@
4.2.0 / 2018-01-15
==================
* Add support for returning an `http.Agent` instance
* Optimize promisifying logic
* Set `timeout` to null for proper cleanup
* Remove Node.js <= 0.11.3 special-casing from test case
4.1.2 / 2017-11-20
==================
* test Node 9 on Travis
* ensure that `https.get()` uses the patched `https.request()`
4.1.1 / 2017-07-20
==================
* Correct `https.request()` with a String (#9)
4.1.0 / 2017-06-26
==================
* mix in Agent options into Request options
* throw when nothing is returned from agent-base callback
* do not modify the options object for https requests
4.0.1 / 2017-06-13
==================
* add `this` context tests and fixes
4.0.0 / 2017-06-06
==================
* drop support for Node.js < 4
* drop old versions of Node.js from Travis-CI
* specify Node.js >= 4.0.0 in `engines.node`
* remove more old code
* remove "extend" dependency
* remove "semver" dependency
* make the Promise logic a bit cleaner
* add async function pseudo-example to README
* use direct return in README example
3.0.0 / 2017-06-02
==================
* drop support for Node.js v0.8 and v0.10
* add support for async, Promises, and direct return
* add a couple `options` test cases
* implement a `"timeout"` option
* rename main file to `index.js`
* test Node 8 on Travis
2.1.1 / 2017-05-30
==================
* Revert [`fe2162e`](https://github.com/TooTallNate/node-agent-base/commit/fe2162e0ba18123f5b301cba4de1e9dd74e437cd) and [`270bdc9`](https://github.com/TooTallNate/node-agent-base/commit/270bdc92eb8e3bd0444d1e5266e8e9390aeb3095) (fixes #7)
2.1.0 / 2017-05-26
==================
* unref is not supported for node < 0.9.1 (@pi0)
* add tests to dangling socket (@pi0)
* check unref() is supported (@pi0)
* fix dangling sockets problem (@pi0)
* add basic "ws" module tests
* make `Agent` be subclassable
* turn `addRequest()` into a named function
* test: Node.js v4 likes to call `cork` on the stream (#3, @tomhughes)
* travis: test node v4, v5, v6 and v7
2.0.1 / 2015-09-10
==================
* package: update "semver" to v5.0.1 for WebPack (#1, @vhpoet)
2.0.0 / 2015-07-10
==================
* refactor to patch Node.js core for more consistent `opts` values
* ensure that HTTP(s) default port numbers are always given
* test: use ssl-cert-snakeoil SSL certs
* test: add tests for arbitrary options
* README: add API section
* README: make the Agent HTTP/HTTPS generic in the example
* README: use SVG for Travis-CI badge
1.0.2 / 2015-06-27
==================
* agent: set `req._hadError` to true after emitting "error"
* package: update "mocha" to v2
* test: add artificial HTTP GET request test
* test: add artificial data events test
* test: fix artifical GET response test on node > v0.11.3
* test: use a real timeout for the async error test
1.0.1 / 2013-09-09
==================
* Fix passing an "error" object to the callback function on the first tick
1.0.0 / 2013-09-09
==================
* New API: now you pass a callback function directly
0.0.1 / 2013-07-09
==================
* Initial release

View File

@@ -0,0 +1,145 @@
agent-base
==========
### Turn a function into an [`http.Agent`][http.Agent] instance
[![Build Status](https://travis-ci.org/TooTallNate/node-agent-base.svg?branch=master)](https://travis-ci.org/TooTallNate/node-agent-base)
This module provides an `http.Agent` generator. That is, you pass it an async
callback function, and it returns a new `http.Agent` instance that will invoke the
given callback function when sending outbound HTTP requests.
#### Some subclasses:
Here's some more interesting uses of `agent-base`.
Send a pull request to list yours!
* [`http-proxy-agent`][http-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTP endpoints
* [`https-proxy-agent`][https-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTPS endpoints
* [`pac-proxy-agent`][pac-proxy-agent]: A PAC file proxy `http.Agent` implementation for HTTP and HTTPS
* [`socks-proxy-agent`][socks-proxy-agent]: A SOCKS (v4a) proxy `http.Agent` implementation for HTTP and HTTPS
Installation
------------
Install with `npm`:
``` bash
$ npm install agent-base
```
Example
-------
Here's a minimal example that creates a new `net.Socket` connection to the server
for every HTTP request (i.e. the equivalent of `agent: false` option):
```js
var net = require('net');
var tls = require('tls');
var url = require('url');
var http = require('http');
var agent = require('agent-base');
var endpoint = 'http://nodejs.org/api/';
var parsed = url.parse(endpoint);
// This is the important part!
parsed.agent = agent(function (req, opts) {
var socket;
// `secureEndpoint` is true when using the https module
if (opts.secureEndpoint) {
socket = tls.connect(opts);
} else {
socket = net.connect(opts);
}
return socket;
});
// Everything else works just like normal...
http.get(parsed, function (res) {
console.log('"response" event!', res.headers);
res.pipe(process.stdout);
});
```
Returning a Promise or using an `async` function is also supported:
```js
agent(async function (req, opts) {
await sleep(1000);
// etc…
});
```
Return another `http.Agent` instance to "pass through" the responsibility
for that HTTP request to that agent:
```js
agent(function (req, opts) {
return opts.secureEndpoint ? https.globalAgent : http.globalAgent;
});
```
API
---
## Agent(Function callback[, Object options]) → [http.Agent][]
Creates a base `http.Agent` that will execute the callback function `callback`
for every HTTP request that it is used as the `agent` for. The callback function
is responsible for creating a `stream.Duplex` instance of some kind that will be
used as the underlying socket in the HTTP request.
The `options` object accepts the following properties:
* `timeout` - Number - Timeout for the `callback()` function in milliseconds. Defaults to Infinity (optional).
The callback function should have the following signature:
### callback(http.ClientRequest req, Object options, Function cb) → undefined
The ClientRequest `req` can be accessed to read request headers and
and the path, etc. The `options` object contains the options passed
to the `http.request()`/`https.request()` function call, and is formatted
to be directly passed to `net.connect()`/`tls.connect()`, or however
else you want a Socket to be created. Pass the created socket to
the callback function `cb` once created, and the HTTP request will
continue to proceed.
If the `https` module is used to invoke the HTTP request, then the
`secureEndpoint` property on `options` _will be set to `true`_.
License
-------
(The MIT License)
Copyright (c) 2013 Nathan Rajlich &lt;nathan@tootallnate.net&gt;
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
[http-proxy-agent]: https://github.com/TooTallNate/node-http-proxy-agent
[https-proxy-agent]: https://github.com/TooTallNate/node-https-proxy-agent
[pac-proxy-agent]: https://github.com/TooTallNate/node-pac-proxy-agent
[socks-proxy-agent]: https://github.com/TooTallNate/node-socks-proxy-agent
[http.Agent]: https://nodejs.org/api/http.html#http_class_http_agent

View File

@@ -0,0 +1,160 @@
'use strict';
require('./patch-core');
const inherits = require('util').inherits;
const promisify = require('es6-promisify');
const EventEmitter = require('events').EventEmitter;
module.exports = Agent;
function isAgent(v) {
return v && typeof v.addRequest === 'function';
}
/**
* Base `http.Agent` implementation.
* No pooling/keep-alive is implemented by default.
*
* @param {Function} callback
* @api public
*/
function Agent(callback, _opts) {
if (!(this instanceof Agent)) {
return new Agent(callback, _opts);
}
EventEmitter.call(this);
// The callback gets promisified if it has 3 parameters
// (i.e. it has a callback function) lazily
this._promisifiedCallback = false;
let opts = _opts;
if ('function' === typeof callback) {
this.callback = callback;
} else if (callback) {
opts = callback;
}
// timeout for the socket to be returned from the callback
this.timeout = (opts && opts.timeout) || null;
this.options = opts;
}
inherits(Agent, EventEmitter);
/**
* Override this function in your subclass!
*/
Agent.prototype.callback = function callback(req, opts) {
throw new Error(
'"agent-base" has no default implementation, you must subclass and override `callback()`'
);
};
/**
* Called by node-core's "_http_client.js" module when creating
* a new HTTP request with this Agent instance.
*
* @api public
*/
Agent.prototype.addRequest = function addRequest(req, _opts) {
const ownOpts = Object.assign({}, _opts);
// Set default `host` for HTTP to localhost
if (null == ownOpts.host) {
ownOpts.host = 'localhost';
}
// Set default `port` for HTTP if none was explicitly specified
if (null == ownOpts.port) {
ownOpts.port = ownOpts.secureEndpoint ? 443 : 80;
}
const opts = Object.assign({}, this.options, ownOpts);
if (opts.host && opts.path) {
// If both a `host` and `path` are specified then it's most likely the
// result of a `url.parse()` call... we need to remove the `path` portion so
// that `net.connect()` doesn't attempt to open that as a unix socket file.
delete opts.path;
}
delete opts.agent;
delete opts.hostname;
delete opts._defaultAgent;
delete opts.defaultPort;
delete opts.createConnection;
// Hint to use "Connection: close"
// XXX: non-documented `http` module API :(
req._last = true;
req.shouldKeepAlive = false;
// Create the `stream.Duplex` instance
let timeout;
let timedOut = false;
const timeoutMs = this.timeout;
function onerror(err) {
if (req._hadError) return;
req.emit('error', err);
// For Safety. Some additional errors might fire later on
// and we need to make sure we don't double-fire the error event.
req._hadError = true;
}
function ontimeout() {
timeout = null;
timedOut = true;
const err = new Error(
'A "socket" was not created for HTTP request before ' + timeoutMs + 'ms'
);
err.code = 'ETIMEOUT';
onerror(err);
}
function callbackError(err) {
if (timedOut) return;
if (timeout != null) {
clearTimeout(timeout);
timeout = null;
}
onerror(err);
}
function onsocket(socket) {
if (timedOut) return;
if (timeout != null) {
clearTimeout(timeout);
timeout = null;
}
if (isAgent(socket)) {
// `socket` is actually an http.Agent instance, so relinquish
// responsibility for this `req` to the Agent from here on
socket.addRequest(req, opts);
} else if (socket) {
req.onSocket(socket);
} else {
const err = new Error(
`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``
);
onerror(err);
}
}
if (!this._promisifiedCallback && this.callback.length >= 3) {
// Legacy callback function - convert to a Promise
this.callback = promisify(this.callback, this);
this._promisifiedCallback = true;
}
if (timeoutMs > 0) {
timeout = setTimeout(ontimeout, timeoutMs);
}
try {
Promise.resolve(this.callback(req, opts)).then(onsocket, callbackError);
} catch (err) {
Promise.reject(err).catch(callbackError);
}
};

View File

@@ -0,0 +1,68 @@
{
"_from": "agent-base@4",
"_id": "agent-base@4.2.0",
"_inBundle": true,
"_integrity": "sha512-c+R/U5X+2zz2+UCrCFv6odQzJdoqI+YecuhnAJLa1zYaMc13zPfwMwZrr91Pd1DYNo/yPRbiM4WVf9whgwFsIg==",
"_location": "/npm/agent-base",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "agent-base@4",
"name": "agent-base",
"escapedName": "agent-base",
"rawSpec": "4",
"saveSpec": null,
"fetchSpec": "4"
},
"_requiredBy": [
"/npm/http-proxy-agent",
"/npm/https-proxy-agent",
"/npm/npm-registry-fetch/socks-proxy-agent",
"/npm/socks-proxy-agent"
],
"_resolved": "https://registry.npmjs.org/agent-base/-/agent-base-4.2.0.tgz",
"_shasum": "9838b5c3392b962bad031e6a4c5e1024abec45ce",
"_spec": "agent-base@4",
"_where": "/Users/rebecca/code/npm/node_modules/http-proxy-agent",
"author": {
"name": "Nathan Rajlich",
"email": "nathan@tootallnate.net",
"url": "http://n8.io/"
},
"bugs": {
"url": "https://github.com/TooTallNate/node-agent-base/issues"
},
"bundleDependencies": false,
"dependencies": {
"es6-promisify": "^5.0.0"
},
"deprecated": false,
"description": "Turn a function into an `http.Agent` instance",
"devDependencies": {
"mocha": "^3.4.2",
"ws": "^3.0.0"
},
"engines": {
"node": ">= 4.0.0"
},
"homepage": "https://github.com/TooTallNate/node-agent-base#readme",
"keywords": [
"http",
"agent",
"base",
"barebones",
"https"
],
"license": "MIT",
"main": "./index.js",
"name": "agent-base",
"repository": {
"type": "git",
"url": "git://github.com/TooTallNate/node-agent-base.git"
},
"scripts": {
"test": "mocha --reporter spec"
},
"version": "4.2.0"
}

View File

@@ -0,0 +1,37 @@
'use strict';
const url = require('url');
const https = require('https');
/**
* This currently needs to be applied to all Node.js versions
* in order to determine if the `req` is an HTTP or HTTPS request.
*
* There is currently no PR attempting to move this property upstream.
*/
https.request = (function(request) {
return function(_options, cb) {
let options;
if (typeof _options === 'string') {
options = url.parse(_options);
} else {
options = Object.assign({}, _options);
}
if (null == options.port) {
options.port = 443;
}
options.secureEndpoint = true;
return request.call(https, options, cb);
};
})(https.request);
/**
* This is needed for Node.js >= 9.0.0 to make sure `https.get()` uses the
* patched `https.request()`.
*
* Ref: https://github.com/nodejs/node/commit/5118f31
*/
https.get = function(options, cb) {
const req = https.request(options, cb);
req.end();
return req;
};

View File

@@ -0,0 +1,15 @@
-----BEGIN RSA PRIVATE KEY-----
MIICWwIBAAKBgQCzURxIqzer0ACAbX/lHdsn4Gd9PLKrf7EeDYfIdV0HZKPD8WDr
bBx2/fBu0OW2sjnzv/SVZbJ0DAuPE/p0+eT0qb2qC10iz9iTD7ribd7gxhirVb8y
b3fBjXsxc8V8p4Ny1LcvNSqCjwUbJqdRogfoJeTiqPM58z5sNzuv5iq7iwIDAQAB
AoGAPMQy4olrP0UotlzlJ36bowLP70ffgHCwU+/f4NWs5fF78c3du0oSx1w820Dd
Z7E0JF8bgnlJJTxjumPZz0RUCugrEHBKJmzEz3cxF5E3+7NvteZcjKn9D67RrM5x
1/uSZ9cqKE9cYvY4fSuHx18diyZ4axR/wB1Pea2utjjDM+ECQQDb9ZbmmaWMiRpQ
5Up+loxP7BZNPsEVsm+DVJmEFbaFgGfncWBqSIqnPNjMwTwj0OigTwCAEGPkfRVW
T0pbYWCxAkEA0LK7SCTwzyDmhASUalk0x+3uCAA6ryFdwJf/wd8TRAvVOmkTEldX
uJ7ldLvfrONYO3v56uKTU/SoNdZYzKtO+wJAX2KM4ctXYy5BXztPpr2acz4qHa1N
Bh+vBAC34fOYhyQ76r3b1btHhWZ5jbFuZwm9F2erC94Ps5IaoqcX07DSwQJAPKGw
h2U0EPkd/3zVIZCJJQya+vgWFIs9EZcXVtvYXQyTBkVApTN66MhBIYjzkub5205J
bVQmOV37AKklY1DhwQJAA1wos0cYxro02edzatxd0DIR2r4qqOqLkw6BhYHhq6HJ
ZvIcQkHqdSXzdETFc01I1znDGGIrJHcnvKWgBPoEUg==
-----END RSA PRIVATE KEY-----

View File

@@ -0,0 +1,12 @@
-----BEGIN CERTIFICATE-----
MIIB1TCCAT4CCQDV5mPlzm9+izANBgkqhkiG9w0BAQUFADAvMS0wKwYDVQQDEyQ3
NTI3YmQ3Ny1hYjNlLTQ3NGItYWNlNy1lZWQ2MDUzOTMxZTcwHhcNMTUwNzA2MjI0
NTA3WhcNMjUwNzAzMjI0NTA3WjAvMS0wKwYDVQQDEyQ3NTI3YmQ3Ny1hYjNlLTQ3
NGItYWNlNy1lZWQ2MDUzOTMxZTcwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGB
ALNRHEirN6vQAIBtf+Ud2yfgZ308sqt/sR4Nh8h1XQdko8PxYOtsHHb98G7Q5bay
OfO/9JVlsnQMC48T+nT55PSpvaoLXSLP2JMPuuJt3uDGGKtVvzJvd8GNezFzxXyn
g3LUty81KoKPBRsmp1GiB+gl5OKo8znzPmw3O6/mKruLAgMBAAEwDQYJKoZIhvcN
AQEFBQADgYEACzoHUF8UV2Z6541Q2wKEA0UFUzmUjf/E1XwBO+1P15ZZ64uw34B4
1RwMPtAo9RY/PmICTWtNxWGxkzwb2JtDWtnxVER/lF8k2XcXPE76fxTHJF/BKk9J
QU8OTD1dd9gHCBviQB9TqntRZ5X7axjtuWjb2umY+owBYzAHZkp1HKI=
-----END CERTIFICATE-----

View File

@@ -0,0 +1,673 @@
/**
* Module dependencies.
*/
var fs = require('fs');
var url = require('url');
var net = require('net');
var tls = require('tls');
var http = require('http');
var https = require('https');
var WebSocket = require('ws');
var assert = require('assert');
var events = require('events');
var inherits = require('util').inherits;
var Agent = require('../');
var PassthroughAgent = Agent(function(req, opts) {
return opts.secureEndpoint ? https.globalAgent : http.globalAgent;
});
describe('Agent', function() {
describe('subclass', function() {
it('should be subclassable', function(done) {
function MyAgent() {
Agent.call(this);
}
inherits(MyAgent, Agent);
MyAgent.prototype.callback = function(req, opts, fn) {
assert.equal(req.path, '/foo');
assert.equal(req.getHeader('host'), '127.0.0.1:1234');
assert.equal(opts.secureEndpoint, true);
done();
};
var info = url.parse('https://127.0.0.1:1234/foo');
info.agent = new MyAgent();
https.get(info);
});
});
describe('options', function() {
it('should support an options Object as first argument', function() {
var agent = new Agent({ timeout: 1000 });
assert.equal(1000, agent.timeout);
});
it('should support an options Object as second argument', function() {
var agent = new Agent(function() {}, { timeout: 1000 });
assert.equal(1000, agent.timeout);
});
it('should be mixed in with HTTP request options', function(done) {
var agent = new Agent({
host: 'my-proxy.com',
port: 3128,
foo: 'bar'
});
agent.callback = function(req, opts, fn) {
assert.equal('bar', opts.foo);
assert.equal('a', opts.b);
// `host` and `port` are special-cases, and should always be
// overwritten in the request `opts` inside the agent-base callback
assert.equal('localhost', opts.host);
assert.equal(80, opts.port);
done();
};
var opts = {
b: 'a',
agent: agent
};
http.get(opts);
});
});
describe('`this` context', function() {
it('should be the Agent instance', function(done) {
var called = false;
var agent = new Agent();
agent.callback = function() {
called = true;
assert.equal(this, agent);
};
var info = url.parse('http://127.0.0.1/foo');
info.agent = agent;
var req = http.get(info);
req.on('error', function(err) {
assert(/no Duplex stream was returned/.test(err.message));
done();
});
});
it('should be the Agent instance with callback signature', function(done) {
var called = false;
var agent = new Agent();
agent.callback = function(req, opts, fn) {
called = true;
assert.equal(this, agent);
fn();
};
var info = url.parse('http://127.0.0.1/foo');
info.agent = agent;
var req = http.get(info);
req.on('error', function(err) {
assert(/no Duplex stream was returned/.test(err.message));
done();
});
});
});
describe('"error" event', function() {
it('should be invoked on `http.ClientRequest` instance if `callback()` has not been defined', function(
done
) {
var agent = new Agent();
var info = url.parse('http://127.0.0.1/foo');
info.agent = agent;
var req = http.get(info);
req.on('error', function(err) {
assert.equal(
'"agent-base" has no default implementation, you must subclass and override `callback()`',
err.message
);
done();
});
});
it('should be invoked on `http.ClientRequest` instance if Error passed to callback function on the first tick', function(
done
) {
var agent = new Agent(function(req, opts, fn) {
fn(new Error('is this caught?'));
});
var info = url.parse('http://127.0.0.1/foo');
info.agent = agent;
var req = http.get(info);
req.on('error', function(err) {
assert.equal('is this caught?', err.message);
done();
});
});
it('should be invoked on `http.ClientRequest` instance if Error passed to callback function after the first tick', function(
done
) {
var agent = new Agent(function(req, opts, fn) {
setTimeout(function() {
fn(new Error('is this caught?'));
}, 10);
});
var info = url.parse('http://127.0.0.1/foo');
info.agent = agent;
var req = http.get(info);
req.on('error', function(err) {
assert.equal('is this caught?', err.message);
done();
});
});
});
describe('artificial "streams"', function() {
it('should send a GET request', function(done) {
var stream = new events.EventEmitter();
// needed for the `http` module to call .write() on the stream
stream.writable = true;
stream.write = function(str) {
assert(0 == str.indexOf('GET / HTTP/1.1'));
done();
};
// needed for `http` module in Node.js 4
stream.cork = function() {};
var opts = {
method: 'GET',
host: '127.0.0.1',
path: '/',
port: 80,
agent: new Agent(function(req, opts, fn) {
fn(null, stream);
})
};
var req = http.request(opts);
req.end();
});
it('should receive a GET response', function(done) {
var stream = new events.EventEmitter();
var opts = {
method: 'GET',
host: '127.0.0.1',
path: '/',
port: 80,
agent: new Agent(function(req, opts, fn) {
fn(null, stream);
})
};
var req = http.request(opts, function(res) {
assert.equal('0.9', res.httpVersion);
assert.equal(111, res.statusCode);
assert.equal('bar', res.headers.foo);
done();
});
// have to wait for the "socket" event since `http.ClientRequest`
// doesn't *actually* attach the listeners to the "stream" until
// this happens
req.once('socket', function() {
var buf = new Buffer(
'HTTP/0.9 111\r\n' +
'Foo: bar\r\n' +
'Set-Cookie: 1\r\n' +
'Set-Cookie: 2\r\n\r\n'
);
stream.emit('data', buf);
});
req.end();
});
});
});
describe('"http" module', function() {
var server;
var port;
// setup test HTTP server
before(function(done) {
server = http.createServer();
server.listen(0, function() {
port = server.address().port;
done();
});
});
// shut down test HTTP server
after(function(done) {
server.once('close', function() {
done();
});
server.close();
});
it('should work for basic HTTP requests', function(done) {
var called = false;
var agent = new Agent(function(req, opts, fn) {
called = true;
var socket = net.connect(opts);
fn(null, socket);
});
// add HTTP server "request" listener
var gotReq = false;
server.once('request', function(req, res) {
gotReq = true;
res.setHeader('X-Foo', 'bar');
res.setHeader('X-Url', req.url);
res.end();
});
var info = url.parse('http://127.0.0.1:' + port + '/foo');
info.agent = agent;
http.get(info, function(res) {
assert.equal('bar', res.headers['x-foo']);
assert.equal('/foo', res.headers['x-url']);
assert(gotReq);
assert(called);
done();
});
});
it('should support direct return in `connect()`', function(done) {
var called = false;
var agent = new Agent(function(req, opts) {
called = true;
return net.connect(opts);
});
// add HTTP server "request" listener
var gotReq = false;
server.once('request', function(req, res) {
gotReq = true;
res.setHeader('X-Foo', 'bar');
res.setHeader('X-Url', req.url);
res.end();
});
var info = url.parse('http://127.0.0.1:' + port + '/foo');
info.agent = agent;
http.get(info, function(res) {
assert.equal('bar', res.headers['x-foo']);
assert.equal('/foo', res.headers['x-url']);
assert(gotReq);
assert(called);
done();
});
});
it('should support returning a Promise in `connect()`', function(done) {
var called = false;
var agent = new Agent(function(req, opts) {
return new Promise(function(resolve, reject) {
called = true;
resolve(net.connect(opts));
});
});
// add HTTP server "request" listener
var gotReq = false;
server.once('request', function(req, res) {
gotReq = true;
res.setHeader('X-Foo', 'bar');
res.setHeader('X-Url', req.url);
res.end();
});
var info = url.parse('http://127.0.0.1:' + port + '/foo');
info.agent = agent;
http.get(info, function(res) {
assert.equal('bar', res.headers['x-foo']);
assert.equal('/foo', res.headers['x-url']);
assert(gotReq);
assert(called);
done();
});
});
it('should set the `Connection: close` response header', function(done) {
var called = false;
var agent = new Agent(function(req, opts, fn) {
called = true;
var socket = net.connect(opts);
fn(null, socket);
});
// add HTTP server "request" listener
var gotReq = false;
server.once('request', function(req, res) {
gotReq = true;
res.setHeader('X-Url', req.url);
assert.equal('close', req.headers.connection);
res.end();
});
var info = url.parse('http://127.0.0.1:' + port + '/bar');
info.agent = agent;
http.get(info, function(res) {
assert.equal('/bar', res.headers['x-url']);
assert.equal('close', res.headers.connection);
assert(gotReq);
assert(called);
done();
});
});
it('should pass through options from `http.request()`', function(done) {
var agent = new Agent(function(req, opts, fn) {
assert.equal('google.com', opts.host);
assert.equal('bar', opts.foo);
done();
});
http.get({
host: 'google.com',
foo: 'bar',
agent: agent
});
});
it('should default to port 80', function(done) {
var agent = new Agent(function(req, opts, fn) {
assert.equal(80, opts.port);
done();
});
// (probably) not hitting a real HTTP server here,
// so no need to add a httpServer request listener
http.get({
host: '127.0.0.1',
path: '/foo',
agent: agent
});
});
it('should support the "timeout" option', function(done) {
// ensure we timeout after the "error" event had a chance to trigger
this.timeout(1000);
this.slow(800);
var agent = new Agent(
function(req, opts, fn) {
// this function will time out
},
{ timeout: 100 }
);
var opts = url.parse('http://nodejs.org');
opts.agent = agent;
var req = http.get(opts);
req.once('error', function(err) {
assert.equal('ETIMEOUT', err.code);
req.abort();
done();
});
});
describe('PassthroughAgent', function() {
it('should pass through to `http.globalAgent`', function(done) {
// add HTTP server "request" listener
var gotReq = false;
server.once('request', function(req, res) {
gotReq = true;
res.setHeader('X-Foo', 'bar');
res.setHeader('X-Url', req.url);
res.end();
});
var info = url.parse('http://127.0.0.1:' + port + '/foo');
info.agent = PassthroughAgent;
http.get(info, function(res) {
assert.equal('bar', res.headers['x-foo']);
assert.equal('/foo', res.headers['x-url']);
assert(gotReq);
done();
});
});
});
});
describe('"https" module', function() {
var server;
var port;
// setup test HTTPS server
before(function(done) {
var options = {
key: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.key'),
cert: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.pem')
};
server = https.createServer(options);
server.listen(0, function() {
port = server.address().port;
done();
});
});
// shut down test HTTP server
after(function(done) {
server.once('close', function() {
done();
});
server.close();
});
it('should not modify the passed in Options object', function(done) {
var called = false;
var agent = new Agent(function(req, opts, fn) {
called = true;
assert.equal(true, opts.secureEndpoint);
assert.equal(443, opts.port);
assert.equal('localhost', opts.host);
});
var opts = { agent: agent };
var req = https.request(opts);
assert.equal(true, called);
assert.equal(false, 'secureEndpoint' in opts);
assert.equal(false, 'port' in opts);
done();
});
it('should work with a String URL', function(done) {
var endpoint = 'https://127.0.0.1:' + port;
var req = https.get(endpoint);
// it's gonna error out since `rejectUnauthorized` is not being passed in
req.on('error', function(err) {
assert.equal(err.code, 'DEPTH_ZERO_SELF_SIGNED_CERT');
done();
});
});
it('should work for basic HTTPS requests', function(done) {
var called = false;
var agent = new Agent(function(req, opts, fn) {
called = true;
assert(opts.secureEndpoint);
var socket = tls.connect(opts);
fn(null, socket);
});
// add HTTPS server "request" listener
var gotReq = false;
server.once('request', function(req, res) {
gotReq = true;
res.setHeader('X-Foo', 'bar');
res.setHeader('X-Url', req.url);
res.end();
});
var info = url.parse('https://127.0.0.1:' + port + '/foo');
info.agent = agent;
info.rejectUnauthorized = false;
https.get(info, function(res) {
assert.equal('bar', res.headers['x-foo']);
assert.equal('/foo', res.headers['x-url']);
assert(gotReq);
assert(called);
done();
});
});
it('should pass through options from `https.request()`', function(done) {
var agent = new Agent(function(req, opts, fn) {
assert.equal('google.com', opts.host);
assert.equal('bar', opts.foo);
done();
});
https.get({
host: 'google.com',
foo: 'bar',
agent: agent
});
});
it('should default to port 443', function(done) {
var agent = new Agent(function(req, opts, fn) {
assert.equal(true, opts.secureEndpoint);
assert.equal(false, opts.rejectUnauthorized);
assert.equal(443, opts.port);
done();
});
// (probably) not hitting a real HTTPS server here,
// so no need to add a httpsServer request listener
https.get({
host: '127.0.0.1',
path: '/foo',
agent: agent,
rejectUnauthorized: false
});
});
describe('PassthroughAgent', function() {
it('should pass through to `https.globalAgent`', function(done) {
// add HTTP server "request" listener
var gotReq = false;
server.once('request', function(req, res) {
gotReq = true;
res.setHeader('X-Foo', 'bar');
res.setHeader('X-Url', req.url);
res.end();
});
var info = url.parse('https://127.0.0.1:' + port + '/foo');
info.agent = PassthroughAgent;
info.rejectUnauthorized = false;
https.get(info, function(res) {
assert.equal('bar', res.headers['x-foo']);
assert.equal('/foo', res.headers['x-url']);
assert(gotReq);
done();
});
});
});
});
describe('"ws" server', function() {
var wss;
var server;
var port;
// setup test HTTP server
before(function(done) {
server = http.createServer();
wss = new WebSocket.Server({ server: server });
server.listen(0, function() {
port = server.address().port;
done();
});
});
// shut down test HTTP server
after(function(done) {
server.once('close', function() {
done();
});
server.close();
});
it('should work for basic WebSocket connections', function(done) {
function onconnection(ws) {
ws.on('message', function(data) {
assert.equal('ping', data);
ws.send('pong');
});
}
wss.on('connection', onconnection);
var agent = new Agent(function(req, opts, fn) {
var socket = net.connect(opts);
fn(null, socket);
});
var client = new WebSocket('ws://127.0.0.1:' + port + '/', {
agent: agent
});
client.on('open', function() {
client.send('ping');
});
client.on('message', function(data) {
assert.equal('pong', data);
client.close();
wss.removeListener('connection', onconnection);
done();
});
});
});
describe('"wss" server', function() {
var wss;
var server;
var port;
// setup test HTTP server
before(function(done) {
var options = {
key: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.key'),
cert: fs.readFileSync(__dirname + '/ssl-cert-snakeoil.pem')
};
server = https.createServer(options);
wss = new WebSocket.Server({ server: server });
server.listen(0, function() {
port = server.address().port;
done();
});
});
// shut down test HTTP server
after(function(done) {
server.once('close', function() {
done();
});
server.close();
});
it('should work for secure WebSocket connections', function(done) {
function onconnection(ws) {
ws.on('message', function(data) {
assert.equal('ping', data);
ws.send('pong');
});
}
wss.on('connection', onconnection);
var agent = new Agent(function(req, opts, fn) {
var socket = tls.connect(opts);
fn(null, socket);
});
var client = new WebSocket('wss://127.0.0.1:' + port + '/', {
agent: agent,
rejectUnauthorized: false
});
client.on('open', function() {
client.send('ping');
});
client.on('message', function(data) {
assert.equal('pong', data);
client.close();
wss.removeListener('connection', onconnection);
done();
});
});
});

View File

@@ -0,0 +1,148 @@
3.4.1 / 2018-03-08
==================
**fixes**
* [[`4d3a3b1`](http://github.com/node-modules/agentkeepalive/commit/4d3a3b1f7b16595febbbd39eeed72b2663549014)] - fix: Handle ipv6 addresses in host-header correctly with TLS (#53) (Mattias Holmlund <<u376@m1.holmlund.se>>)
**others**
* [[`55a7a5c`](http://github.com/node-modules/agentkeepalive/commit/55a7a5cd33e97f9a8370083dcb041c5552f10ac9)] - test: stop timer after test end (fengmk2 <<fengmk2@gmail.com>>)
3.4.0 / 2018-02-27
==================
**features**
* [[`bc7cadb`](http://github.com/node-modules/agentkeepalive/commit/bc7cadb30ecd2071e2b341ac53ae1a2b8155c43d)] - feat: use socket custom freeSocketKeepAliveTimeout first (#59) (fengmk2 <<fengmk2@gmail.com>>)
**others**
* [[`138eda8`](http://github.com/node-modules/agentkeepalive/commit/138eda81e10b632aaa87bea0cb66d8667124c4e8)] - doc: fix `keepAliveMsecs` params description (#55) (Hongcai Deng <<admin@dhchouse.com>>)
3.3.0 / 2017-06-20
==================
* feat: add statusChanged getter (#51)
* chore: format License
3.2.0 / 2017-06-10
==================
* feat: add expiring active sockets
* test: add node 8 (#49)
3.1.0 / 2017-02-20
==================
* feat: timeout support humanize ms (#48)
3.0.0 / 2016-12-20
==================
* fix: emit agent socket close event
* test: add remove excess calls to removeSocket
* test: use egg-ci
* test: refactor test with eslint rules
* feat: merge _http_agent.js from 7.2.1
2.2.0 / 2016-06-26
==================
* feat: Add browser shim (noop) for isomorphic use. (#39)
* chore: add security check badge
2.1.1 / 2016-04-06
==================
* https: fix ssl socket leak when keepalive is used
* chore: remove circle ci image
2.1.0 / 2016-04-02
==================
* fix: opened sockets number overflow maxSockets
2.0.5 / 2016-03-16
==================
* fix: pick _evictSession to httpsAgent
2.0.4 / 2016-03-13
==================
* test: add Circle ci
* test: add appveyor ci build
* refactor: make sure only one error listener
* chore: use codecov
* fix: handle idle socket error
* test: run on more node versions
2.0.3 / 2015-08-03
==================
* fix: add default error handler to avoid Unhandled error event throw
2.0.2 / 2015-04-25
==================
* fix: remove socket from freeSockets on 'timeout' (@pmalouin)
2.0.1 / 2015-04-19
==================
* fix: add timeoutSocketCount to getCurrentStatus()
* feat(getCurrentStatus): add getCurrentStatus
2.0.0 / 2015-04-01
==================
* fix: socket.destroyed always be undefined on 0.10.x
* Make it compatible with node v0.10.x (@lattmann)
1.2.1 / 2015-03-23
==================
* patch from iojs: don't overwrite servername option
* patch commits from joyent/node
* add max sockets test case
* add nagle algorithm delayed link
1.2.0 / 2014-09-02
==================
* allow set keepAliveTimeout = 0
* support timeout on working socket. fixed #6
1.1.0 / 2014-08-28
==================
* add some socket counter for deep monitor
1.0.0 / 2014-08-13
==================
* update _http_agent, only support 0.11+, only support node 0.11.0+
0.2.2 / 2013-11-19
==================
* support node 0.8 and node 0.10
0.2.1 / 2013-11-08
==================
* fix socket does not timeout bug, it will hang on life, must use 0.2.x on node 0.11
0.2.0 / 2013-11-06
==================
* use keepalive agent on node 0.11+ impl
0.1.5 / 2013-06-24
==================
* support coveralls
* add node 0.10 test
* add 0.8.22 original https.js
* add original http.js module to diff
* update jscover
* mv pem to fixtures
* add https agent usage

View File

@@ -0,0 +1,248 @@
# agentkeepalive
[![NPM version][npm-image]][npm-url]
[![build status][travis-image]][travis-url]
[![Appveyor status][appveyor-image]][appveyor-url]
[![Test coverage][codecov-image]][codecov-url]
[![David deps][david-image]][david-url]
[![Known Vulnerabilities][snyk-image]][snyk-url]
[![npm download][download-image]][download-url]
[npm-image]: https://img.shields.io/npm/v/agentkeepalive.svg?style=flat
[npm-url]: https://npmjs.org/package/agentkeepalive
[travis-image]: https://img.shields.io/travis/node-modules/agentkeepalive.svg?style=flat
[travis-url]: https://travis-ci.org/node-modules/agentkeepalive
[appveyor-image]: https://ci.appveyor.com/api/projects/status/k7ct4s47di6m5uy2?svg=true
[appveyor-url]: https://ci.appveyor.com/project/fengmk2/agentkeepalive
[codecov-image]: https://codecov.io/gh/node-modules/agentkeepalive/branch/master/graph/badge.svg
[codecov-url]: https://codecov.io/gh/node-modules/agentkeepalive
[david-image]: https://img.shields.io/david/node-modules/agentkeepalive.svg?style=flat
[david-url]: https://david-dm.org/node-modules/agentkeepalive
[snyk-image]: https://snyk.io/test/npm/agentkeepalive/badge.svg?style=flat-square
[snyk-url]: https://snyk.io/test/npm/agentkeepalive
[download-image]: https://img.shields.io/npm/dm/agentkeepalive.svg?style=flat-square
[download-url]: https://npmjs.org/package/agentkeepalive
The Node.js's missing `keep alive` `http.Agent`. Support `http` and `https`.
## What's different from original `http.Agent`?
- `keepAlive=true` by default
- Disable Nagle's algorithm: `socket.setNoDelay(true)`
- Add free socket timeout: avoid long time inactivity socket leak in the free-sockets queue.
- Add active socket timeout: avoid long time inactivity socket leak in the active-sockets queue.
## Install
```bash
$ npm install agentkeepalive --save
```
## new Agent([options])
* `options` {Object} Set of configurable options to set on the agent.
Can have the following fields:
* `keepAlive` {Boolean} Keep sockets around in a pool to be used by
other requests in the future. Default = `true`.
* `keepAliveMsecs` {Number} When using the keepAlive option, specifies the initial delay
for TCP Keep-Alive packets. Ignored when the keepAlive option is false or undefined. Defaults to 1000.
Default = `1000`. Only relevant if `keepAlive` is set to `true`.
* `freeSocketKeepAliveTimeout`: {Number} Sets the free socket to timeout
after `freeSocketKeepAliveTimeout` milliseconds of inactivity on the free socket.
Default is `15000`.
Only relevant if `keepAlive` is set to `true`.
* `timeout`: {Number} Sets the working socket to timeout
after `timeout` milliseconds of inactivity on the working socket.
Default is `freeSocketKeepAliveTimeout * 2`.
* `maxSockets` {Number} Maximum number of sockets to allow per
host. Default = `Infinity`.
* `maxFreeSockets` {Number} Maximum number of sockets to leave open
in a free state. Only relevant if `keepAlive` is set to `true`.
Default = `256`.
* `socketActiveTTL` {Number} Sets the socket active time to live, even if it's in use.
If not setted the behaviour continues the same (the socket will be released only when free)
Default = `null`.
## Usage
```js
const http = require('http');
const Agent = require('agentkeepalive');
const keepaliveAgent = new Agent({
maxSockets: 100,
maxFreeSockets: 10,
timeout: 60000,
freeSocketKeepAliveTimeout: 30000, // free socket keepalive for 30 seconds
});
const options = {
host: 'cnodejs.org',
port: 80,
path: '/',
method: 'GET',
agent: keepaliveAgent,
};
const req = http.request(options, res => {
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', function (chunk) {
console.log('BODY: ' + chunk);
});
});
req.on('error', e => {
console.log('problem with request: ' + e.message);
});
req.end();
setTimeout(() => {
if (keepaliveAgent.statusChanged) {
console.log('[%s] agent status changed: %j', Date(), keepaliveAgent.getCurrentStatus());
}
}, 2000);
```
### `getter agent.statusChanged`
counters have change or not after last checkpoint.
### `agent.getCurrentStatus()`
`agent.getCurrentStatus()` will return a object to show the status of this agent:
```js
{
createSocketCount: 10,
closeSocketCount: 5,
timeoutSocketCount: 0,
requestCount: 5,
freeSockets: { 'localhost:57479:': 3 },
sockets: { 'localhost:57479:': 5 },
requests: {}
}
```
### Support `https`
```js
const https = require('https');
const HttpsAgent = require('agentkeepalive').HttpsAgent;
const keepaliveAgent = new HttpsAgent();
// https://www.google.com/search?q=nodejs&sugexp=chrome,mod=12&sourceid=chrome&ie=UTF-8
const options = {
host: 'www.google.com',
port: 443,
path: '/search?q=nodejs&sugexp=chrome,mod=12&sourceid=chrome&ie=UTF-8',
method: 'GET',
agent: keepaliveAgent,
};
const req = https.request(options, res => {
console.log('STATUS: ' + res.statusCode);
console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
res.on('data', chunk => {
console.log('BODY: ' + chunk);
});
});
req.on('error', e => {
console.log('problem with request: ' + e.message);
});
req.end();
setTimeout(() => {
console.log('agent status: %j', keepaliveAgent.getCurrentStatus());
}, 2000);
```
## [Benchmark](https://github.com/node-modules/agentkeepalive/tree/master/benchmark)
run the benchmark:
```bash
cd benchmark
sh start.sh
```
Intel(R) Core(TM)2 Duo CPU P8600 @ 2.40GHz
node@v0.8.9
50 maxSockets, 60 concurrent, 1000 requests per concurrent, 5ms delay
Keep alive agent (30 seconds):
```js
Transactions: 60000 hits
Availability: 100.00 %
Elapsed time: 29.70 secs
Data transferred: 14.88 MB
Response time: 0.03 secs
Transaction rate: 2020.20 trans/sec
Throughput: 0.50 MB/sec
Concurrency: 59.84
Successful transactions: 60000
Failed transactions: 0
Longest transaction: 0.15
Shortest transaction: 0.01
```
Normal agent:
```js
Transactions: 60000 hits
Availability: 100.00 %
Elapsed time: 46.53 secs
Data transferred: 14.88 MB
Response time: 0.05 secs
Transaction rate: 1289.49 trans/sec
Throughput: 0.32 MB/sec
Concurrency: 59.81
Successful transactions: 60000
Failed transactions: 0
Longest transaction: 0.45
Shortest transaction: 0.00
```
Socket created:
```
[proxy.js:120000] keepalive, 50 created, 60000 requestFinished, 1200 req/socket, 0 requests, 0 sockets, 0 unusedSockets, 50 timeout
{" <10ms":662," <15ms":17825," <20ms":20552," <30ms":17646," <40ms":2315," <50ms":567," <100ms":377," <150ms":56," <200ms":0," >=200ms+":0}
----------------------------------------------------------------
[proxy.js:120000] normal , 53866 created, 84260 requestFinished, 1.56 req/socket, 0 requests, 0 sockets
{" <10ms":75," <15ms":1112," <20ms":10947," <30ms":32130," <40ms":8228," <50ms":3002," <100ms":4274," <150ms":181," <200ms":18," >=200ms+":33}
```
## License
```
(The MIT License)
Copyright(c) node-modules and other contributors.
Copyright(c) 2012 - 2015 fengmk2 <fengmk2@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
'Software'), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
```

View File

@@ -0,0 +1,5 @@
module.exports = noop;
module.exports.HttpsAgent = noop;
// Noop function for browser since native api's don't use agents.
function noop () {}

View File

@@ -0,0 +1,4 @@
'use strict';
module.exports = require('./lib/agent');
module.exports.HttpsAgent = require('./lib/https_agent');

View File

@@ -0,0 +1,416 @@
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// patch from https://github.com/nodejs/node/blob/v7.2.1/lib/_http_agent.js
'use strict';
const net = require('net');
const util = require('util');
const EventEmitter = require('events');
const debug = util.debuglog('http');
// New Agent code.
// The largest departure from the previous implementation is that
// an Agent instance holds connections for a variable number of host:ports.
// Surprisingly, this is still API compatible as far as third parties are
// concerned. The only code that really notices the difference is the
// request object.
// Another departure is that all code related to HTTP parsing is in
// ClientRequest.onSocket(). The Agent is now *strictly*
// concerned with managing a connection pool.
function Agent(options) {
if (!(this instanceof Agent))
return new Agent(options);
EventEmitter.call(this);
var self = this;
self.defaultPort = 80;
self.protocol = 'http:';
self.options = util._extend({}, options);
// don't confuse net and make it think that we're connecting to a pipe
self.options.path = null;
self.requests = {};
self.sockets = {};
self.freeSockets = {};
self.keepAliveMsecs = self.options.keepAliveMsecs || 1000;
self.keepAlive = self.options.keepAlive || false;
self.maxSockets = self.options.maxSockets || Agent.defaultMaxSockets;
self.maxFreeSockets = self.options.maxFreeSockets || 256;
// [patch start]
// free keep-alive socket timeout. By default free socket do not have a timeout.
self.freeSocketKeepAliveTimeout = self.options.freeSocketKeepAliveTimeout || 0;
// working socket timeout. By default working socket do not have a timeout.
self.timeout = self.options.timeout || 0;
// the socket active time to live, even if it's in use
this.socketActiveTTL = this.options.socketActiveTTL || null;
// [patch end]
self.on('free', function(socket, options) {
var name = self.getName(options);
debug('agent.on(free)', name);
if (socket.writable &&
self.requests[name] && self.requests[name].length) {
// [patch start]
debug('continue handle next request');
// [patch end]
self.requests[name].shift().onSocket(socket);
if (self.requests[name].length === 0) {
// don't leak
delete self.requests[name];
}
} else {
// If there are no pending requests, then put it in
// the freeSockets pool, but only if we're allowed to do so.
var req = socket._httpMessage;
if (req &&
req.shouldKeepAlive &&
socket.writable &&
self.keepAlive) {
var freeSockets = self.freeSockets[name];
var freeLen = freeSockets ? freeSockets.length : 0;
var count = freeLen;
if (self.sockets[name])
count += self.sockets[name].length;
if (count > self.maxSockets || freeLen >= self.maxFreeSockets) {
socket.destroy();
} else {
freeSockets = freeSockets || [];
self.freeSockets[name] = freeSockets;
socket.setKeepAlive(true, self.keepAliveMsecs);
socket.unref();
socket._httpMessage = null;
self.removeSocket(socket, options);
freeSockets.push(socket);
// [patch start]
// Add a default error handler to avoid Unhandled 'error' event throw on idle socket
// https://github.com/node-modules/agentkeepalive/issues/25
// https://github.com/nodejs/node/pull/4482 (fixed in >= 4.4.0 and >= 5.4.0)
if (socket.listeners('error').length === 0) {
socket.once('error', freeSocketErrorListener);
}
// set free keepalive timer
// try to use socket custom freeSocketKeepAliveTimeout first
const freeSocketKeepAliveTimeout = socket.freeSocketKeepAliveTimeout || self.freeSocketKeepAliveTimeout;
socket.setTimeout(freeSocketKeepAliveTimeout);
debug(`push to free socket queue and wait for ${freeSocketKeepAliveTimeout}ms`);
// [patch end]
}
} else {
socket.destroy();
}
}
});
}
util.inherits(Agent, EventEmitter);
exports.Agent = Agent;
// [patch start]
function freeSocketErrorListener(err) {
var socket = this;
debug('SOCKET ERROR on FREE socket:', err.message, err.stack);
socket.destroy();
socket.emit('agentRemove');
}
// [patch end]
Agent.defaultMaxSockets = Infinity;
Agent.prototype.createConnection = net.createConnection;
// Get the key for a given set of request options
Agent.prototype.getName = function getName(options) {
var name = options.host || 'localhost';
name += ':';
if (options.port)
name += options.port;
name += ':';
if (options.localAddress)
name += options.localAddress;
// Pacify parallel/test-http-agent-getname by only appending
// the ':' when options.family is set.
if (options.family === 4 || options.family === 6)
name += ':' + options.family;
return name;
};
// [patch start]
function handleSocketCreation(req) {
return function(err, newSocket) {
if (err) {
process.nextTick(function() {
req.emit('error', err);
});
return;
}
req.onSocket(newSocket);
}
}
// [patch end]
Agent.prototype.addRequest = function addRequest(req, options, port/*legacy*/,
localAddress/*legacy*/) {
// Legacy API: addRequest(req, host, port, localAddress)
if (typeof options === 'string') {
options = {
host: options,
port,
localAddress
};
}
options = util._extend({}, options);
options = util._extend(options, this.options);
if (!options.servername)
options.servername = calculateServerName(options, req);
var name = this.getName(options);
if (!this.sockets[name]) {
this.sockets[name] = [];
}
var freeLen = this.freeSockets[name] ? this.freeSockets[name].length : 0;
var sockLen = freeLen + this.sockets[name].length;
if (freeLen) {
// we have a free socket, so use that.
var socket = this.freeSockets[name].shift();
debug('have free socket');
// [patch start]
// remove free socket error event handler
socket.removeListener('error', freeSocketErrorListener);
// restart the default timer
socket.setTimeout(this.timeout);
if (this.socketActiveTTL && Date.now() - socket.createdTime > this.socketActiveTTL) {
debug(`socket ${socket.createdTime} expired`);
socket.destroy();
return this.createSocket(req, options, handleSocketCreation(req));
}
// [patch end]
// don't leak
if (!this.freeSockets[name].length)
delete this.freeSockets[name];
socket.ref();
req.onSocket(socket);
this.sockets[name].push(socket);
} else if (sockLen < this.maxSockets) {
debug('call onSocket', sockLen, freeLen);
// If we are under maxSockets create a new one.
// [patch start]
this.createSocket(req, options, handleSocketCreation(req));
// [patch end]
} else {
debug('wait for socket');
// We are over limit so we'll add it to the queue.
if (!this.requests[name]) {
this.requests[name] = [];
}
this.requests[name].push(req);
}
};
Agent.prototype.createSocket = function createSocket(req, options, cb) {
var self = this;
options = util._extend({}, options);
options = util._extend(options, self.options);
if (!options.servername)
options.servername = calculateServerName(options, req);
var name = self.getName(options);
options._agentKey = name;
debug('createConnection', name, options);
options.encoding = null;
var called = false;
const newSocket = self.createConnection(options, oncreate);
// [patch start]
if (newSocket) {
oncreate(null, Object.assign(newSocket, { createdTime: Date.now() }));
}
// [patch end]
function oncreate(err, s) {
if (called)
return;
called = true;
if (err)
return cb(err);
if (!self.sockets[name]) {
self.sockets[name] = [];
}
self.sockets[name].push(s);
debug('sockets', name, self.sockets[name].length);
function onFree() {
self.emit('free', s, options);
}
s.on('free', onFree);
function onClose(err) {
debug('CLIENT socket onClose');
// This is the only place where sockets get removed from the Agent.
// If you want to remove a socket from the pool, just close it.
// All socket errors end in a close event anyway.
self.removeSocket(s, options);
// [patch start]
self.emit('close');
// [patch end]
}
s.on('close', onClose);
// [patch start]
// start socket timeout handler
function onTimeout() {
debug('CLIENT socket onTimeout');
s.destroy();
// Remove it from freeSockets immediately to prevent new requests from being sent through this socket.
self.removeSocket(s, options);
self.emit('timeout');
}
s.on('timeout', onTimeout);
// set the default timer
s.setTimeout(self.timeout);
// [patch end]
function onRemove() {
// We need this function for cases like HTTP 'upgrade'
// (defined by WebSockets) where we need to remove a socket from the
// pool because it'll be locked up indefinitely
debug('CLIENT socket onRemove');
self.removeSocket(s, options);
s.removeListener('close', onClose);
s.removeListener('free', onFree);
s.removeListener('agentRemove', onRemove);
// [patch start]
// remove socket timeout handler
s.setTimeout(0, onTimeout);
// [patch end]
}
s.on('agentRemove', onRemove);
cb(null, s);
}
};
function calculateServerName(options, req) {
let servername = options.host;
const hostHeader = req.getHeader('host');
if (hostHeader) {
// abc => abc
// abc:123 => abc
// [::1] => ::1
// [::1]:123 => ::1
if (hostHeader.startsWith('[')) {
const index = hostHeader.indexOf(']');
if (index === -1) {
// Leading '[', but no ']'. Need to do something...
servername = hostHeader;
} else {
servername = hostHeader.substr(1, index - 1);
}
} else {
servername = hostHeader.split(':', 1)[0];
}
}
return servername;
}
Agent.prototype.removeSocket = function removeSocket(s, options) {
var name = this.getName(options);
debug('removeSocket', name, 'writable:', s.writable);
var sets = [this.sockets];
// If the socket was destroyed, remove it from the free buffers too.
if (!s.writable)
sets.push(this.freeSockets);
for (var sk = 0; sk < sets.length; sk++) {
var sockets = sets[sk];
if (sockets[name]) {
var index = sockets[name].indexOf(s);
if (index !== -1) {
sockets[name].splice(index, 1);
// Don't leak
if (sockets[name].length === 0)
delete sockets[name];
}
}
}
// [patch start]
var freeLen = this.freeSockets[name] ? this.freeSockets[name].length : 0;
var sockLen = freeLen + this.sockets[name] ? this.sockets[name].length : 0;
// [patch end]
if (this.requests[name] && this.requests[name].length && sockLen < this.maxSockets) {
debug('removeSocket, have a request, make a socket');
var req = this.requests[name][0];
// If we have pending requests and a socket gets closed make a new one
this.createSocket(req, options, function(err, newSocket) {
if (err) {
process.nextTick(function() {
req.emit('error', err);
});
return;
}
newSocket.emit('free');
});
}
};
Agent.prototype.destroy = function destroy() {
var sets = [this.freeSockets, this.sockets];
for (var s = 0; s < sets.length; s++) {
var set = sets[s];
var keys = Object.keys(set);
for (var v = 0; v < keys.length; v++) {
var setName = set[keys[v]];
for (var n = 0; n < setName.length; n++) {
setName[n].destroy();
}
}
}
};
exports.globalAgent = new Agent();

View File

@@ -0,0 +1,133 @@
/**
* refer:
* * @atimb "Real keep-alive HTTP agent": https://gist.github.com/2963672
* * https://github.com/joyent/node/blob/master/lib/http.js
* * https://github.com/joyent/node/blob/master/lib/https.js
* * https://github.com/joyent/node/blob/master/lib/_http_agent.js
*/
'use strict';
const OriginalAgent = require('./_http_agent').Agent;
const ms = require('humanize-ms');
class Agent extends OriginalAgent {
constructor(options) {
options = options || {};
options.keepAlive = options.keepAlive !== false;
// default is keep-alive and 15s free socket timeout
if (options.freeSocketKeepAliveTimeout === undefined) {
options.freeSocketKeepAliveTimeout = 15000;
}
// Legacy API: keepAliveTimeout should be rename to `freeSocketKeepAliveTimeout`
if (options.keepAliveTimeout) {
options.freeSocketKeepAliveTimeout = options.keepAliveTimeout;
}
options.freeSocketKeepAliveTimeout = ms(options.freeSocketKeepAliveTimeout);
// Sets the socket to timeout after timeout milliseconds of inactivity on the socket.
// By default is double free socket keepalive timeout.
if (options.timeout === undefined) {
options.timeout = options.freeSocketKeepAliveTimeout * 2;
// make sure socket default inactivity timeout >= 30s
if (options.timeout < 30000) {
options.timeout = 30000;
}
}
options.timeout = ms(options.timeout);
super(options);
this.createSocketCount = 0;
this.createSocketCountLastCheck = 0;
this.createSocketErrorCount = 0;
this.createSocketErrorCountLastCheck = 0;
this.closeSocketCount = 0;
this.closeSocketCountLastCheck = 0;
// socket error event count
this.errorSocketCount = 0;
this.errorSocketCountLastCheck = 0;
this.requestCount = 0;
this.requestCountLastCheck = 0;
this.timeoutSocketCount = 0;
this.timeoutSocketCountLastCheck = 0;
this.on('free', s => {
this.requestCount++;
// last enter free queue timestamp
s.lastFreeTime = Date.now();
});
this.on('timeout', () => {
this.timeoutSocketCount++;
});
this.on('close', () => {
this.closeSocketCount++;
});
this.on('error', () => {
this.errorSocketCount++;
});
}
createSocket(req, options, cb) {
super.createSocket(req, options, (err, socket) => {
if (err) {
this.createSocketErrorCount++;
return cb(err);
}
if (this.keepAlive) {
// Disable Nagle's algorithm: http://blog.caustik.com/2012/04/08/scaling-node-js-to-100k-concurrent-connections/
// https://fengmk2.com/benchmark/nagle-algorithm-delayed-ack-mock.html
socket.setNoDelay(true);
}
this.createSocketCount++;
cb(null, socket);
});
}
get statusChanged() {
const changed = this.createSocketCount !== this.createSocketCountLastCheck ||
this.createSocketErrorCount !== this.createSocketErrorCountLastCheck ||
this.closeSocketCount !== this.closeSocketCountLastCheck ||
this.errorSocketCount !== this.errorSocketCountLastCheck ||
this.timeoutSocketCount !== this.timeoutSocketCountLastCheck ||
this.requestCount !== this.requestCountLastCheck;
if (changed) {
this.createSocketCountLastCheck = this.createSocketCount;
this.createSocketErrorCountLastCheck = this.createSocketErrorCount;
this.closeSocketCountLastCheck = this.closeSocketCount;
this.errorSocketCountLastCheck = this.errorSocketCount;
this.timeoutSocketCountLastCheck = this.timeoutSocketCount;
this.requestCountLastCheck = this.requestCount;
}
return changed;
}
getCurrentStatus() {
return {
createSocketCount: this.createSocketCount,
createSocketErrorCount: this.createSocketErrorCount,
closeSocketCount: this.closeSocketCount,
errorSocketCount: this.errorSocketCount,
timeoutSocketCount: this.timeoutSocketCount,
requestCount: this.requestCount,
freeSockets: inspect(this.freeSockets),
sockets: inspect(this.sockets),
requests: inspect(this.requests),
};
}
}
module.exports = Agent;
function inspect(obj) {
const res = {};
for (const key in obj) {
res[key] = obj[key].length;
}
return res;
}

View File

@@ -0,0 +1,42 @@
/**
* Https Agent base on custom http agent
*/
'use strict';
const https = require('https');
const HttpAgent = require('./agent');
const OriginalHttpsAgent = https.Agent;
class HttpsAgent extends HttpAgent {
constructor(options) {
super(options);
this.defaultPort = 443;
this.protocol = 'https:';
this.maxCachedSessions = this.options.maxCachedSessions;
if (this.maxCachedSessions === undefined) {
this.maxCachedSessions = 100;
}
this._sessionCache = {
map: {},
list: [],
};
}
}
[
'createConnection',
'getName',
'_getSession',
'_cacheSession',
// https://github.com/nodejs/node/pull/4982
'_evictSession',
].forEach(function(method) {
if (typeof OriginalHttpsAgent.prototype[method] === 'function') {
HttpsAgent.prototype[method] = OriginalHttpsAgent.prototype[method];
}
});
module.exports = HttpsAgent;

View File

@@ -0,0 +1,83 @@
{
"_from": "agentkeepalive@^3.4.1",
"_id": "agentkeepalive@3.4.1",
"_inBundle": true,
"_integrity": "sha512-MPIwsZU9PP9kOrZpyu2042kYA8Fdt/AedQYkYXucHgF9QoD9dXVp0ypuGnHXSR0hTstBxdt85Xkh4JolYfK5wg==",
"_location": "/npm/agentkeepalive",
"_phantomChildren": {},
"_requested": {
"type": "range",
"registry": true,
"raw": "agentkeepalive@^3.4.1",
"name": "agentkeepalive",
"escapedName": "agentkeepalive",
"rawSpec": "^3.4.1",
"saveSpec": null,
"fetchSpec": "^3.4.1"
},
"_requiredBy": [
"/npm/make-fetch-happen",
"/npm/npm-registry-fetch/make-fetch-happen"
],
"_resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-3.4.1.tgz",
"_shasum": "aa95aebc3a749bca5ed53e3880a09f5235b48f0c",
"_spec": "agentkeepalive@^3.4.1",
"_where": "/Users/rebecca/code/npm/node_modules/make-fetch-happen",
"author": {
"name": "fengmk2",
"email": "fengmk2@gmail.com",
"url": "https://fengmk2.com"
},
"browser": "browser.js",
"bugs": {
"url": "https://github.com/node-modules/agentkeepalive/issues"
},
"bundleDependencies": false,
"ci": {
"version": "4.3.2, 4, 6, 8, 9"
},
"dependencies": {
"humanize-ms": "^1.2.1"
},
"deprecated": false,
"description": "Missing keepalive http.Agent",
"devDependencies": {
"autod": "^2.8.0",
"egg-bin": "^1.10.3",
"egg-ci": "^1.7.0",
"eslint": "^3.19.0",
"eslint-config-egg": "^4.2.0",
"pedding": "^1.1.0"
},
"engines": {
"node": ">= 4.0.0"
},
"files": [
"index.js",
"browser.js",
"lib"
],
"homepage": "https://github.com/node-modules/agentkeepalive#readme",
"keywords": [
"http",
"https",
"agent",
"keepalive",
"agentkeepalive"
],
"license": "MIT",
"main": "index.js",
"name": "agentkeepalive",
"repository": {
"type": "git",
"url": "git://github.com/node-modules/agentkeepalive.git"
},
"scripts": {
"autod": "autod",
"ci": "npm run lint && npm run cov",
"cov": "egg-bin cov",
"lint": "eslint lib test index.js",
"test": "egg-bin test"
},
"version": "3.4.1"
}

View File

@@ -0,0 +1,20 @@
var Ajv = require('ajv');
var ajv = new Ajv({allErrors: true});
var schema = {
"properties": {
"foo": { "type": "string" },
"bar": { "type": "number", "maximum": 3 }
}
};
var validate = ajv.compile(schema);
test({"foo": "abc", "bar": 2});
test({"foo": 2, "bar": 4});
function test(data) {
var valid = validate(data);
if (valid) console.log('Valid!');
else console.log('Invalid: ' + ajv.errorsText(validate.errors));
}

22
website/node_modules/npm/node_modules/ajv/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2015 Evgeny Poberezkin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

1327
website/node_modules/npm/node_modules/ajv/README.md generated vendored Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

49
website/node_modules/npm/node_modules/ajv/lib/$data.js generated vendored Normal file
View File

@@ -0,0 +1,49 @@
'use strict';
var KEYWORDS = [
'multipleOf',
'maximum',
'exclusiveMaximum',
'minimum',
'exclusiveMinimum',
'maxLength',
'minLength',
'pattern',
'additionalItems',
'maxItems',
'minItems',
'uniqueItems',
'maxProperties',
'minProperties',
'required',
'additionalProperties',
'enum',
'format',
'const'
];
module.exports = function (metaSchema, keywordsJsonPointers) {
for (var i=0; i<keywordsJsonPointers.length; i++) {
metaSchema = JSON.parse(JSON.stringify(metaSchema));
var segments = keywordsJsonPointers[i].split('/');
var keywords = metaSchema;
var j;
for (j=1; j<segments.length; j++)
keywords = keywords[segments[j]];
for (j=0; j<KEYWORDS.length; j++) {
var key = KEYWORDS[j];
var schema = keywords[key];
if (schema) {
keywords[key] = {
anyOf: [
schema,
{ $ref: 'https://raw.githubusercontent.com/epoberezkin/ajv/master/lib/refs/$data.json#' }
]
};
}
}
}
return metaSchema;
};

358
website/node_modules/npm/node_modules/ajv/lib/ajv.d.ts generated vendored Normal file
View File

@@ -0,0 +1,358 @@
declare var ajv: {
(options?: ajv.Options): ajv.Ajv;
new (options?: ajv.Options): ajv.Ajv;
ValidationError: ValidationError;
MissingRefError: MissingRefError;
$dataMetaSchema: object;
}
declare namespace ajv {
interface Ajv {
/**
* Validate data using schema
* Schema will be compiled and cached (using serialized JSON as key, [fast-json-stable-stringify](https://github.com/epoberezkin/fast-json-stable-stringify) is used to serialize by default).
* @param {string|object|Boolean} schemaKeyRef key, ref or schema object
* @param {Any} data to be validated
* @return {Boolean} validation result. Errors from the last validation will be available in `ajv.errors` (and also in compiled schema: `schema.errors`).
*/
validate(schemaKeyRef: object | string | boolean, data: any): boolean | Thenable<any>;
/**
* Create validating function for passed schema.
* @param {object|Boolean} schema schema object
* @return {Function} validating function
*/
compile(schema: object | boolean): ValidateFunction;
/**
* Creates validating function for passed schema with asynchronous loading of missing schemas.
* `loadSchema` option should be a function that accepts schema uri and node-style callback.
* @this Ajv
* @param {object|Boolean} schema schema object
* @param {Boolean} meta optional true to compile meta-schema; this parameter can be skipped
* @param {Function} callback optional node-style callback, it is always called with 2 parameters: error (or null) and validating function.
* @return {Thenable<ValidateFunction>} validating function
*/
compileAsync(schema: object | boolean, meta?: Boolean, callback?: (err: Error, validate: ValidateFunction) => any): Thenable<ValidateFunction>;
/**
* Adds schema to the instance.
* @param {object|Array} schema schema or array of schemas. If array is passed, `key` and other parameters will be ignored.
* @param {string} key Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.
* @return {Ajv} this for method chaining
*/
addSchema(schema: Array<object> | object, key?: string): Ajv;
/**
* Add schema that will be used to validate other schemas
* options in META_IGNORE_OPTIONS are alway set to false
* @param {object} schema schema object
* @param {string} key optional schema key
* @return {Ajv} this for method chaining
*/
addMetaSchema(schema: object, key?: string): Ajv;
/**
* Validate schema
* @param {object|Boolean} schema schema to validate
* @return {Boolean} true if schema is valid
*/
validateSchema(schema: object | boolean): boolean;
/**
* Get compiled schema from the instance by `key` or `ref`.
* @param {string} keyRef `key` that was passed to `addSchema` or full schema reference (`schema.id` or resolved id).
* @return {Function} schema validating function (with property `schema`).
*/
getSchema(keyRef: string): ValidateFunction;
/**
* Remove cached schema(s).
* If no parameter is passed all schemas but meta-schemas are removed.
* If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed.
* Even if schema is referenced by other schemas it still can be removed as other schemas have local references.
* @param {string|object|RegExp|Boolean} schemaKeyRef key, ref, pattern to match key/ref or schema object
* @return {Ajv} this for method chaining
*/
removeSchema(schemaKeyRef?: object | string | RegExp | boolean): Ajv;
/**
* Add custom format
* @param {string} name format name
* @param {string|RegExp|Function} format string is converted to RegExp; function should return boolean (true when valid)
* @return {Ajv} this for method chaining
*/
addFormat(name: string, format: FormatValidator | FormatDefinition): Ajv;
/**
* Define custom keyword
* @this Ajv
* @param {string} keyword custom keyword, should be a valid identifier, should be different from all standard, custom and macro keywords.
* @param {object} definition keyword definition object with properties `type` (type(s) which the keyword applies to), `validate` or `compile`.
* @return {Ajv} this for method chaining
*/
addKeyword(keyword: string, definition: KeywordDefinition): Ajv;
/**
* Get keyword definition
* @this Ajv
* @param {string} keyword pre-defined or custom keyword.
* @return {object|Boolean} custom keyword definition, `true` if it is a predefined keyword, `false` otherwise.
*/
getKeyword(keyword: string): object | boolean;
/**
* Remove keyword
* @this Ajv
* @param {string} keyword pre-defined or custom keyword.
* @return {Ajv} this for method chaining
*/
removeKeyword(keyword: string): Ajv;
/**
* Convert array of error message objects to string
* @param {Array<object>} errors optional array of validation errors, if not passed errors from the instance are used.
* @param {object} options optional options with properties `separator` and `dataVar`.
* @return {string} human readable string with all errors descriptions
*/
errorsText(errors?: Array<ErrorObject>, options?: ErrorsTextOptions): string;
errors?: Array<ErrorObject>;
}
interface Thenable <R> {
then <U> (onFulfilled?: (value: R) => U | Thenable<U>, onRejected?: (error: any) => U | Thenable<U>): Thenable<U>;
}
interface ValidateFunction {
(
data: any,
dataPath?: string,
parentData?: object | Array<any>,
parentDataProperty?: string | number,
rootData?: object | Array<any>
): boolean | Thenable<any>;
schema?: object | boolean;
errors?: null | Array<ErrorObject>;
refs?: object;
refVal?: Array<any>;
root?: ValidateFunction | object;
$async?: true;
source?: object;
}
interface Options {
$data?: boolean;
allErrors?: boolean;
verbose?: boolean;
jsonPointers?: boolean;
uniqueItems?: boolean;
unicode?: boolean;
format?: string;
formats?: object;
unknownFormats?: true | string[] | 'ignore';
schemas?: Array<object> | object;
schemaId?: '$id' | 'id';
missingRefs?: true | 'ignore' | 'fail';
extendRefs?: true | 'ignore' | 'fail';
loadSchema?: (uri: string, cb?: (err: Error, schema: object) => void) => Thenable<object | boolean>;
removeAdditional?: boolean | 'all' | 'failing';
useDefaults?: boolean | 'shared';
coerceTypes?: boolean | 'array';
async?: boolean | string;
transpile?: string | ((code: string) => string);
meta?: boolean | object;
validateSchema?: boolean | 'log';
addUsedSchema?: boolean;
inlineRefs?: boolean | number;
passContext?: boolean;
loopRequired?: number;
ownProperties?: boolean;
multipleOfPrecision?: boolean | number;
errorDataPath?: string,
messages?: boolean;
sourceCode?: boolean;
processCode?: (code: string) => string;
cache?: object;
}
type FormatValidator = string | RegExp | ((data: string) => boolean | Thenable<any>);
interface FormatDefinition {
validate: FormatValidator;
compare: (data1: string, data2: string) => number;
async?: boolean;
}
interface KeywordDefinition {
type?: string | Array<string>;
async?: boolean;
$data?: boolean;
errors?: boolean | string;
metaSchema?: object;
// schema: false makes validate not to expect schema (ValidateFunction)
schema?: boolean;
modifying?: boolean;
valid?: boolean;
// one and only one of the following properties should be present
validate?: SchemaValidateFunction | ValidateFunction;
compile?: (schema: any, parentSchema: object, it: CompilationContext) => ValidateFunction;
macro?: (schema: any, parentSchema: object, it: CompilationContext) => object | boolean;
inline?: (it: CompilationContext, keyword: string, schema: any, parentSchema: object) => string;
}
interface CompilationContext {
level: number;
dataLevel: number;
schema: any;
schemaPath: string;
baseId: string;
async: boolean;
opts: Options;
formats: {
[index: string]: FormatDefinition | undefined;
};
compositeRule: boolean;
validate: (schema: object) => boolean;
util: {
copy(obj: any, target?: any): any;
toHash(source: string[]): { [index: string]: true | undefined };
equal(obj: any, target: any): boolean;
getProperty(str: string): string;
schemaHasRules(schema: object, rules: any): string;
escapeQuotes(str: string): string;
toQuotedString(str: string): string;
getData(jsonPointer: string, dataLevel: number, paths: string[]): string;
escapeJsonPointer(str: string): string;
unescapeJsonPointer(str: string): string;
escapeFragment(str: string): string;
unescapeFragment(str: string): string;
};
self: Ajv;
}
interface SchemaValidateFunction {
(
schema: any,
data: any,
parentSchema?: object,
dataPath?: string,
parentData?: object | Array<any>,
parentDataProperty?: string | number,
rootData?: object | Array<any>
): boolean | Thenable<any>;
errors?: Array<ErrorObject>;
}
interface ErrorsTextOptions {
separator?: string;
dataVar?: string;
}
interface ErrorObject {
keyword: string;
dataPath: string;
schemaPath: string;
params: ErrorParameters;
// Added to validation errors of propertyNames keyword schema
propertyName?: string;
// Excluded if messages set to false.
message?: string;
// These are added with the `verbose` option.
schema?: any;
parentSchema?: object;
data?: any;
}
type ErrorParameters = RefParams | LimitParams | AdditionalPropertiesParams |
DependenciesParams | FormatParams | ComparisonParams |
MultipleOfParams | PatternParams | RequiredParams |
TypeParams | UniqueItemsParams | CustomParams |
PatternGroupsParams | PatternRequiredParams |
PropertyNamesParams | SwitchParams | NoParams | EnumParams;
interface RefParams {
ref: string;
}
interface LimitParams {
limit: number;
}
interface AdditionalPropertiesParams {
additionalProperty: string;
}
interface DependenciesParams {
property: string;
missingProperty: string;
depsCount: number;
deps: string;
}
interface FormatParams {
format: string
}
interface ComparisonParams {
comparison: string;
limit: number | string;
exclusive: boolean;
}
interface MultipleOfParams {
multipleOf: number;
}
interface PatternParams {
pattern: string;
}
interface RequiredParams {
missingProperty: string;
}
interface TypeParams {
type: string;
}
interface UniqueItemsParams {
i: number;
j: number;
}
interface CustomParams {
keyword: string;
}
interface PatternGroupsParams {
reason: string;
limit: number;
pattern: string;
}
interface PatternRequiredParams {
missingPattern: string;
}
interface PropertyNamesParams {
propertyName: string;
}
interface SwitchParams {
caseIndex: number;
}
interface NoParams {}
interface EnumParams {
allowedValues: Array<any>;
}
}
declare class ValidationError extends Error {
constructor(errors: Array<ajv.ErrorObject>);
message: string;
errors: Array<ajv.ErrorObject>;
ajv: true;
validation: true;
}
declare class MissingRefError extends Error {
constructor(baseId: string, ref: string, message?: string);
static message: (baseId: string, ref: string) => string;
message: string;
missingRef: string;
missingSchema: string;
}
export = ajv;

502
website/node_modules/npm/node_modules/ajv/lib/ajv.js generated vendored Normal file
View File

@@ -0,0 +1,502 @@
'use strict';
var compileSchema = require('./compile')
, resolve = require('./compile/resolve')
, Cache = require('./cache')
, SchemaObject = require('./compile/schema_obj')
, stableStringify = require('fast-json-stable-stringify')
, formats = require('./compile/formats')
, rules = require('./compile/rules')
, $dataMetaSchema = require('./$data')
, patternGroups = require('./patternGroups')
, util = require('./compile/util')
, co = require('co');
module.exports = Ajv;
Ajv.prototype.validate = validate;
Ajv.prototype.compile = compile;
Ajv.prototype.addSchema = addSchema;
Ajv.prototype.addMetaSchema = addMetaSchema;
Ajv.prototype.validateSchema = validateSchema;
Ajv.prototype.getSchema = getSchema;
Ajv.prototype.removeSchema = removeSchema;
Ajv.prototype.addFormat = addFormat;
Ajv.prototype.errorsText = errorsText;
Ajv.prototype._addSchema = _addSchema;
Ajv.prototype._compile = _compile;
Ajv.prototype.compileAsync = require('./compile/async');
var customKeyword = require('./keyword');
Ajv.prototype.addKeyword = customKeyword.add;
Ajv.prototype.getKeyword = customKeyword.get;
Ajv.prototype.removeKeyword = customKeyword.remove;
var errorClasses = require('./compile/error_classes');
Ajv.ValidationError = errorClasses.Validation;
Ajv.MissingRefError = errorClasses.MissingRef;
Ajv.$dataMetaSchema = $dataMetaSchema;
var META_SCHEMA_ID = 'http://json-schema.org/draft-06/schema';
var META_IGNORE_OPTIONS = [ 'removeAdditional', 'useDefaults', 'coerceTypes' ];
var META_SUPPORT_DATA = ['/properties'];
/**
* Creates validator instance.
* Usage: `Ajv(opts)`
* @param {Object} opts optional options
* @return {Object} ajv instance
*/
function Ajv(opts) {
if (!(this instanceof Ajv)) return new Ajv(opts);
opts = this._opts = util.copy(opts) || {};
setLogger(this);
this._schemas = {};
this._refs = {};
this._fragments = {};
this._formats = formats(opts.format);
var schemaUriFormat = this._schemaUriFormat = this._formats['uri-reference'];
this._schemaUriFormatFunc = function (str) { return schemaUriFormat.test(str); };
this._cache = opts.cache || new Cache;
this._loadingSchemas = {};
this._compilations = [];
this.RULES = rules();
this._getId = chooseGetId(opts);
opts.loopRequired = opts.loopRequired || Infinity;
if (opts.errorDataPath == 'property') opts._errorDataPathProperty = true;
if (opts.serialize === undefined) opts.serialize = stableStringify;
this._metaOpts = getMetaSchemaOptions(this);
if (opts.formats) addInitialFormats(this);
addDraft6MetaSchema(this);
if (typeof opts.meta == 'object') this.addMetaSchema(opts.meta);
addInitialSchemas(this);
if (opts.patternGroups) patternGroups(this);
}
/**
* Validate data using schema
* Schema will be compiled and cached (using serialized JSON as key. [fast-json-stable-stringify](https://github.com/epoberezkin/fast-json-stable-stringify) is used to serialize.
* @this Ajv
* @param {String|Object} schemaKeyRef key, ref or schema object
* @param {Any} data to be validated
* @return {Boolean} validation result. Errors from the last validation will be available in `ajv.errors` (and also in compiled schema: `schema.errors`).
*/
function validate(schemaKeyRef, data) {
var v;
if (typeof schemaKeyRef == 'string') {
v = this.getSchema(schemaKeyRef);
if (!v) throw new Error('no schema with key or ref "' + schemaKeyRef + '"');
} else {
var schemaObj = this._addSchema(schemaKeyRef);
v = schemaObj.validate || this._compile(schemaObj);
}
var valid = v(data);
if (v.$async === true)
return this._opts.async == '*' ? co(valid) : valid;
this.errors = v.errors;
return valid;
}
/**
* Create validating function for passed schema.
* @this Ajv
* @param {Object} schema schema object
* @param {Boolean} _meta true if schema is a meta-schema. Used internally to compile meta schemas of custom keywords.
* @return {Function} validating function
*/
function compile(schema, _meta) {
var schemaObj = this._addSchema(schema, undefined, _meta);
return schemaObj.validate || this._compile(schemaObj);
}
/**
* Adds schema to the instance.
* @this Ajv
* @param {Object|Array} schema schema or array of schemas. If array is passed, `key` and other parameters will be ignored.
* @param {String} key Optional schema key. Can be passed to `validate` method instead of schema object or id/ref. One schema per instance can have empty `id` and `key`.
* @param {Boolean} _skipValidation true to skip schema validation. Used internally, option validateSchema should be used instead.
* @param {Boolean} _meta true if schema is a meta-schema. Used internally, addMetaSchema should be used instead.
* @return {Ajv} this for method chaining
*/
function addSchema(schema, key, _skipValidation, _meta) {
if (Array.isArray(schema)){
for (var i=0; i<schema.length; i++) this.addSchema(schema[i], undefined, _skipValidation, _meta);
return this;
}
var id = this._getId(schema);
if (id !== undefined && typeof id != 'string')
throw new Error('schema id must be string');
key = resolve.normalizeId(key || id);
checkUnique(this, key);
this._schemas[key] = this._addSchema(schema, _skipValidation, _meta, true);
return this;
}
/**
* Add schema that will be used to validate other schemas
* options in META_IGNORE_OPTIONS are alway set to false
* @this Ajv
* @param {Object} schema schema object
* @param {String} key optional schema key
* @param {Boolean} skipValidation true to skip schema validation, can be used to override validateSchema option for meta-schema
* @return {Ajv} this for method chaining
*/
function addMetaSchema(schema, key, skipValidation) {
this.addSchema(schema, key, skipValidation, true);
return this;
}
/**
* Validate schema
* @this Ajv
* @param {Object} schema schema to validate
* @param {Boolean} throwOrLogError pass true to throw (or log) an error if invalid
* @return {Boolean} true if schema is valid
*/
function validateSchema(schema, throwOrLogError) {
var $schema = schema.$schema;
if ($schema !== undefined && typeof $schema != 'string')
throw new Error('$schema must be a string');
$schema = $schema || this._opts.defaultMeta || defaultMeta(this);
if (!$schema) {
this.logger.warn('meta-schema not available');
this.errors = null;
return true;
}
var currentUriFormat = this._formats.uri;
this._formats.uri = typeof currentUriFormat == 'function'
? this._schemaUriFormatFunc
: this._schemaUriFormat;
var valid;
try { valid = this.validate($schema, schema); }
finally { this._formats.uri = currentUriFormat; }
if (!valid && throwOrLogError) {
var message = 'schema is invalid: ' + this.errorsText();
if (this._opts.validateSchema == 'log') this.logger.error(message);
else throw new Error(message);
}
return valid;
}
function defaultMeta(self) {
var meta = self._opts.meta;
self._opts.defaultMeta = typeof meta == 'object'
? self._getId(meta) || meta
: self.getSchema(META_SCHEMA_ID)
? META_SCHEMA_ID
: undefined;
return self._opts.defaultMeta;
}
/**
* Get compiled schema from the instance by `key` or `ref`.
* @this Ajv
* @param {String} keyRef `key` that was passed to `addSchema` or full schema reference (`schema.id` or resolved id).
* @return {Function} schema validating function (with property `schema`).
*/
function getSchema(keyRef) {
var schemaObj = _getSchemaObj(this, keyRef);
switch (typeof schemaObj) {
case 'object': return schemaObj.validate || this._compile(schemaObj);
case 'string': return this.getSchema(schemaObj);
case 'undefined': return _getSchemaFragment(this, keyRef);
}
}
function _getSchemaFragment(self, ref) {
var res = resolve.schema.call(self, { schema: {} }, ref);
if (res) {
var schema = res.schema
, root = res.root
, baseId = res.baseId;
var v = compileSchema.call(self, schema, root, undefined, baseId);
self._fragments[ref] = new SchemaObject({
ref: ref,
fragment: true,
schema: schema,
root: root,
baseId: baseId,
validate: v
});
return v;
}
}
function _getSchemaObj(self, keyRef) {
keyRef = resolve.normalizeId(keyRef);
return self._schemas[keyRef] || self._refs[keyRef] || self._fragments[keyRef];
}
/**
* Remove cached schema(s).
* If no parameter is passed all schemas but meta-schemas are removed.
* If RegExp is passed all schemas with key/id matching pattern but meta-schemas are removed.
* Even if schema is referenced by other schemas it still can be removed as other schemas have local references.
* @this Ajv
* @param {String|Object|RegExp} schemaKeyRef key, ref, pattern to match key/ref or schema object
* @return {Ajv} this for method chaining
*/
function removeSchema(schemaKeyRef) {
if (schemaKeyRef instanceof RegExp) {
_removeAllSchemas(this, this._schemas, schemaKeyRef);
_removeAllSchemas(this, this._refs, schemaKeyRef);
return this;
}
switch (typeof schemaKeyRef) {
case 'undefined':
_removeAllSchemas(this, this._schemas);
_removeAllSchemas(this, this._refs);
this._cache.clear();
return this;
case 'string':
var schemaObj = _getSchemaObj(this, schemaKeyRef);
if (schemaObj) this._cache.del(schemaObj.cacheKey);
delete this._schemas[schemaKeyRef];
delete this._refs[schemaKeyRef];
return this;
case 'object':
var serialize = this._opts.serialize;
var cacheKey = serialize ? serialize(schemaKeyRef) : schemaKeyRef;
this._cache.del(cacheKey);
var id = this._getId(schemaKeyRef);
if (id) {
id = resolve.normalizeId(id);
delete this._schemas[id];
delete this._refs[id];
}
}
return this;
}
function _removeAllSchemas(self, schemas, regex) {
for (var keyRef in schemas) {
var schemaObj = schemas[keyRef];
if (!schemaObj.meta && (!regex || regex.test(keyRef))) {
self._cache.del(schemaObj.cacheKey);
delete schemas[keyRef];
}
}
}
/* @this Ajv */
function _addSchema(schema, skipValidation, meta, shouldAddSchema) {
if (typeof schema != 'object' && typeof schema != 'boolean')
throw new Error('schema should be object or boolean');
var serialize = this._opts.serialize;
var cacheKey = serialize ? serialize(schema) : schema;
var cached = this._cache.get(cacheKey);
if (cached) return cached;
shouldAddSchema = shouldAddSchema || this._opts.addUsedSchema !== false;
var id = resolve.normalizeId(this._getId(schema));
if (id && shouldAddSchema) checkUnique(this, id);
var willValidate = this._opts.validateSchema !== false && !skipValidation;
var recursiveMeta;
if (willValidate && !(recursiveMeta = id && id == resolve.normalizeId(schema.$schema)))
this.validateSchema(schema, true);
var localRefs = resolve.ids.call(this, schema);
var schemaObj = new SchemaObject({
id: id,
schema: schema,
localRefs: localRefs,
cacheKey: cacheKey,
meta: meta
});
if (id[0] != '#' && shouldAddSchema) this._refs[id] = schemaObj;
this._cache.put(cacheKey, schemaObj);
if (willValidate && recursiveMeta) this.validateSchema(schema, true);
return schemaObj;
}
/* @this Ajv */
function _compile(schemaObj, root) {
if (schemaObj.compiling) {
schemaObj.validate = callValidate;
callValidate.schema = schemaObj.schema;
callValidate.errors = null;
callValidate.root = root ? root : callValidate;
if (schemaObj.schema.$async === true)
callValidate.$async = true;
return callValidate;
}
schemaObj.compiling = true;
var currentOpts;
if (schemaObj.meta) {
currentOpts = this._opts;
this._opts = this._metaOpts;
}
var v;
try { v = compileSchema.call(this, schemaObj.schema, root, schemaObj.localRefs); }
finally {
schemaObj.compiling = false;
if (schemaObj.meta) this._opts = currentOpts;
}
schemaObj.validate = v;
schemaObj.refs = v.refs;
schemaObj.refVal = v.refVal;
schemaObj.root = v.root;
return v;
function callValidate() {
var _validate = schemaObj.validate;
var result = _validate.apply(null, arguments);
callValidate.errors = _validate.errors;
return result;
}
}
function chooseGetId(opts) {
switch (opts.schemaId) {
case '$id': return _get$Id;
case 'id': return _getId;
default: return _get$IdOrId;
}
}
/* @this Ajv */
function _getId(schema) {
if (schema.$id) this.logger.warn('schema $id ignored', schema.$id);
return schema.id;
}
/* @this Ajv */
function _get$Id(schema) {
if (schema.id) this.logger.warn('schema id ignored', schema.id);
return schema.$id;
}
function _get$IdOrId(schema) {
if (schema.$id && schema.id && schema.$id != schema.id)
throw new Error('schema $id is different from id');
return schema.$id || schema.id;
}
/**
* Convert array of error message objects to string
* @this Ajv
* @param {Array<Object>} errors optional array of validation errors, if not passed errors from the instance are used.
* @param {Object} options optional options with properties `separator` and `dataVar`.
* @return {String} human readable string with all errors descriptions
*/
function errorsText(errors, options) {
errors = errors || this.errors;
if (!errors) return 'No errors';
options = options || {};
var separator = options.separator === undefined ? ', ' : options.separator;
var dataVar = options.dataVar === undefined ? 'data' : options.dataVar;
var text = '';
for (var i=0; i<errors.length; i++) {
var e = errors[i];
if (e) text += dataVar + e.dataPath + ' ' + e.message + separator;
}
return text.slice(0, -separator.length);
}
/**
* Add custom format
* @this Ajv
* @param {String} name format name
* @param {String|RegExp|Function} format string is converted to RegExp; function should return boolean (true when valid)
* @return {Ajv} this for method chaining
*/
function addFormat(name, format) {
if (typeof format == 'string') format = new RegExp(format);
this._formats[name] = format;
return this;
}
function addDraft6MetaSchema(self) {
var $dataSchema;
if (self._opts.$data) {
$dataSchema = require('./refs/$data.json');
self.addMetaSchema($dataSchema, $dataSchema.$id, true);
}
if (self._opts.meta === false) return;
var metaSchema = require('./refs/json-schema-draft-06.json');
if (self._opts.$data) metaSchema = $dataMetaSchema(metaSchema, META_SUPPORT_DATA);
self.addMetaSchema(metaSchema, META_SCHEMA_ID, true);
self._refs['http://json-schema.org/schema'] = META_SCHEMA_ID;
}
function addInitialSchemas(self) {
var optsSchemas = self._opts.schemas;
if (!optsSchemas) return;
if (Array.isArray(optsSchemas)) self.addSchema(optsSchemas);
else for (var key in optsSchemas) self.addSchema(optsSchemas[key], key);
}
function addInitialFormats(self) {
for (var name in self._opts.formats) {
var format = self._opts.formats[name];
self.addFormat(name, format);
}
}
function checkUnique(self, id) {
if (self._schemas[id] || self._refs[id])
throw new Error('schema with key or id "' + id + '" already exists');
}
function getMetaSchemaOptions(self) {
var metaOpts = util.copy(self._opts);
for (var i=0; i<META_IGNORE_OPTIONS.length; i++)
delete metaOpts[META_IGNORE_OPTIONS[i]];
return metaOpts;
}
function setLogger(self) {
var logger = self._opts.logger;
if (logger === false) {
self.logger = {log: noop, warn: noop, error: noop};
} else {
if (logger === undefined) logger = console;
if (!(typeof logger == 'object' && logger.log && logger.warn && logger.error))
throw new Error('logger must implement log, warn and error methods');
self.logger = logger;
}
}
function noop() {}

26
website/node_modules/npm/node_modules/ajv/lib/cache.js generated vendored Normal file
View File

@@ -0,0 +1,26 @@
'use strict';
var Cache = module.exports = function Cache() {
this._cache = {};
};
Cache.prototype.put = function Cache_put(key, value) {
this._cache[key] = value;
};
Cache.prototype.get = function Cache_get(key) {
return this._cache[key];
};
Cache.prototype.del = function Cache_del(key) {
delete this._cache[key];
};
Cache.prototype.clear = function Cache_clear() {
this._cache = {};
};

Some files were not shown because too many files have changed in this diff Show More