mirror of
https://github.com/titanscouting/tra-analysis.git
synced 2025-07-27 13:18:49 +00:00
apps
data analysis
website
functions
node_modules
.bin
@firebase
@google-cloud
@grpc
@mrmlnc
@nodelib
@protobufjs
@types
accepts
acorn
acorn-es7-plugin
ajv
ansi-regex
arr-diff
arr-flatten
arr-union
array-filter
array-flatten
array-union
array-uniq
array-unique
arrify
ascli
asn1
assert-plus
assign-symbols
async
asynckit
atob
aws-sign2
aws4
axios
balanced-match
base
bcrypt-pbkdf
body-parser
brace-expansion
braces
buffer-equal-constant-time
buffer-from
bun
bytebuffer
bytes
cache-base
call-me-maybe
call-signature
camelcase
capture-stack-trace
caseless
class-utils
cliui
code-point-at
collection-visit
colour
combined-stream
component-emitter
compressible
concat-map
concat-stream
configstore
content-disposition
content-type
cookie
cookie-signature
copy-descriptor
core-js
core-util-is
cors
create-error-class
crypto-random-string
dashdash
debug
decamelize
decode-uri-component
deep-equal
define-properties
define-property
delayed-stream
depd
destroy
diff-match-patch
dir-glob
dom-storage
dot-prop
duplexify
eastasianwidth
ecc-jsbn
ecdsa-sig-formatter
ee-first
empower
empower-core
encodeurl
end-of-stream
ent
escape-html
espurify
estraverse
etag
expand-brackets
express
extend
extend-shallow
extglob
extsprintf
fast-deep-equal
fast-glob
fast-json-stable-stringify
faye-websocket
fill-range
finalhandler
firebase-admin
firebase-functions
follow-redirects
for-in
forever-agent
form-data
forwarded
fragment-cache
fresh
fs.realpath
functional-red-black-tree
gcp-metadata
gcs-resumable-upload
get-value
getpass
glob
glob-parent
glob-to-regexp
globby
google-auth-library
google-auto-auth
google-gax
google-p12-pem
google-proto-files
graceful-fs
grpc
gtoken
har-schema
har-validator
has-value
has-values
hash-stream-validation
http-errors
http-parser-js
CHANGELOG.md
LICENSE.md
README.md
http-parser.js
package.json
http-signature
iconv-lite
ignore
imurmurhash
indexof
inflight
inherits
invert-kv
ipaddr.js
is
is-accessor-descriptor
is-buffer
is-data-descriptor
is-descriptor
is-extendable
is-extglob
is-fullwidth-code-point
is-glob
is-number
is-obj
is-plain-object
is-stream-ended
is-typedarray
is-windows
isarray
isobject
isstream
jsbn
json-schema
json-schema-traverse
json-stringify-safe
jsonwebtoken
jsprim
jwa
jws
kind-of
lcid
lodash
lodash.camelcase
lodash.clone
lodash.includes
lodash.isboolean
lodash.isinteger
lodash.isnumber
lodash.isplainobject
lodash.isstring
lodash.merge
lodash.once
log-driver
long
lru-cache
make-dir
map-cache
map-visit
media-typer
merge-descriptors
merge2
methmeth
methods
micromatch
mime
mime-db
mime-types
minimatch
mixin-deep
modelo
ms
nan
nanomatch
negotiator
node-forge
number-is-nan
oauth-sign
object-assign
object-copy
object-keys
object-visit
object.pick
on-finished
once
optjs
os-locale
parseurl
pascalcase
path-dirname
path-is-absolute
path-to-regexp
path-type
performance-now
pify
posix-character-classes
power-assert
power-assert-context-formatter
power-assert-context-reducer-ast
power-assert-context-traversal
power-assert-formatter
power-assert-renderer-assertion
power-assert-renderer-base
power-assert-renderer-comparison
power-assert-renderer-diagram
power-assert-renderer-file
power-assert-util-string-width
process-nextick-args
protobufjs
proxy-addr
pseudomap
psl
pump
pumpify
punycode
qs
range-parser
raw-body
readable-stream
regex-not
repeat-element
repeat-string
request
resolve-url
ret
retry-axios
retry-request
safe-buffer
safe-regex
safer-buffer
send
serve-static
set-value
setprototypeof
signal-exit
slash
snakeize
snapdragon
snapdragon-node
snapdragon-util
source-map
source-map-resolve
source-map-url
split-array-stream
split-string
sshpk
static-extend
statuses
stream-events
stream-shift
string-format-obj
string-width
string_decoder
stringifier
strip-ansi
stubs
through2
to-object-path
to-regex
to-regex-range
tough-cookie
traverse
tslib
tunnel-agent
tweetnacl
type-is
type-name
typedarray
union-value
unique-string
universal-deep-strict-equal
unpipe
unset-value
uri-js
urix
use
util-deprecate
utils-merge
uuid
vary
verror
websocket-driver
websocket-extensions
window-size
wrap-ansi
wrappy
write-file-atomic
xdg-basedir
xmlhttprequest
xtend
y18n
yallist
yargs
index.js
package-lock.json
package.json
node_modules
public
.firebaserc
.gitignore
.runtimeconfig.json
firebase.json
firestore.indexes.json
firestore.rules
package-lock.json
.gitattributes
.gitignore
README.md
446 lines
12 KiB
JavaScript
446 lines
12 KiB
JavaScript
/*jshint node:true */
|
|
|
|
var assert = require('assert');
|
|
|
|
exports.HTTPParser = HTTPParser;
|
|
function HTTPParser(type) {
|
|
assert.ok(type === HTTPParser.REQUEST || type === HTTPParser.RESPONSE);
|
|
this.type = type;
|
|
this.state = type + '_LINE';
|
|
this.info = {
|
|
headers: [],
|
|
upgrade: false
|
|
};
|
|
this.trailers = [];
|
|
this.line = '';
|
|
this.isChunked = false;
|
|
this.connection = '';
|
|
this.headerSize = 0; // for preventing too big headers
|
|
this.body_bytes = null;
|
|
this.isUserCall = false;
|
|
this.hadError = false;
|
|
}
|
|
HTTPParser.encoding = 'ascii';
|
|
HTTPParser.maxHeaderSize = 80 * 1024; // maxHeaderSize (in bytes) is configurable, but 80kb by default;
|
|
HTTPParser.REQUEST = 'REQUEST';
|
|
HTTPParser.RESPONSE = 'RESPONSE';
|
|
var kOnHeaders = HTTPParser.kOnHeaders = 0;
|
|
var kOnHeadersComplete = HTTPParser.kOnHeadersComplete = 1;
|
|
var kOnBody = HTTPParser.kOnBody = 2;
|
|
var kOnMessageComplete = HTTPParser.kOnMessageComplete = 3;
|
|
|
|
// Some handler stubs, needed for compatibility
|
|
HTTPParser.prototype[kOnHeaders] =
|
|
HTTPParser.prototype[kOnHeadersComplete] =
|
|
HTTPParser.prototype[kOnBody] =
|
|
HTTPParser.prototype[kOnMessageComplete] = function () {};
|
|
|
|
var compatMode0_12 = true;
|
|
Object.defineProperty(HTTPParser, 'kOnExecute', {
|
|
get: function () {
|
|
// hack for backward compatibility
|
|
compatMode0_12 = false;
|
|
return 4;
|
|
}
|
|
});
|
|
|
|
var methods = exports.methods = HTTPParser.methods = [
|
|
'DELETE',
|
|
'GET',
|
|
'HEAD',
|
|
'POST',
|
|
'PUT',
|
|
'CONNECT',
|
|
'OPTIONS',
|
|
'TRACE',
|
|
'COPY',
|
|
'LOCK',
|
|
'MKCOL',
|
|
'MOVE',
|
|
'PROPFIND',
|
|
'PROPPATCH',
|
|
'SEARCH',
|
|
'UNLOCK',
|
|
'BIND',
|
|
'REBIND',
|
|
'UNBIND',
|
|
'ACL',
|
|
'REPORT',
|
|
'MKACTIVITY',
|
|
'CHECKOUT',
|
|
'MERGE',
|
|
'M-SEARCH',
|
|
'NOTIFY',
|
|
'SUBSCRIBE',
|
|
'UNSUBSCRIBE',
|
|
'PATCH',
|
|
'PURGE',
|
|
'MKCALENDAR',
|
|
'LINK',
|
|
'UNLINK'
|
|
];
|
|
var method_connect = methods.indexOf('CONNECT');
|
|
HTTPParser.prototype.reinitialize = HTTPParser;
|
|
HTTPParser.prototype.close =
|
|
HTTPParser.prototype.pause =
|
|
HTTPParser.prototype.resume =
|
|
HTTPParser.prototype.free = function () {};
|
|
HTTPParser.prototype._compatMode0_11 = false;
|
|
HTTPParser.prototype.getAsyncId = function() { return 0; };
|
|
|
|
var headerState = {
|
|
REQUEST_LINE: true,
|
|
RESPONSE_LINE: true,
|
|
HEADER: true
|
|
};
|
|
HTTPParser.prototype.execute = function (chunk, start, length) {
|
|
if (!(this instanceof HTTPParser)) {
|
|
throw new TypeError('not a HTTPParser');
|
|
}
|
|
|
|
// backward compat to node < 0.11.4
|
|
// Note: the start and length params were removed in newer version
|
|
start = start || 0;
|
|
length = typeof length === 'number' ? length : chunk.length;
|
|
|
|
this.chunk = chunk;
|
|
this.offset = start;
|
|
var end = this.end = start + length;
|
|
try {
|
|
while (this.offset < end) {
|
|
if (this[this.state]()) {
|
|
break;
|
|
}
|
|
}
|
|
} catch (err) {
|
|
if (this.isUserCall) {
|
|
throw err;
|
|
}
|
|
this.hadError = true;
|
|
return err;
|
|
}
|
|
this.chunk = null;
|
|
length = this.offset - start;
|
|
if (headerState[this.state]) {
|
|
this.headerSize += length;
|
|
if (this.headerSize > HTTPParser.maxHeaderSize) {
|
|
return new Error('max header size exceeded');
|
|
}
|
|
}
|
|
return length;
|
|
};
|
|
|
|
var stateFinishAllowed = {
|
|
REQUEST_LINE: true,
|
|
RESPONSE_LINE: true,
|
|
BODY_RAW: true
|
|
};
|
|
HTTPParser.prototype.finish = function () {
|
|
if (this.hadError) {
|
|
return;
|
|
}
|
|
if (!stateFinishAllowed[this.state]) {
|
|
return new Error('invalid state for EOF');
|
|
}
|
|
if (this.state === 'BODY_RAW') {
|
|
this.userCall()(this[kOnMessageComplete]());
|
|
}
|
|
};
|
|
|
|
// These three methods are used for an internal speed optimization, and it also
|
|
// works if theses are noops. Basically consume() asks us to read the bytes
|
|
// ourselves, but if we don't do it we get them through execute().
|
|
HTTPParser.prototype.consume =
|
|
HTTPParser.prototype.unconsume =
|
|
HTTPParser.prototype.getCurrentBuffer = function () {};
|
|
|
|
//For correct error handling - see HTTPParser#execute
|
|
//Usage: this.userCall()(userFunction('arg'));
|
|
HTTPParser.prototype.userCall = function () {
|
|
this.isUserCall = true;
|
|
var self = this;
|
|
return function (ret) {
|
|
self.isUserCall = false;
|
|
return ret;
|
|
};
|
|
};
|
|
|
|
HTTPParser.prototype.nextRequest = function () {
|
|
this.userCall()(this[kOnMessageComplete]());
|
|
this.reinitialize(this.type);
|
|
};
|
|
|
|
HTTPParser.prototype.consumeLine = function () {
|
|
var end = this.end,
|
|
chunk = this.chunk;
|
|
for (var i = this.offset; i < end; i++) {
|
|
if (chunk[i] === 0x0a) { // \n
|
|
var line = this.line + chunk.toString(HTTPParser.encoding, this.offset, i);
|
|
if (line.charAt(line.length - 1) === '\r') {
|
|
line = line.substr(0, line.length - 1);
|
|
}
|
|
this.line = '';
|
|
this.offset = i + 1;
|
|
return line;
|
|
}
|
|
}
|
|
//line split over multiple chunks
|
|
this.line += chunk.toString(HTTPParser.encoding, this.offset, this.end);
|
|
this.offset = this.end;
|
|
};
|
|
|
|
var headerExp = /^([^: \t]+):[ \t]*((?:.*[^ \t])|)/;
|
|
var headerContinueExp = /^[ \t]+(.*[^ \t])/;
|
|
HTTPParser.prototype.parseHeader = function (line, headers) {
|
|
if (line.indexOf('\r') !== -1) {
|
|
throw parseErrorCode('HPE_LF_EXPECTED');
|
|
}
|
|
|
|
var match = headerExp.exec(line);
|
|
var k = match && match[1];
|
|
if (k) { // skip empty string (malformed header)
|
|
headers.push(k);
|
|
headers.push(match[2]);
|
|
} else {
|
|
var matchContinue = headerContinueExp.exec(line);
|
|
if (matchContinue && headers.length) {
|
|
if (headers[headers.length - 1]) {
|
|
headers[headers.length - 1] += ' ';
|
|
}
|
|
headers[headers.length - 1] += matchContinue[1];
|
|
}
|
|
}
|
|
};
|
|
|
|
var requestExp = /^([A-Z-]+) ([^ ]+) HTTP\/(\d)\.(\d)$/;
|
|
HTTPParser.prototype.REQUEST_LINE = function () {
|
|
var line = this.consumeLine();
|
|
if (!line) {
|
|
return;
|
|
}
|
|
var match = requestExp.exec(line);
|
|
if (match === null) {
|
|
throw parseErrorCode('HPE_INVALID_CONSTANT');
|
|
}
|
|
this.info.method = this._compatMode0_11 ? match[1] : methods.indexOf(match[1]);
|
|
if (this.info.method === -1) {
|
|
throw new Error('invalid request method');
|
|
}
|
|
this.info.url = match[2];
|
|
this.info.versionMajor = +match[3];
|
|
this.info.versionMinor = +match[4];
|
|
this.body_bytes = 0;
|
|
this.state = 'HEADER';
|
|
};
|
|
|
|
var responseExp = /^HTTP\/(\d)\.(\d) (\d{3}) ?(.*)$/;
|
|
HTTPParser.prototype.RESPONSE_LINE = function () {
|
|
var line = this.consumeLine();
|
|
if (!line) {
|
|
return;
|
|
}
|
|
var match = responseExp.exec(line);
|
|
if (match === null) {
|
|
throw parseErrorCode('HPE_INVALID_CONSTANT');
|
|
}
|
|
this.info.versionMajor = +match[1];
|
|
this.info.versionMinor = +match[2];
|
|
var statusCode = this.info.statusCode = +match[3];
|
|
this.info.statusMessage = match[4];
|
|
// Implied zero length.
|
|
if ((statusCode / 100 | 0) === 1 || statusCode === 204 || statusCode === 304) {
|
|
this.body_bytes = 0;
|
|
}
|
|
this.state = 'HEADER';
|
|
};
|
|
|
|
HTTPParser.prototype.shouldKeepAlive = function () {
|
|
if (this.info.versionMajor > 0 && this.info.versionMinor > 0) {
|
|
if (this.connection.indexOf('close') !== -1) {
|
|
return false;
|
|
}
|
|
} else if (this.connection.indexOf('keep-alive') === -1) {
|
|
return false;
|
|
}
|
|
if (this.body_bytes !== null || this.isChunked) { // || skipBody
|
|
return true;
|
|
}
|
|
return false;
|
|
};
|
|
|
|
HTTPParser.prototype.HEADER = function () {
|
|
var line = this.consumeLine();
|
|
if (line === undefined) {
|
|
return;
|
|
}
|
|
var info = this.info;
|
|
if (line) {
|
|
this.parseHeader(line, info.headers);
|
|
} else {
|
|
var headers = info.headers;
|
|
var hasContentLength = false;
|
|
var currentContentLengthValue;
|
|
var hasUpgradeHeader = false;
|
|
for (var i = 0; i < headers.length; i += 2) {
|
|
switch (headers[i].toLowerCase()) {
|
|
case 'transfer-encoding':
|
|
this.isChunked = headers[i + 1].toLowerCase() === 'chunked';
|
|
break;
|
|
case 'content-length':
|
|
currentContentLengthValue = +headers[i + 1];
|
|
if (hasContentLength) {
|
|
// Fix duplicate Content-Length header with same values.
|
|
// Throw error only if values are different.
|
|
// Known issues:
|
|
// https://github.com/request/request/issues/2091#issuecomment-328715113
|
|
// https://github.com/nodejs/node/issues/6517#issuecomment-216263771
|
|
if (currentContentLengthValue !== this.body_bytes) {
|
|
throw parseErrorCode('HPE_UNEXPECTED_CONTENT_LENGTH');
|
|
}
|
|
} else {
|
|
hasContentLength = true;
|
|
this.body_bytes = currentContentLengthValue;
|
|
}
|
|
break;
|
|
case 'connection':
|
|
this.connection += headers[i + 1].toLowerCase();
|
|
break;
|
|
case 'upgrade':
|
|
hasUpgradeHeader = true;
|
|
break;
|
|
}
|
|
}
|
|
|
|
// if both isChunked and hasContentLength, isChunked wins
|
|
// This is required so the body is parsed using the chunked method, and matches
|
|
// Chrome's behavior. We could, maybe, ignore them both (would get chunked
|
|
// encoding into the body), and/or disable shouldKeepAlive to be more
|
|
// resilient.
|
|
if (this.isChunked && hasContentLength) {
|
|
hasContentLength = false;
|
|
this.body_bytes = null;
|
|
}
|
|
|
|
// Logic from https://github.com/nodejs/http-parser/blob/921d5585515a153fa00e411cf144280c59b41f90/http_parser.c#L1727-L1737
|
|
// "For responses, "Upgrade: foo" and "Connection: upgrade" are
|
|
// mandatory only when it is a 101 Switching Protocols response,
|
|
// otherwise it is purely informational, to announce support.
|
|
if (hasUpgradeHeader && this.connection.indexOf('upgrade') != -1) {
|
|
info.upgrade = this.type === HTTPParser.REQUEST || info.statusCode === 101;
|
|
} else {
|
|
info.upgrade = info.method === method_connect;
|
|
}
|
|
|
|
info.shouldKeepAlive = this.shouldKeepAlive();
|
|
//problem which also exists in original node: we should know skipBody before calling onHeadersComplete
|
|
var skipBody;
|
|
if (compatMode0_12) {
|
|
skipBody = this.userCall()(this[kOnHeadersComplete](info));
|
|
} else {
|
|
skipBody = this.userCall()(this[kOnHeadersComplete](info.versionMajor,
|
|
info.versionMinor, info.headers, info.method, info.url, info.statusCode,
|
|
info.statusMessage, info.upgrade, info.shouldKeepAlive));
|
|
}
|
|
if (skipBody === 2) {
|
|
this.nextRequest();
|
|
return true;
|
|
} else if (this.isChunked && !skipBody) {
|
|
this.state = 'BODY_CHUNKHEAD';
|
|
} else if (skipBody || this.body_bytes === 0) {
|
|
this.nextRequest();
|
|
// For older versions of node (v6.x and older?), that return skipBody=1 or skipBody=true,
|
|
// need this "return true;" if it's an upgrade request.
|
|
return info.upgrade;
|
|
} else if (this.body_bytes === null) {
|
|
this.state = 'BODY_RAW';
|
|
} else {
|
|
this.state = 'BODY_SIZED';
|
|
}
|
|
}
|
|
};
|
|
|
|
HTTPParser.prototype.BODY_CHUNKHEAD = function () {
|
|
var line = this.consumeLine();
|
|
if (line === undefined) {
|
|
return;
|
|
}
|
|
this.body_bytes = parseInt(line, 16);
|
|
if (!this.body_bytes) {
|
|
this.state = 'BODY_CHUNKTRAILERS';
|
|
} else {
|
|
this.state = 'BODY_CHUNK';
|
|
}
|
|
};
|
|
|
|
HTTPParser.prototype.BODY_CHUNK = function () {
|
|
var length = Math.min(this.end - this.offset, this.body_bytes);
|
|
this.userCall()(this[kOnBody](this.chunk, this.offset, length));
|
|
this.offset += length;
|
|
this.body_bytes -= length;
|
|
if (!this.body_bytes) {
|
|
this.state = 'BODY_CHUNKEMPTYLINE';
|
|
}
|
|
};
|
|
|
|
HTTPParser.prototype.BODY_CHUNKEMPTYLINE = function () {
|
|
var line = this.consumeLine();
|
|
if (line === undefined) {
|
|
return;
|
|
}
|
|
assert.equal(line, '');
|
|
this.state = 'BODY_CHUNKHEAD';
|
|
};
|
|
|
|
HTTPParser.prototype.BODY_CHUNKTRAILERS = function () {
|
|
var line = this.consumeLine();
|
|
if (line === undefined) {
|
|
return;
|
|
}
|
|
if (line) {
|
|
this.parseHeader(line, this.trailers);
|
|
} else {
|
|
if (this.trailers.length) {
|
|
this.userCall()(this[kOnHeaders](this.trailers, ''));
|
|
}
|
|
this.nextRequest();
|
|
}
|
|
};
|
|
|
|
HTTPParser.prototype.BODY_RAW = function () {
|
|
var length = this.end - this.offset;
|
|
this.userCall()(this[kOnBody](this.chunk, this.offset, length));
|
|
this.offset = this.end;
|
|
};
|
|
|
|
HTTPParser.prototype.BODY_SIZED = function () {
|
|
var length = Math.min(this.end - this.offset, this.body_bytes);
|
|
this.userCall()(this[kOnBody](this.chunk, this.offset, length));
|
|
this.offset += length;
|
|
this.body_bytes -= length;
|
|
if (!this.body_bytes) {
|
|
this.nextRequest();
|
|
}
|
|
};
|
|
|
|
// backward compat to node < 0.11.6
|
|
['Headers', 'HeadersComplete', 'Body', 'MessageComplete'].forEach(function (name) {
|
|
var k = HTTPParser['kOn' + name];
|
|
Object.defineProperty(HTTPParser.prototype, 'on' + name, {
|
|
get: function () {
|
|
return this[k];
|
|
},
|
|
set: function (to) {
|
|
// hack for backward compatibility
|
|
this._compatMode0_11 = true;
|
|
method_connect = 'CONNECT';
|
|
return (this[k] = to);
|
|
}
|
|
});
|
|
});
|
|
|
|
function parseErrorCode(code) {
|
|
var err = new Error('Parse Error');
|
|
err.code = code;
|
|
return err;
|
|
}
|