summaryrefslogtreecommitdiff
path: root/frontend-old/node_modules/http-parser-js/http-parser.js
diff options
context:
space:
mode:
Diffstat (limited to 'frontend-old/node_modules/http-parser-js/http-parser.js')
-rw-r--r--frontend-old/node_modules/http-parser-js/http-parser.js470
1 files changed, 470 insertions, 0 deletions
diff --git a/frontend-old/node_modules/http-parser-js/http-parser.js b/frontend-old/node_modules/http-parser-js/http-parser.js
new file mode 100644
index 0000000..3e98ab9
--- /dev/null
+++ b/frontend-old/node_modules/http-parser-js/http-parser.js
@@ -0,0 +1,470 @@
+/*jshint node:true */
+
+exports.HTTPParser = HTTPParser;
+function HTTPParser(type) {
+ if (type !== undefined && type !== HTTPParser.REQUEST && type !== HTTPParser.RESPONSE) {
+ throw new Error('type must be REQUEST or RESPONSE');
+ }
+ if (type === undefined) {
+ // Node v12+
+ } else {
+ this.initialize(type);
+ }
+ this.maxHeaderSize=HTTPParser.maxHeaderSize
+}
+HTTPParser.prototype.initialize = function (type, async_resource) {
+ if (type !== HTTPParser.REQUEST && type !== HTTPParser.RESPONSE) {
+ throw new Error('type must be REQUEST or RESPONSE');
+ }
+ this.type = type;
+ this.state = type + '_LINE';
+ this.info = {
+ headers: [],
+ upgrade: false
+ };
+ this.trailers = [];
+ this.line = '';
+ this.isChunked = false;
+ this.connection = '';
+ this.headerSize = 0; // for preventing too big headers
+ this.body_bytes = null;
+ this.isUserCall = false;
+ this.hadError = false;
+};
+
+HTTPParser.encoding = 'ascii';
+HTTPParser.maxHeaderSize = 80 * 1024; // maxHeaderSize (in bytes) is configurable, but 80kb by default;
+HTTPParser.REQUEST = 'REQUEST';
+HTTPParser.RESPONSE = 'RESPONSE';
+
+// Note: *not* starting with kOnHeaders=0 line the Node parser, because any
+// newly added constants (kOnTimeout in Node v12.19.0) will overwrite 0!
+var kOnHeaders = HTTPParser.kOnHeaders = 1;
+var kOnHeadersComplete = HTTPParser.kOnHeadersComplete = 2;
+var kOnBody = HTTPParser.kOnBody = 3;
+var kOnMessageComplete = HTTPParser.kOnMessageComplete = 4;
+
+// Some handler stubs, needed for compatibility
+HTTPParser.prototype[kOnHeaders] =
+HTTPParser.prototype[kOnHeadersComplete] =
+HTTPParser.prototype[kOnBody] =
+HTTPParser.prototype[kOnMessageComplete] = function () {};
+
+var compatMode0_12 = true;
+Object.defineProperty(HTTPParser, 'kOnExecute', {
+ get: function () {
+ // hack for backward compatibility
+ compatMode0_12 = false;
+ return 99;
+ }
+ });
+
+var methods = exports.methods = HTTPParser.methods = [
+ 'DELETE',
+ 'GET',
+ 'HEAD',
+ 'POST',
+ 'PUT',
+ 'CONNECT',
+ 'OPTIONS',
+ 'TRACE',
+ 'COPY',
+ 'LOCK',
+ 'MKCOL',
+ 'MOVE',
+ 'PROPFIND',
+ 'PROPPATCH',
+ 'SEARCH',
+ 'UNLOCK',
+ 'BIND',
+ 'REBIND',
+ 'UNBIND',
+ 'ACL',
+ 'REPORT',
+ 'MKACTIVITY',
+ 'CHECKOUT',
+ 'MERGE',
+ 'M-SEARCH',
+ 'NOTIFY',
+ 'SUBSCRIBE',
+ 'UNSUBSCRIBE',
+ 'PATCH',
+ 'PURGE',
+ 'MKCALENDAR',
+ 'LINK',
+ 'UNLINK',
+ 'SOURCE',
+];
+var method_connect = methods.indexOf('CONNECT');
+HTTPParser.prototype.reinitialize = HTTPParser;
+HTTPParser.prototype.close =
+HTTPParser.prototype.pause =
+HTTPParser.prototype.resume =
+HTTPParser.prototype.remove =
+HTTPParser.prototype.free = function () {};
+HTTPParser.prototype._compatMode0_11 = false;
+HTTPParser.prototype.getAsyncId = function() { return 0; };
+
+var headerState = {
+ REQUEST_LINE: true,
+ RESPONSE_LINE: true,
+ HEADER: true
+};
+HTTPParser.prototype.execute = function (chunk, start, length) {
+ if (!(this instanceof HTTPParser)) {
+ throw new TypeError('not a HTTPParser');
+ }
+
+ // backward compat to node < 0.11.4
+ // Note: the start and length params were removed in newer version
+ start = start || 0;
+ length = typeof length === 'number' ? length : chunk.length;
+
+ this.chunk = chunk;
+ this.offset = start;
+ var end = this.end = start + length;
+ try {
+ while (this.offset < end) {
+ if (this[this.state]()) {
+ break;
+ }
+ }
+ } catch (err) {
+ if (this.isUserCall) {
+ throw err;
+ }
+ this.hadError = true;
+ return err;
+ }
+ this.chunk = null;
+ length = this.offset - start;
+ if (headerState[this.state]) {
+ this.headerSize += length;
+ if (this.headerSize > (this.maxHeaderSize||HTTPParser.maxHeaderSize)) {
+ return new Error('max header size exceeded');
+ }
+ }
+ return length;
+};
+
+var stateFinishAllowed = {
+ REQUEST_LINE: true,
+ RESPONSE_LINE: true,
+ BODY_RAW: true
+};
+HTTPParser.prototype.finish = function () {
+ if (this.hadError) {
+ return;
+ }
+ if (!stateFinishAllowed[this.state]) {
+ return new Error('invalid state for EOF');
+ }
+ if (this.state === 'BODY_RAW') {
+ this.userCall()(this[kOnMessageComplete]());
+ }
+};
+
+// These three methods are used for an internal speed optimization, and it also
+// works if theses are noops. Basically consume() asks us to read the bytes
+// ourselves, but if we don't do it we get them through execute().
+HTTPParser.prototype.consume =
+HTTPParser.prototype.unconsume =
+HTTPParser.prototype.getCurrentBuffer = function () {};
+
+//For correct error handling - see HTTPParser#execute
+//Usage: this.userCall()(userFunction('arg'));
+HTTPParser.prototype.userCall = function () {
+ this.isUserCall = true;
+ var self = this;
+ return function (ret) {
+ self.isUserCall = false;
+ return ret;
+ };
+};
+
+HTTPParser.prototype.nextRequest = function () {
+ this.userCall()(this[kOnMessageComplete]());
+ this.reinitialize(this.type);
+};
+
+HTTPParser.prototype.consumeLine = function () {
+ var end = this.end,
+ chunk = this.chunk;
+ for (var i = this.offset; i < end; i++) {
+ if (chunk[i] === 0x0a) { // \n
+ var line = this.line + chunk.toString(HTTPParser.encoding, this.offset, i);
+ if (line.charAt(line.length - 1) === '\r') {
+ line = line.substr(0, line.length - 1);
+ }
+ this.line = '';
+ this.offset = i + 1;
+ return line;
+ }
+ }
+ //line split over multiple chunks
+ this.line += chunk.toString(HTTPParser.encoding, this.offset, this.end);
+ this.offset = this.end;
+};
+
+var headerExp = /^([^: \t]+):[ \t]*((?:.*[^ \t])|)/;
+var headerContinueExp = /^[ \t]+(.*[^ \t])/;
+HTTPParser.prototype.parseHeader = function (line, headers) {
+ if (line.indexOf('\r') !== -1) {
+ throw parseErrorCode('HPE_LF_EXPECTED');
+ }
+
+ var match = headerExp.exec(line);
+ var k = match && match[1];
+ if (k) { // skip empty string (malformed header)
+ headers.push(k);
+ headers.push(match[2]);
+ } else {
+ var matchContinue = headerContinueExp.exec(line);
+ if (matchContinue && headers.length) {
+ if (headers[headers.length - 1]) {
+ headers[headers.length - 1] += ' ';
+ }
+ headers[headers.length - 1] += matchContinue[1];
+ }
+ }
+};
+
+var requestExp = /^([A-Z-]+) ([^ ]+) HTTP\/(\d)\.(\d)$/;
+HTTPParser.prototype.REQUEST_LINE = function () {
+ var line = this.consumeLine();
+ if (!line) {
+ return;
+ }
+ var match = requestExp.exec(line);
+ if (match === null) {
+ throw parseErrorCode('HPE_INVALID_CONSTANT');
+ }
+ this.info.method = this._compatMode0_11 ? match[1] : methods.indexOf(match[1]);
+ if (this.info.method === -1) {
+ throw new Error('invalid request method');
+ }
+ this.info.url = match[2];
+ this.info.versionMajor = +match[3];
+ this.info.versionMinor = +match[4];
+ this.body_bytes = 0;
+ this.state = 'HEADER';
+};
+
+var responseExp = /^HTTP\/(\d)\.(\d) (\d{3}) ?(.*)$/;
+HTTPParser.prototype.RESPONSE_LINE = function () {
+ var line = this.consumeLine();
+ if (!line) {
+ return;
+ }
+ var match = responseExp.exec(line);
+ if (match === null) {
+ throw parseErrorCode('HPE_INVALID_CONSTANT');
+ }
+ this.info.versionMajor = +match[1];
+ this.info.versionMinor = +match[2];
+ var statusCode = this.info.statusCode = +match[3];
+ this.info.statusMessage = match[4];
+ // Implied zero length.
+ if ((statusCode / 100 | 0) === 1 || statusCode === 204 || statusCode === 304) {
+ this.body_bytes = 0;
+ }
+ this.state = 'HEADER';
+};
+
+HTTPParser.prototype.shouldKeepAlive = function () {
+ if (this.info.versionMajor > 0 && this.info.versionMinor > 0) {
+ if (this.connection.indexOf('close') !== -1) {
+ return false;
+ }
+ } else if (this.connection.indexOf('keep-alive') === -1) {
+ return false;
+ }
+ if (this.body_bytes !== null || this.isChunked) { // || skipBody
+ return true;
+ }
+ return false;
+};
+
+HTTPParser.prototype.HEADER = function () {
+ var line = this.consumeLine();
+ if (line === undefined) {
+ return;
+ }
+ var info = this.info;
+ if (line) {
+ this.parseHeader(line, info.headers);
+ } else {
+ var headers = info.headers;
+ var hasContentLength = false;
+ var currentContentLengthValue;
+ var hasUpgradeHeader = false;
+ for (var i = 0; i < headers.length; i += 2) {
+ switch (headers[i].toLowerCase()) {
+ case 'transfer-encoding':
+ this.isChunked = headers[i + 1].toLowerCase() === 'chunked';
+ break;
+ case 'content-length':
+ currentContentLengthValue = +headers[i + 1];
+ if (hasContentLength) {
+ // Fix duplicate Content-Length header with same values.
+ // Throw error only if values are different.
+ // Known issues:
+ // https://github.com/request/request/issues/2091#issuecomment-328715113
+ // https://github.com/nodejs/node/issues/6517#issuecomment-216263771
+ if (currentContentLengthValue !== this.body_bytes) {
+ throw parseErrorCode('HPE_UNEXPECTED_CONTENT_LENGTH');
+ }
+ } else {
+ hasContentLength = true;
+ this.body_bytes = currentContentLengthValue;
+ }
+ break;
+ case 'connection':
+ this.connection += headers[i + 1].toLowerCase();
+ break;
+ case 'upgrade':
+ hasUpgradeHeader = true;
+ break;
+ }
+ }
+
+ // if both isChunked and hasContentLength, isChunked wins
+ // This is required so the body is parsed using the chunked method, and matches
+ // Chrome's behavior. We could, maybe, ignore them both (would get chunked
+ // encoding into the body), and/or disable shouldKeepAlive to be more
+ // resilient.
+ if (this.isChunked && hasContentLength) {
+ hasContentLength = false;
+ this.body_bytes = null;
+ }
+
+ // Logic from https://github.com/nodejs/http-parser/blob/921d5585515a153fa00e411cf144280c59b41f90/http_parser.c#L1727-L1737
+ // "For responses, "Upgrade: foo" and "Connection: upgrade" are
+ // mandatory only when it is a 101 Switching Protocols response,
+ // otherwise it is purely informational, to announce support.
+ if (hasUpgradeHeader && this.connection.indexOf('upgrade') != -1) {
+ info.upgrade = this.type === HTTPParser.REQUEST || info.statusCode === 101;
+ } else {
+ info.upgrade = info.method === method_connect;
+ }
+
+ if (this.isChunked && info.upgrade) {
+ this.isChunked = false;
+ }
+
+ info.shouldKeepAlive = this.shouldKeepAlive();
+ //problem which also exists in original node: we should know skipBody before calling onHeadersComplete
+ var skipBody;
+ if (compatMode0_12) {
+ skipBody = this.userCall()(this[kOnHeadersComplete](info));
+ } else {
+ skipBody = this.userCall()(this[kOnHeadersComplete](info.versionMajor,
+ info.versionMinor, info.headers, info.method, info.url, info.statusCode,
+ info.statusMessage, info.upgrade, info.shouldKeepAlive));
+ }
+ if (skipBody === 2) {
+ this.nextRequest();
+ return true;
+ } else if (this.isChunked && !skipBody) {
+ this.state = 'BODY_CHUNKHEAD';
+ } else if (skipBody || this.body_bytes === 0) {
+ this.nextRequest();
+ // For older versions of node (v6.x and older?), that return skipBody=1 or skipBody=true,
+ // need this "return true;" if it's an upgrade request.
+ return info.upgrade;
+ } else if (this.body_bytes === null) {
+ this.state = 'BODY_RAW';
+ } else {
+ this.state = 'BODY_SIZED';
+ }
+ }
+};
+
+HTTPParser.prototype.BODY_CHUNKHEAD = function () {
+ var line = this.consumeLine();
+ if (line === undefined) {
+ return;
+ }
+ this.body_bytes = parseInt(line, 16);
+ if (!this.body_bytes) {
+ this.state = 'BODY_CHUNKTRAILERS';
+ } else {
+ this.state = 'BODY_CHUNK';
+ }
+};
+
+HTTPParser.prototype.BODY_CHUNK = function () {
+ var length = Math.min(this.end - this.offset, this.body_bytes);
+ // 0, length are for backwards compatibility. See: https://github.com/creationix/http-parser-js/pull/98
+ this.userCall()(this[kOnBody](this.chunk.slice(this.offset, this.offset + length), 0, length));
+ this.offset += length;
+ this.body_bytes -= length;
+ if (!this.body_bytes) {
+ this.state = 'BODY_CHUNKEMPTYLINE';
+ }
+};
+
+HTTPParser.prototype.BODY_CHUNKEMPTYLINE = function () {
+ var line = this.consumeLine();
+ if (line === undefined) {
+ return;
+ }
+ if (line !== '') {
+ throw new Error('Expected empty line');
+ }
+ this.state = 'BODY_CHUNKHEAD';
+};
+
+HTTPParser.prototype.BODY_CHUNKTRAILERS = function () {
+ var line = this.consumeLine();
+ if (line === undefined) {
+ return;
+ }
+ if (line) {
+ this.parseHeader(line, this.trailers);
+ } else {
+ if (this.trailers.length) {
+ this.userCall()(this[kOnHeaders](this.trailers, ''));
+ }
+ this.nextRequest();
+ }
+};
+
+HTTPParser.prototype.BODY_RAW = function () {
+ // 0, length are for backwards compatibility. See: https://github.com/creationix/http-parser-js/pull/98
+ this.userCall()(this[kOnBody](this.chunk.slice(this.offset, this.end), 0, this.end - this.offset));
+ this.offset = this.end;
+};
+
+HTTPParser.prototype.BODY_SIZED = function () {
+ var length = Math.min(this.end - this.offset, this.body_bytes);
+ // 0, length are for backwards compatibility. See: https://github.com/creationix/http-parser-js/pull/98
+ this.userCall()(this[kOnBody](this.chunk.slice(this.offset, this.offset + length), 0, length));
+ this.offset += length;
+ this.body_bytes -= length;
+ if (!this.body_bytes) {
+ this.nextRequest();
+ }
+};
+
+// backward compat to node < 0.11.6
+['Headers', 'HeadersComplete', 'Body', 'MessageComplete'].forEach(function (name) {
+ var k = HTTPParser['kOn' + name];
+ Object.defineProperty(HTTPParser.prototype, 'on' + name, {
+ get: function () {
+ return this[k];
+ },
+ set: function (to) {
+ // hack for backward compatibility
+ this._compatMode0_11 = true;
+ method_connect = 'CONNECT';
+ return (this[k] = to);
+ }
+ });
+});
+
+function parseErrorCode(code) {
+ var err = new Error('Parse Error');
+ err.code = code;
+ return err;
+}