From 8a3e0c069747cb6a7e1889de92304856224fee72 Mon Sep 17 00:00:00 2001 From: Matteo Collina Date: Fri, 14 Dec 2018 12:30:01 +0100 Subject: [PATCH 1/5] http: fix regression of binary upgrade response body See: https://github.com/nodejs/node/issues/24958 PR-URL: https://github.com/nodejs/node/pull/25036 Reviewed-By: Myles Borins --- src/node_http_parser.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/node_http_parser.cc b/src/node_http_parser.cc index 713c7c577e173a..d634cba4d3af03 100644 --- a/src/node_http_parser.cc +++ b/src/node_http_parser.cc @@ -610,8 +610,6 @@ class Parser : public AsyncWrap { size_t nparsed = http_parser_execute(&parser_, &settings, data, len); - enum http_errno err = HTTP_PARSER_ERRNO(&parser_); - Save(); // Unassign the 'buffer_' variable @@ -626,7 +624,9 @@ class Parser : public AsyncWrap { Local nparsed_obj = Integer::New(env()->isolate(), nparsed); // If there was a parse error in one of the callbacks // TODO(bnoordhuis) What if there is an error on EOF? - if ((!parser_.upgrade && nparsed != len) || err != HPE_OK) { + if (!parser_.upgrade && nparsed != len) { + enum http_errno err = HTTP_PARSER_ERRNO(&parser_); + Local e = Exception::Error(env()->parse_error_string()); Local obj = e->ToObject(env()->isolate()); obj->Set(env()->bytes_parsed_string(), nparsed_obj); From 59f83d689641d5030743ee4f3e453e754843e188 Mon Sep 17 00:00:00 2001 From: cjihrig Date: Thu, 29 Nov 2018 17:29:53 -0500 Subject: [PATCH 2/5] deps: cherry-pick http_parser_set_max_header_size MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit adds http_parser_set_max_header_size() to the http-parser for overriding the compile time maximum HTTP header size. Backport-PR-URL: https://github.com/nodejs/node/pull/25173 PR-URL: https://github.com/nodejs/node/pull/24811 Fixes: https://github.com/nodejs/node/issues/24692 Refs: https://github.com/nodejs/http-parser/pull/453 Reviewed-By: Anna Henningsen Reviewed-By: Matteo Collina Reviewed-By: Myles Borins Reviewed-By: Michael Dawson Reviewed-By: Сковорода Никита Андреевич Reviewed-By: James M Snell Reviewed-By: Jeremiah Senkpiel --- deps/http_parser/http_parser.c | 15 +++++++++++---- deps/http_parser/http_parser.h | 3 +++ 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/deps/http_parser/http_parser.c b/deps/http_parser/http_parser.c index 6522618671d09c..46764bced09478 100644 --- a/deps/http_parser/http_parser.c +++ b/deps/http_parser/http_parser.c @@ -25,6 +25,8 @@ #include #include +static uint32_t max_header_size = HTTP_MAX_HEADER_SIZE; + #ifndef ULLONG_MAX # define ULLONG_MAX ((uint64_t) -1) /* 2^64-1 */ #endif @@ -137,20 +139,20 @@ do { \ } while (0) /* Don't allow the total size of the HTTP headers (including the status - * line) to exceed HTTP_MAX_HEADER_SIZE. This check is here to protect + * line) to exceed max_header_size. This check is here to protect * embedders against denial-of-service attacks where the attacker feeds * us a never-ending header that the embedder keeps buffering. * * This check is arguably the responsibility of embedders but we're doing * it on the embedder's behalf because most won't bother and this way we - * make the web a little safer. HTTP_MAX_HEADER_SIZE is still far bigger + * make the web a little safer. max_header_size is still far bigger * than any reasonable request or response so this should never affect * day-to-day operation. */ #define COUNT_HEADER_SIZE(V) \ do { \ parser->nread += (V); \ - if (UNLIKELY(parser->nread > (HTTP_MAX_HEADER_SIZE))) { \ + if (UNLIKELY(parser->nread > max_header_size)) { \ SET_ERRNO(HPE_HEADER_OVERFLOW); \ goto error; \ } \ @@ -1471,7 +1473,7 @@ size_t http_parser_execute (http_parser *parser, const char* p_lf; size_t limit = data + len - p; - limit = MIN(limit, HTTP_MAX_HEADER_SIZE); + limit = MIN(limit, max_header_size); p_cr = (const char*) memchr(p, CR, limit); p_lf = (const char*) memchr(p, LF, limit); @@ -2437,3 +2439,8 @@ http_parser_version(void) { HTTP_PARSER_VERSION_MINOR * 0x00100 | HTTP_PARSER_VERSION_PATCH * 0x00001; } + +void +http_parser_set_max_header_size(uint32_t size) { + max_header_size = size; +} diff --git a/deps/http_parser/http_parser.h b/deps/http_parser/http_parser.h index 1fbf30e2b4740b..ea7bafef2c3178 100644 --- a/deps/http_parser/http_parser.h +++ b/deps/http_parser/http_parser.h @@ -427,6 +427,9 @@ void http_parser_pause(http_parser *parser, int paused); /* Checks if this is the final chunk of the body. */ int http_body_is_final(const http_parser *parser); +/* Change the maximum header size provided at compile time. */ +void http_parser_set_max_header_size(uint32_t size); + #ifdef __cplusplus } #endif From f233b160c9b8d5126b4e4845d1661c718d14d39f Mon Sep 17 00:00:00 2001 From: cjihrig Date: Mon, 3 Dec 2018 12:27:46 -0500 Subject: [PATCH 3/5] cli: add --max-http-header-size flag MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Allow the maximum size of HTTP headers to be overridden from the command line. Backport-PR-URL: https://github.com/nodejs/node/pull/25173 co-authored-by: Matteo Collina PR-URL: https://github.com/nodejs/node/pull/24811 Fixes: https://github.com/nodejs/node/issues/24692 Reviewed-By: Anna Henningsen Reviewed-By: Myles Borins Reviewed-By: Michael Dawson Reviewed-By: Сковорода Никита Андреевич Reviewed-By: James M Snell Reviewed-By: Jeremiah Senkpiel --- doc/api/cli.md | 8 ++ doc/node.1 | 4 + src/node.cc | 7 ++ src/node_config.cc | 13 +++ src/node_http_parser.cc | 5 + src/node_internals.h | 3 + test/sequential/test-http-max-http-headers.js | 72 ++++++++---- .../test-set-http-max-http-headers.js | 104 ++++++++++++++++++ 8 files changed, 192 insertions(+), 24 deletions(-) create mode 100644 test/sequential/test-set-http-max-http-headers.js diff --git a/doc/api/cli.md b/doc/api/cli.md index 5a943b3428c6ca..0765091106b12c 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -293,6 +293,13 @@ Indicate the end of node options. Pass the rest of the arguments to the script. If no script filename or eval/print script is supplied prior to this, then the next argument will be used as a script filename. +### `--max-http-header-size=size` + + +Specify the maximum size, in bytes, of HTTP headers. Defaults to 8KB. + ## Environment Variables ### `NODE_DEBUG=module[,…]` @@ -353,6 +360,7 @@ Node options that are allowed are: - `--debug-brk` - `--debug-port` - `--debug` +- `--max-http-header-size` - `--no-deprecation` - `--no-warnings` - `--openssl-config` diff --git a/doc/node.1 b/doc/node.1 index b36f176983787f..b5cdd08117c703 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -92,6 +92,10 @@ Open the REPL even if stdin does not appear to be a terminal. Preload the specified module at startup. Follows `require()`'s module resolution rules. \fImodule\fR may be either a path to a file, or a node module name. +.TP +.BR \-\-max\-http\-header-size \fI=size\fR +Specify the maximum size of HTTP headers in bytes. Defaults to 8KB. + .TP .BR \-\-no\-deprecation Silence deprecation warnings. diff --git a/src/node.cc b/src/node.cc index 1076157f807fbb..4041196d81d597 100644 --- a/src/node.cc +++ b/src/node.cc @@ -170,6 +170,8 @@ unsigned int reverted = 0; static std::string icu_data_dir; // NOLINT(runtime/string) #endif +uint64_t max_http_header_size = 8 * 1024; + // used by C++ modules as well bool no_deprecation = false; @@ -3731,6 +3733,8 @@ static void PrintHelp() { " --trace-deprecation show stack traces on deprecations\n" " --throw-deprecation throw an exception anytime a deprecated " "function is used\n" + " --max-http-header-size Specify the maximum size of HTTP\n" + " headers in bytes. Defaults to 8KB.\n" " --no-warnings silence all process warnings\n" " --napi-modules load N-API modules (no-op - option kept for " " compatibility)\n" @@ -3852,6 +3856,7 @@ static void CheckIfAllowedInEnv(const char* exe, bool is_env, "--pending-deprecation", "--no-warnings", "--napi-modules", + "--max-http-header-size", "--trace-warnings", "--redirect-warnings", "--trace-sync-io", @@ -4010,6 +4015,8 @@ static void ParseArgs(int* argc, new_v8_argc += 1; } else if (strncmp(arg, "--v8-pool-size=", 15) == 0) { v8_thread_pool_size = atoi(arg + 15); + } else if (strncmp(arg, "--max-http-header-size=", 23) == 0) { + max_http_header_size = atoi(arg + 23); #if HAVE_OPENSSL } else if (strncmp(arg, "--tls-cipher-list=", 18) == 0) { default_cipher_list = arg + 18; diff --git a/src/node_config.cc b/src/node_config.cc index 4a397d1dcc6c2d..4746c775e12dfa 100644 --- a/src/node_config.cc +++ b/src/node_config.cc @@ -7,6 +7,7 @@ namespace node { using v8::Context; using v8::Local; +using v8::Number; using v8::Object; using v8::ReadOnly; using v8::String; @@ -24,6 +25,13 @@ using v8::Value; True(env->isolate()), ReadOnly).FromJust(); \ } while (0) +#define READONLY_PROPERTY(obj, name, value) \ + do { \ + obj->DefineOwnProperty(env->context(), \ + FIXED_ONE_BYTE_STRING(env->isolate(), name), \ + value, ReadOnly).FromJust(); \ + } while (0) + void InitConfig(Local target, Local unused, Local context) { @@ -46,6 +54,11 @@ void InitConfig(Local target, if (config_expose_internals) READONLY_BOOLEAN_PROPERTY("exposeInternals"); + + READONLY_PROPERTY(target, + "maxHTTPHeaderSize", + Number::New(env->isolate(), max_http_header_size)); + if (!config_warning_file.empty()) { Local name = OneByteString(env->isolate(), "warningFile"); Local value = String::NewFromUtf8(env->isolate(), diff --git a/src/node_http_parser.cc b/src/node_http_parser.cc index d634cba4d3af03..633e66aaef28a6 100644 --- a/src/node_http_parser.cc +++ b/src/node_http_parser.cc @@ -731,6 +731,9 @@ const struct http_parser_settings Parser::settings = { nullptr // on_chunk_complete }; +void InitMaxHttpHeaderSizeOnce() { + http_parser_set_max_header_size(max_http_header_size); +} void InitHttpParser(Local target, Local unused, @@ -775,6 +778,8 @@ void InitHttpParser(Local target, target->Set(FIXED_ONE_BYTE_STRING(env->isolate(), "HTTPParser"), t->GetFunction()); + static uv_once_t init_once = UV_ONCE_INIT; + uv_once(&init_once, InitMaxHttpHeaderSizeOnce); } } // namespace node diff --git a/src/node_internals.h b/src/node_internals.h index bfb9bf296d7285..9e6495028a0b41 100644 --- a/src/node_internals.h +++ b/src/node_internals.h @@ -56,6 +56,9 @@ extern bool config_expose_internals; // it to stderr. extern std::string config_warning_file; +// Set in node.cc by ParseArgs when --max-http-header-size is used +extern uint64_t max_http_header_size; + // Forward declaration class Environment; diff --git a/test/sequential/test-http-max-http-headers.js b/test/sequential/test-http-max-http-headers.js index ae76142a4fd077..51d071f95a2e81 100644 --- a/test/sequential/test-http-max-http-headers.js +++ b/test/sequential/test-http-max-http-headers.js @@ -1,10 +1,17 @@ 'use strict'; +// Flags: --expose_internals const assert = require('assert'); const common = require('../common'); const http = require('http'); const net = require('net'); -const MAX = 8 * 1024; // 8KB +const MAX = +(process.argv[2] || 8 * 1024); // Command line option, or 8KB. + +assert(process.binding('config').maxHTTPHeaderSize, + 'The option should exist on process.binding(\'config\')'); + +console.log('pid is', process.pid); +console.log('max header size is', process.binding('config').maxHTTPHeaderSize); // Verify that we cannot receive more than 8KB of headers. @@ -28,19 +35,15 @@ function fillHeaders(headers, currentSize, valid = false) { headers += 'a'.repeat(MAX - headers.length - 3); // Generate valid headers if (valid) { - // TODO(mcollina): understand why -9 is needed instead of -1 - headers = headers.slice(0, -9); + // TODO(mcollina): understand why -32 is needed instead of -1 + headers = headers.slice(0, -32); } return headers + '\r\n\r\n'; } -const timeout = common.platformTimeout(10); - function writeHeaders(socket, headers) { const array = []; - - // this is off from 1024 so that \r\n does not get split - const chunkSize = 1000; + const chunkSize = 100; let last = 0; for (let i = 0; i < headers.length / chunkSize; i++) { @@ -55,19 +58,25 @@ function writeHeaders(socket, headers) { next(); function next() { - if (socket.write(array.shift())) { - if (array.length === 0) { - socket.end(); - } else { - setTimeout(next, timeout); - } + if (socket.destroyed) { + console.log('socket was destroyed early, data left to write:', + array.join('').length); + return; + } + + const chunk = array.shift(); + + if (chunk) { + console.log('writing chunk of size', chunk.length); + socket.write(chunk, next); } else { - socket.once('drain', next); + socket.end(); } } } function test1() { + console.log('test1'); let headers = 'HTTP/1.1 200 OK\r\n' + 'Content-Length: 0\r\n' + @@ -82,6 +91,9 @@ function test1() { writeHeaders(sock, headers); sock.resume(); }); + + // The socket might error but that's ok + sock.on('error', () => {}); }); server.listen(0, common.mustCall(() => { @@ -90,17 +102,17 @@ function test1() { client.on('error', common.mustCall((err) => { assert.strictEqual(err.code, 'HPE_HEADER_OVERFLOW'); - server.close(); - setImmediate(test2); + server.close(test2); })); })); } const test2 = common.mustCall(() => { + console.log('test2'); let headers = 'GET / HTTP/1.1\r\n' + 'Host: localhost\r\n' + - 'Agent: node\r\n' + + 'Agent: nod2\r\n' + 'X-CRASH: '; // /, Host, localhost, Agent, node, X-CRASH, a... @@ -109,7 +121,7 @@ const test2 = common.mustCall(() => { const server = http.createServer(common.mustNotCall()); - server.on('clientError', common.mustCall((err) => { + server.once('clientError', common.mustCall((err) => { assert.strictEqual(err.code, 'HPE_HEADER_OVERFLOW'); })); @@ -121,34 +133,46 @@ const test2 = common.mustCall(() => { }); finished(client, common.mustCall((err) => { - server.close(); - setImmediate(test3); + server.close(test3); })); })); }); const test3 = common.mustCall(() => { + console.log('test3'); let headers = 'GET / HTTP/1.1\r\n' + 'Host: localhost\r\n' + - 'Agent: node\r\n' + + 'Agent: nod3\r\n' + 'X-CRASH: '; // /, Host, localhost, Agent, node, X-CRASH, a... const currentSize = 1 + 4 + 9 + 5 + 4 + 7; headers = fillHeaders(headers, currentSize, true); + console.log('writing', headers.length); + const server = http.createServer(common.mustCall((req, res) => { - res.end('hello world'); - setImmediate(server.close.bind(server)); + res.end('hello from test3 server'); + server.close(); })); + server.on('clientError', (err) => { + console.log(err.code); + if (err.code === 'HPE_HEADER_OVERFLOW') { + console.log(err.rawPacket.toString('hex')); + } + }); + server.on('clientError', common.mustNotCall()); + server.listen(0, common.mustCall(() => { const client = net.connect(server.address().port); client.on('connect', () => { writeHeaders(client, headers); client.resume(); }); + + client.pipe(process.stdout); })); }); diff --git a/test/sequential/test-set-http-max-http-headers.js b/test/sequential/test-set-http-max-http-headers.js new file mode 100644 index 00000000000000..7ec13f370784f8 --- /dev/null +++ b/test/sequential/test-set-http-max-http-headers.js @@ -0,0 +1,104 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { spawn } = require('child_process'); +const path = require('path'); +const testName = path.join(__dirname, 'test-http-max-http-headers.js'); + +const timeout = common.platformTimeout(100); + +const tests = []; + +function test(fn) { + tests.push(fn); +} + +test(function(cb) { + console.log('running subtest expecting failure'); + + // Validate that the test fails if the max header size is too small. + const args = ['--expose-internals', + '--max-http-header-size=1024', + testName]; + const cp = spawn(process.execPath, args, { stdio: 'inherit' }); + + cp.on('close', common.mustCall((code, signal) => { + assert.strictEqual(code, 1); + assert.strictEqual(signal, null); + cb(); + })); +}); + +test(function(cb) { + console.log('running subtest expecting success'); + + const env = Object.assign({}, process.env, { + NODE_DEBUG: 'http' + }); + + // Validate that the test fails if the max header size is too small. + // Validate that the test now passes if the same limit becomes large enough. + const args = ['--expose-internals', + '--max-http-header-size=1024', + testName, + '1024']; + const cp = spawn(process.execPath, args, { + env, + stdio: 'inherit' + }); + + cp.on('close', common.mustCall((code, signal) => { + assert.strictEqual(code, 0); + assert.strictEqual(signal, null); + cb(); + })); +}); + +// Next, repeat the same checks using NODE_OPTIONS if it is supported. +if (process.config.variables.node_without_node_options) { + const env = Object.assign({}, process.env, { + NODE_OPTIONS: '--max-http-header-size=1024' + }); + + test(function(cb) { + console.log('running subtest expecting failure'); + + // Validate that the test fails if the max header size is too small. + const args = ['--expose-internals', testName]; + const cp = spawn(process.execPath, args, { env, stdio: 'inherit' }); + + cp.on('close', common.mustCall((code, signal) => { + assert.strictEqual(code, 1); + assert.strictEqual(signal, null); + cb(); + })); + }); + + test(function(cb) { + // Validate that the test now passes if the same limit + // becomes large enough. + const args = ['--expose-internals', testName, '1024']; + const cp = spawn(process.execPath, args, { env, stdio: 'inherit' }); + + cp.on('close', common.mustCall((code, signal) => { + assert.strictEqual(code, 0); + assert.strictEqual(signal, null); + cb(); + })); + }); +} + +function runTest() { + const fn = tests.shift(); + + if (!fn) { + return; + } + + fn(() => { + setTimeout(runTest, timeout); + }); +} + +runTest(); From c0c4de71f0fb7b55804a9d2110dded0493fc7c3e Mon Sep 17 00:00:00 2001 From: cjihrig Date: Wed, 5 Dec 2018 19:59:12 -0500 Subject: [PATCH 4/5] http: add maxHeaderSize property This commit exposes the value of --max-http-header-size as a property of the http module. Backport-PR-URL: https://github.com/nodejs/node/pull/25218 PR-URL: https://github.com/nodejs/node/pull/24860 Reviewed-By: Richard Lau Reviewed-By: Matteo Collina Reviewed-By: Michael Dawson Reviewed-By: Shelley Vohr Reviewed-By: James M Snell --- doc/api/http.md | 11 +++++++++++ lib/http.js | 15 +++++++++++++++ test/parallel/test-http-max-header-size.js | 11 +++++++++++ 3 files changed, 37 insertions(+) create mode 100644 test/parallel/test-http-max-header-size.js diff --git a/doc/api/http.md b/doc/api/http.md index b631afc0570cae..e3b0b0af450751 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -1581,6 +1581,16 @@ added: v0.5.9 Global instance of `Agent` which is used as the default for all HTTP client requests. +## http.maxHeaderSize + + +* {number} + +Read-only property specifying the maximum allowed size of HTTP headers in bytes. +Defaults to 8KB. Configurable using the [`--max-http-header-size`][] CLI option. + ## http.request(options[, callback]) Specify the maximum size, in bytes, of HTTP headers. Defaults to 8KB. diff --git a/doc/api/http.md b/doc/api/http.md index e3b0b0af450751..3aad3cfdc95d6d 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -1583,7 +1583,7 @@ requests. ## http.maxHeaderSize * {number} diff --git a/doc/changelogs/CHANGELOG_V6.md b/doc/changelogs/CHANGELOG_V6.md index aeaaff1d072c0d..4c222b5c8776e2 100644 --- a/doc/changelogs/CHANGELOG_V6.md +++ b/doc/changelogs/CHANGELOG_V6.md @@ -7,6 +7,7 @@ +6.16.0
6.15.1
6.15.0
6.14.4
@@ -67,6 +68,27 @@ [Node.js Long Term Support Plan](https://github.com/nodejs/LTS) and will be supported actively until April 2018 and maintained until April 2019. + +## 2018-12-26, Version 6.16.0 'Boron' (LTS), @MylesBorins + +The 6.15.0 security release introduced some unexpected breakages on the 6.x release line. +This is a special release to fix a regression in the HTTP binary upgrade response body and add +a missing CLI flag to adjust the max header size of the http parser. + +### Notable Changes + +* **cli**: + - add --max-http-header-size flag (cjihrig) [#24811](https://github.com/nodejs/node/pull/24811) +* **http**: + - add maxHeaderSize property (cjihrig) [#24860](https://github.com/nodejs/node/pull/24860) + +### Commits + +* [[`f233b160c9`](https://github.com/nodejs/node/commit/f233b160c9)] - **(SEMVER-MINOR)** **cli**: add --max-http-header-size flag (cjihrig) [#24811](https://github.com/nodejs/node/pull/24811) +* [[`59f83d6896`](https://github.com/nodejs/node/commit/59f83d6896)] - **(SEMVER-MINOR)** **deps**: cherry-pick http\_parser\_set\_max\_header\_size (cjihrig) [#24811](https://github.com/nodejs/node/pull/24811) +* [[`c0c4de71f0`](https://github.com/nodejs/node/commit/c0c4de71f0)] - **(SEMVER-MINOR)** **http**: add maxHeaderSize property (cjihrig) [#24860](https://github.com/nodejs/node/pull/24860) +* [[`8a3e0c0697`](https://github.com/nodejs/node/commit/8a3e0c0697)] - **http**: fix regression of binary upgrade response body (Matteo Collina) [#25036](https://github.com/nodejs/node/pull/25036) + ## 2018-12-03, Version 6.15.1 'Boron' (LTS), @rvagg diff --git a/src/node_version.h b/src/node_version.h index cf983645b46909..3602772fe29f6f 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -2,13 +2,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 6 -#define NODE_MINOR_VERSION 15 -#define NODE_PATCH_VERSION 2 +#define NODE_MINOR_VERSION 16 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 1 #define NODE_VERSION_LTS_CODENAME "Boron" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n)