Skip to content

Commit 6155a1f

Browse files
ShogunPandamarco-ippolito
authored andcommitted
http: add maximum chunk extension size
PR-URL: nodejs-private/node-private#518 Fixes: https://hackerone.com/reports/2233486 Reviewed-By: Matteo Collina <matteo.collina@gmail.com> Reviewed-By: Marco Ippolito <marcoippolito54@gmail.com> Reviewed-By: Rafael Gonzaga <rafael.nunu@hotmail.com> CVE-ID: CVE-2024-22019
1 parent ef3eea2 commit 6155a1f

4 files changed

Lines changed: 171 additions & 4 deletions

File tree

doc/api/errors.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3124,6 +3124,18 @@ malconfigured clients, if more than 8 KiB of HTTP header data is received then
31243124
HTTP parsing will abort without a request or response object being created, and
31253125
an `Error` with this code will be emitted.
31263126

3127+
<a id="HPE_CHUNK_EXTENSIONS_OVERFLOW"></a>
3128+
3129+
### `HPE_CHUNK_EXTENSIONS_OVERFLOW`
3130+
3131+
<!-- YAML
3132+
added: REPLACEME
3133+
-->
3134+
3135+
Too much data was received for a chunk extensions. In order to protect against
3136+
malicious or malconfigured clients, if more than 16 KiB of data is received
3137+
then an `Error` with this code will be emitted.
3138+
31273139
<a id="HPE_UNEXPECTED_CONTENT_LENGTH"></a>
31283140

31293141
### `HPE_UNEXPECTED_CONTENT_LENGTH`

lib/_http_server.js

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -857,6 +857,11 @@ const requestHeaderFieldsTooLargeResponse = Buffer.from(
857857
'Connection: close\r\n\r\n', 'ascii',
858858
);
859859

860+
const requestChunkExtensionsTooLargeResponse = Buffer.from(
861+
`HTTP/1.1 413 ${STATUS_CODES[413]}\r\n` +
862+
'Connection: close\r\n\r\n', 'ascii',
863+
);
864+
860865
function socketOnError(e) {
861866
// Ignore further errors
862867
this.removeListener('error', socketOnError);
@@ -877,6 +882,9 @@ function socketOnError(e) {
877882
case 'HPE_HEADER_OVERFLOW':
878883
response = requestHeaderFieldsTooLargeResponse;
879884
break;
885+
case 'HPE_CHUNK_EXTENSIONS_OVERFLOW':
886+
response = requestChunkExtensionsTooLargeResponse;
887+
break;
880888
case 'ERR_HTTP_REQUEST_TIMEOUT':
881889
response = requestTimeoutResponse;
882890
break;

src/node_http_parser.cc

Lines changed: 20 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,8 @@ const uint32_t kOnExecute = 5;
7979
const uint32_t kOnTimeout = 6;
8080
// Any more fields than this will be flushed into JS
8181
const size_t kMaxHeaderFieldsCount = 32;
82+
// Maximum size of chunk extensions
83+
const size_t kMaxChunkExtensionsSize = 16384;
8284

8385
const uint32_t kLenientNone = 0;
8486
const uint32_t kLenientHeaders = 1 << 0;
@@ -271,6 +273,7 @@ class Parser : public AsyncWrap, public StreamListener {
271273

272274
num_fields_ = num_values_ = 0;
273275
headers_completed_ = false;
276+
chunk_extensions_nread_ = 0;
274277
last_message_start_ = uv_hrtime();
275278
url_.Reset();
276279
status_message_.Reset();
@@ -526,9 +529,22 @@ class Parser : public AsyncWrap, public StreamListener {
526529
return 0;
527530
}
528531

529-
// Reset nread for the next chunk
532+
int on_chunk_extension(const char* at, size_t length) {
533+
chunk_extensions_nread_ += length;
534+
535+
if (chunk_extensions_nread_ > kMaxChunkExtensionsSize) {
536+
llhttp_set_error_reason(&parser_,
537+
"HPE_CHUNK_EXTENSIONS_OVERFLOW:Chunk extensions overflow");
538+
return HPE_USER;
539+
}
540+
541+
return 0;
542+
}
543+
544+
// Reset nread for the next chunk and also reset the extensions counter
530545
int on_chunk_header() {
531546
header_nread_ = 0;
547+
chunk_extensions_nread_ = 0;
532548
return 0;
533549
}
534550

@@ -1017,6 +1033,7 @@ class Parser : public AsyncWrap, public StreamListener {
10171033
bool headers_completed_ = false;
10181034
bool pending_pause_ = false;
10191035
uint64_t header_nread_ = 0;
1036+
uint64_t chunk_extensions_nread_ = 0;
10201037
uint64_t max_http_header_size_;
10211038
uint64_t last_message_start_;
10221039
ConnectionsList* connectionsList_;
@@ -1195,10 +1212,9 @@ const llhttp_settings_t Parser::settings = {
11951212
Proxy<DataCall, &Parser::on_header_value>::Raw,
11961213

11971214
// on_chunk_extension_name
1198-
nullptr,
1215+
Proxy<DataCall, &Parser::on_chunk_extension>::Raw,
11991216
// on_chunk_extension_value
1200-
nullptr,
1201-
1217+
Proxy<DataCall, &Parser::on_chunk_extension>::Raw,
12021218
Proxy<Call, &Parser::on_headers_complete>::Raw,
12031219
Proxy<DataCall, &Parser::on_body>::Raw,
12041220
Proxy<Call, &Parser::on_message_complete>::Raw,
Lines changed: 131 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,131 @@
1+
'use strict';
2+
3+
const common = require('../common');
4+
const http = require('http');
5+
const net = require('net');
6+
const assert = require('assert');
7+
8+
// Verify that chunk extensions are limited in size when sent all together.
9+
{
10+
const server = http.createServer((req, res) => {
11+
req.on('end', () => {
12+
res.writeHead(200, { 'Content-Type': 'text/plain' });
13+
res.end('bye');
14+
});
15+
16+
req.resume();
17+
});
18+
19+
server.listen(0, () => {
20+
const sock = net.connect(server.address().port);
21+
let data = '';
22+
23+
sock.on('data', (chunk) => data += chunk.toString('utf-8'));
24+
25+
sock.on('end', common.mustCall(function() {
26+
assert.strictEqual(data, 'HTTP/1.1 413 Payload Too Large\r\nConnection: close\r\n\r\n');
27+
server.close();
28+
}));
29+
30+
sock.end('' +
31+
'GET / HTTP/1.1\r\n' +
32+
'Host: localhost:8080\r\n' +
33+
'Transfer-Encoding: chunked\r\n\r\n' +
34+
'2;' + 'A'.repeat(20000) + '=bar\r\nAA\r\n' +
35+
'0\r\n\r\n'
36+
);
37+
});
38+
}
39+
40+
// Verify that chunk extensions are limited in size when sent in intervals.
41+
{
42+
const server = http.createServer((req, res) => {
43+
req.on('end', () => {
44+
res.writeHead(200, { 'Content-Type': 'text/plain' });
45+
res.end('bye');
46+
});
47+
48+
req.resume();
49+
});
50+
51+
server.listen(0, () => {
52+
const sock = net.connect(server.address().port);
53+
let remaining = 20000;
54+
let data = '';
55+
56+
const interval = setInterval(
57+
() => {
58+
if (remaining > 0) {
59+
sock.write('A'.repeat(1000));
60+
} else {
61+
sock.write('=bar\r\nAA\r\n0\r\n\r\n');
62+
clearInterval(interval);
63+
}
64+
65+
remaining -= 1000;
66+
},
67+
common.platformTimeout(20),
68+
).unref();
69+
70+
sock.on('data', (chunk) => data += chunk.toString('utf-8'));
71+
72+
sock.on('end', common.mustCall(function() {
73+
assert.strictEqual(data, 'HTTP/1.1 413 Payload Too Large\r\nConnection: close\r\n\r\n');
74+
server.close();
75+
}));
76+
77+
sock.write('' +
78+
'GET / HTTP/1.1\r\n' +
79+
'Host: localhost:8080\r\n' +
80+
'Transfer-Encoding: chunked\r\n\r\n' +
81+
'2;'
82+
);
83+
});
84+
}
85+
86+
// Verify the chunk extensions is correctly reset after a chunk
87+
{
88+
const server = http.createServer((req, res) => {
89+
req.on('end', () => {
90+
res.writeHead(200, { 'content-type': 'text/plain', 'connection': 'close', 'date': 'now' });
91+
res.end('bye');
92+
});
93+
94+
req.resume();
95+
});
96+
97+
server.listen(0, () => {
98+
const sock = net.connect(server.address().port);
99+
let data = '';
100+
101+
sock.on('data', (chunk) => data += chunk.toString('utf-8'));
102+
103+
sock.on('end', common.mustCall(function() {
104+
assert.strictEqual(
105+
data,
106+
'HTTP/1.1 200 OK\r\n' +
107+
'content-type: text/plain\r\n' +
108+
'connection: close\r\n' +
109+
'date: now\r\n' +
110+
'Transfer-Encoding: chunked\r\n' +
111+
'\r\n' +
112+
'3\r\n' +
113+
'bye\r\n' +
114+
'0\r\n' +
115+
'\r\n',
116+
);
117+
118+
server.close();
119+
}));
120+
121+
sock.end('' +
122+
'GET / HTTP/1.1\r\n' +
123+
'Host: localhost:8080\r\n' +
124+
'Transfer-Encoding: chunked\r\n\r\n' +
125+
'2;' + 'A'.repeat(10000) + '=bar\r\nAA\r\n' +
126+
'2;' + 'A'.repeat(10000) + '=bar\r\nAA\r\n' +
127+
'2;' + 'A'.repeat(10000) + '=bar\r\nAA\r\n' +
128+
'0\r\n\r\n'
129+
);
130+
});
131+
}

0 commit comments

Comments
 (0)