Skip to content

Commit d3aafea

Browse files
committed
fix: limit Content-Encoding chain to 5 to prevent resource exhaustion
A malicious server could send responses with thousands of Content-Encoding layers, causing high CPU usage and memory allocation when decompressing. This fix limits the number of content-encodings to 5, matching the approach used by urllib3 (GHSA-gm62-xv2j-4w53) and curl (CVE-2022-32206). Fixes CWE-770: Allocation of Resources Without Limits or Throttling Signed-off-by: Matteo Collina <hello@matteocollina.com>
1 parent f9c9185 commit d3aafea

File tree

3 files changed

+88
-12
lines changed

3 files changed

+88
-12
lines changed

README.md

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -329,7 +329,13 @@ const headers = await fetch(url, { method: 'HEAD' })
329329

330330
The [Fetch Standard](https://fetch.spec.whatwg.org) requires implementations to exclude certain headers from requests and responses. In browser environments, some headers are forbidden so the user agent remains in full control over them. In Undici, these constraints are removed to give more control to the user.
331331

332-
### `undici.upgrade([url, options]): Promise`
332+
#### Content-Encoding
333+
334+
* https://www.rfc-editor.org/rfc/rfc9110#field.content-encoding
335+
336+
Undici limits the number of `Content-Encoding` layers in a response to **5** to prevent resource exhaustion attacks. If a server responds with more than 5 content-encodings (e.g., `Content-Encoding: gzip, gzip, gzip, gzip, gzip, gzip`), the fetch will be rejected with an error. This limit matches the approach taken by [curl](https://curl.se/docs/CVE-2022-32206.html) and [urllib3](https://github.com/advisories/GHSA-gm62-xv2j-4rw9).
337+
338+
#### `undici.upgrade([url, options]): Promise`
333339

334340
Upgrade to a different protocol. See [MDN - HTTP - Protocol upgrade mechanism](https://developer.mozilla.org/en-US/docs/Web/HTTP/Protocol_upgrade_mechanism) for more details.
335341

lib/web/fetch/index.js

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2111,21 +2111,13 @@ async function httpNetworkFetch (
21112111
return
21122112
}
21132113

2114-
/** @type {string[]} */
2115-
let codings = []
21162114
let location = ''
21172115

21182116
const headersList = new HeadersList()
21192117

21202118
for (let i = 0; i < rawHeaders.length; i += 2) {
21212119
headersList.append(bufferToLowerCasedHeaderName(rawHeaders[i]), rawHeaders[i + 1].toString('latin1'), true)
21222120
}
2123-
const contentEncoding = headersList.get('content-encoding', true)
2124-
if (contentEncoding) {
2125-
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
2126-
// "All content-coding values are case-insensitive..."
2127-
codings = contentEncoding.toLowerCase().split(',').map((x) => x.trim())
2128-
}
21292121
location = headersList.get('location', true)
21302122

21312123
this.body = new Readable({ read: resume })
@@ -2136,9 +2128,23 @@ async function httpNetworkFetch (
21362128
redirectStatusSet.has(status)
21372129

21382130
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
2139-
if (codings.length !== 0 && request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) {
2131+
if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) {
2132+
// https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
2133+
const contentEncoding = headersList.get('content-encoding', true)
2134+
// "All content-coding values are case-insensitive..."
2135+
/** @type {string[]} */
2136+
const codings = contentEncoding ? contentEncoding.toLowerCase().split(',') : []
2137+
2138+
// Limit the number of content-encodings to prevent resource exhaustion.
2139+
// CVE fix similar to urllib3 (GHSA-gm62-xv2j-4w53) and curl (CVE-2022-32206).
2140+
const maxContentEncodings = 5
2141+
if (codings.length > maxContentEncodings) {
2142+
reject(new Error(`too many content-encodings in response: ${codings.length}, maximum allowed is ${maxContentEncodings}`))
2143+
return true
2144+
}
2145+
21402146
for (let i = codings.length - 1; i >= 0; --i) {
2141-
const coding = codings[i]
2147+
const coding = codings[i].trim()
21422148
// https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2
21432149
if (coding === 'x-gzip' || coding === 'gzip') {
21442150
decoders.push(zlib.createGunzip({

test/fetch/encoding.js

Lines changed: 65 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
'use strict'
22

3-
const { test } = require('node:test')
3+
const { test, describe, before, after } = require('node:test')
44
const assert = require('node:assert')
55
const { createServer } = require('node:http')
66
const { once } = require('node:events')
@@ -58,3 +58,67 @@ test('response decompression according to content-encoding should be handled in
5858

5959
assert.strictEqual(await response.text(), text)
6060
})
61+
62+
describe('content-encoding chain limit', () => {
63+
// CVE fix: Limit the number of content-encodings to prevent resource exhaustion
64+
// Similar to urllib3 (GHSA-gm62-xv2j-4w53) and curl (CVE-2022-32206)
65+
const MAX_CONTENT_ENCODINGS = 5
66+
67+
let server
68+
before(async () => {
69+
server = createServer({
70+
noDelay: true
71+
}, (req, res) => {
72+
const encodingCount = parseInt(req.headers['x-encoding-count'] || '1', 10)
73+
const encodings = Array(encodingCount).fill('identity').join(', ')
74+
75+
res.writeHead(200, {
76+
'Content-Encoding': encodings,
77+
'Content-Type': 'text/plain'
78+
})
79+
res.end('test')
80+
})
81+
await once(server.listen(0), 'listening')
82+
})
83+
84+
after(() => {
85+
server.close()
86+
})
87+
88+
test(`should allow exactly ${MAX_CONTENT_ENCODINGS} content-encodings`, async (t) => {
89+
const response = await fetch(`http://localhost:${server.address().port}`, {
90+
keepalive: false,
91+
headers: { 'x-encoding-count': String(MAX_CONTENT_ENCODINGS) }
92+
})
93+
94+
assert.strictEqual(response.status, 200)
95+
// identity encoding is a no-op, so the body should be passed through
96+
assert.strictEqual(await response.text(), 'test')
97+
})
98+
99+
test(`should reject more than ${MAX_CONTENT_ENCODINGS} content-encodings`, async (t) => {
100+
await assert.rejects(
101+
fetch(`http://localhost:${server.address().port}`, {
102+
keepalive: false,
103+
headers: { 'x-encoding-count': String(MAX_CONTENT_ENCODINGS + 1) }
104+
}),
105+
(err) => {
106+
assert.ok(err.cause?.message.includes('content-encoding'))
107+
return true
108+
}
109+
)
110+
})
111+
112+
test('should reject excessive content-encoding chains', async (t) => {
113+
await assert.rejects(
114+
fetch(`http://localhost:${server.address().port}`, {
115+
keepalive: false,
116+
headers: { 'x-encoding-count': '100' }
117+
}),
118+
(err) => {
119+
assert.ok(err.cause?.message.includes('content-encoding'))
120+
return true
121+
}
122+
)
123+
})
124+
})

0 commit comments

Comments
 (0)