Skip to content

Commit 2b35890

Browse files
authored
Release 2.15.902 (#310)
2.15.902 (2026-02-03) ===================== - Fixed multiplexing mixing issue under specific concurrency condition. (#309) - Backported "GHSA-38jv-5279-wg99" security patch for "decompression-bomb safeguards of the streaming API were bypassed when HTTP redirects were followed" from upstream. - Backported "Started treating Retry-After times greater than 6 hours as 6 hours by default" from upstream (urllib3#3743).
2 parents bbdcc4a + 029011a commit 2b35890

File tree

11 files changed

+104
-4
lines changed

11 files changed

+104
-4
lines changed

CHANGES.rst

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,11 @@
1+
2.15.902 (2026-02-03)
2+
=====================
3+
4+
- Fixed multiplexing mixing issue under specific concurrency condition. (#309)
5+
- Backported "GHSA-38jv-5279-wg99" security patch for "decompression-bomb safeguards of the streaming API were bypassed when HTTP redirects were followed" from upstream.
6+
- Backported "Started treating Retry-After times greater than 6 hours as 6 hours by default" from upstream (https://github.com/urllib3/urllib3/pull/3743).
7+
8+
19
2.15.901 (2025-12-22)
210
=====================
311

dummyserver/handlers.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -202,11 +202,24 @@ def redirect(self, request: httputil.HTTPServerRequest) -> Response: # type: ig
202202
params = request_params(request)
203203
target = params.get("target", "/")
204204
status = params.get("status", b"303 See Other").decode("latin-1")
205+
compressed = params.get("compressed", b"false") == b"true"
206+
205207
if len(status) == 3:
206208
status = f"{status} Redirect"
207209

208210
headers = [("Location", target)]
209-
return Response(status=status, headers=headers)
211+
212+
if compressed:
213+
headers.append(("Content-Encoding", "gzip"))
214+
data = gzip.compress(b"foo")
215+
else:
216+
data = b""
217+
218+
return Response(
219+
status=status,
220+
headers=headers,
221+
body=data,
222+
)
210223

211224
def not_found(self, request: httputil.HTTPServerRequest) -> Response:
212225
return Response("Not found", status="404 Not Found")

src/urllib3/_async/connectionpool.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1384,6 +1384,7 @@ async def _make_request(
13841384
response = await conn.getresponse(
13851385
police_officer=self.pool,
13861386
early_response_callback=on_early_response,
1387+
promise=rp,
13871388
)
13881389
except (BaseSSLError, OSError) as e:
13891390
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)

src/urllib3/_async/response.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -205,7 +205,11 @@ async def drain_conn(self) -> None: # type: ignore[override]
205205
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
206206
"""
207207
try:
208-
await self.read()
208+
await self.read(
209+
# Do not spend resources decoding the content unless
210+
# decoding has already been initiated.
211+
decode_content=self._has_decoded_content,
212+
)
209213
except (HTTPError, OSError, BaseSSLError):
210214
pass
211215

src/urllib3/_version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
# This file is protected via CODEOWNERS
22
from __future__ import annotations
33

4-
__version__ = "2.15.901"
4+
__version__ = "2.15.902"

src/urllib3/connectionpool.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1375,6 +1375,7 @@ def _make_request(
13751375
response = conn.getresponse(
13761376
police_officer=self.pool,
13771377
early_response_callback=on_early_response,
1378+
promise=rp,
13781379
)
13791380
except (BaseSSLError, OSError) as e:
13801381
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)

src/urllib3/response.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -561,7 +561,11 @@ def drain_conn(self) -> None:
561561
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
562562
"""
563563
try:
564-
self.read()
564+
self.read(
565+
# Do not spend resources decoding the content unless
566+
# decoding has already been initiated.
567+
decode_content=self._has_decoded_content,
568+
)
565569
except (HTTPError, OSError, BaseSSLError):
566570
pass
567571

src/urllib3/util/retry.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -180,6 +180,11 @@ class Retry:
180180
Sequence of headers to remove from the request when a response
181181
indicating a redirect is returned before firing off the redirected
182182
request.
183+
184+
:param int retry_after_max: Number of seconds to allow as the maximum for
185+
Retry-After headers. Defaults to :attr:`Retry.DEFAULT_RETRY_AFTER_MAX`.
186+
Any Retry-After headers larger than this value will be limited to this
187+
value.
183188
"""
184189

185190
#: Default methods to be used for ``allowed_methods``
@@ -198,6 +203,10 @@ class Retry:
198203
#: Default maximum backoff time.
199204
DEFAULT_BACKOFF_MAX = 120
200205

206+
# This is undocumented in the RFC. Setting to 6 hours matches other popular libraries.
207+
#: Default maximum allowed value for Retry-After headers in seconds
208+
DEFAULT_RETRY_AFTER_MAX: typing.Final[int] = 21600
209+
201210
# Backward compatibility; assigned outside of the class.
202211
DEFAULT: typing.ClassVar[Retry]
203212

@@ -221,6 +230,7 @@ def __init__(
221230
str
222231
] = DEFAULT_REMOVE_HEADERS_ON_REDIRECT,
223232
backoff_jitter: float = 0.0,
233+
retry_after_max: int = DEFAULT_RETRY_AFTER_MAX,
224234
) -> None:
225235
self.total = total
226236
self.connect = connect
@@ -245,6 +255,7 @@ def __init__(
245255
h.lower() for h in remove_headers_on_redirect
246256
)
247257
self.backoff_jitter = backoff_jitter
258+
self.retry_after_max = retry_after_max
248259

249260
def new(self, **kw: typing.Any) -> Retry:
250261
params = dict(
@@ -264,6 +275,7 @@ def new(self, **kw: typing.Any) -> Retry:
264275
remove_headers_on_redirect=self.remove_headers_on_redirect,
265276
respect_retry_after_header=self.respect_retry_after_header,
266277
backoff_jitter=self.backoff_jitter,
278+
retry_after_max=self.retry_after_max,
267279
)
268280

269281
params.update(kw)
@@ -322,6 +334,10 @@ def parse_retry_after(self, retry_after: str) -> float:
322334

323335
seconds = max(seconds, 0)
324336

337+
# Check the seconds do not exceed the specified maximum
338+
if seconds > self.retry_after_max:
339+
seconds = self.retry_after_max
340+
325341
return seconds
326342

327343
def get_retry_after(self, response: HTTPResponse) -> float | None:

test/test_retry.py

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -181,6 +181,18 @@ def test_configurable_backoff_max(self) -> None:
181181
retry = retry.increment(method="GET")
182182
assert retry.get_backoff_time() == max_backoff
183183

184+
def test_configurable_retry_after_max(self) -> None:
185+
"""Configurable retry after is computed correctly"""
186+
max_retry_after = Retry.DEFAULT_RETRY_AFTER_MAX
187+
188+
retry = Retry()
189+
assert retry.parse_retry_after(str(max_retry_after)) == max_retry_after
190+
assert retry.parse_retry_after(str(max_retry_after + 1)) == max_retry_after
191+
192+
retry = Retry(retry_after_max=1)
193+
assert retry.parse_retry_after(str(1)) == 1
194+
assert retry.parse_retry_after(str(2)) == 1
195+
184196
def test_backoff_jitter(self) -> None:
185197
"""Backoff with jitter is computed correctly"""
186198
max_backoff = 1

test/with_dummyserver/asynchronous/test_connectionpool.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -576,6 +576,26 @@ async def test_redirect(self) -> None:
576576
assert r.status == 200
577577
assert await r.data == b"Dummy server!"
578578

579+
@mock.patch("urllib3.response.GzipDecoder.decompress")
580+
async def test_no_decoding_with_redirect_when_preload_disabled(
581+
self, gzip_decompress: mock.MagicMock
582+
) -> None:
583+
"""
584+
Test that urllib3 does not attempt to decode a gzipped redirect
585+
response when `preload_content` is set to `False`.
586+
"""
587+
async with AsyncHTTPConnectionPool(self.host, self.port) as pool:
588+
# Three requests are expected: two redirects and one final / 200 OK.
589+
response = await pool.request(
590+
"GET",
591+
"/redirect",
592+
fields={"target": "/redirect?compressed=true", "compressed": "true"},
593+
preload_content=False,
594+
)
595+
596+
assert response.status == 200
597+
gzip_decompress.assert_not_called()
598+
579599
async def test_303_redirect_makes_request_lose_body(self) -> None:
580600
async with AsyncHTTPConnectionPool(self.host, self.port) as pool:
581601
response = await pool.request(

0 commit comments

Comments
 (0)