Skip to content

Commit 5a9c2fb

Browse files
authored
[PR #9771/e80d5854 backport][3.10] Small cleanups to enabling compression in web_response (#9776)
1 parent 3426707 commit 5a9c2fb

File tree

2 files changed

+56
-38
lines changed

2 files changed

+56
-38
lines changed

aiohttp/web_response.py

Lines changed: 38 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
from .typedefs import JSONEncoder, LooseHeaders
4343

4444
REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus}
45+
LARGE_BODY_SIZE = 1024**2
4546

4647
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
4748

@@ -397,25 +398,26 @@ def _generate_content_type_header(
397398
self._headers[CONTENT_TYPE] = ctype
398399

399400
async def _do_start_compression(self, coding: ContentCoding) -> None:
400-
if coding != ContentCoding.identity:
401-
assert self._payload_writer is not None
402-
self._headers[hdrs.CONTENT_ENCODING] = coding.value
403-
self._payload_writer.enable_compression(coding.value)
404-
# Compressed payload may have different content length,
405-
# remove the header
406-
self._headers.popall(hdrs.CONTENT_LENGTH, None)
401+
if coding is ContentCoding.identity:
402+
return
403+
assert self._payload_writer is not None
404+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
405+
self._payload_writer.enable_compression(coding.value)
406+
# Compressed payload may have different content length,
407+
# remove the header
408+
self._headers.popall(hdrs.CONTENT_LENGTH, None)
407409

408410
async def _start_compression(self, request: "BaseRequest") -> None:
409411
if self._compression_force:
410412
await self._do_start_compression(self._compression_force)
411-
else:
412-
# Encoding comparisons should be case-insensitive
413-
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
414-
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
415-
for value, coding in CONTENT_CODINGS.items():
416-
if value in accept_encoding:
417-
await self._do_start_compression(coding)
418-
return
413+
return
414+
# Encoding comparisons should be case-insensitive
415+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
416+
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
417+
for value, coding in CONTENT_CODINGS.items():
418+
if value in accept_encoding:
419+
await self._do_start_compression(coding)
420+
return
419421

420422
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
421423
if self._eof_sent:
@@ -765,30 +767,28 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
765767
async def _do_start_compression(self, coding: ContentCoding) -> None:
766768
if self._chunked or isinstance(self._body, Payload):
767769
return await super()._do_start_compression(coding)
768-
769-
if coding != ContentCoding.identity:
770-
# Instead of using _payload_writer.enable_compression,
771-
# compress the whole body
772-
compressor = ZLibCompressor(
773-
encoding=str(coding.value),
774-
max_sync_chunk_size=self._zlib_executor_size,
775-
executor=self._zlib_executor,
776-
)
777-
assert self._body is not None
778-
if self._zlib_executor_size is None and len(self._body) > 1024 * 1024:
779-
warnings.warn(
780-
"Synchronous compression of large response bodies "
781-
f"({len(self._body)} bytes) might block the async event loop. "
782-
"Consider providing a custom value to zlib_executor_size/"
783-
"zlib_executor response properties or disabling compression on it."
784-
)
785-
self._compressed_body = (
786-
await compressor.compress(self._body) + compressor.flush()
770+
if coding is ContentCoding.identity:
771+
return
772+
# Instead of using _payload_writer.enable_compression,
773+
# compress the whole body
774+
compressor = ZLibCompressor(
775+
encoding=coding.value,
776+
max_sync_chunk_size=self._zlib_executor_size,
777+
executor=self._zlib_executor,
778+
)
779+
assert self._body is not None
780+
if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE:
781+
warnings.warn(
782+
"Synchronous compression of large response bodies "
783+
f"({len(self._body)} bytes) might block the async event loop. "
784+
"Consider providing a custom value to zlib_executor_size/"
785+
"zlib_executor response properties or disabling compression on it."
787786
)
788-
assert self._compressed_body is not None
789-
790-
self._headers[hdrs.CONTENT_ENCODING] = coding.value
791-
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
787+
self._compressed_body = (
788+
await compressor.compress(self._body) + compressor.flush()
789+
)
790+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
791+
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
792792

793793

794794
def json_response(

tests/test_web_response.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -480,6 +480,24 @@ async def test_force_compression_deflate() -> None:
480480
assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING)
481481

482482

483+
async def test_force_compression_deflate_large_payload() -> None:
484+
"""Make sure a warning is thrown for large payloads compressed in the event loop."""
485+
req = make_request(
486+
"GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"})
487+
)
488+
resp = Response(body=b"large")
489+
490+
resp.enable_compression(ContentCoding.deflate)
491+
assert resp.compression
492+
493+
with pytest.warns(
494+
Warning, match="Synchronous compression of large response bodies"
495+
), mock.patch("aiohttp.web_response.LARGE_BODY_SIZE", 2):
496+
msg = await resp.prepare(req)
497+
assert msg is not None
498+
assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING)
499+
500+
483501
async def test_force_compression_no_accept_deflate() -> None:
484502
req = make_request("GET", "/")
485503
resp = StreamResponse()

0 commit comments

Comments
 (0)