Skip to content

Commit c206faf

Browse files
[PR #9771/e80d5854 backport][3.11] Small cleanups to enabling compression in web_response (#9775)
Co-authored-by: J. Nick Koston <[email protected]>
1 parent 602552e commit c206faf

File tree

2 files changed

+58
-40
lines changed

2 files changed

+58
-40
lines changed

aiohttp/web_response.py

Lines changed: 40 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@
4343
from .typedefs import JSONEncoder, LooseHeaders
4444

4545
REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus}
46+
LARGE_BODY_SIZE = 1024**2
4647

4748
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
4849

@@ -412,27 +413,28 @@ def _generate_content_type_header(
412413
self._headers[CONTENT_TYPE] = ctype
413414

414415
async def _do_start_compression(self, coding: ContentCoding) -> None:
415-
if coding != ContentCoding.identity:
416-
assert self._payload_writer is not None
417-
self._headers[hdrs.CONTENT_ENCODING] = coding.value
418-
self._payload_writer.enable_compression(
419-
coding.value, self._compression_strategy
420-
)
421-
# Compressed payload may have different content length,
422-
# remove the header
423-
self._headers.popall(hdrs.CONTENT_LENGTH, None)
416+
if coding is ContentCoding.identity:
417+
return
418+
assert self._payload_writer is not None
419+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
420+
self._payload_writer.enable_compression(
421+
coding.value, self._compression_strategy
422+
)
423+
# Compressed payload may have different content length,
424+
# remove the header
425+
self._headers.popall(hdrs.CONTENT_LENGTH, None)
424426

425427
async def _start_compression(self, request: "BaseRequest") -> None:
426428
if self._compression_force:
427429
await self._do_start_compression(self._compression_force)
428-
else:
429-
# Encoding comparisons should be case-insensitive
430-
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
431-
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
432-
for value, coding in CONTENT_CODINGS.items():
433-
if value in accept_encoding:
434-
await self._do_start_compression(coding)
435-
return
430+
return
431+
# Encoding comparisons should be case-insensitive
432+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
433+
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
434+
for value, coding in CONTENT_CODINGS.items():
435+
if value in accept_encoding:
436+
await self._do_start_compression(coding)
437+
return
436438

437439
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
438440
if self._eof_sent:
@@ -782,30 +784,28 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
782784
async def _do_start_compression(self, coding: ContentCoding) -> None:
783785
if self._chunked or isinstance(self._body, Payload):
784786
return await super()._do_start_compression(coding)
785-
786-
if coding != ContentCoding.identity:
787-
# Instead of using _payload_writer.enable_compression,
788-
# compress the whole body
789-
compressor = ZLibCompressor(
790-
encoding=str(coding.value),
791-
max_sync_chunk_size=self._zlib_executor_size,
792-
executor=self._zlib_executor,
793-
)
794-
assert self._body is not None
795-
if self._zlib_executor_size is None and len(self._body) > 1024 * 1024:
796-
warnings.warn(
797-
"Synchronous compression of large response bodies "
798-
f"({len(self._body)} bytes) might block the async event loop. "
799-
"Consider providing a custom value to zlib_executor_size/"
800-
"zlib_executor response properties or disabling compression on it."
801-
)
802-
self._compressed_body = (
803-
await compressor.compress(self._body) + compressor.flush()
787+
if coding is ContentCoding.identity:
788+
return
789+
# Instead of using _payload_writer.enable_compression,
790+
# compress the whole body
791+
compressor = ZLibCompressor(
792+
encoding=coding.value,
793+
max_sync_chunk_size=self._zlib_executor_size,
794+
executor=self._zlib_executor,
795+
)
796+
assert self._body is not None
797+
if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE:
798+
warnings.warn(
799+
"Synchronous compression of large response bodies "
800+
f"({len(self._body)} bytes) might block the async event loop. "
801+
"Consider providing a custom value to zlib_executor_size/"
802+
"zlib_executor response properties or disabling compression on it."
804803
)
805-
assert self._compressed_body is not None
806-
807-
self._headers[hdrs.CONTENT_ENCODING] = coding.value
808-
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
804+
self._compressed_body = (
805+
await compressor.compress(self._body) + compressor.flush()
806+
)
807+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
808+
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
809809

810810

811811
def json_response(

tests/test_web_response.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -481,6 +481,24 @@ async def test_force_compression_deflate() -> None:
481481
assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING)
482482

483483

484+
async def test_force_compression_deflate_large_payload() -> None:
485+
"""Make sure a warning is thrown for large payloads compressed in the event loop."""
486+
req = make_request(
487+
"GET", "/", headers=CIMultiDict({hdrs.ACCEPT_ENCODING: "gzip, deflate"})
488+
)
489+
resp = Response(body=b"large")
490+
491+
resp.enable_compression(ContentCoding.deflate)
492+
assert resp.compression
493+
494+
with pytest.warns(
495+
Warning, match="Synchronous compression of large response bodies"
496+
), mock.patch("aiohttp.web_response.LARGE_BODY_SIZE", 2):
497+
msg = await resp.prepare(req)
498+
assert msg is not None
499+
assert "deflate" == resp.headers.get(hdrs.CONTENT_ENCODING)
500+
501+
484502
async def test_force_compression_no_accept_deflate() -> None:
485503
req = make_request("GET", "/")
486504
resp = StreamResponse()

0 commit comments

Comments
 (0)