|
43 | 43 | from .typedefs import JSONEncoder, LooseHeaders
|
44 | 44 |
|
45 | 45 | REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus}
|
| 46 | +LARGE_BODY_SIZE = 1024**2 |
46 | 47 |
|
47 | 48 | __all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
|
48 | 49 |
|
@@ -412,27 +413,28 @@ def _generate_content_type_header(
|
412 | 413 | self._headers[CONTENT_TYPE] = ctype
|
413 | 414 |
|
414 | 415 | async def _do_start_compression(self, coding: ContentCoding) -> None:
|
415 |
| - if coding != ContentCoding.identity: |
416 |
| - assert self._payload_writer is not None |
417 |
| - self._headers[hdrs.CONTENT_ENCODING] = coding.value |
418 |
| - self._payload_writer.enable_compression( |
419 |
| - coding.value, self._compression_strategy |
420 |
| - ) |
421 |
| - # Compressed payload may have different content length, |
422 |
| - # remove the header |
423 |
| - self._headers.popall(hdrs.CONTENT_LENGTH, None) |
| 416 | + if coding is ContentCoding.identity: |
| 417 | + return |
| 418 | + assert self._payload_writer is not None |
| 419 | + self._headers[hdrs.CONTENT_ENCODING] = coding.value |
| 420 | + self._payload_writer.enable_compression( |
| 421 | + coding.value, self._compression_strategy |
| 422 | + ) |
| 423 | + # Compressed payload may have different content length, |
| 424 | + # remove the header |
| 425 | + self._headers.popall(hdrs.CONTENT_LENGTH, None) |
424 | 426 |
|
425 | 427 | async def _start_compression(self, request: "BaseRequest") -> None:
|
426 | 428 | if self._compression_force:
|
427 | 429 | await self._do_start_compression(self._compression_force)
|
428 |
| - else: |
429 |
| - # Encoding comparisons should be case-insensitive |
430 |
| - # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 |
431 |
| - accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() |
432 |
| - for value, coding in CONTENT_CODINGS.items(): |
433 |
| - if value in accept_encoding: |
434 |
| - await self._do_start_compression(coding) |
435 |
| - return |
| 430 | + return |
| 431 | + # Encoding comparisons should be case-insensitive |
| 432 | + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 |
| 433 | + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() |
| 434 | + for value, coding in CONTENT_CODINGS.items(): |
| 435 | + if value in accept_encoding: |
| 436 | + await self._do_start_compression(coding) |
| 437 | + return |
436 | 438 |
|
437 | 439 | async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
438 | 440 | if self._eof_sent:
|
@@ -782,30 +784,28 @@ async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
782 | 784 | async def _do_start_compression(self, coding: ContentCoding) -> None:
|
783 | 785 | if self._chunked or isinstance(self._body, Payload):
|
784 | 786 | return await super()._do_start_compression(coding)
|
785 |
| - |
786 |
| - if coding != ContentCoding.identity: |
787 |
| - # Instead of using _payload_writer.enable_compression, |
788 |
| - # compress the whole body |
789 |
| - compressor = ZLibCompressor( |
790 |
| - encoding=str(coding.value), |
791 |
| - max_sync_chunk_size=self._zlib_executor_size, |
792 |
| - executor=self._zlib_executor, |
793 |
| - ) |
794 |
| - assert self._body is not None |
795 |
| - if self._zlib_executor_size is None and len(self._body) > 1024 * 1024: |
796 |
| - warnings.warn( |
797 |
| - "Synchronous compression of large response bodies " |
798 |
| - f"({len(self._body)} bytes) might block the async event loop. " |
799 |
| - "Consider providing a custom value to zlib_executor_size/" |
800 |
| - "zlib_executor response properties or disabling compression on it." |
801 |
| - ) |
802 |
| - self._compressed_body = ( |
803 |
| - await compressor.compress(self._body) + compressor.flush() |
| 787 | + if coding is ContentCoding.identity: |
| 788 | + return |
| 789 | + # Instead of using _payload_writer.enable_compression, |
| 790 | + # compress the whole body |
| 791 | + compressor = ZLibCompressor( |
| 792 | + encoding=coding.value, |
| 793 | + max_sync_chunk_size=self._zlib_executor_size, |
| 794 | + executor=self._zlib_executor, |
| 795 | + ) |
| 796 | + assert self._body is not None |
| 797 | + if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE: |
| 798 | + warnings.warn( |
| 799 | + "Synchronous compression of large response bodies " |
| 800 | + f"({len(self._body)} bytes) might block the async event loop. " |
| 801 | + "Consider providing a custom value to zlib_executor_size/" |
| 802 | + "zlib_executor response properties or disabling compression on it." |
804 | 803 | )
|
805 |
| - assert self._compressed_body is not None |
806 |
| - |
807 |
| - self._headers[hdrs.CONTENT_ENCODING] = coding.value |
808 |
| - self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body)) |
| 804 | + self._compressed_body = ( |
| 805 | + await compressor.compress(self._body) + compressor.flush() |
| 806 | + ) |
| 807 | + self._headers[hdrs.CONTENT_ENCODING] = coding.value |
| 808 | + self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body)) |
809 | 809 |
|
810 | 810 |
|
811 | 811 | def json_response(
|
|
0 commit comments