Skip to content

Commit 5eab6f3

Browse files
committed
Ensure complete test coverage
1 parent 1346675 commit 5eab6f3

File tree

2 files changed

+22
-10
lines changed

2 files changed

+22
-10
lines changed

src/brotlicffi/_api.py

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -377,20 +377,16 @@ def __init__(self, dictionary=b''):
377377

378378
@staticmethod
379379
def _calculate_buffer_size(
380-
input_data_len, output_buffer_limit, total_output_size, chunks_num
380+
input_data_len, output_buffer_limit, chunks_len, chunks_num
381381
):
382382
if output_buffer_limit is not None:
383-
remaining_space = output_buffer_limit - total_output_size
384-
if remaining_space <= 0:
385-
return None
386-
return remaining_space
383+
return output_buffer_limit - chunks_len
387384
# When `decompress(b'')` is called without `output_buffer_limit`.
388385
elif input_data_len == 0:
389386
# libbrotli would use 32 KB as a starting buffer size and double it
390387
# each time, capped at 16 MB.
391388
# https://github.com/google/brotli/blob/028fb5a23661f123017c060daa546b55cf4bde29/python/_brotli.c#L291-L292
392-
log_size = chunks_num + 15
393-
return 1 << min(log_size, 24)
389+
return 1 << min(chunks_num + 15, 24)
394390
else:
395391
# Allocate a buffer that's hopefully overlarge, but if it's not we
396392
# don't mind: we'll spin around again.
@@ -419,6 +415,11 @@ def decompress(self, data, output_buffer_limit=None):
419415
"'can_accept_more_data()' is False"
420416
)
421417

418+
# We should avoid operations on the `self._unconsumed_data` if no data
419+
# is to be processed.
420+
if output_buffer_limit is not None and output_buffer_limit <= 0:
421+
return b''
422+
422423
# Use unconsumed data if available, use new data otherwise.
423424
if self._unconsumed_data:
424425
input_data = self._unconsumed_data
@@ -437,11 +438,9 @@ def decompress(self, data, output_buffer_limit=None):
437438
buffer_size = self._calculate_buffer_size(
438439
input_data_len=len(input_data),
439440
output_buffer_limit=output_buffer_limit,
440-
total_output_size=chunks_len,
441+
chunks_len=chunks_len,
441442
chunks_num=len(chunks),
442443
)
443-
if buffer_size is None:
444-
break
445444

446445
available_out = ffi.new("size_t *", buffer_size)
447446
out_buffer = ffi.new("uint8_t[]", buffer_size)

test/test_simple_decompression.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,19 @@ def test_decompressobj_with_output_buffer_limit(
5858
result = o.decompress(compressed_data, output_buffer_limit=small_limit)
5959
assert len(result) <= small_limit
6060

61+
# Ensure `output_buffer_limit` of zero works.
62+
assert o.decompress(b'', output_buffer_limit=0) == b''
63+
64+
if o._unconsumed_data:
65+
with pytest.raises(
66+
brotlicffi.error,
67+
match=(
68+
r"brotli: decoder process called with data when "
69+
r"'can_accept_more_data\(\)' is False"
70+
),
71+
):
72+
o.decompress(b'additional data')
73+
6174
if not o.is_finished():
6275
assert not o.can_accept_more_data()
6376

0 commit comments

Comments
 (0)