@@ -28,10 +28,12 @@ def test_threaded_read():
2828
2929
3030@pytest .mark .parametrize (["mode" , "threads" ],
31- itertools .product (["wb" , "wt" ], [1 , 3 ]))
31+ itertools .product (["wb" , "wt" ], [1 , 3 , - 1 ]))
3232def test_threaded_write (mode , threads ):
3333 with tempfile .NamedTemporaryFile ("wb" , delete = False ) as tmp :
34- with igzip_threaded .open (tmp , mode , threads = threads ) as out_file :
34+ # Use a small block size to simulate many writes.
35+ with igzip_threaded .open (tmp , mode , threads = threads ,
36+ block_size = 8 * 1024 ) as out_file :
3537 gzip_open_mode = "rb" if "b" in mode else "rt"
3638 with gzip .open (TEST_FILE , gzip_open_mode ) as in_file :
3739 while True :
@@ -77,13 +79,33 @@ def test_threaded_read_error():
7779
7880@pytest .mark .timeout (5 )
7981@pytest .mark .parametrize ("threads" , [1 , 3 ])
80- def test_threaded_write_error (threads ):
81- # parallel_deflate_and_crc method is called in a worker thread.
82- with pytest .raises (OverflowError ) as error :
82+ def test_threaded_write_oversized_block_no_error (threads ):
83+ # Random bytes are incompressible, and therefore are guaranteed to
84+ # trigger a buffer overflow when larger than block size unless handled
85+ # correctly.
86+ data = os .urandom (1024 * 63 ) # not a multiple of block_size
87+ with tempfile .NamedTemporaryFile (mode = "wb" , delete = False ) as tmp :
8388 with igzip_threaded .open (
84- io .BytesIO (), "wb" , compresslevel = 3 , threads = threads
89+ tmp , "wb" , compresslevel = 3 , threads = threads ,
90+ block_size = 8 * 1024
8591 ) as writer :
86- writer .write (os .urandom (1024 * 1024 * 50 ))
92+ writer .write (data )
93+ with gzip .open (tmp .name , "rb" ) as gzipped :
94+ decompressed = gzipped .read ()
95+ assert data == decompressed
96+
97+
98+ @pytest .mark .timeout (5 )
99+ @pytest .mark .parametrize ("threads" , [1 , 3 ])
100+ def test_threaded_write_error (threads ):
101+ f = igzip_threaded ._ThreadedGzipWriter (
102+ fp = io .BytesIO (), level = 3 ,
103+ threads = threads , block_size = 8 * 1024 )
104+ # Bypass the write method which should not allow blocks larger than
105+ # block_size.
106+ f .input_queues [0 ].put ((os .urandom (1024 * 64 ), b"" ))
107+ with pytest .raises (OverflowError ) as error :
108+ f .close ()
87109 error .match ("Compressed output exceeds buffer size" )
88110
89111
0 commit comments