@@ -174,11 +174,10 @@ def _decode_bitonal(self):
174174 are exactly one byte, and so the inter-token whitespace is optional.
175175 """
176176 decoded_data = bytearray ()
177- total_tokens = self .size
177+ total_bytes = self .size
178178
179179 comment_spans = False
180- tokens_read = 0
181- while True :
180+ while len (decoded_data ) != total_bytes :
182181 block = self ._read_block () # read next block
183182 if not block :
184183 raise ValueError ("Reached EOF while reading data" )
@@ -194,28 +193,24 @@ def _decode_bitonal(self):
194193 block , comment_spans = self ._ignore_comments (block )
195194
196195 tokens = b"" .join (block .split ())
197-
198196 for token in tokens :
199197 if token not in (48 , 49 ):
200198 raise ValueError (f"Invalid token for this mode: { bytes ([token ])} " )
201- tokens_read += 1
202- decoded_data .append (token )
203- if tokens_read == total_tokens : # finished!
204- invert = bytes .maketrans (b"01" , b"\xFF \x00 " )
205- return decoded_data .translate (invert )
199+ decoded_data = (decoded_data + tokens )[:total_bytes ]
200+ invert = bytes .maketrans (b"01" , b"\xFF \x00 " )
201+ return decoded_data .translate (invert )
206202
207203 def _decode_blocks (self , channels = 1 , depth = 8 ):
208204 decoded_data = bytearray ()
209205 # HACK: 32-bit grayscale uses signed int
210206 maxval = 2 ** (31 if depth == 32 else depth ) - 1
211207 max_len = 10
212208 bytes_per_sample = depth // 8
213- total_tokens = self .size * channels
209+ total_bytes = self .size * channels * bytes_per_sample
214210
215211 comment_spans = False
216212 half_token = False
217- tokens_read = 0
218- while True :
213+ while len (decoded_data ) != total_bytes :
219214 block = self ._read_block () # read next block
220215 if not block :
221216 if half_token :
@@ -251,12 +246,12 @@ def _decode_blocks(self, channels=1, depth=8):
251246 f"Token too long found in data: { token [:max_len + 1 ]} "
252247 )
253248 token = int (token )
254- tokens_read += 1
255249 if token > maxval :
256250 raise ValueError (f"Channel value too large for this mode: { token } " )
257251 decoded_data += token .to_bytes (bytes_per_sample , "big" )
258- if tokens_read == total_tokens : # finished!
259- return decoded_data
252+ if len (decoded_data ) == total_bytes : # finished!
253+ break
254+ return decoded_data
260255
261256 def decode (self , buffer ):
262257 self .size = self .state .xsize * self .state .ysize
0 commit comments