@@ -22,8 +22,8 @@ def __init__(
2222 self ,
2323 upload_session : LargeFileUploadSession ,
2424 request_adapter : RequestAdapter ,
25- stream : BytesIO , # counter check this
26- max_chunk_size : int = 409600 # 4 * 1024 * 1024 - use smaller chnuks for testing
25+ stream : BytesIO ,
26+ max_chunk_size : int = 409600
2727 ):
2828 if not isinstance (upload_session , LargeFileUploadSession ):
2929 raise TypeError ("upload_session must be an instance of LargeFileUploadSession" )
@@ -119,8 +119,6 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
119119 process_next = session
120120 # determine the range to be uploaded
121121 # even when resuming existing upload sessions.
122- #range_parts = self.next_range[0].split("-") if self.next_range else ['0']
123-
124122 range_parts = self .next_range [0 ].split ("-" ) if self .next_range else ['0' , '0' ]
125123 end = min (int (range_parts [0 ]) + self .max_chunk_size - 1 , self .file_size )
126124 uploaded_range = [range_parts [0 ], end ]
@@ -186,7 +184,7 @@ async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int =
186184 info .headers .try_add ('Content-Range' , f'bytes { start } -{ end } /{ self .file_size } ' )
187185 info .headers .try_add ('Content-Length' , str (len (chunk_data )))
188186 info .headers .try_add ("Content-Type" , "application/octet-stream" )
189- info .set_stream_content (bytes (chunk_data )) # Convert chunk_data to bytes
187+ info .set_stream_content (bytes (chunk_data ))
190188 error_map : Dict [str , int ] = {}
191189 parsable_factory = LargeFileUploadSession
192190 return await self .request_adapter .send_async (info , parsable_factory , error_map )
0 commit comments