1010from kiota_abstractions .headers_collection import HeadersCollection
1111from kiota_abstractions .request_information import RequestInformation
1212from kiota_abstractions .serialization .additional_data_holder import AdditionalDataHolder
13+ from kiota_abstractions .serialization .parsable_factory import ParsableFactory
1314
1415from kiota_abstractions .request_adapter import RequestAdapter
1516
@@ -23,7 +24,8 @@ def __init__(
2324 upload_session : Parsable ,
2425 request_adapter : RequestAdapter ,
2526 stream : BytesIO ,
26- max_chunk_size : int = 5 * 1024 * 1024
27+ parsable_factory : Optional [ParsableFactory ] = None ,
28+ max_chunk_size : int = 409600 #5 * 1024 * 1024
2729 ):
2830 self ._upload_session = upload_session
2931 self ._request_adapter = request_adapter
@@ -33,6 +35,7 @@ def __init__(
3335 except AttributeError :
3436 self .file_size = os .stat (stream .name ).st_size
3537 self .max_chunk_size = max_chunk_size
38+ self .factory = parsable_factory
3639 cleaned_value = self .check_value_exists (
3740 upload_session , 'get_next_expected_range' , ['next_expected_range' , 'NextExpectedRange' ]
3841 )
@@ -97,8 +100,17 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
97100 range_parts = self .next_range [0 ].split ("-" ) if self .next_range else ['0' , '0' ]
98101 end = min (int (range_parts [0 ]) + self .max_chunk_size - 1 , self .file_size )
99102 uploaded_range = [range_parts [0 ], end ]
100- while self .chunks > 0 :
103+ response = None
104+
105+ while self .chunks >= 0 :
101106 session = process_next
107+ print (f"Chunks for upload : { self .chunks } " )
108+ if self .chunks == 0 :
109+ # last chunk
110+ print (f"Last chunk: { self .chunks } upload stated" )
111+ response = await self .last_chunk (self .stream )
112+ print ("Last chunk response: received" )
113+
102114 try :
103115 lfu_session : LargeFileUploadSession = session # type: ignore
104116 if lfu_session is None :
@@ -115,17 +127,13 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
115127 uploaded_range = [range_parts [0 ], end ]
116128 self .next_range = next_range [0 ] + "-"
117129 process_next = await self .next_chunk (self .stream )
130+
118131 except Exception as error :
119132 logging .error ("Error uploading chunk %s" , error )
120133 finally :
121134 self .chunks -= 1
122135 upload_result = UploadResult ()
123- upload_result .upload_session = UploadSessionDataHolder (
124- expiration_date_time = self .upload_session .expiration_date_time ,
125- next_expected_ranges = self .upload_session .next_expected_ranges ,
126- upload_url = self .upload_session .upload_url
127- )
128- upload_result .item_response = session
136+ upload_result .item_response = response
129137 upload_result .location = self .upload_session .upload_url
130138 return upload_result
131139
@@ -172,6 +180,47 @@ async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int =
172180 parsable_factory = LargeFileUploadSession
173181 return await self .request_adapter .send_async (info , parsable_factory , error_map )
174182
183+ async def last_chunk (
184+ self ,
185+ file : BytesIO ,
186+ range_start : int = 0 ,
187+ range_end : int = 0 ,
188+ parsable_factory : Optional [ParsableFactory ] = None
189+ ) -> Future :
190+ upload_url = self .get_validated_upload_url (self .upload_session )
191+ if not upload_url :
192+ raise ValueError ('The upload session URL must not be empty.' )
193+ info = RequestInformation ()
194+ info .url = upload_url
195+ info .http_method = Method .PUT
196+ if not self .next_range :
197+ self .next_range = f'{ range_start } -{ range_end } '
198+ range_parts = self .next_range .split ('-' ) if self .next_range else ['-' ]
199+ start = int (range_parts [0 ])
200+ end = int (range_parts [1 ]) if len (range_parts ) > 1 else 0
201+ if start == 0 and end == 0 :
202+ chunk_data = file .read (self .max_chunk_size )
203+ end = min (self .max_chunk_size - 1 , self .file_size - 1 )
204+ elif start == 0 :
205+ chunk_data = file .read (end + 1 )
206+ elif end == 0 :
207+ file .seek (start )
208+ chunk_data = file .read (self .max_chunk_size )
209+ end = start + len (chunk_data ) - 1
210+ else :
211+ file .seek (start )
212+ end = min (end , self .max_chunk_size + start )
213+ chunk_data = file .read (end - start + 1 )
214+ info .headers = HeadersCollection ()
215+
216+ info .headers .try_add ('Content-Range' , f'bytes { start } -{ end } /{ self .file_size } ' )
217+ info .headers .try_add ('Content-Length' , str (len (chunk_data )))
218+ info .headers .try_add ("Content-Type" , "application/octet-stream" )
219+ info .set_stream_content (bytes (chunk_data ))
220+ error_map : Dict [str , int ] = {}
221+ parsable_factory = self .factory or parsable_factory
222+ return await self .request_adapter .send_async (info , parsable_factory , error_map )
223+
175224 def get_file (self ) -> BytesIO :
176225 return self .stream
177226
0 commit comments