99from kiota_abstractions .request_information import RequestInformation
1010from kiota_abstractions .serialization .additional_data_holder import AdditionalDataHolder
1111
12- from msgraph_core .models import LargeFileUploadCreateSession , LargeFileUploadSession
12+ from msgraph_core .models import LargeFileUploadCreateSession , LargeFileUploadSession # check imports
1313
1414
1515class LargeFileUploadTask :
@@ -39,14 +39,20 @@ def get_upload_session(self) -> Parsable:
3939 def get_adapter (self ) -> RequestAdapter :
4040 return self .request_adapter
4141
42- def create_upload_session (self , model : LargeFileUploadCreateSession , callback : Method ):
42+ def create_upload_session (
43+ self ,
44+ request_body : LargeFileUploadSession ,
45+ model : LargeFileUploadCreateSession ,
46+ ) -> Future :
4347 request_info = RequestInformation ()
44- request_info .set_uri (self .options .get_item_path ())
45- request_info .set_http_method ('POST' )
46- request_info .set_content_type ('application/json' )
47- request_info .set_payload (model )
48+ request_info .url = ""
49+ request_info .http_method = Method .POST
50+ request_info .set_content_from_parsable (
51+ self .request_adapter , 'application/json' , request_body
52+ )
53+ request_info .set_stream_content (model )
4854
49- self .request_adapter .send_async (request_info , LargeFileUploadSession , callback )
55+ return self .request_adapter .send_async (request_info , LargeFileUploadSession , {} )
5056
5157 def get_chunks (self ) -> int :
5258 return self .chunks
@@ -102,19 +108,24 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None) -> Future:
102108
103109 def process_chunk (self , upload_session , uploaded_range ):
104110 if upload_session is None :
105- return upload_session
111+ return None
112+
106113 next_range = upload_session .get_next_expected_ranges ()
114+ if not next_range :
115+ return upload_session
116+
107117 old_url = self .get_validated_upload_url (self .upload_session )
108118 upload_session .set_upload_url (old_url )
119+
109120 if self .on_chunk_upload_complete is not None :
110121 self .on_chunk_upload_complete (uploaded_range )
111- if not next_range :
112- return upload_session
122+
113123 range_parts = next_range [0 ].split ("-" )
114124 end = min (int (range_parts [0 ]) + self .max_chunk_size , self .file_size )
115- uploaded_range = [range_parts [0 ], end ]
116- self .set_next_range (next_range [0 ] + "-" )
117- process_next = self .next_chunk (self .stream )
125+ self .set_next_range (f"{ range_parts [0 ]} -{ end } " )
126+
127+ self .next_chunk (self .stream )
128+
118129 return upload_session
119130
120131 def handle_error (self , error ):
@@ -129,8 +140,8 @@ async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int =
129140 if not upload_url :
130141 raise ValueError ('The upload session URL must not be empty.' )
131142 info = RequestInformation ()
132- info .set_uri ( upload_url )
133- info .http_method = HttpMethod .PUT
143+ info .url = upload_url
144+ info .http_method = Method .PUT
134145 if not self .next_range :
135146 self .set_next_range (f'{ range_start } -{ range_end } ' )
136147 range_parts = self .next_range .split ('-' ) if self .next_range else ['-' ]
@@ -150,29 +161,25 @@ async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int =
150161 end = min (end , self .max_chunk_size + start )
151162 chunk_data = file .read (end - start + 1 )
152163
153- info .set_headers (
154- {
155- ** info .get_headers (), 'Content-Range' : f'bytes { start } -{ end } /{ self .file_size } '
156- }
157- )
158- info .set_headers ({** info .get_headers (), 'Content-Length' : str (len (chunk_data ))})
164+ info .headers = {
165+ ** info .request_headers (), 'Content-Range' : f'bytes { start } -{ end } /{ self .file_size } '
166+ }
167+
168+ info .headers = {** info .request_headers (), 'Content-Length' : str (len (chunk_data ))}
159169
160170 info .set_stream_content (BytesIO (chunk_data ))
161- return await self .adapter .send_async (
171+ return await self .request_adapter .send_async (
162172 info , LargeFileUploadSession .create_from_discriminator_value
163173 )
164174
165175 def get_file (self ) -> BytesIO :
166176 return self .stream
167177
168178 async def cancel (self ) -> Optional [Future ]:
169- request_information = RequestInformation ()
170- request_information .http_method = HttpMethod .DELETE
171-
172179 upload_url = self .get_validated_upload_url (self .upload_session )
180+ request_information = RequestInformation (http_method = Method .DELETE , url = upload_url )
173181
174- request_information .set_uri (upload_url )
175- result = await self .request_adapter .send_no_content_async (request_information )
182+ await self .request_adapter .send_no_response_content_async (request_information )
176183
177184 if hasattr (self .upload_session , 'set_is_cancelled' ):
178185 self .upload_session .set_is_cancelled (True )
@@ -181,6 +188,7 @@ async def cancel(self) -> Optional[Future]:
181188 current = self .upload_session .get_additional_data ()
182189 new = {** current , 'is_cancelled' : True }
183190 self .upload_session .set_additional_data (new )
191+
184192 return self .upload_session
185193
186194 def additional_data_contains (self , parsable : Parsable ,
@@ -189,7 +197,7 @@ def additional_data_contains(self, parsable: Parsable,
189197 raise ValueError (
190198 f'The object passed does not contain property/properties { "," .join (property_candidates )} and does not implement AdditionalDataHolder'
191199 )
192- additional_data = parsable .get_additional_data ()
200+ additional_data = parsable .additional_data
193201 for property_candidate in property_candidates :
194202 if property_candidate in additional_data :
195203 return True , additional_data [property_candidate ]
0 commit comments