11import logging
22import os
33from asyncio import Future
4- from collections .abc import Callable
54from datetime import datetime , timedelta , timezone
65from io import BytesIO
7- from typing import Any , Optional , Tuple , TypeVar , Union
6+ from typing import Any , Callable , Optional , Tuple , TypeVar , Union
87
98from kiota_abstractions .headers_collection import HeadersCollection
109from kiota_abstractions .method import Method
@@ -38,11 +37,13 @@ def __init__(
3837 self .max_chunk_size = max_chunk_size
3938 self .factory = parsable_factory
4039 cleaned_value = self .check_value_exists (
41- upload_session , 'get_next_expected_range' , ['next_expected_range' , 'NextExpectedRange' ]
40+ upload_session , 'get_next_expected_range' , [
41+ 'next_expected_range' , 'NextExpectedRange' ]
4242 )
4343 self .next_range = cleaned_value [0 ]
4444 self ._chunks = int ((self .file_size / max_chunk_size ) + 0.5 )
45- self .on_chunk_upload_complete : Optional [Callable [[list [int ]], None ]] = None
45+ self .on_chunk_upload_complete : Optional [Callable [[
46+ list [int ]], None ]] = None
4647
4748 @property
4849 def upload_session (self ):
@@ -68,7 +69,8 @@ def upload_session_expired(self, upload_session: Optional[Parsable] = None) -> b
6869 now = datetime .now (timezone .utc )
6970 upload_session = upload_session or self .upload_session
7071 if not hasattr (upload_session , "expiration_date_time" ):
71- raise ValueError ("Upload session does not have an expiration date time" )
72+ raise ValueError (
73+ "Upload session does not have an expiration date time" )
7274 expiry = getattr (upload_session , 'expiration_date_time' )
7375 if expiry is None :
7476 raise ValueError ("Expiry is None" )
@@ -93,13 +95,16 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
9395
9496 self .on_chunk_upload_complete = after_chunk_upload or self .on_chunk_upload_complete
9597 session : LargeFileUploadSession = await self .next_chunk (
96- self .stream , 0 , max (0 , min (self .max_chunk_size - 1 , self .file_size - 1 ))
98+ self .stream , 0 , max (
99+ 0 , min (self .max_chunk_size - 1 , self .file_size - 1 ))
97100 )
98101 process_next = session
99102 # determine the range to be uploaded
100103 # even when resuming existing upload sessions.
101- range_parts = self .next_range [0 ].split ("-" ) if self .next_range else ['0' , '0' ]
102- end = min (int (range_parts [0 ]) + self .max_chunk_size - 1 , self .file_size )
104+ range_parts = self .next_range [0 ].split (
105+ "-" ) if self .next_range else ['0' , '0' ]
106+ end = min (int (range_parts [0 ]) +
107+ self .max_chunk_size - 1 , self .file_size )
103108 uploaded_range = [range_parts [0 ], end ]
104109 response = None
105110
@@ -124,12 +129,13 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
124129 if not next_range :
125130 continue
126131 range_parts = str (next_range [0 ]).split ("-" )
127- end = min (int (range_parts [0 ]) + self .max_chunk_size , self .file_size )
132+ end = min (int (range_parts [0 ]) +
133+ self .max_chunk_size , self .file_size )
128134 uploaded_range = [range_parts [0 ], end ]
129135 self .next_range = next_range [0 ] + "-"
130136 process_next = await self .next_chunk (self .stream )
131137
132- except Exception as error : #pylint: disable=broad-except
138+ except Exception as error : # pylint: disable=broad-except
133139 logging .error ("Error uploading chunk %s" , error )
134140 finally :
135141 self .chunks -= 1
@@ -176,7 +182,8 @@ async def next_chunk(
176182 chunk_data = file .read (end - start + 1 )
177183 info .headers = HeadersCollection ()
178184
179- info .headers .try_add ('Content-Range' , f'bytes { start } -{ end } /{ self .file_size } ' )
185+ info .headers .try_add (
186+ 'Content-Range' , f'bytes { start } -{ end } /{ self .file_size } ' )
180187 info .headers .try_add ('Content-Length' , str (len (chunk_data )))
181188 info .headers .try_add ("Content-Type" , "application/octet-stream" )
182189 info .set_stream_content (bytes (chunk_data ))
@@ -216,7 +223,8 @@ async def last_chunk(
216223 chunk_data = file .read (end - start + 1 )
217224 info .headers = HeadersCollection ()
218225
219- info .headers .try_add ('Content-Range' , f'bytes { start } -{ end } /{ self .file_size } ' )
226+ info .headers .try_add (
227+ 'Content-Range' , f'bytes { start } -{ end } /{ self .file_size } ' )
220228 info .headers .try_add ('Content-Length' , str (len (chunk_data )))
221229 info .headers .try_add ("Content-Type" , "application/octet-stream" )
222230 info .set_stream_content (bytes (chunk_data ))
@@ -231,7 +239,8 @@ def get_file(self) -> BytesIO:
231239
232240 async def cancel (self ) -> Parsable :
233241 upload_url = self .get_validated_upload_url (self .upload_session )
234- request_information = RequestInformation (method = Method .DELETE , url_template = upload_url )
242+ request_information = RequestInformation (
243+ method = Method .DELETE , url_template = upload_url )
235244
236245 await self .request_adapter .send_no_response_content_async (request_information )
237246
@@ -254,7 +263,8 @@ def additional_data_contains(self, parsable: Parsable,
254263 'AdditionalDataHolder'
255264 )
256265 if not hasattr (parsable , 'additional_data' ):
257- raise ValueError ('The object passed does not contain an additional_data property' )
266+ raise ValueError (
267+ 'The object passed does not contain an additional_data property' )
258268 additional_data = parsable .additional_data
259269 for property_candidate in property_candidates :
260270 if property_candidate in additional_data :
@@ -298,7 +308,8 @@ async def resume(self) -> Future:
298308
299309 def get_validated_upload_url (self , upload_session : Parsable ) -> str :
300310 if not hasattr (upload_session , 'upload_url' ):
301- raise RuntimeError ('The upload session does not contain a valid upload url' )
311+ raise RuntimeError (
312+ 'The upload session does not contain a valid upload url' )
302313 result = upload_session .upload_url
303314
304315 if result is None or result .strip () == '' :
0 commit comments