Skip to content

Commit 405ff07

Browse files
committed
fix code formating
1 parent 0d5d0d7 commit 405ff07

File tree

2 files changed

+28
-15
lines changed

2 files changed

+28
-15
lines changed

src/msgraph_core/models/large_file_upload_session.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
from dataclasses import dataclass, field
2-
from typing import List, Dict, Optional
2+
from typing import List, Dict, Optional, Any
33
from datetime import datetime
44

55
from kiota_abstractions.serialization.additional_data_holder import AdditionalDataHolder
@@ -14,7 +14,7 @@ def __init__(
1414
self,
1515
upload_url: Optional[str] = None,
1616
expiration_date_time: Optional[datetime] = None,
17-
additional_data: Optional[List[Dict[str, any]]] = None,
17+
additional_data: Optional[List[Dict[str, Any]]] = None,
1818
is_cancelled: Optional[bool] = False,
1919
next_expected_ranges: Optional[List[str]] = None
2020
):
@@ -79,7 +79,7 @@ def serialize(self, writer: SerializationWriter) -> None:
7979
)
8080
writer.write_additional_data_value(self.additional_data)
8181

82-
def get_field_deserializers(self) -> Dict[str, any]:
82+
def get_field_deserializers(self) -> Dict[str, Any]:
8383
return {
8484
'upload_url':
8585
lambda parse_node: setattr(self, 'upload_url', parse_node.get_str_value()),

src/msgraph_core/tasks/large_file_upload.py

Lines changed: 25 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ def __init__(
2525
):
2626
if not isinstance(upload_session, LargeFileUploadSession):
2727
raise TypeError("upload_session must be an instance of LargeFileUploadSession")
28-
self.upload_session = upload_session
29-
self.request_adapter = request_adapter
28+
self._upload_session = upload_session
29+
self._request_adapter = request_adapter
3030
self.stream = stream
3131
self.file_size = stream.getbuffer().nbytes
3232
self.max_chunk_size = max_chunk_size
@@ -35,11 +35,15 @@ def __init__(
3535
)
3636
self.next_range = cleaned_value[0]
3737
self._chunks = int((self.file_size / max_chunk_size) + 0.5)
38-
self.on_chunk_upload_complete: Optional[Callable[[int, int], None]] = None
38+
self.on_chunk_upload_complete: Optional[Callable[[List[int]], None]] = None
3939

4040
@property
41-
def upload_session(self) -> Parsable:
42-
return self.upload_session
41+
def upload_session(self):
42+
return self._upload_session
43+
44+
@upload_session.setter
45+
def upload_session(self, value):
46+
self._upload_session = value
4347

4448
@staticmethod
4549
async def create_upload_session(request_adapter: RequestAdapter, request_body, url: str):
@@ -59,14 +63,24 @@ async def create_upload_session(request_adapter: RequestAdapter, request_body, u
5963
)
6064

6165
@property
62-
def request_adapter(self) -> RequestAdapter:
63-
return self.request_adapter
66+
def request_adapter(self):
67+
return self._request_adapter
68+
69+
@request_adapter.setter
70+
def request_adapter(self, value):
71+
self._request_adapter = value
6472

6573
@property
66-
def chunks(self) -> int:
74+
def chunks(self):
6775
return self._chunks
6876

69-
def upload_session_expired(self, upload_session: LargeFileUploadSession = None) -> bool:
77+
@chunks.setter
78+
def chunks(self, value):
79+
self._chunks = value
80+
81+
def upload_session_expired(
82+
self, upload_session: Optional[LargeFileUploadSession] = None
83+
) -> bool:
7084
now = datetime.now()
7185
upload_session = upload_session or self.upload_session
7286
if not hasattr(upload_session, "expiration_date_time"):
@@ -120,7 +134,7 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
120134
end = min(int(range_parts[0]) + self.max_chunk_size, self.file_size)
121135
uploaded_range = [range_parts[0], end]
122136
self.next_range = next_range[0] + "-"
123-
process_next = self.next_chunk(self.stream)
137+
process_next = await self.next_chunk(self.stream)
124138
except Exception as error:
125139
logging.error(f"Error uploading chunk {error}")
126140
raise # remove after manual testing
@@ -161,7 +175,6 @@ async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int =
161175
file.seek(start)
162176
end = min(end, self.max_chunk_size + start)
163177
chunk_data = file.read(end - start + 1)
164-
print(f"Chunk data {chunk_data}")
165178
info.headers = HeadersCollection()
166179

167180
info.headers.try_add('Content-Range', f'bytes {start}-{end}/{self.file_size}')
@@ -170,7 +183,7 @@ async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int =
170183
info.set_stream_content(BytesIO(chunk_data))
171184
error_map: Dict[str, int] = {}
172185

173-
parsable_factory: LargeFileUploadSession[Any] = self.upload_session
186+
parsable_factory: LargeFileUploadSession = self.upload_session
174187

175188
return await self.request_adapter.send_async(info, parsable_factory, error_map)
176189

0 commit comments

Comments
 (0)