Skip to content

Commit fcc0b8d

Browse files
committed
Undo breaking type changes in LFU and fix errors
1 parent b6fad13 commit fcc0b8d

File tree

2 files changed

+32
-26
lines changed

2 files changed

+32
-26
lines changed

src/msgraph_core/models/large_file_upload_session.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
from __future__ import annotations
2-
from typing import Any, Callable, Dict, List, Optional, Protocol, TypeVar, Generic
2+
from typing import Any, Callable, Dict, List, Optional
33
import datetime
44
from dataclasses import dataclass, field
55

66
from kiota_abstractions.serialization import (
7-
ParseNode, SerializationWriter, ParsableFactory
7+
AdditionalDataHolder, Parsable, ParseNode, SerializationWriter
88
)
99

1010

1111
@dataclass
12-
class LargeFileUploadSession(Protocol, ParsableFactory):
12+
class LargeFileUploadSession(AdditionalDataHolder, Parsable):
1313

1414
additional_data: Dict[str, Any] = field(default_factory=dict)
1515
expiration_date_time: Optional[datetime.datetime] = None
@@ -31,7 +31,7 @@ def create_from_discriminator_value(
3131
"""
3232
if not parse_node:
3333
raise TypeError("parse_node cannot be null.")
34-
return LargeFileUploadSession() # type: ignore # Breaking change to remove this method since a Protocol cannot instantiate itself
34+
return LargeFileUploadSession()
3535

3636
def get_field_deserializers(self, ) -> Dict[str, Callable[[ParseNode], None]]:
3737
"""

src/msgraph_core/tasks/large_file_upload.py

Lines changed: 28 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import os
2-
from typing import Callable, Optional, List, Tuple, Any, Dict
2+
from typing import Callable, Optional, List, Tuple, Any, Dict, TypeVar, Union, Type
33
from io import BytesIO
44
from asyncio import Future
55
from datetime import datetime, timedelta, timezone
@@ -8,23 +8,23 @@
88
from kiota_abstractions.method import Method
99
from kiota_abstractions.headers_collection import HeadersCollection
1010
from kiota_abstractions.request_information import RequestInformation
11-
from kiota_abstractions.serialization.additional_data_holder import AdditionalDataHolder
12-
from kiota_abstractions.serialization.parsable_factory import ParsableFactory
11+
from kiota_abstractions.serialization import Parsable, ParsableFactory, AdditionalDataHolder
1312

1413
from kiota_abstractions.request_adapter import RequestAdapter
1514

1615
from msgraph_core.models import LargeFileUploadSession, UploadResult # check imports
1716

17+
T = TypeVar('T', bound=Parsable)
1818

1919
# pylint: disable=too-many-instance-attributes
2020
class LargeFileUploadTask:
2121

2222
def __init__(
2323
self,
24-
upload_session: LargeFileUploadSession,
24+
upload_session: Parsable,
2525
request_adapter: RequestAdapter,
2626
stream: BytesIO,
27-
parsable_factory: Optional[ParsableFactory] = None,
27+
parsable_factory: Optional[ParsableFactory[T]] = None,
2828
max_chunk_size: int = 5 * 1024 * 1024
2929
):
3030
self._upload_session = upload_session
@@ -63,12 +63,12 @@ def chunks(self):
6363
def chunks(self, value):
6464
self._chunks = value
6565

66-
def upload_session_expired(self, upload_session: Optional[LargeFileUploadSession] = None) -> bool:
66+
def upload_session_expired(self, upload_session: Optional[Parsable] = None) -> bool:
6767
now = datetime.now(timezone.utc)
6868
upload_session = upload_session or self.upload_session
6969
if not hasattr(upload_session, "expiration_date_time"):
7070
raise ValueError("Upload session does not have an expiration date time")
71-
expiry = upload_session.expiration_date_time
71+
expiry = getattr(upload_session, 'expiration_date_time')
7272
if expiry is None:
7373
raise ValueError("Expiry is None")
7474
if isinstance(expiry, str):
@@ -91,7 +91,7 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
9191
raise RuntimeError('The upload session is expired.')
9292

9393
self.on_chunk_upload_complete = after_chunk_upload or self.on_chunk_upload_complete
94-
session = await self.next_chunk(
94+
session: LargeFileUploadSession = await self.next_chunk(
9595
self.stream, 0, max(0, min(self.max_chunk_size - 1, self.file_size - 1))
9696
)
9797
process_next = session
@@ -112,9 +112,11 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
112112
lfu_session = session
113113
if lfu_session is None:
114114
continue
115-
next_range = lfu_session.next_expected_ranges
115+
if hasattr(lfu_session, 'next_expected_ranges'):
116+
next_range = lfu_session.next_expected_ranges
116117
old_url = self.get_validated_upload_url(self.upload_session)
117-
lfu_session.upload_url = old_url
118+
if hasattr(lfu_session, 'upload_url'):
119+
lfu_session.upload_url = old_url
118120
if self.on_chunk_upload_complete is not None:
119121
self.on_chunk_upload_complete(uploaded_range)
120122
if not next_range:
@@ -131,7 +133,8 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
131133
self.chunks -= 1
132134
upload_result: UploadResult[Any] = UploadResult()
133135
upload_result.item_response = response
134-
upload_result.location = self.upload_session.upload_url
136+
if hasattr(self.upload_session, 'upload_url'):
137+
upload_result.location = self.upload_session.upload_url
135138
return upload_result
136139

137140
@property
@@ -142,7 +145,7 @@ def next_range(self):
142145
def next_range(self, value: Optional[str]) -> None:
143146
self._next_range = value
144147

145-
async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int = 0) -> Optional[LargeFileUploadSession]:
148+
async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int = 0) -> LargeFileUploadSession:
146149
upload_url = self.get_validated_upload_url(self.upload_session)
147150
if not upload_url:
148151
raise ValueError('The upload session URL must not be empty.')
@@ -174,16 +177,15 @@ async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int =
174177
info.headers.try_add("Content-Type", "application/octet-stream")
175178
info.set_stream_content(bytes(chunk_data))
176179
error_map: Dict[str, int] = {}
177-
parsable_factory = LargeFileUploadSession
178-
return await self.request_adapter.send_async(info, parsable_factory, error_map)
180+
return await self.request_adapter.send_async(info, LargeFileUploadSession, error_map)
179181

180182
async def last_chunk(
181183
self,
182184
file: BytesIO,
183185
range_start: int = 0,
184186
range_end: int = 0,
185-
parsable_factory: Optional[ParsableFactory] = None
186-
) -> Future:
187+
parsable_factory: Optional[ParsableFactory[T]] = None
188+
) -> Optional[Union[T, bytes]]:
187189
upload_url = self.get_validated_upload_url(self.upload_session)
188190
if not upload_url:
189191
raise ValueError('The upload session URL must not be empty.')
@@ -215,13 +217,15 @@ async def last_chunk(
215217
info.headers.try_add("Content-Type", "application/octet-stream")
216218
info.set_stream_content(bytes(chunk_data))
217219
error_map: Dict[str, int] = {}
218-
parsable_factory = self.factory or parsable_factory
219-
return await self.request_adapter.send_async(info, parsable_factory, error_map)
220+
factory = self.factory or parsable_factory
221+
if factory:
222+
return await self.request_adapter.send_async(info, factory, error_map)
223+
return await self.request_adapter.send_primitive_async(info, "bytes", error_map)
220224

221225
def get_file(self) -> BytesIO:
222226
return self.stream
223227

224-
async def cancel(self) -> Optional[Future]:
228+
async def cancel(self) -> Parsable:
225229
upload_url = self.get_validated_upload_url(self.upload_session)
226230
request_information = RequestInformation(method=Method.DELETE, url_template=upload_url)
227231

@@ -237,22 +241,24 @@ async def cancel(self) -> Optional[Future]:
237241

238242
return self.upload_session
239243

240-
def additional_data_contains(self, parsable: LargeFileUploadSession,
244+
def additional_data_contains(self, parsable: Parsable,
241245
property_candidates: List[str]) -> Tuple[bool, Any]:
242246
if not issubclass(type(parsable), AdditionalDataHolder):
243247
raise ValueError(
244248
f'The object passed does not contain property/properties '
245249
f'{",".join(property_candidates)} and does not implement '
246250
f'AdditionalDataHolder'
247251
)
252+
if not hasattr(parsable, 'additional_data'):
253+
raise ValueError(f'The object passed does not contain an additional_data property')
248254
additional_data = parsable.additional_data
249255
for property_candidate in property_candidates:
250256
if property_candidate in additional_data:
251257
return True, additional_data[property_candidate]
252258
return False, None
253259

254260
def check_value_exists(
255-
self, parsable: LargeFileUploadSession, attribute_name: str, property_names_in_additional_data: List[str]
261+
self, parsable: Parsable, attribute_name: str, property_names_in_additional_data: List[str]
256262
) -> Tuple[bool, Any]:
257263
checked_additional_data = self.additional_data_contains(
258264
parsable, property_names_in_additional_data
@@ -286,7 +292,7 @@ async def resume(self) -> Future:
286292
self.next_range = next_range
287293
return await self.upload()
288294

289-
def get_validated_upload_url(self, upload_session: LargeFileUploadSession) -> str:
295+
def get_validated_upload_url(self, upload_session: Parsable) -> str:
290296
if not hasattr(upload_session, 'upload_url'):
291297
raise RuntimeError('The upload session does not contain a valid upload url')
292298
result = upload_session.upload_url

0 commit comments

Comments
 (0)