Skip to content

Commit 6ca9e38

Browse files
committed
Resolve large file upload type issues
1 parent 9dd5306 commit 6ca9e38

File tree

2 files changed

+12
-13
lines changed

2 files changed

+12
-13
lines changed

src/msgraph_core/models/large_file_upload_session.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
from __future__ import annotations
2-
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
2+
from typing import Any, Callable, Dict, List, Optional, Protocol, TypeVar, Generic
33
import datetime
44
from dataclasses import dataclass, field
55

66
from kiota_abstractions.serialization import (
7-
AdditionalDataHolder, Parsable, ParseNode, SerializationWriter
7+
ParseNode, SerializationWriter, ParsableFactory
88
)
99

1010

1111
@dataclass
12-
class LargeFileUploadSession(AdditionalDataHolder, Parsable):
12+
class LargeFileUploadSession(Protocol, ParsableFactory):
1313

1414
additional_data: Dict[str, Any] = field(default_factory=dict)
1515
expiration_date_time: Optional[datetime.datetime] = None
@@ -25,13 +25,13 @@ def create_from_discriminator_value(
2525
) -> LargeFileUploadSession:
2626
"""
2727
Creates a new instance of the appropriate class based
28-
on discriminator value param parse_node: The parse node
28+
on discriminator value param parse_node: The parse node
2929
to use to read the discriminator value and create the object
3030
Returns: UploadSession
3131
"""
3232
if not parse_node:
3333
raise TypeError("parse_node cannot be null.")
34-
return LargeFileUploadSession()
34+
return LargeFileUploadSession() # type: ignore # Breaking change to remove this method since a Protocol cannot instantiate itself
3535

3636
def get_field_deserializers(self, ) -> Dict[str, Callable[[ParseNode], None]]:
3737
"""

src/msgraph_core/tasks/large_file_upload.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
from datetime import datetime, timedelta, timezone
66
import logging
77

8-
from kiota_abstractions.serialization.parsable import Parsable
98
from kiota_abstractions.method import Method
109
from kiota_abstractions.headers_collection import HeadersCollection
1110
from kiota_abstractions.request_information import RequestInformation
@@ -22,7 +21,7 @@ class LargeFileUploadTask:
2221

2322
def __init__(
2423
self,
25-
upload_session: Parsable,
24+
upload_session: LargeFileUploadSession,
2625
request_adapter: RequestAdapter,
2726
stream: BytesIO,
2827
parsable_factory: Optional[ParsableFactory] = None,
@@ -64,7 +63,7 @@ def chunks(self):
6463
def chunks(self, value):
6564
self._chunks = value
6665

67-
def upload_session_expired(self, upload_session: Optional[Parsable] = None) -> bool:
66+
def upload_session_expired(self, upload_session: Optional[LargeFileUploadSession] = None) -> bool:
6867
now = datetime.now(timezone.utc)
6968
upload_session = upload_session or self.upload_session
7069
if not hasattr(upload_session, "expiration_date_time"):
@@ -110,7 +109,7 @@ async def upload(self, after_chunk_upload: Optional[Callable] = None):
110109
response = await self.last_chunk(self.stream)
111110

112111
try:
113-
lfu_session: LargeFileUploadSession = session # type: ignore
112+
lfu_session = session
114113
if lfu_session is None:
115114
continue
116115
next_range = lfu_session.next_expected_ranges
@@ -143,7 +142,7 @@ def next_range(self):
143142
def next_range(self, value: Optional[str]) -> None:
144143
self._next_range = value
145144

146-
async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int = 0) -> Future:
145+
async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int = 0) -> Optional[LargeFileUploadSession]:
147146
upload_url = self.get_validated_upload_url(self.upload_session)
148147
if not upload_url:
149148
raise ValueError('The upload session URL must not be empty.')
@@ -238,7 +237,7 @@ async def cancel(self) -> Optional[Future]:
238237

239238
return self.upload_session
240239

241-
def additional_data_contains(self, parsable: Parsable,
240+
def additional_data_contains(self, parsable: LargeFileUploadSession,
242241
property_candidates: List[str]) -> Tuple[bool, Any]:
243242
if not issubclass(type(parsable), AdditionalDataHolder):
244243
raise ValueError(
@@ -253,7 +252,7 @@ def additional_data_contains(self, parsable: Parsable,
253252
return False, None
254253

255254
def check_value_exists(
256-
self, parsable: Parsable, attribute_name: str, property_names_in_additional_data: List[str]
255+
self, parsable: LargeFileUploadSession, attribute_name: str, property_names_in_additional_data: List[str]
257256
) -> Tuple[bool, Any]:
258257
checked_additional_data = self.additional_data_contains(
259258
parsable, property_names_in_additional_data
@@ -287,7 +286,7 @@ async def resume(self) -> Future:
287286
self.next_range = next_range
288287
return await self.upload()
289288

290-
def get_validated_upload_url(self, upload_session: Parsable) -> str:
289+
def get_validated_upload_url(self, upload_session: LargeFileUploadSession) -> str:
291290
if not hasattr(upload_session, 'upload_url'):
292291
raise RuntimeError('The upload session does not contain a valid upload url')
293292
result = upload_session.upload_url

0 commit comments

Comments
 (0)