Skip to content

Commit 4aed868

Browse files
committed
Fixed upload session and upload task
1 parent 3b2441d commit 4aed868

File tree

2 files changed

+50
-87
lines changed

2 files changed

+50
-87
lines changed
Lines changed: 49 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -1,98 +1,63 @@
1+
from __future__ import annotations
2+
from typing import Any, Callable, Dict, List, Optional, TYPE_CHECKING, Union
3+
import datetime
14
from dataclasses import dataclass, field
2-
from typing import List, Dict, Optional, Any
3-
from datetime import datetime
45

5-
from kiota_abstractions.serialization.additional_data_holder import AdditionalDataHolder
6-
from kiota_abstractions.serialization.parsable import Parsable
7-
from kiota_abstractions.serialization.parse_node import ParseNode
8-
from kiota_abstractions.serialization.serialization_writer import SerializationWriter
6+
from kiota_abstractions.serialization import AdditionalDataHolder, Parsable, ParseNode, SerializationWriter
97

108

11-
class LargeFileUploadSession(Parsable, AdditionalDataHolder):
9+
@dataclass
10+
class LargeFileUploadSession(AdditionalDataHolder, Parsable):
1211

13-
def __init__(
14-
self,
15-
upload_url: Optional[str] = None,
16-
expiration_date_time: Optional[datetime] = None,
17-
additional_data: Optional[List[Dict[str, Any]]] = None,
18-
is_cancelled: Optional[bool] = False,
19-
next_expected_ranges: Optional[List[str]] = None
20-
):
21-
self._upload_url = upload_url
22-
self._expiration_date_time = expiration_date_time
23-
self.additional_data = additional_data if additional_data is not None else {}
24-
self.is_cancelled = is_cancelled
25-
self.next_expected_ranges = next_expected_ranges if next_expected_ranges is not None else []
26-
27-
@property
28-
def upload_url(self):
29-
return self._upload_url
30-
31-
@upload_url.setter
32-
def upload_url(self, value):
33-
self._upload_url = value
34-
35-
@property
36-
def expiration_date_time(self):
37-
return self._expiration_date_time
38-
39-
@expiration_date_time.setter
40-
def expiration_date_time(self, value):
41-
self._expiration_date_time = value
42-
43-
@property
44-
def additional_data(self):
45-
return self._additional_data
46-
47-
@additional_data.setter
48-
def additional_data(self, value):
49-
self._additional_data = value if value is not None else []
50-
51-
@property
52-
def is_cancelled(self):
53-
return self._is_cancelled
54-
55-
@is_cancelled.setter
56-
def is_cancelled(self, value):
57-
self._is_cancelled = value
58-
59-
@property
60-
def next_expected_ranges(self):
61-
return self._next_expected_ranges
62-
63-
@next_expected_ranges.setter
64-
def next_expected_ranges(self, value):
65-
self._next_expected_ranges = value if value is not None else []
12+
additional_data: Dict[str, Any] = field(default_factory=dict)
13+
expiration_date_time: Optional[datetime.datetime] = None
14+
next_expected_ranges: Optional[List[str]] = None
15+
is_cancelled: Optional[bool] = False
16+
odata_type: Optional[str] = None
17+
# The URL endpoint that accepts PUT requests for byte ranges of the file.
18+
upload_url: Optional[str] = None
6619

6720
@staticmethod
6821
def create_from_discriminator_value(
6922
parse_node: Optional[ParseNode] = None
70-
) -> Optional['LargeFileUploadSession']:
23+
) -> LargeFileUploadSession:
24+
"""
25+
Creates a new instance of the appropriate class based on discriminator value
26+
param parse_node: The parse node to use to read the discriminator value and create the object
27+
Returns: UploadSession
28+
"""
7129
if not parse_node:
72-
return None
30+
raise TypeError("parse_node cannot be null.")
7331
return LargeFileUploadSession()
7432

33+
def get_field_deserializers(self, ) -> Dict[str, Callable[[ParseNode], None]]:
34+
"""
35+
The deserialization information for the current model
36+
Returns: Dict[str, Callable[[ParseNode], None]]
37+
"""
38+
fields: Dict[str, Callable[[Any], None]] = {
39+
"expirationDateTime":
40+
lambda n: setattr(self, 'expiration_date_time', n.get_datetime_value()),
41+
"nextExpectedRanges":
42+
lambda n:
43+
setattr(self, 'next_expected_ranges', n.get_collection_of_primitive_values(str)),
44+
"@odata.type":
45+
lambda n: setattr(self, 'odata_type', n.get_str_value()),
46+
"uploadUrl":
47+
lambda n: setattr(self, 'upload_url', n.get_str_value()),
48+
}
49+
return fields
50+
7551
def serialize(self, writer: SerializationWriter) -> None:
76-
writer.write_str_value('upload_url', self.upload_url)
77-
writer.write_datetime_value('expiration_date_time', self.expiration_date_time)
78-
writer.write_bool_value('is_cancelled', self.is_cancelled)
79-
writer.write_collection_of_primitive_values(
80-
'next_expected_ranges', self.next_expected_ranges
81-
)
52+
"""
53+
Serializes information the current object
54+
param writer: Serialization writer to use to serialize this model
55+
Returns: None
56+
"""
57+
if not writer:
58+
raise TypeError("writer cannot be null.")
59+
writer.write_datetime_value("expirationDateTime", self.expiration_date_time)
60+
writer.write_collection_of_primitive_values("nextExpectedRanges", self.next_expected_ranges)
61+
writer.write_str_value("@odata.type", self.odata_type)
62+
writer.write_str_value("uploadUrl", self.upload_url)
8263
writer.write_additional_data_value(self.additional_data)
83-
84-
def get_field_deserializers(self) -> Dict[str, Any]:
85-
return {
86-
'upload_url':
87-
lambda parse_node: setattr(self, 'upload_url', parse_node.get_str_value()),
88-
'expiration_date_time':
89-
lambda parse_node:
90-
setattr(self, 'expiration_date_time', parse_node.get_datetime_value()),
91-
'is_cancelled':
92-
lambda parse_node: setattr(self, 'is_cancelled', parse_node.get_bool_value()),
93-
'next_expected_ranges':
94-
lambda parse_node: setattr(
95-
self, 'next_expected_ranges', parse_node.
96-
get_collection_of_primitive_values('string')
97-
)
98-
}

src/msgraph_core/tasks/large_file_upload.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def __init__(
2323
upload_session: LargeFileUploadSession,
2424
request_adapter: RequestAdapter,
2525
stream: BytesIO, # counter check this
26-
max_chunk_size: int = 1024 # 4 * 1024 * 1024 - use smaller chnuks for testing
26+
max_chunk_size: int = 409600 # 4 * 1024 * 1024 - use smaller chnuks for testing
2727
):
2828
if not isinstance(upload_session, LargeFileUploadSession):
2929
raise TypeError("upload_session must be an instance of LargeFileUploadSession")
@@ -182,12 +182,10 @@ async def next_chunk(self, file: BytesIO, range_start: int = 0, range_end: int =
182182
end = min(end, self.max_chunk_size + start)
183183
chunk_data = file.read(end - start + 1)
184184
info.headers = HeadersCollection()
185-
access_token = "<pending auth fix>"
186185

187186
info.headers.try_add('Content-Range', f'bytes {start}-{end}/{self.file_size}')
188187
info.headers.try_add('Content-Length', str(len(chunk_data)))
189188
info.headers.try_add("Content-Type", "application/octet-stream")
190-
info.headers.try_add("Authorization", f"Bearer {access_token}")
191189
info.set_stream_content(bytes(chunk_data)) # Convert chunk_data to bytes
192190
error_map: Dict[str, int] = {}
193191
parsable_factory = LargeFileUploadSession

0 commit comments

Comments
 (0)