Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions NEXT_CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

### Bug Fixes

- Fix the issue where `FilesExt.upload`'s second parameter was changed from `contents` to `content` unintentionally. Now the interface is backward compatible to versions previous than 0.69.0.

### Documentation

### Internal Changes
Expand Down
20 changes: 10 additions & 10 deletions databricks/sdk/mixins/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -1064,7 +1064,7 @@ def _get_optimized_performance_parameters_for_upload(
def upload(
self,
file_path: str,
content: BinaryIO,
contents: BinaryIO,
*,
overwrite: Optional[bool] = None,
part_size: Optional[int] = None,
Expand All @@ -1076,7 +1076,7 @@ def upload(

:param file_path: str
The absolute remote path of the target file, e.g. /Volumes/path/to/your/file
:param content: BinaryIO
:param contents: BinaryIO
The contents of the file to upload. This must be a BinaryIO stream.
:param overwrite: bool (optional)
If true, an existing file will be overwritten. When not specified, assumed True.
Expand All @@ -1096,7 +1096,7 @@ def upload(

if self._config.disable_experimental_files_api_client:
_LOG.info("Disable experimental files API client, will use the original upload method.")
super().upload(file_path=file_path, contents=content, overwrite=overwrite)
super().upload(file_path=file_path, contents=contents, overwrite=overwrite)
return UploadStreamResult()

_LOG.debug(f"Uploading file from BinaryIO stream")
Expand All @@ -1107,12 +1107,12 @@ def upload(

# Determine content length if the stream is seekable
content_length = None
if content.seekable():
if contents.seekable():
_LOG.debug(f"Uploading using seekable mode")
# If the stream is seekable, we can read its size.
content.seek(0, os.SEEK_END)
content_length = content.tell()
content.seek(0)
contents.seek(0, os.SEEK_END)
content_length = contents.tell()
contents.seek(0)

# Get optimized part size and batch size based on content length and provided part size
optimized_part_size, optimized_batch_size = self._get_optimized_performance_parameters_for_upload(
Expand All @@ -1135,17 +1135,17 @@ def upload(
)

if ctx.use_parallel:
self._parallel_upload_from_stream(ctx, content)
self._parallel_upload_from_stream(ctx, contents)
return UploadStreamResult()
elif ctx.content_length is not None:
self._upload_single_thread_with_known_size(ctx, content)
self._upload_single_thread_with_known_size(ctx, contents)
return UploadStreamResult()
else:
_LOG.debug(f"Uploading using non-seekable mode")
# If the stream is not seekable, we cannot determine its size.
# We will use a multipart upload.
_LOG.debug(f"Using multipart upload for non-seekable input stream of unknown size for file {file_path}")
self._single_thread_multipart_upload(ctx, content)
self._single_thread_multipart_upload(ctx, contents)
return UploadStreamResult()

def upload_from(
Expand Down
10 changes: 5 additions & 5 deletions tests/test_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -1085,7 +1085,7 @@ def run(self, config: Config, monkeypatch) -> None:
custom_response_download_from_url=CustomResponse(
code=403,
first_invocation=2,
last_invocation=4,
last_invocation=3,
body=PresignedUrlDownloadTestCase.expired_url_aws_response,
),
),
Expand Down Expand Up @@ -1375,17 +1375,17 @@ def processor() -> list:
def upload() -> None:
if source_type == UploadSourceType.FILE:
w.files.upload_from(
self.path,
content_or_source,
file_path=self.path,
source_path=content_or_source,
overwrite=self.overwrite,
part_size=self.multipart_upload_part_size,
use_parallel=use_parallel,
parallelism=self.parallelism,
)
else:
w.files.upload(
self.path,
content_or_source,
file_path=self.path,
contents=content_or_source,
overwrite=self.overwrite,
part_size=self.multipart_upload_part_size,
use_parallel=use_parallel,
Expand Down
Loading