Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
* Declare official support of Python 3.12
* Cache-Control option when uploading files
* Add `retry-for` parameter to retry for some period (in seconds) when an upload or download fails. If it's not given, 5 retries will be done.

### Infrastructure
* Remove unsupported PyPy 3.7 from tests matrix and add PyPy 3.10 instead
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,8 @@ b2 create-key [-h] [--bucket BUCKET] [--namePrefix NAMEPREFIX] [--duration DURAT
b2 delete-bucket [-h] bucketName
b2 delete-file-version [-h] [fileName] fileId
b2 delete-key [-h] applicationKeyId
b2 download-file-by-id [-h] [--noProgress] [--threads THREADS] [--sourceServerSideEncryption {SSE-C}] [--sourceServerSideEncryptionAlgorithm {AES256}] [--write-buffer-size BYTES] [--skip-hash-verification] [--max-download-streams-per-file MAX_DOWNLOAD_STREAMS_PER_FILE] fileId localFileName
b2 download-file-by-name [-h] [--noProgress] [--threads THREADS] [--sourceServerSideEncryption {SSE-C}] [--sourceServerSideEncryptionAlgorithm {AES256}] [--write-buffer-size BYTES] [--skip-hash-verification] [--max-download-streams-per-file MAX_DOWNLOAD_STREAMS_PER_FILE] bucketName b2FileName localFileName
b2 download-file-by-id [-h] [--noProgress] [--threads THREADS] [--retry-for SECONDS] [--sourceServerSideEncryption {SSE-C}] [--sourceServerSideEncryptionAlgorithm {AES256}] [--write-buffer-size BYTES] [--skip-hash-verification] [--max-download-streams-per-file MAX_DOWNLOAD_STREAMS_PER_FILE] fileId localFileName
b2 download-file-by-name [-h] [--noProgress] [--threads THREADS] [--retry-for SECONDS] [--sourceServerSideEncryption {SSE-C}] [--sourceServerSideEncryptionAlgorithm {AES256}] [--write-buffer-size BYTES] [--skip-hash-verification] [--max-download-streams-per-file MAX_DOWNLOAD_STREAMS_PER_FILE] bucketName b2FileName localFileName
b2 get-account-info [-h]
b2 get-bucket [-h] [--showSize] bucketName
b2 get-file-info [-h] fileId
Expand All @@ -73,9 +73,9 @@ b2 ls [-h] [--long] [--json] [--replication] [--versions] [--recursive] [--withW
b2 rm [-h] [--dryRun] [--threads THREADS] [--queueSize QUEUESIZE] [--noProgress] [--failFast] [--versions] [--recursive] [--withWildcard] bucketName [folderName]
b2 make-url [-h] fileId
b2 make-friendly-url [-h] bucketName fileName
b2 sync [-h] [--noProgress] [--dryRun] [--allowEmptySource] [--excludeAllSymlinks] [--threads THREADS] [--syncThreads SYNCTHREADS] [--downloadThreads DOWNLOADTHREADS] [--uploadThreads UPLOADTHREADS] [--compareVersions {none,modTime,size}] [--compareThreshold MILLIS] [--excludeRegex REGEX] [--includeRegex REGEX] [--excludeDirRegex REGEX] [--excludeIfModifiedAfter TIMESTAMP] [--destinationServerSideEncryption {SSE-B2,SSE-C}] [--destinationServerSideEncryptionAlgorithm {AES256}] [--sourceServerSideEncryption {SSE-C}] [--sourceServerSideEncryptionAlgorithm {AES256}] [--write-buffer-size BYTES] [--skip-hash-verification] [--max-download-streams-per-file MAX_DOWNLOAD_STREAMS_PER_FILE] [--incrementalMode] [--skipNewer | --replaceNewer] [--delete | --keepDays DAYS] source destination
b2 sync [-h] [--noProgress] [--dryRun] [--allowEmptySource] [--excludeAllSymlinks] [--retry-for SECONDS] [--threads THREADS] [--syncThreads SYNCTHREADS] [--downloadThreads DOWNLOADTHREADS] [--uploadThreads UPLOADTHREADS] [--compareVersions {none,modTime,size}] [--compareThreshold MILLIS] [--excludeRegex REGEX] [--includeRegex REGEX] [--excludeDirRegex REGEX] [--excludeIfModifiedAfter TIMESTAMP] [--destinationServerSideEncryption {SSE-B2,SSE-C}] [--destinationServerSideEncryptionAlgorithm {AES256}] [--sourceServerSideEncryption {SSE-C}] [--sourceServerSideEncryptionAlgorithm {AES256}] [--write-buffer-size BYTES] [--skip-hash-verification] [--max-download-streams-per-file MAX_DOWNLOAD_STREAMS_PER_FILE] [--incrementalMode] [--skipNewer | --replaceNewer] [--delete | --keepDays DAYS] source destination
b2 update-bucket [-h] [--bucketInfo BUCKETINFO] [--corsRules CORSRULES] [--lifecycleRules LIFECYCLERULES] [--defaultRetentionMode {compliance,governance,none}] [--defaultRetentionPeriod period] [--replication REPLICATION] [--fileLockEnabled] [--defaultServerSideEncryption {SSE-B2,none}] [--defaultServerSideEncryptionAlgorithm {AES256}] bucketName [{allPublic,allPrivate}]
b2 upload-file [-h] [--noProgress] [--quiet] [--contentType CONTENTTYPE] [--minPartSize MINPARTSIZE] [--sha1 SHA1] [--threads THREADS] [--info INFO] [--custom-upload-timestamp CUSTOM_UPLOAD_TIMESTAMP] [--destinationServerSideEncryption {SSE-B2,SSE-C}] [--destinationServerSideEncryptionAlgorithm {AES256}] [--legalHold {on,off}] [--fileRetentionMode {compliance,governance}] [--retainUntil TIMESTAMP] [--incrementalMode] bucketName localFilePath b2FileName
b2 upload-file [-h] [--noProgress] [--quiet] [--contentType CONTENTTYPE] [--minPartSize MINPARTSIZE] [--sha1 SHA1] [--threads THREADS] [--retry-for SECONDS] [--info INFO] [--custom-upload-timestamp CUSTOM_UPLOAD_TIMESTAMP] [--destinationServerSideEncryption {SSE-B2,SSE-C}] [--destinationServerSideEncryptionAlgorithm {AES256}] [--legalHold {on,off}] [--fileRetentionMode {compliance,governance}] [--retainUntil TIMESTAMP] [--incrementalMode] bucketName localFilePath b2FileName
b2 update-file-legal-hold [-h] [fileName] fileId {on,off}
b2 update-file-retention [-h] [--retainUntil TIMESTAMP] [--bypassGovernance] [fileName] fileId {governance,compliance,none}
b2 replication-setup [-h] [--destination-profile DESTINATION_PROFILE] [--name NAME] [--priority PRIORITY] [--file-name-prefix PREFIX] [--include-existing-files] SOURCE_BUCKET_NAME DESTINATION_BUCKET_NAME
Expand Down
22 changes: 20 additions & 2 deletions b2/console_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -506,6 +506,7 @@ class UploadModeMixin(Described):
@classmethod
def _setup_parser(cls, parser):
parser.add_argument('--incrementalMode', action='store_true')
super()._setup_parser(parser) # noqa

@staticmethod
def _get_upload_mode_from_args(args):
Expand All @@ -514,6 +515,17 @@ def _get_upload_mode_from_args(args):
return UploadMode.FULL


class RetryMixin(Described):
"""
Use --retry-for to set the maximum time period, in seconds, for which to retry when an upload or download fails.
"""

@classmethod
def _setup_parser(cls, parser):
parser.add_argument('--retry-for', metavar='SECONDS', type=int)
super()._setup_parser(parser) # noqa


class Command(Described):
# Set to True for commands that receive sensitive information in arguments
FORBID_LOGGING_ARGUMENTS = False
Expand Down Expand Up @@ -1275,7 +1287,7 @@ def _print_file_attribute(self, label, value):
@B2.register_subcommand
class DownloadFileById(
SourceSseMixin, WriteBufferSizeMixin, SkipHashVerificationMixin, MaxDownloadStreamsMixin,
DownloadCommand
RetryMixin, DownloadCommand
):
"""
Downloads the given file, and stores it in the given local file.
Expand Down Expand Up @@ -1324,6 +1336,7 @@ class DownloadFileByName(
WriteBufferSizeMixin,
SkipHashVerificationMixin,
MaxDownloadStreamsMixin,
RetryMixin,
DownloadCommand,
):
"""
Expand Down Expand Up @@ -2157,6 +2170,7 @@ class Sync(
SkipHashVerificationMixin,
MaxDownloadStreamsMixin,
UploadModeMixin,
RetryMixin,
Command,
):
"""
Expand Down Expand Up @@ -2600,7 +2614,8 @@ def run(self, args):

@B2.register_subcommand
class UploadFile(
DestinationSseMixin, LegalHoldMixin, FileRetentionSettingMixin, UploadModeMixin, Command
DestinationSseMixin, LegalHoldMixin, FileRetentionSettingMixin, UploadModeMixin, RetryMixin,
Command
):
"""
Uploads one file to the given bucket. Uploads the contents
Expand Down Expand Up @@ -3409,6 +3424,9 @@ def run_command(self, argv):
if 'max_download_streams_per_file' in args:
kwargs['max_download_streams_per_file'] = args.max_download_streams_per_file

if 'retry_for' in args and args.retry_for:
kwargs['retry_time'] = datetime.timedelta(seconds=args.retry_for)

self.api = _get_b2api_for_profile(**kwargs)

b2_command = B2(self)
Expand Down
42 changes: 32 additions & 10 deletions test/unit/test_console_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import pathlib
import re
import unittest.mock as mock
from datetime import timedelta
from io import StringIO
from itertools import chain, product
from typing import List, Optional
Expand Down Expand Up @@ -871,8 +872,10 @@ def test_files(self):
# Download file by ID. (Same expected output as downloading by name)
local_download2 = os.path.join(temp_dir, 'download2.txt')
self._run_command(
['download-file-by-id', '--noProgress', '9999', local_download2], expected_stdout,
'', 0
[
'download-file-by-id', '--noProgress', '--retry-for', '10', '9999',
local_download2
], expected_stdout, '', 0
)
self.assertEqual(b'hello world', self._read_file(local_download2))

Expand Down Expand Up @@ -1141,7 +1144,10 @@ def _test_download_threads(self, download_by, num_threads):
with TempDir() as temp_dir:
local_file = self._make_local_file(temp_dir, 'file.txt')
self._run_command(
['upload-file', '--noProgress', 'my-bucket', local_file, 'file.txt'],
[
'upload-file', '--noProgress', '--retry-for', '10', 'my-bucket', local_file,
'file.txt'
],
remove_version=True,
)

Expand Down Expand Up @@ -1200,7 +1206,10 @@ def test_copy_file_by_id(self):
}

self._run_command(
['upload-file', '--noProgress', 'my-bucket', local_file1, 'file1.txt'],
[
'upload-file', '--noProgress', '--retry-for', '10', 'my-bucket', local_file1,
'file1.txt'
],
expected_json_in_stdout=expected_json,
remove_version=True,
expected_part_of_stdout=expected_stdout,
Expand Down Expand Up @@ -1255,8 +1264,8 @@ def test_copy_file_by_id(self):
local_download1 = os.path.join(temp_dir, 'file1_copy.txt')
self._run_command(
[
'download-file-by-name', '--noProgress', 'my-bucket', 'file1_copy.txt',
local_download1
'download-file-by-name', '--noProgress', '--retry-for', '10', 'my-bucket',
'file1_copy.txt', local_download1
]
)
self.assertEqual(b'lo wo', self._read_file(local_download1))
Expand Down Expand Up @@ -1431,8 +1440,8 @@ def test_upload_large_file(self):

self._run_command(
[
'upload-file', '--noProgress', '--threads', '5', 'my-bucket', file_path,
'test.txt'
'upload-file', '--noProgress', '--retry-for', '10', '--threads', '5',
'my-bucket', file_path, 'test.txt'
],
expected_json_in_stdout=expected_json,
remove_version=True,
Expand Down Expand Up @@ -1495,6 +1504,8 @@ def test_upload_incremental(self):
incremental_upload_params = [
'upload-file',
'--noProgress',
'--retry-for',
'10',
'--threads',
'5',
'--incrementalMode',
Expand Down Expand Up @@ -1884,7 +1895,7 @@ def test_sync(self):
upload test.txt
'''

command = ['sync', '--noProgress', temp_dir, 'b2://my-bucket']
command = ['sync', '--noProgress', '--retry-for', '10', temp_dir, 'b2://my-bucket']
self._run_command(command, expected_stdout, '', 0)

def test_sync_empty_folder_when_not_enabled(self):
Expand Down Expand Up @@ -1915,7 +1926,9 @@ def test_sync_dry_run(self):
expected_stdout = '''
upload test-dry-run.txt
'''
command = ['sync', '--noProgress', '--dryRun', temp_dir, 'b2://my-bucket']
command = [
'sync', '--noProgress', '--retry-for', '10', '--dryRun', temp_dir, 'b2://my-bucket'
]
self._run_command(command, expected_stdout, '', 0)

# file should not have been uploaded
Expand Down Expand Up @@ -2380,10 +2393,12 @@ def test_passing_api_parameters(self):
'--write-buffer-size': 123,
'--skip-hash-verification': None,
'--max-download-streams-per-file': 8,
'--retry-for': 30,
},
{
'--write-buffer-size': 321,
'--max-download-streams-per-file': 7,
'--retry-for': 900,
},
]
for command, params in product(commands, parameters):
Expand All @@ -2396,9 +2411,16 @@ def test_passing_api_parameters(self):
args = list(map(str, filter(None, chain.from_iterable(params.items()))))
console_tool.run_command(command + args)

retry_time = timedelta(seconds=params['--retry-for'])

download_manager = console_tool.api.services.download_manager
assert download_manager.write_buffer_size == params['--write-buffer-size']
assert download_manager.check_hash is ('--skip-hash-verification' not in params)
for strategy in download_manager.strategies:
assert strategy._retry_time == retry_time

upload_manager = console_tool.api.services.upload_manager
assert upload_manager.retry_counter.retry_time == retry_time

parallel_strategy = one(
strategy for strategy in download_manager.strategies
Expand Down