Skip to content

Commit a214e84

Browse files
committed
Make the maximum retries a provider setting
During local testing, all 429 failures succeeded upon first retry. The issue turned out to be "namespace lock contention" instead of "two many requests". It is reasonable to set the default maximum value to 2 retires per upload.
1 parent 89ff7be commit a214e84

File tree

2 files changed

+4
-1
lines changed

2 files changed

+4
-1
lines changed

waterbutler/providers/dropbox/provider.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@ class DropboxProvider(provider.BaseProvider):
6161
BASE_URL = pd_settings.BASE_URL
6262
CONTIGUOUS_UPLOAD_SIZE_LIMIT = pd_settings.CONTIGUOUS_UPLOAD_SIZE_LIMIT
6363
CHUNK_SIZE = pd_settings.CHUNK_SIZE
64+
MAX_429_RETRIES = pd_settings.MAX_429_RETRIES
6465

6566
def __init__(self, auth, credentials, settings):
6667
super().__init__(auth, credentials, settings)
@@ -311,7 +312,7 @@ async def _contiguous_upload(self,
311312
chunk = await stream.read()
312313

313314
rate_limit_retry = 0
314-
while rate_limit_retry < 2:
315+
while rate_limit_retry < self.MAX_429_RETRIES:
315316
file_stream = streams.FileStreamReader(file_cache)
316317
resp = await self.make_request(
317318
'POST',

waterbutler/providers/dropbox/settings.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,3 +10,5 @@
1010
CONTIGUOUS_UPLOAD_SIZE_LIMIT = int(config.get('CONTIGUOUS_UPLOAD_SIZE_LIMIT', 150000000)) # 150 MB
1111

1212
CHUNK_SIZE = int(config.get('CHUNK_SIZE', 4000000)) # 4 MB
13+
14+
MAX_429_RETRIES = int(config.get('MAX_429_RETRIES', 2))

0 commit comments

Comments
 (0)