Skip to content
This repository was archived by the owner on Feb 21, 2026. It is now read-only.

Commit 97cb166

Browse files
authored
Merge pull request #146 from mpacer/read_content_in_client
Move async content reading inside s3 client context manager
2 parents b65f57d + 8044faa commit 97cb166

File tree

2 files changed

+10
-25
lines changed

2 files changed

+10
-25
lines changed

bookstore/clone.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -197,13 +197,14 @@ async def _clone(self, s3_bucket, s3_object_key):
197197
self.log.info(f"Processing clone of {s3_object_key}")
198198
try:
199199
obj = await client.get_object(Bucket=s3_bucket, Key=s3_object_key)
200+
content = (await obj['Body'].read()).decode('utf-8')
200201
except ClientError as e:
201202
status_code = e.response['ResponseMetadata'].get('HTTPStatusCode')
202203
raise web.HTTPError(status_code, e.args[0])
203204

204205
self.log.info(f"Obtained contents for {s3_object_key}")
205206

206-
return obj
207+
return obj, content
207208

208209
@web.authenticated
209210
async def post(self):
@@ -237,9 +238,9 @@ async def post(self):
237238
)
238239

239240
self.log.info(f"About to clone from {s3_object_key}")
240-
obj = await self._clone(s3_bucket, s3_object_key)
241+
obj, content = await self._clone(s3_bucket, s3_object_key)
241242

242-
content_model = await self.build_content_model(obj, target_path)
243+
content_model = self.build_content_model(content, target_path)
243244

244245
self.log.info(f"Completing clone for {s3_object_key}")
245246
self.contents_manager.save(content_model, content_model['path'])
@@ -250,15 +251,15 @@ async def post(self):
250251
self.set_header('Content-Type', 'application/json')
251252
self.finish(resp_model)
252253

253-
async def build_content_model(self, obj, target_path):
254+
def build_content_model(self, content, target_path):
254255
"""Helper that takes a response from S3 and creates a ContentsAPI compatible model.
255256
256257
If the file at target_path already exists, this increments the file name.
257258
258259
Parameters
259260
----------
260-
obj : dict
261-
Response object from S3
261+
content : str
262+
string encoded file content
262263
target_path : str
263264
The the path we wish to clone to, may be incremented if already present.
264265
@@ -268,8 +269,6 @@ async def build_content_model(self, obj, target_path):
268269
Jupyter Contents API compatible model
269270
"""
270271
path = self.contents_manager.increment_filename(target_path)
271-
content = await obj['Body'].read()
272-
content = content.decode('utf-8')
273272
if os.path.splitext(path)[1] in [".ipynb", ".jpynb"]:
274273
model = build_notebook_model(content, path)
275274
else:

bookstore/tests/test_clone.py

Lines changed: 3 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -235,17 +235,9 @@ async def test_build_text_content_model(self):
235235
"path": "test_directory/file_name.txt",
236236
}
237237

238-
class MyFakeClass:
239-
def __init__(self):
240-
pass
241-
242-
async def read(self):
243-
return content.encode('utf-8')
244-
245-
obj = {'Body': MyFakeClass()}
246238
path = "test_directory/file_name.txt"
247239
success_handler = self.post_handler({})
248-
model = await success_handler.build_content_model(obj, path)
240+
model = success_handler.build_content_model(content, path)
249241
assert model == expected
250242

251243
@gen_test
@@ -259,15 +251,9 @@ async def test_build_notebook_content_model(self):
259251
"path": "test_directory/file_name.ipynb",
260252
}
261253

262-
class MyFakeClass:
263-
def __init__(self):
264-
pass
265-
266-
async def read(self):
267-
return nbformat.writes(content).encode('utf-8')
254+
str_content = nbformat.writes(content)
268255

269-
obj = {'Body': MyFakeClass()}
270256
path = "test_directory/file_name.ipynb"
271257
success_handler = self.post_handler({})
272-
model = await success_handler.build_content_model(obj, path)
258+
model = success_handler.build_content_model(str_content, path)
273259
assert model == expected

0 commit comments

Comments
 (0)