Skip to content

Commit bc0ea30

Browse files
authored
Aws s3 storage (#10)
* rename .client() to .http_client to be consistent with async, and add s3 support * mock s3 fully
1 parent 3024953 commit bc0ea30

File tree

4 files changed

+137
-394
lines changed

4 files changed

+137
-394
lines changed

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ exclude: "^$"
44

55
repos:
66
- repo: https://github.com/pre-commit/pre-commit-hooks
7-
rev: v5.0.0
7+
rev: v6.0.0
88
hooks:
99
- id: check-added-large-files
1010
- repo: https://github.com/astral-sh/ruff-pre-commit

pyproject.toml

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,7 @@ classifiers = [
2828

2929
[dependency-groups]
3030
dev = [
31-
"boto3>=1.40.6",
32-
"moto>=5.1.9",
31+
"hishel[s3]>=0.1.3",
3332
"pre-commit>=4.2.0",
3433
"pylint>=3.3.8",
3534
"pytest>=8.4.1",
@@ -213,4 +212,4 @@ requires = ["hatchling",
213212
build-backend = "hatchling.build"
214213

215214
[tool.pytest.ini_options]
216-
asyncio_mode = "auto"
215+
asyncio_mode = "auto"

tests/test_s3.py

Lines changed: 36 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,30 @@
1-
import boto3
2-
from moto import mock_aws
31
from httpxthrottlecache import HttpxThrottleCache
42
import time
53
import pytest
64
import asyncio
5+
import io
6+
7+
class s3_mock:
8+
def __init__(self):
9+
self.store={}
10+
def create_bucket(self, Bucket):
11+
...
12+
def put_object(self, Bucket, Key, Body, Metadata):
13+
self.store[(Bucket,Key)]={"Body":Body if isinstance(Body,bytes) else Body.encode(),"Metadata":Metadata}
14+
def get_object(self, Bucket, Key):
15+
o=self.store[(Bucket,Key)]
16+
return {"Body": io.BytesIO(o["Body"]), "Metadata": o["Metadata"]}
17+
def head_object(self, Bucket, Key):
18+
return {"Metadata": self.store[(Bucket,Key)]["Metadata"]}
19+
def list_objects(self, Bucket):
20+
return {"Contents":[{"Key": k[1]} for k in self.store.keys() if k[0]==Bucket]}
21+
def delete_object(self, Bucket, Key):
22+
self.store.pop((Bucket,Key), None)
23+
24+
class boto3():
25+
def client(*args, **kwargs):
26+
return s3_mock()
727

8-
@mock_aws
928
def test_s3_sync():
1029
url = "https://httpbin.org/cache/60"
1130

@@ -29,23 +48,22 @@ def test_s3_sync():
2948

3049
@pytest.mark.asyncio
3150
async def test_s3_async():
32-
with mock_aws():
33-
url = "https://httpbin.org/cache/60"
51+
url = "https://httpbin.org/cache/60"
3452

35-
s3 = boto3.client("s3", region_name="us-east-1")
36-
s3.create_bucket(Bucket="mybucket")
53+
s3 = boto3.client("s3", region_name="us-east-1")
54+
s3.create_bucket(Bucket="mybucket")
3755

38-
mgr = HttpxThrottleCache(httpx_params={"headers": {}}, cache_enabled=True, s3_bucket="mybucket", s3_client=s3)
39-
40-
async with mgr.async_http_client() as client:
41-
response1 = await client.get(url=url)
56+
mgr = HttpxThrottleCache(httpx_params={"headers": {}}, cache_enabled=True, s3_bucket="mybucket", s3_client=s3)
57+
58+
async with mgr.async_http_client() as client:
59+
response1 = await client.get(url=url)
4260

43-
assert response1.status_code == 200, response1.status_code
44-
45-
await asyncio.sleep(1.5)
46-
47-
response2 = await client.get(url=url)
61+
assert response1.status_code == 200, response1.status_code
62+
63+
await asyncio.sleep(1.5)
64+
65+
response2 = await client.get(url=url)
4866

49-
assert response2.status_code == 200, response2.status_code
67+
assert response2.status_code == 200, response2.status_code
5068

51-
assert response1.headers["date"] == response2.headers["date"]
69+
assert response1.headers["date"] == response2.headers["date"]

0 commit comments

Comments
 (0)