Skip to content

Commit 1313669

Browse files
authored
Fix endpoint of s3 in production (#943) (#944)
1 parent 9bf21ac commit 1313669

File tree

3 files changed

+7
-2
lines changed

3 files changed

+7
-2
lines changed

packages/s3wrapper/src/s3wrapper/s3_client.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ def __init__(self, endpoint, access_key=None, secret_key=None, secure=False):
1919
self.endpoint = endpoint
2020
self.access_key = access_key
2121
self.secret_key = secret_key
22+
self.secure = secure
23+
self.endpoint_url = ("https://" if secure else "http://") + endpoint
2224
try:
2325
self.client = Minio(endpoint,
2426
access_key=access_key,

services/docker-compose.yml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -247,8 +247,10 @@ services:
247247
- MINIO_ACCESS_KEY=12345678
248248
- MINIO_SECRET_KEY=12345678
249249
ports:
250-
- "9000"
250+
- "9001:9000"
251251
command: server /data
252+
volumes:
253+
- minio:/data
252254
#--------------------------------------------------------------------
253255
maintenance:
254256
image: ${DOCKER_REGISTRY:-itisfoundation}/maintenance:${DOCKER_IMAGE_TAG:-latest}
@@ -276,3 +278,4 @@ volumes:
276278
output:
277279
log:
278280
postgres:
281+
minio:

services/storage/src/simcore_service_storage/dsm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,7 @@ async def list_files(self, user_id: str, location: str, uuid_filter: str ="", re
240240
# MaG: This is inefficient: Do this automatically when file is modified
241241
_loop = asyncio.get_event_loop()
242242
session = aiobotocore.get_session(loop=_loop)
243-
async with session.create_client('s3', endpoint_url="http://"+self.s3_client.endpoint, aws_access_key_id=self.s3_client.access_key,
243+
async with session.create_client('s3', endpoint_url=self.s3_client.endpoint_url, aws_access_key_id=self.s3_client.access_key,
244244
aws_secret_access_key=self.s3_client.secret_key) as client:
245245
responses = await asyncio.gather(*[client.list_objects_v2(Bucket=d.bucket_name, Prefix=_d) for _d in [__d.object_name for __d in data]])
246246
for d, resp in zip(data, responses):

0 commit comments

Comments
 (0)