Skip to content

Commit a8aef37

Browse files
committed
remove log_catch again
2 parents a9575a8 + 96791e7 commit a8aef37

File tree

7 files changed

+113
-116
lines changed

7 files changed

+113
-116
lines changed

.github/workflows/ci-testing-deploy.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -514,7 +514,7 @@ jobs:
514514
unit-test-storage:
515515
needs: changes
516516
if: ${{ needs.changes.outputs.storage == 'true' || github.event_name == 'push' }}
517-
timeout-minutes: 18 # if this timeout gets too small, then split the tests
517+
timeout-minutes: 25 # if this timeout gets too small, then split the tests
518518
name: "[unit] storage"
519519
runs-on: ${{ matrix.os }}
520520
strategy:

packages/pytest-simcore/src/pytest_simcore/docker_compose.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,10 @@
22
# pylint: disable=unused-argument
33
# pylint: disable=unused-variable
44

5-
""" Fixtures to create docker-compose.yaml configuration files (as in Makefile)
5+
"""Fixtures to create docker-compose.yaml configuration files (as in Makefile)
66
7-
- Basically runs `docker compose config
8-
- Services in stack can be selected using 'core_services_selection', 'ops_services_selection' fixtures
7+
- Basically runs `docker compose config
8+
- Services in stack can be selected using 'core_services_selection', 'ops_services_selection' fixtures
99
1010
"""
1111

@@ -391,6 +391,10 @@ def _filter_services_and_dump(
391391
if "environment" in service:
392392
service["environment"] = _minio_fix(service["environment"])
393393

394+
if name == "postgres":
395+
# NOTE: # -c fsync=off is not recommended for production as this disable writing to disk https://pythonspeed.com/articles/faster-db-tests/
396+
service["command"] += ["-c", "fsync=off"]
397+
394398
# updates current docker-compose (also versioned ... do not change by hand)
395399
with docker_compose_path.open("wt") as fh:
396400
yaml.dump(content, fh, default_flow_style=False)

packages/service-library/src/servicelib/rabbitmq/_client_rpc.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -32,15 +32,18 @@ async def create(
3232
cls, *, client_name: str, settings: RabbitSettings, **kwargs
3333
) -> "RabbitMQRPCClient":
3434
client = cls(client_name=client_name, settings=settings, **kwargs)
35-
await client._rpc_initialize() # noqa: SLF001
35+
await client._rpc_initialize()
3636
return client
3737

3838
async def _rpc_initialize(self) -> None:
39+
# NOTE: to show the connection name in the rabbitMQ UI see there
40+
# https://www.bountysource.com/issues/89342433-setting-custom-connection-name-via-client_properties-doesn-t-work-when-connecting-using-an-amqp-url
41+
#
42+
connection_name = f"{get_rabbitmq_client_unique_name(self.client_name)}.rpc"
43+
url = f"{self.settings.dsn}?name={connection_name}"
3944
self._connection = await aio_pika.connect_robust(
40-
self.settings.dsn,
41-
client_properties={
42-
"connection_name": f"{get_rabbitmq_client_unique_name(self.client_name)}.rpc"
43-
},
45+
url,
46+
client_properties={"connection_name": connection_name},
4447
)
4548
self._channel = await self._connection.channel()
4649

packages/settings-library/src/settings_library/redis.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,6 @@ def build_redis_dsn(self, db_index: RedisDatabase) -> str:
4242
),
4343
host=self.REDIS_HOST,
4444
port=self.REDIS_PORT,
45-
path=f"/{db_index}",
45+
path=f"{db_index}",
4646
)
4747
)

services/storage/src/simcore_service_storage/api/rpc/_async_jobs.py

Lines changed: 65 additions & 71 deletions
Original file line numberDiff line numberDiff line change
@@ -47,19 +47,18 @@ async def _assert_job_exists(
4747
async def cancel(app: FastAPI, job_id: AsyncJobId, job_id_data: AsyncJobNameData):
4848
assert app # nosec
4949
assert job_id_data # nosec
50-
with log_catch(logger=_logger):
51-
try:
52-
await _assert_job_exists(
53-
job_id=job_id,
54-
job_id_data=job_id_data,
55-
celery_client=get_celery_client(app),
56-
)
57-
await get_celery_client(app).abort_task(
58-
task_context=job_id_data.model_dump(),
59-
task_uuid=job_id,
60-
)
61-
except CeleryError as exc:
62-
raise JobSchedulerError(exc=f"{exc}") from exc
50+
try:
51+
await _assert_job_exists(
52+
job_id=job_id,
53+
job_id_data=job_id_data,
54+
celery_client=get_celery_client(app),
55+
)
56+
await get_celery_client(app).abort_task(
57+
task_context=job_id_data.model_dump(),
58+
task_uuid=job_id,
59+
)
60+
except CeleryError as exc:
61+
raise JobSchedulerError(exc=f"{exc}") from exc
6362

6463

6564
@router.expose(reraise_if_error_type=(JobSchedulerError, JobMissingError))
@@ -69,25 +68,24 @@ async def status(
6968
assert app # nosec
7069
assert job_id_data # nosec
7170

72-
with log_catch(logger=_logger):
73-
try:
74-
await _assert_job_exists(
75-
job_id=job_id,
76-
job_id_data=job_id_data,
77-
celery_client=get_celery_client(app),
78-
)
79-
task_status = await get_celery_client(app).get_task_status(
80-
task_context=job_id_data.model_dump(),
81-
task_uuid=job_id,
82-
)
83-
except CeleryError as exc:
84-
raise JobSchedulerError(exc=f"{exc}") from exc
85-
86-
return AsyncJobStatus(
71+
try:
72+
await _assert_job_exists(
8773
job_id=job_id,
88-
progress=task_status.progress_report,
89-
done=task_status.is_done,
74+
job_id_data=job_id_data,
75+
celery_client=get_celery_client(app),
76+
)
77+
task_status = await get_celery_client(app).get_task_status(
78+
task_context=job_id_data.model_dump(),
79+
task_uuid=job_id,
9080
)
81+
except CeleryError as exc:
82+
raise JobSchedulerError(exc=f"{exc}") from exc
83+
84+
return AsyncJobStatus(
85+
job_id=job_id,
86+
progress=task_status.progress_report,
87+
done=task_status.is_done,
88+
)
9189

9290

9391
@router.expose(
@@ -106,38 +104,35 @@ async def result(
106104
assert job_id # nosec
107105
assert job_id_data # nosec
108106

109-
with log_catch(logger=_logger):
110-
try:
111-
await _assert_job_exists(
112-
job_id=job_id,
113-
job_id_data=job_id_data,
114-
celery_client=get_celery_client(app),
115-
)
116-
_status = await get_celery_client(app).get_task_status(
117-
task_context=job_id_data.model_dump(),
118-
task_uuid=job_id,
119-
)
120-
if not _status.is_done:
121-
raise JobNotDoneError(job_id=job_id)
122-
_result = await get_celery_client(app).get_task_result(
123-
task_context=job_id_data.model_dump(),
124-
task_uuid=job_id,
125-
)
126-
except CeleryError as exc:
127-
raise JobSchedulerError(exc=f"{exc}") from exc
128-
129-
if _status.task_state == TaskState.ABORTED:
130-
raise JobAbortedError(job_id=job_id)
131-
if _status.task_state == TaskState.ERROR:
132-
exc_type = ""
133-
exc_msg = ""
134-
with log_catch(logger=_logger, reraise=False):
135-
task_error = TaskError.model_validate(_result)
136-
exc_type = task_error.exc_type
137-
exc_msg = task_error.exc_msg
138-
raise JobError(job_id=job_id, exc_type=exc_type, exc_msg=exc_msg)
139-
140-
return AsyncJobResult(result=_result)
107+
try:
108+
await _assert_job_exists(
109+
job_id=job_id, job_id_data=job_id_data, celery_client=get_celery_client(app)
110+
)
111+
_status = await get_celery_client(app).get_task_status(
112+
task_context=job_id_data.model_dump(),
113+
task_uuid=job_id,
114+
)
115+
if not _status.is_done:
116+
raise JobNotDoneError(job_id=job_id)
117+
_result = await get_celery_client(app).get_task_result(
118+
task_context=job_id_data.model_dump(),
119+
task_uuid=job_id,
120+
)
121+
except CeleryError as exc:
122+
raise JobSchedulerError(exc=f"{exc}") from exc
123+
124+
if _status.task_state == TaskState.ABORTED:
125+
raise JobAbortedError(job_id=job_id)
126+
if _status.task_state == TaskState.ERROR:
127+
exc_type = ""
128+
exc_msg = ""
129+
with log_catch(logger=_logger, reraise=False):
130+
task_error = TaskError.model_validate(_result)
131+
exc_type = task_error.exc_type
132+
exc_msg = task_error.exc_msg
133+
raise JobError(job_id=job_id, exc_type=exc_type, exc_msg=exc_msg)
134+
135+
return AsyncJobResult(result=_result)
141136

142137

143138
@router.expose(reraise_if_error_type=(JobSchedulerError,))
@@ -146,12 +141,11 @@ async def list_jobs(
146141
) -> list[AsyncJobGet]:
147142
assert app # nosec
148143
assert filter_ # nosec
149-
with log_catch(logger=_logger):
150-
try:
151-
task_uuids = await get_celery_client(app).get_task_uuids(
152-
task_context=job_id_data.model_dump(),
153-
)
154-
except CeleryError as exc:
155-
raise JobSchedulerError(exc=f"{exc}") from exc
156-
157-
return [AsyncJobGet(job_id=task_uuid) for task_uuid in task_uuids]
144+
try:
145+
task_uuids = await get_celery_client(app).get_task_uuids(
146+
task_context=job_id_data.model_dump(),
147+
)
148+
except CeleryError as exc:
149+
raise JobSchedulerError(exc=f"{exc}") from exc
150+
151+
return [AsyncJobGet(job_id=task_uuid) for task_uuid in task_uuids]

services/storage/src/simcore_service_storage/api/rpc/_data_export.py

Lines changed: 23 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
DataExportTaskStartInput,
1313
InvalidFileIdentifierError,
1414
)
15-
from servicelib.logging_utils import log_catch
1615
from servicelib.rabbitmq import RPCRouter
1716

1817
from ...datcore_dsm import DatCoreDataManager
@@ -41,31 +40,30 @@ async def start_data_export(
4140
) -> AsyncJobGet:
4241
assert app # nosec
4342

44-
with log_catch(_logger):
45-
dsm = get_dsm_provider(app).get(data_export_start.location_id)
43+
dsm = get_dsm_provider(app).get(data_export_start.location_id)
4644

47-
try:
48-
for _id in data_export_start.file_and_folder_ids:
49-
if isinstance(dsm, DatCoreDataManager):
50-
_ = await dsm.get_file(user_id=job_id_data.user_id, file_id=_id)
51-
elif isinstance(dsm, SimcoreS3DataManager):
52-
await dsm.can_read_file(user_id=job_id_data.user_id, file_id=_id)
45+
try:
46+
for _id in data_export_start.file_and_folder_ids:
47+
if isinstance(dsm, DatCoreDataManager):
48+
_ = await dsm.get_file(user_id=job_id_data.user_id, file_id=_id)
49+
elif isinstance(dsm, SimcoreS3DataManager):
50+
await dsm.can_read_file(user_id=job_id_data.user_id, file_id=_id)
5351

54-
except (FileAccessRightError, DatcoreAdapterError) as err:
55-
raise AccessRightError(
56-
user_id=job_id_data.user_id,
57-
file_id=_id,
58-
location_id=data_export_start.location_id,
59-
) from err
52+
except (FileAccessRightError, DatcoreAdapterError) as err:
53+
raise AccessRightError(
54+
user_id=job_id_data.user_id,
55+
file_id=_id,
56+
location_id=data_export_start.location_id,
57+
) from err
6058

61-
try:
62-
task_uuid = await get_celery_client(app).send_task(
63-
"export_data",
64-
task_context=job_id_data.model_dump(),
65-
files=data_export_start.file_and_folder_ids, # ANE: adapt here your signature
66-
)
67-
except CeleryError as exc:
68-
raise JobSchedulerError(exc=f"{exc}") from exc
69-
return AsyncJobGet(
70-
job_id=task_uuid,
59+
try:
60+
task_uuid = await get_celery_client(app).send_task(
61+
"export_data",
62+
task_context=job_id_data.model_dump(),
63+
files=data_export_start.file_and_folder_ids, # ANE: adapt here your signature
7164
)
65+
except CeleryError as exc:
66+
raise JobSchedulerError(exc=f"{exc}") from exc
67+
return AsyncJobGet(
68+
job_id=task_uuid,
69+
)

services/storage/src/simcore_service_storage/api/rpc/_paths.py

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
AsyncJobNameData,
88
)
99
from models_library.projects_nodes_io import LocationID
10-
from servicelib.logging_utils import log_catch
1110
from servicelib.rabbitmq import RPCRouter
1211

1312
from ...modules.celery import get_celery_client
@@ -26,13 +25,12 @@ async def compute_path_size(
2625
path: Path,
2726
) -> AsyncJobGet:
2827
assert app # nosec
29-
with log_catch(logger=_logger):
30-
task_uuid = await get_celery_client(app).send_task(
31-
remote_compute_path_size.__name__,
32-
task_context=job_id_data.model_dump(),
33-
user_id=job_id_data.user_id,
34-
location_id=location_id,
35-
path=path,
36-
)
28+
task_uuid = await get_celery_client(app).send_task(
29+
remote_compute_path_size.__name__,
30+
task_context=job_id_data.model_dump(),
31+
user_id=job_id_data.user_id,
32+
location_id=location_id,
33+
path=path,
34+
)
3735

38-
return AsyncJobGet(job_id=task_uuid)
36+
return AsyncJobGet(job_id=task_uuid)

0 commit comments

Comments
 (0)