Skip to content

Commit 13b42f6

Browse files
committed
Merge branch 'master' into pr/giancarloromeo/8141
2 parents 037272b + b24fbf5 commit 13b42f6

File tree

441 files changed

+6955
-4282
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

441 files changed

+6955
-4282
lines changed

.env-devel

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@ CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DEFAULT_CLUSTER_AUTH='{"type":"tls","tls_c
5656
CLUSTERS_KEEPER_COMPUTATIONAL_BACKEND_DOCKER_IMAGE_TAG=master-github-latest
5757
CLUSTERS_KEEPER_DASK_NPROCS=1
5858
CLUSTERS_KEEPER_DASK_NTHREADS=0
59+
CLUSTERS_KEEPER_DASK_NTHREADS_MULTIPLIER=1
5960
CLUSTERS_KEEPER_DASK_WORKER_SATURATION=inf
6061
CLUSTERS_KEEPER_EC2_ACCESS=null
6162
CLUSTERS_KEEPER_SSM_ACCESS=null

.github/instructions/python.instructions.md

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,8 @@ Ensure compatibility with the following library versions:
3535

3636
* Use `f-string` formatting for all string interpolation except for logging message strings.
3737
* Use **relative imports** within the same package/module.
38+
- For imports within the same repository/project, always use relative imports (e.g., `from ..constants import APP_SETTINGS_KEY` instead of `from simcore_service_webserver.constants import APP_SETTINGS_KEY`)
39+
- Use absolute imports only for external libraries and packages
3840
* Place **all imports at the top** of the file.
3941
* Document functions when the code is not self-explanatory or if asked explicitly.
4042

@@ -44,6 +46,18 @@ Ensure compatibility with the following library versions:
4446
* Prefer `json_dumps` / `json_loads` from `common_library.json_serialization` instead of the built-in `json.dumps` / `json.loads`.
4547
* When using Pydantic models, prefer methods like `model.model_dump_json()` for serialization.
4648

47-
### 7. **Running tests**
49+
### 7. **aiohttp Framework**
50+
51+
* **Application Keys**: Always use `web.AppKey` for type-safe application storage instead of string keys
52+
- Define keys with specific types: `APP_MY_KEY: Final = web.AppKey("APP_MY_KEY", MySpecificType)`
53+
- Use precise types instead of generic `object` when the actual type is known
54+
- Example: `APP_SETTINGS_KEY: Final = web.AppKey("APP_SETTINGS_KEY", ApplicationSettings)`
55+
- Store and retrieve: `app[APP_MY_KEY] = value` and `data = app[APP_MY_KEY]`
56+
* **Request Keys**: Use `web.AppKey` for request storage as well for consistency and type safety
57+
* **Middleware**: Follow the repository's middleware patterns for cross-cutting concerns
58+
* **Error Handling**: Use the established exception handling decorators and patterns
59+
* **Route Definitions**: Use `web.RouteTableDef()` and organize routes logically within modules
60+
61+
### 8. **Running tests**
4862
* Use `--keep-docker-up` flag when testing to keep docker containers up between sessions.
4963
* Always activate the python virtual environment before running pytest.
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
---
2+
mode: edit
3+
description: Converts string-based aiohttp app key constants to type-safe web.AppKey
4+
model: GPT-4.1
5+
---
6+
7+
Convert all string-based app key constants to use type-safe web.AppKey.
8+
9+
- Replace patterns like:
10+
```python
11+
CONSTNAME_APPKEY: Final[str] = f"{__name__}.my_key"
12+
```
13+
with:
14+
```python
15+
from aiohttp import web
16+
CONSTNAME_APPKEY: Final = web.AppKey("CONSTNAME", ValueType)
17+
```
18+
(Replace ValueType with the actual type stored under this key.)
19+
20+
- Update all usages:
21+
- `app[CONSTNAME_APPKEY] = value`
22+
- `data = app[CONSTNAME_APPKEY]` or `data = request.app[CONSTNAME_APPKEY]`
23+
24+
- Key constant MUST be UPPERCASE
25+
- Key name MUST be suffixed `_APPKEY`
26+
- Remove any f"{__name__}..." patterns; use a simple string identifier in web.AppKey.
27+
- Ensure all keys are type-safe and self-documenting.
28+
- IF you change the original name, you MUST change all the references
File renamed without changes.

.github/workflows/cleanup-caches-by-branches.yml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,23 +8,23 @@ on:
88
jobs:
99
cleanup:
1010
runs-on: ubuntu-latest
11+
permissions:
12+
actions: write
1113
steps:
1214
- name: Cleanup
1315
run: |
14-
gh extension install actions/gh-actions-cache
15-
16-
echo "Fetching list of cache key"
17-
cacheKeysForPR=$(gh actions-cache list -R $REPO -B $BRANCH -L 100 | cut -f 1 )
16+
echo "Fetching list of cache keys"
17+
cacheKeysForPR=$(gh cache list --ref $BRANCH --limit 100 --json id --jq '.[].id')
1818
1919
## Setting this to not fail the workflow while deleting cache keys.
2020
set +e
2121
echo "Deleting caches..."
2222
for cacheKey in $cacheKeysForPR
2323
do
24-
gh actions-cache delete $cacheKey -R $REPO -B $BRANCH --confirm
24+
gh cache delete $cacheKey
2525
done
2626
echo "Done"
2727
env:
28-
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
29-
REPO: ${{ github.repository }}
28+
GH_TOKEN: ${{ github.token }}
29+
GH_REPO: ${{ github.repository }}
3030
BRANCH: refs/pull/${{ github.event.pull_request.number }}/merge

packages/celery-library/Makefile

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ tests: ## runs unit tests
2727
--durations=10 \
2828
--exitfirst \
2929
--failed-first \
30+
--keep-docker-up \
3031
--pdb \
3132
-vv \
3233
$(CURDIR)/tests
@@ -41,6 +42,7 @@ tests-ci: ## runs unit tests
4142
--cov-report=term-missing \
4243
--cov-report=xml \
4344
--junitxml=junit.xml -o junit_family=legacy \
45+
--keep-docker-up \
4446
--cov=celery_library \
4547
--durations=10 \
4648
--log-date-format="%Y-%m-%d %H:%M:%S" \

packages/celery-library/src/celery_library/backends/redis.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,12 @@
66
from models_library.progress_bar import ProgressReport
77
from pydantic import ValidationError
88
from servicelib.celery.models import (
9+
WILDCARD,
10+
ExecutionMetadata,
11+
OwnerMetadata,
912
Task,
10-
TaskFilter,
1113
TaskID,
1214
TaskInfoStore,
13-
TaskMetadata,
14-
Wildcard,
1515
)
1616
from servicelib.redis import RedisClientSDK, handle_redis_returns_union_types
1717

@@ -35,23 +35,23 @@ def __init__(self, redis_client_sdk: RedisClientSDK) -> None:
3535
async def create_task(
3636
self,
3737
task_id: TaskID,
38-
task_metadata: TaskMetadata,
38+
execution_metadata: ExecutionMetadata,
3939
expiry: timedelta,
4040
) -> None:
4141
task_key = _build_key(task_id)
4242
await handle_redis_returns_union_types(
4343
self._redis_client_sdk.redis.hset(
4444
name=task_key,
4545
key=_CELERY_TASK_METADATA_KEY,
46-
value=task_metadata.model_dump_json(),
46+
value=execution_metadata.model_dump_json(),
4747
)
4848
)
4949
await self._redis_client_sdk.redis.expire(
5050
task_key,
5151
expiry,
5252
)
5353

54-
async def get_task_metadata(self, task_id: TaskID) -> TaskMetadata | None:
54+
async def get_task_metadata(self, task_id: TaskID) -> ExecutionMetadata | None:
5555
raw_result = await handle_redis_returns_union_types(
5656
self._redis_client_sdk.redis.hget(
5757
_build_key(task_id), _CELERY_TASK_METADATA_KEY
@@ -61,7 +61,7 @@ async def get_task_metadata(self, task_id: TaskID) -> TaskMetadata | None:
6161
return None
6262

6363
try:
64-
return TaskMetadata.model_validate_json(raw_result)
64+
return ExecutionMetadata.model_validate_json(raw_result)
6565
except ValidationError as exc:
6666
_logger.debug(
6767
"Failed to deserialize task metadata for task %s: %s", task_id, f"{exc}"
@@ -85,9 +85,9 @@ async def get_task_progress(self, task_id: TaskID) -> ProgressReport | None:
8585
)
8686
return None
8787

88-
async def list_tasks(self, task_filter: TaskFilter) -> list[Task]:
89-
search_key = _CELERY_TASK_INFO_PREFIX + task_filter.create_task_id(
90-
task_uuid=Wildcard()
88+
async def list_tasks(self, owner_metadata: OwnerMetadata) -> list[Task]:
89+
search_key = _CELERY_TASK_INFO_PREFIX + owner_metadata.model_dump_task_id(
90+
task_uuid=WILDCARD
9191
)
9292

9393
keys: list[str] = []
@@ -112,11 +112,11 @@ async def list_tasks(self, task_filter: TaskFilter) -> list[Task]:
112112
continue
113113

114114
with contextlib.suppress(ValidationError):
115-
task_metadata = TaskMetadata.model_validate_json(raw_metadata)
115+
execution_metadata = ExecutionMetadata.model_validate_json(raw_metadata)
116116
tasks.append(
117117
Task(
118-
uuid=TaskFilter.get_task_uuid(key),
119-
metadata=task_metadata,
118+
uuid=OwnerMetadata.get_task_uuid(key),
119+
metadata=execution_metadata,
120120
)
121121
)
122122

packages/celery-library/src/celery_library/rpc/_async_jobs.py

Lines changed: 13 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44

55
from celery.exceptions import CeleryError # type: ignore[import-untyped]
66
from models_library.api_schemas_rpc_async_jobs.async_jobs import (
7-
AsyncJobFilter,
87
AsyncJobGet,
98
AsyncJobId,
109
AsyncJobResult,
@@ -17,7 +16,7 @@
1716
JobNotDoneError,
1817
JobSchedulerError,
1918
)
20-
from servicelib.celery.models import TaskFilter, TaskState
19+
from servicelib.celery.models import OwnerMetadata, TaskState
2120
from servicelib.celery.task_manager import TaskManager
2221
from servicelib.logging_utils import log_catch
2322
from servicelib.rabbitmq import RPCRouter
@@ -34,14 +33,13 @@
3433

3534
@router.expose(reraise_if_error_type=(JobSchedulerError, JobMissingError))
3635
async def cancel(
37-
task_manager: TaskManager, job_id: AsyncJobId, job_filter: AsyncJobFilter
36+
task_manager: TaskManager, job_id: AsyncJobId, owner_metadata: OwnerMetadata
3837
):
3938
assert task_manager # nosec
40-
assert job_filter # nosec
41-
task_filter = TaskFilter.model_validate(job_filter.model_dump())
39+
assert owner_metadata # nosec
4240
try:
4341
await task_manager.cancel_task(
44-
task_filter=task_filter,
42+
owner_metadata=owner_metadata,
4543
task_uuid=job_id,
4644
)
4745
except TaskNotFoundError as exc:
@@ -52,15 +50,14 @@ async def cancel(
5250

5351
@router.expose(reraise_if_error_type=(JobSchedulerError, JobMissingError))
5452
async def status(
55-
task_manager: TaskManager, job_id: AsyncJobId, job_filter: AsyncJobFilter
53+
task_manager: TaskManager, job_id: AsyncJobId, owner_metadata: OwnerMetadata
5654
) -> AsyncJobStatus:
5755
assert task_manager # nosec
58-
assert job_filter # nosec
56+
assert owner_metadata # nosec
5957

60-
task_filter = TaskFilter.model_validate(job_filter.model_dump())
6158
try:
6259
task_status = await task_manager.get_task_status(
63-
task_filter=task_filter,
60+
owner_metadata=owner_metadata,
6461
task_uuid=job_id,
6562
)
6663
except TaskNotFoundError as exc:
@@ -85,23 +82,21 @@ async def status(
8582
)
8683
)
8784
async def result(
88-
task_manager: TaskManager, job_id: AsyncJobId, job_filter: AsyncJobFilter
85+
task_manager: TaskManager, job_id: AsyncJobId, owner_metadata: OwnerMetadata
8986
) -> AsyncJobResult:
9087
assert task_manager # nosec
9188
assert job_id # nosec
92-
assert job_filter # nosec
93-
94-
task_filter = TaskFilter.model_validate(job_filter.model_dump())
89+
assert owner_metadata # nosec
9590

9691
try:
9792
_status = await task_manager.get_task_status(
98-
task_filter=task_filter,
93+
owner_metadata=owner_metadata,
9994
task_uuid=job_id,
10095
)
10196
if not _status.is_done:
10297
raise JobNotDoneError(job_id=job_id)
10398
_result = await task_manager.get_task_result(
104-
task_filter=task_filter,
99+
owner_metadata=owner_metadata,
105100
task_uuid=job_id,
106101
)
107102
except TaskNotFoundError as exc:
@@ -134,13 +129,12 @@ async def result(
134129

135130
@router.expose(reraise_if_error_type=(JobSchedulerError,))
136131
async def list_jobs(
137-
task_manager: TaskManager, job_filter: AsyncJobFilter
132+
task_manager: TaskManager, owner_metadata: OwnerMetadata
138133
) -> list[AsyncJobGet]:
139134
assert task_manager # nosec
140-
task_filter = TaskFilter.model_validate(job_filter.model_dump())
141135
try:
142136
tasks = await task_manager.list_tasks(
143-
task_filter=task_filter,
137+
owner_metadata=owner_metadata,
144138
)
145139
except CeleryError as exc:
146140
raise JobSchedulerError(exc=f"{exc}") from exc

0 commit comments

Comments
 (0)