Skip to content

Commit d6f8ff8

Browse files
authored
Merge branch 'master' into feature/share-tags
2 parents 68f482d + a0bf53a commit d6f8ff8

File tree

72 files changed

+597
-365
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

72 files changed

+597
-365
lines changed

.github/workflows/ci-arm-build.yml

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
name: CI ARM64 Build and Push
2+
3+
on:
4+
push:
5+
branches:
6+
- "master"
7+
tags-ignore:
8+
- "*"
9+
10+
workflow_dispatch:
11+
12+
concurrency:
13+
group: ${{ github.workflow }}-${{ github.ref }}
14+
cancel-in-progress: true
15+
16+
jobs:
17+
build-and-push-arm64:
18+
timeout-minutes: 60 # intentionally long to allow for slow builds
19+
runs-on: ubuntu-latest
20+
strategy:
21+
matrix:
22+
os: [ubuntu-22.04]
23+
python: ["3.11"]
24+
env:
25+
# secrets can be set in settings/secrets on github
26+
DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }}
27+
steps:
28+
- uses: actions/checkout@v4
29+
- name: setup QEMU
30+
uses: docker/setup-qemu-action@v3
31+
- name: setup docker buildx
32+
id: buildx
33+
uses: docker/setup-buildx-action@v3
34+
with:
35+
driver: docker-container
36+
- name: expose github runtime for buildx
37+
uses: crazy-max/ghaction-github-runtime@v3
38+
- name: show system environs
39+
run: ./ci/helpers/show_system_versions.bash
40+
- name: login to Dockerhub
41+
uses: docker/login-action@v2
42+
with:
43+
username: ${{ secrets.DOCKER_USERNAME }}
44+
password: ${{ secrets.DOCKER_PASSWORD }}
45+
- name: Set deployment variables
46+
run: |
47+
if [ "${GITHUB_REF}" == "refs/heads/master" ]; then
48+
echo "TAG_PREFIX=master-github" >> $GITHUB_ENV
49+
elif [[ "${GITHUB_REF}" == refs/heads/hotfix_v* ]]; then
50+
echo "TAG_PREFIX=hotfix-github" >> $GITHUB_ENV
51+
elif [[ "${GITHUB_REF}" == refs/heads/hotfix_staging_* ]]; then
52+
echo "TAG_PREFIX=hotfix-staging-github" >> $GITHUB_ENV
53+
fi
54+
- name: build & push images for latest tag
55+
run: |
56+
export DOCKER_IMAGE_TAG="$TAG_PREFIX-latest-arm64"
57+
export DOCKER_TARGET_PLATFORMS=linux/arm64
58+
make build push=true
Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
name: CI Multi-Architecture Fusing
2+
3+
on:
4+
workflow_run:
5+
workflows: ["CI ARM64 Build and Push", "CI"]
6+
types:
7+
- completed
8+
branches:
9+
- "master"
10+
11+
12+
concurrency:
13+
group: ${{ github.workflow }}-${{ github.ref }}
14+
cancel-in-progress: true
15+
16+
jobs:
17+
multi-architecture-fusing:
18+
if: ${{ github.event.workflow_run.conclusion == 'success' }}
19+
timeout-minutes: 60 # intentionally long to allow for slow builds
20+
runs-on: ubuntu-latest
21+
strategy:
22+
matrix:
23+
os: [ubuntu-22.04]
24+
python: ["3.11"]
25+
env:
26+
# secrets can be set in settings/secrets on github
27+
DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }}
28+
steps:
29+
- uses: actions/checkout@v4
30+
- name: setup QEMU
31+
uses: docker/setup-qemu-action@v3
32+
- name: setup docker buildx
33+
id: buildx
34+
uses: docker/setup-buildx-action@v3
35+
with:
36+
driver: docker-container
37+
- name: expose github runtime for buildx
38+
uses: crazy-max/ghaction-github-runtime@v3
39+
- name: show system environs
40+
run: ./ci/helpers/show_system_versions.bash
41+
- name: login to Dockerhub
42+
uses: docker/login-action@v2
43+
with:
44+
username: ${{ secrets.DOCKER_USERNAME }}
45+
password: ${{ secrets.DOCKER_PASSWORD }}
46+
- name: Set deployment variables
47+
run: |
48+
if [ "${GITHUB_REF}" == "refs/heads/master" ]; then
49+
echo "TAG_PREFIX=master-github" >> $GITHUB_ENV
50+
elif [[ "${GITHUB_REF}" == refs/heads/hotfix_v* ]]; then
51+
echo "TAG_PREFIX=hotfix-github" >> $GITHUB_ENV
52+
elif [[ "${GITHUB_REF}" == refs/heads/hotfix_staging_* ]]; then
53+
echo "TAG_PREFIX=hotfix-staging-github" >> $GITHUB_ENV
54+
fi
55+
- name: fuse images in the registry for latest tag
56+
run: |
57+
export DOCKER_IMAGE_TAG="$TAG_PREFIX-latest"
58+
make docker-image-fuse SUFFIX=arm64

.pre-commit-config.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ repos:
3030
name: upgrade code
3131
exclude: ^scripts/maintenance/computational-clusters/autoscaled_monitor/cli\.py$ # Optional get replaced and typer does not like it
3232
- repo: https://github.com/hadialqattan/pycln
33-
rev: v2.1.4
33+
rev: v2.5.0
3434
hooks:
3535
- id: pycln
3636
args: [--all, --expand-stars]

Makefile

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -172,8 +172,16 @@ docker buildx bake --allow=fs.read=.. \
172172
,--load\
173173
)\
174174
)\
175+
$(if $(push),\
176+
$(foreach service, $(SERVICES_NAMES_TO_BUILD),\
177+
--set $(service).tags=$(DOCKER_REGISTRY)/$(service):$(DOCKER_IMAGE_TAG) \
178+
) \
179+
$(foreach service, $(SERVICES_NAMES_TO_BUILD),\
180+
--set $(service).output="type=registry$(comma)push=true" \
181+
)\
182+
,) \
175183
$(if $(push),--push,) \
176-
$(if $(push),--file docker-bake.hcl,) --file docker-compose-build.yml $(if $(target),$(target),$(INCLUDED_SERVICES)) \
184+
--file docker-compose-build.yml $(if $(target),$(target),$(INCLUDED_SERVICES)) \
177185
$(if $(findstring -nc,$@),--no-cache,\
178186
$(foreach service, $(SERVICES_NAMES_TO_BUILD),\
179187
--set $(service).cache-to=type=gha$(comma)mode=max$(comma)scope=$(service) \
@@ -859,3 +867,8 @@ release-staging release-prod: .check-on-master-branch ## Helper to create a sta
859867
.PHONY: release-hotfix release-staging-hotfix
860868
release-hotfix release-staging-hotfix: ## Helper to create a hotfix release in Github (usage: make release-hotfix version=1.2.4 git_sha=optional or make release-staging-hotfix name=Sprint version=2)
861869
$(create_github_release_url)
870+
871+
.PHONY: docker-image-fuse
872+
docker-image-fuse:
873+
$(foreach service, $(SERVICES_NAMES_TO_BUILD),\
874+
docker buildx imagetools create --tag $(DOCKER_REGISTRY)/$(service):$(DOCKER_IMAGE_TAG) $(DOCKER_REGISTRY)/$(service):$(DOCKER_IMAGE_TAG)-$(SUFFIX) $(DOCKER_REGISTRY)/$(service):$(DOCKER_IMAGE_TAG);)

packages/aws-library/requirements/_base.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -294,7 +294,7 @@ pyyaml==6.0.2
294294
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
295295
# -c requirements/../../../requirements/constraints.txt
296296
# -r requirements/../../../packages/service-library/requirements/_base.in
297-
redis==5.0.4
297+
redis==5.2.1
298298
# via
299299
# -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt
300300
# -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt

packages/pytest-simcore/src/pytest_simcore/redis_service.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
import tenacity
1111
from pytest_mock import MockerFixture
1212
from redis.asyncio import Redis, from_url
13+
from servicelib.redis import _constants as redis_constants
1314
from settings_library.basic_types import PortInt
1415
from settings_library.redis import RedisDatabase, RedisSettings
1516
from tenacity.before_sleep import before_sleep_log
@@ -118,6 +119,4 @@ async def wait_till_redis_responsive(redis_url: URL | str) -> None:
118119
@pytest.fixture
119120
def mock_redis_socket_timeout(mocker: MockerFixture) -> None:
120121
# lowered to allow CI to properly shutdown RedisClientSDK instances
121-
from servicelib import redis
122-
123-
mocker.patch.object(redis, "_DEFAULT_SOCKET_TIMEOUT", timedelta(seconds=1))
122+
mocker.patch.object(redis_constants, "DEFAULT_SOCKET_TIMEOUT", timedelta(seconds=1))

packages/service-library/requirements/_base.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -210,7 +210,7 @@ pyyaml==6.0.2
210210
# -c requirements/../../../packages/settings-library/requirements/../../../requirements/constraints.txt
211211
# -c requirements/../../../requirements/constraints.txt
212212
# -r requirements/_base.in
213-
redis==5.0.4
213+
redis==5.2.1
214214
# via
215215
# -c requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt
216216
# -c requirements/../../../packages/models-library/requirements/../../../packages/common-library/requirements/../../../requirements/constraints.txt
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
from ._client import RedisClientSDK
2+
from ._clients_manager import RedisClientsManager
3+
from ._decorators import exclusive
4+
from ._distributed_locks_utils import start_exclusive_periodic_task
5+
from ._errors import (
6+
CouldNotAcquireLockError,
7+
CouldNotConnectToRedisError,
8+
LockLostError,
9+
)
10+
from ._models import RedisManagerDBConfig
11+
from ._utils import handle_redis_returns_union_types
12+
13+
__all__: tuple[str, ...] = (
14+
"CouldNotAcquireLockError",
15+
"CouldNotConnectToRedisError",
16+
"exclusive",
17+
"handle_redis_returns_union_types",
18+
"LockLostError",
19+
"RedisClientSDK",
20+
"RedisClientsManager",
21+
"RedisManagerDBConfig",
22+
"start_exclusive_periodic_task",
23+
)
24+
25+
# nopycln: file

packages/service-library/src/servicelib/redis.py renamed to packages/service-library/src/servicelib/redis/_client.py

Lines changed: 19 additions & 100 deletions
Original file line numberDiff line numberDiff line change
@@ -5,65 +5,38 @@
55
from asyncio import Task
66
from collections.abc import AsyncIterator
77
from dataclasses import dataclass, field
8-
from typing import Final
98
from uuid import uuid4
109

1110
import redis.asyncio as aioredis
1211
import redis.exceptions
13-
from common_library.errors_classes import OsparcErrorMixin
14-
from pydantic import NonNegativeFloat, NonNegativeInt
12+
from pydantic import NonNegativeFloat
1513
from redis.asyncio.lock import Lock
1614
from redis.asyncio.retry import Retry
1715
from redis.backoff import ExponentialBackoff
18-
from settings_library.redis import RedisDatabase, RedisSettings
1916
from tenacity import retry
2017
from yarl import URL
2118

22-
from .background_task import periodic_task
23-
from .logging_utils import log_catch, log_context
24-
from .retry_policies import RedisRetryPolicyUponInitialization
25-
26-
_DEFAULT_LOCK_TTL: Final[datetime.timedelta] = datetime.timedelta(seconds=10)
27-
_DEFAULT_SOCKET_TIMEOUT: Final[datetime.timedelta] = datetime.timedelta(seconds=30)
28-
29-
30-
_DEFAULT_DECODE_RESPONSES: Final[bool] = True
31-
_DEFAULT_HEALTH_CHECK_INTERVAL: Final[datetime.timedelta] = datetime.timedelta(
32-
seconds=5
19+
from ..background_task import periodic_task
20+
from ..logging_utils import log_catch
21+
from ..retry_policies import RedisRetryPolicyUponInitialization
22+
from ._constants import (
23+
DEFAULT_DECODE_RESPONSES,
24+
DEFAULT_HEALTH_CHECK_INTERVAL,
25+
DEFAULT_LOCK_TTL,
26+
DEFAULT_SOCKET_TIMEOUT,
3327
)
34-
_SHUTDOWN_TIMEOUT_S: Final[NonNegativeInt] = 5
35-
28+
from ._errors import CouldNotAcquireLockError, CouldNotConnectToRedisError
29+
from ._utils import auto_extend_lock, cancel_or_warn
3630

3731
_logger = logging.getLogger(__name__)
3832

3933

40-
class BaseRedisError(OsparcErrorMixin, RuntimeError):
41-
...
42-
43-
44-
class CouldNotAcquireLockError(BaseRedisError):
45-
msg_template: str = "Lock {lock.name} could not be acquired!"
46-
47-
48-
class CouldNotConnectToRedisError(BaseRedisError):
49-
msg_template: str = "Connection to '{dsn}' failed"
50-
51-
52-
async def _cancel_or_warn(task: Task) -> None:
53-
if not task.cancelled():
54-
task.cancel()
55-
_, pending = await asyncio.wait((task,), timeout=_SHUTDOWN_TIMEOUT_S)
56-
if pending:
57-
task_name = task.get_name()
58-
_logger.warning("Could not cancel task_name=%s pending=%s", task_name, pending)
59-
60-
6134
@dataclass
6235
class RedisClientSDK:
6336
redis_dsn: str
6437
client_name: str
65-
decode_responses: bool = _DEFAULT_DECODE_RESPONSES
66-
health_check_interval: datetime.timedelta = _DEFAULT_HEALTH_CHECK_INTERVAL
38+
decode_responses: bool = DEFAULT_DECODE_RESPONSES
39+
health_check_interval: datetime.timedelta = DEFAULT_HEALTH_CHECK_INTERVAL
6740

6841
_client: aioredis.Redis = field(init=False)
6942
_health_check_task: Task | None = None
@@ -74,7 +47,7 @@ class RedisClientSDK:
7447
def redis(self) -> aioredis.Redis:
7548
return self._client
7649

77-
def __post_init__(self):
50+
def __post_init__(self) -> None:
7851
self._client = aioredis.from_url(
7952
self.redis_dsn,
8053
# Run 3 retries with exponential backoff strategy source: https://redis.readthedocs.io/en/stable/backoff.html
@@ -84,8 +57,8 @@ def __post_init__(self):
8457
redis.exceptions.ConnectionError,
8558
redis.exceptions.TimeoutError,
8659
],
87-
socket_timeout=_DEFAULT_SOCKET_TIMEOUT.total_seconds(),
88-
socket_connect_timeout=_DEFAULT_SOCKET_TIMEOUT.total_seconds(),
60+
socket_timeout=DEFAULT_SOCKET_TIMEOUT.total_seconds(),
61+
socket_connect_timeout=DEFAULT_SOCKET_TIMEOUT.total_seconds(),
8962
encoding="utf-8",
9063
decode_responses=self.decode_responses,
9164
client_name=self.client_name,
@@ -113,7 +86,7 @@ async def setup(self) -> None:
11386
async def shutdown(self) -> None:
11487
if self._health_check_task:
11588
self._continue_health_checking = False
116-
await _cancel_or_warn(self._health_check_task)
89+
await cancel_or_warn(self._health_check_task)
11790
self._health_check_task = None
11891

11992
await self._client.aclose(close_connection_pool=True)
@@ -165,7 +138,7 @@ async def lock_context(
165138
2. `blocking==True` timeouts out while waiting for lock to be free (another entity holds the lock)
166139
"""
167140

168-
total_lock_duration: datetime.timedelta = _DEFAULT_LOCK_TTL
141+
total_lock_duration: datetime.timedelta = DEFAULT_LOCK_TTL
169142
lock_unique_id = f"lock_extender_{lock_key}_{uuid4()}"
170143

171144
ttl_lock: Lock = self._client.lock(
@@ -178,15 +151,9 @@ async def lock_context(
178151
if not await ttl_lock.acquire(token=lock_value):
179152
raise CouldNotAcquireLockError(lock=ttl_lock)
180153

181-
async def _extend_lock(lock: Lock) -> None:
182-
with log_context(
183-
_logger, logging.DEBUG, f"Extending lock {lock_unique_id}"
184-
), log_catch(_logger, reraise=False):
185-
await lock.reacquire()
186-
187154
try:
188155
async with periodic_task(
189-
_extend_lock,
156+
auto_extend_lock,
190157
interval=total_lock_duration / 2,
191158
task_name=lock_unique_id,
192159
lock=ttl_lock,
@@ -224,51 +191,3 @@ async def _extend_lock(lock: Lock) -> None:
224191
async def lock_value(self, lock_name: str) -> str | None:
225192
output: str | None = await self._client.get(lock_name)
226193
return output
227-
228-
229-
@dataclass(frozen=True)
230-
class RedisManagerDBConfig:
231-
database: RedisDatabase
232-
decode_responses: bool = _DEFAULT_DECODE_RESPONSES
233-
health_check_interval: datetime.timedelta = _DEFAULT_HEALTH_CHECK_INTERVAL
234-
235-
236-
@dataclass
237-
class RedisClientsManager:
238-
"""
239-
Manages the lifetime of redis client sdk connections
240-
"""
241-
242-
databases_configs: set[RedisManagerDBConfig]
243-
settings: RedisSettings
244-
client_name: str
245-
246-
_client_sdks: dict[RedisDatabase, RedisClientSDK] = field(default_factory=dict)
247-
248-
async def setup(self) -> None:
249-
for config in self.databases_configs:
250-
self._client_sdks[config.database] = RedisClientSDK(
251-
redis_dsn=self.settings.build_redis_dsn(config.database),
252-
decode_responses=config.decode_responses,
253-
health_check_interval=config.health_check_interval,
254-
client_name=f"{self.client_name}",
255-
)
256-
257-
for client in self._client_sdks.values():
258-
await client.setup()
259-
260-
async def shutdown(self) -> None:
261-
# NOTE: somehow using logged_gather is not an option
262-
# doing so will make the shutdown procedure hang
263-
for client in self._client_sdks.values():
264-
await client.shutdown()
265-
266-
def client(self, database: RedisDatabase) -> RedisClientSDK:
267-
return self._client_sdks[database]
268-
269-
async def __aenter__(self) -> "RedisClientsManager":
270-
await self.setup()
271-
return self
272-
273-
async def __aexit__(self, *args):
274-
await self.shutdown()

0 commit comments

Comments
 (0)