Skip to content

Commit 0e9a59a

Browse files
GitHKAndrei Neagu
andauthored
✨CEPH/S3 additions (⚠️ devops) (ITISFoundation#2818)
* added filestash as s3 visualizer/editor * trying to use proper bucket * correctly reading env vars * fixed protocol * pinning versions * setting vfs to minimal * @pcrespov review changes * fixed makefile * random secret for filestash * reverting changes * remove usless file * warning if no volumes found * adding settings for vfs cache mode * trying to fix test flakyness * warns when volumes still need to be removed * fixtures now process docker-compose ops * fixed catalog test * reverting makefile changes * name refactor & reverting changes * trying to make multiuple deplyment safe * makeing command safer * debug test case * revert change back Co-authored-by: Andrei Neagu <[email protected]>
1 parent 19b4fbf commit 0e9a59a

File tree

15 files changed

+296
-21
lines changed

15 files changed

+296
-21
lines changed

Makefile

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -195,16 +195,27 @@ CPU_COUNT = $(shell cat /proc/cpuinfo | grep processor | wc -l )
195195
@docker-compose --env-file .env --file services/docker-compose.yml --file services/docker-compose.local.yml --log-level=ERROR config > $@
196196

197197
.stack-ops.yml: .env $(docker-compose-configs)
198+
# Compiling config file for filestash
199+
$(eval TMP_PATH_TO_FILESTASH_CONFIG=$(shell set -o allexport; \
200+
source $(CURDIR)/.env; \
201+
set +o allexport; \
202+
python3 scripts/filestash/create_config.py))
198203
# Creating config for ops stack to $@
199-
@docker-compose --env-file .env --file services/docker-compose-ops.yml --log-level=ERROR config > $@
204+
# -> filestash config at $(TMP_PATH_TO_FILESTASH_CONFIG)
205+
@$(shell \
206+
export TMP_PATH_TO_FILESTASH_CONFIG="${TMP_PATH_TO_FILESTASH_CONFIG}" && \
207+
docker-compose --env-file .env --file services/docker-compose-ops.yml --log-level=DEBUG config > $@ \
208+
)
209+
200210

201211

202212
.PHONY: up-devel up-prod up-version up-latest .deploy-ops
203213

204214
.deploy-ops: .stack-ops.yml
205215
# Deploy stack 'ops'
206216
ifndef ops_disabled
207-
@docker stack deploy --with-registry-auth -c $< ops
217+
# -> filestash config at $(TMP_PATH_TO_FILESTASH_CONFIG)
218+
docker stack deploy --with-registry-auth -c $< ops
208219
else
209220
@echo "Explicitly disabled with ops_disabled flag in CLI"
210221
endif

packages/pytest-simcore/src/pytest_simcore/docker_compose.py

Lines changed: 41 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
import json
1212
import os
1313
import shutil
14+
import subprocess
1415
import sys
1516
from copy import deepcopy
1617
from pathlib import Path
@@ -19,7 +20,8 @@
1920
import pytest
2021
import yaml
2122
from _pytest.config import ExitCode
22-
from dotenv import dotenv_values
23+
from _pytest.monkeypatch import MonkeyPatch
24+
from dotenv import dotenv_values, set_key
2325

2426
from .helpers import (
2527
FIXTURE_CONFIG_CORE_SERVICES_SELECTION,
@@ -154,9 +156,46 @@ def simcore_docker_compose(
154156
return config
155157

156158

159+
@pytest.fixture(scope="module")
160+
def inject_filestash_config_path(
161+
osparc_simcore_scripts_dir: Path,
162+
monkeypatch_module: MonkeyPatch,
163+
env_file_for_testing: Path,
164+
) -> None:
165+
create_filestash_config_py = (
166+
osparc_simcore_scripts_dir / "filestash" / "create_config.py"
167+
)
168+
169+
# ensures .env at git_root_dir, which will be used as current directory
170+
assert env_file_for_testing.exists()
171+
env_values = dotenv_values(env_file_for_testing)
172+
173+
process = subprocess.run(
174+
["python3", f"{create_filestash_config_py}"],
175+
shell=False,
176+
check=True,
177+
stdout=subprocess.PIPE,
178+
env=env_values,
179+
)
180+
filestash_config_json_path = Path(process.stdout.decode("utf-8").strip())
181+
assert filestash_config_json_path.exists()
182+
183+
set_key(
184+
env_file_for_testing,
185+
"TMP_PATH_TO_FILESTASH_CONFIG",
186+
f"{filestash_config_json_path}",
187+
)
188+
monkeypatch_module.setenv(
189+
"TMP_PATH_TO_FILESTASH_CONFIG", f"{filestash_config_json_path}"
190+
)
191+
192+
157193
@pytest.fixture(scope="module")
158194
def ops_docker_compose(
159-
osparc_simcore_root_dir: Path, env_file_for_testing: Path, temp_folder: Path
195+
osparc_simcore_root_dir: Path,
196+
env_file_for_testing: Path,
197+
temp_folder: Path,
198+
inject_filestash_config_path: None,
160199
) -> Dict[str, Any]:
161200
"""Filters only services in docker-compose-ops.yml and returns yaml data
162201

scripts/filestash/create_config.py

Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
"""
2+
Parses the configuration template and injects it where the platform expects it
3+
Notes:
4+
- Admin credentials for filestash are admin:adminadmin
5+
- $ must be escaped with $$ in the template file
6+
"""
7+
8+
import os
9+
import random
10+
import string
11+
import tempfile
12+
13+
from distutils.util import strtobool
14+
from pathlib import Path
15+
from string import Template
16+
17+
SCRIPT_DIR = Path(__file__).resolve().parent
18+
TEMPLATE_PATH = SCRIPT_DIR / "filestash_config.json.template"
19+
CONFIG_JSON = Path(tempfile.mkdtemp()) / "filestash_config.json"
20+
21+
22+
def random_secret_key(length: int = 16) -> str:
23+
return "".join(random.choice(string.ascii_letters) for _ in range(length))
24+
25+
26+
def patch_env_vars() -> None:
27+
endpoint = os.environ["S3_ENDPOINT"]
28+
if not endpoint.startswith("http"):
29+
protocol = "https" if strtobool(os.environ["S3_SECURE"].lower()) else "http"
30+
endpoint = f"{protocol}://{endpoint}"
31+
32+
os.environ["S3_ENDPOINT"] = endpoint
33+
34+
os.environ["REPLACE_SECRET_KEY"] = random_secret_key()
35+
36+
37+
def main() -> None:
38+
patch_env_vars()
39+
40+
assert TEMPLATE_PATH.exists()
41+
42+
template_content = TEMPLATE_PATH.read_text()
43+
44+
config_json = Template(template_content).substitute(os.environ)
45+
46+
assert CONFIG_JSON.parent.exists()
47+
CONFIG_JSON.write_text(config_json)
48+
49+
# path of configuration file is exported as env var
50+
print(f"{CONFIG_JSON}")
51+
52+
53+
if __name__ == "__main__":
54+
main()
Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
{
2+
"general": {
3+
"name": null,
4+
"port": null,
5+
"host": null,
6+
"secret_key": "$REPLACE_SECRET_KEY",
7+
"force_ssl": null,
8+
"editor": null,
9+
"fork_button": null,
10+
"logout": null,
11+
"display_hidden": null,
12+
"refresh_after_upload": null,
13+
"auto_connect": null,
14+
"upload_button": null,
15+
"upload_pool_size": null,
16+
"filepage_default_view": "list",
17+
"filepage_default_sort": "date",
18+
"cookie_timeout": null,
19+
"custom_css": null
20+
},
21+
"features": {
22+
"share": {
23+
"enable": null,
24+
"default_access": null,
25+
"redirect": null
26+
},
27+
"protection": {
28+
"zip_timeout": null,
29+
"enable": null,
30+
"disable_svg": null
31+
},
32+
"office": {
33+
"enable": null,
34+
"onlyoffice_server": null
35+
},
36+
"server": {
37+
"console_enable": null,
38+
"tor_enable": null,
39+
"tor_url": null
40+
},
41+
"syncthing": {
42+
"enable": null,
43+
"server_url": null
44+
},
45+
"image": {
46+
"enable_image": null,
47+
"thumbnail_size": null,
48+
"thumbnail_quality": null,
49+
"thumbnail_caching": null,
50+
"image_quality": null,
51+
"image_caching": null
52+
},
53+
"search": {
54+
"explore_timeout": null
55+
},
56+
"video": {
57+
"blacklist_format": null,
58+
"enable_transcoder": null
59+
}
60+
},
61+
"log": {
62+
"enable": null,
63+
"level": null,
64+
"telemetry": null
65+
},
66+
"email": {
67+
"server": null,
68+
"port": null,
69+
"username": null,
70+
"password": null,
71+
"from": null
72+
},
73+
"auth": {
74+
"admin": "$$2a$$10$$viR16hXd35bAaEJFEgpd9OqIzNBb/VoIsgQ8P3SjKxolpEQEHltrW"
75+
},
76+
"constant": {
77+
"user": "filestash",
78+
"emacs": true,
79+
"pdftotext": true
80+
},
81+
"connections": [
82+
{
83+
"label": "S3",
84+
"type": "s3",
85+
"advanced": true,
86+
"path": "$S3_BUCKET_NAME",
87+
"endpoint": "$S3_ENDPOINT"
88+
}
89+
]
90+
}

services/catalog/tests/unit/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
"pytest_simcore.docker_compose",
1515
"pytest_simcore.docker_registry",
1616
"pytest_simcore.docker_swarm",
17+
"pytest_simcore.monkeypatch_extra",
1718
"pytest_simcore.postgres_service",
1819
"pytest_simcore.pydantic_models",
1920
"pytest_simcore.repository_paths",

services/director-v2/src/simcore_service_director_v2/core/settings.py

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,13 @@ class S3Provider(str, Enum):
4949
MINIO = "MINIO"
5050

5151

52+
class VFSCacheMode(str, Enum):
53+
OFF = "off"
54+
MINIMAL = "minimal"
55+
WRITES = "writes"
56+
FILL = "full"
57+
58+
5259
class RCloneSettings(S3Settings):
5360
R_CLONE_S3_PROVIDER: S3Provider
5461

@@ -60,6 +67,10 @@ class RCloneSettings(S3Settings):
6067
9,
6168
description="time to wait between polling for changes",
6269
)
70+
R_CLONE_VFS_CACHE_MODE: VFSCacheMode = Field(
71+
VFSCacheMode.MINIMAL,
72+
description="used primarly for easy testing without requiring requiring code changes",
73+
)
6374

6475
@validator("R_CLONE_POLL_INTERVAL_SECONDS")
6576
@classmethod
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
from models_library.projects_nodes import NodeID
2+
3+
from ...models.schemas.constants import DYNAMIC_SIDECAR_SERVICE_PREFIX
4+
5+
6+
def get_compose_namespace(node_uuid: NodeID) -> str:
7+
# To avoid collisions for started docker resources a unique identifier is computed:
8+
# - avoids container level collisions on same node
9+
# - avoids volume level collisions on same node
10+
return f"{DYNAMIC_SIDECAR_SERVICE_PREFIX}_{node_uuid}"

services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -370,18 +370,24 @@ async def remove_dynamic_sidecar_network(network_name: str) -> bool:
370370
return False
371371

372372

373-
async def remove_dynamic_sidecar_volumes(node_uuid: NodeID) -> bool:
373+
async def remove_dynamic_sidecar_volumes(node_uuid: NodeID) -> Set[str]:
374374
async with docker_client() as client:
375375
volumes_response = await client.volumes.list(
376376
filters={"label": f"uuid={node_uuid}"}
377377
)
378378
volumes = volumes_response["Volumes"]
379+
log.debug("Removing volumes: %s", [v["Name"] for v in volumes])
380+
if len(volumes) == 0:
381+
log.warning("Expected to find at least 1 volume to remove, 0 were found")
382+
383+
removed_volumes: Set[str] = set()
384+
379385
for volume_data in volumes:
380386
volume = await client.volumes.get(volume_data["Name"])
381387
await volume.delete()
388+
removed_volumes.add(volume_data["Name"])
382389

383-
log.debug("Remove volumes: %s", [v["Name"] for v in volumes])
384-
return True
390+
return removed_volumes
385391

386392

387393
async def list_dynamic_sidecar_services(

services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_service_specs/sidecar.py

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
from servicelib.json_serialization import json_dumps
66

77
from ....core.settings import AppSettings, DynamicSidecarSettings
8-
from ....models.schemas.constants import DYNAMIC_SIDECAR_SERVICE_PREFIX
98
from ....models.schemas.dynamic_services import SchedulerData, ServiceType
9+
from .._namepsace import get_compose_namespace
1010
from ..volumes_resolver import DynamicSidecarVolumesPathsResolver
1111
from .settings import inject_settings_to_create_service_params
1212

@@ -75,10 +75,7 @@ def get_dynamic_sidecar_spec(
7575
of the dynamic service. The director-v2 directly coordinates with
7676
the dynamic-sidecar for this purpose.
7777
"""
78-
# To avoid collisions for started docker resources a unique identifier is computed:
79-
# - avoids container level collisions on same node
80-
# - avoids volume level collisions on same node
81-
compose_namespace = f"{DYNAMIC_SIDECAR_SERVICE_PREFIX}_{scheduler_data.node_uuid}"
78+
compose_namespace = get_compose_namespace(scheduler_data.node_uuid)
8279

8380
mounts = [
8481
# docker socket needed to use the docker api

0 commit comments

Comments
 (0)