Skip to content

Commit 30a02c5

Browse files
use_multi_tenancy_endpoints: use datastore endpoint for directory (#207)
1 parent 3205aee commit 30a02c5

File tree

4 files changed

+91
-11
lines changed

4 files changed

+91
-11
lines changed

job_executor/adapter/datastore_api/__init__.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from urllib3 import Retry
99

1010
from job_executor.adapter.datastore_api.models import (
11+
DatastoreResponse,
1112
Job,
1213
JobQueryResult,
1314
JobStatus,
@@ -72,10 +73,7 @@ def get_jobs(
7273
request_url += f"?{'&'.join(query_fields)}"
7374

7475
response = execute_request("GET", request_url, True)
75-
return [
76-
Job.model_validate({"datastoreRdn": environment.datastore_rdn, **job})
77-
for job in response.json()
78-
]
76+
return [Job.model_validate(job) for job in response.json()]
7977

8078

8179
def update_job_status(
@@ -108,7 +106,9 @@ def is_system_paused() -> bool:
108106

109107

110108
def get_datastore_directory(rdn: str) -> Path:
111-
return Path(environment.datastore_dir)
109+
request_url = f"{DATASTORE_API_URL}/datastores/{rdn}"
110+
response = execute_request("GET", request_url, True)
111+
return Path(DatastoreResponse.model_validate(response.json()).directory)
112112

113113

114114
def query_for_jobs() -> JobQueryResult:

job_executor/adapter/datastore_api/models.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,15 @@
99
from job_executor.common.models import CamelModel
1010

1111

12+
class DatastoreResponse(CamelModel):
13+
datastore_id: int
14+
name: str
15+
rdn: str
16+
description: str
17+
directory: str
18+
bump_enabled: bool
19+
20+
1221
class MaintenanceStatus(CamelModel):
1322
paused: bool
1423
msg: str

tests/unit/worker/test_build_dataset_worker.py

Lines changed: 54 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,11 @@
9191
}
9292

9393
EXPECTED_REQUESTS_PARTITIONED = [
94+
{
95+
"json": None,
96+
"method": "GET",
97+
"url": f"{DATASTORE_API_URL}/datastores/{DATASTORE_RDN}",
98+
},
9499
{
95100
"json": {"status": "decrypting"},
96101
"method": "PUT",
@@ -186,6 +191,11 @@
186191
]
187192

188193
EXPECTED_REQUESTS_IMPORT = [
194+
{
195+
"json": None,
196+
"method": "GET",
197+
"url": f"{DATASTORE_API_URL}/datastores/{DATASTORE_RDN}",
198+
},
189199
{
190200
"json": {"status": "decrypting"},
191201
"method": "PUT",
@@ -247,6 +257,14 @@
247257
and "mock.pseudonym.service" not in request["url"]
248258
]
249259
JOB = SimpleNamespace(job_id=JOB_ID, datastore_rdn=DATASTORE_RDN)
260+
DATASTORE_RESPONSE = {
261+
"datastore_id": 1,
262+
"rdn": DATASTORE_RDN,
263+
"name": "",
264+
"description": "",
265+
"bump_enabled": True,
266+
"directory": DATASTORE_DIR,
267+
}
250268

251269

252270
def setup_function():
@@ -273,6 +291,10 @@ def test_build_partitioned_dataset(requests_mock: RequestsMocker):
273291
requests_mock.put(
274292
f"{DATASTORE_API_URL}/jobs/{JOB_ID}", json={"message": "OK"}
275293
)
294+
requests_mock.get(
295+
f"{DATASTORE_API_URL}/datastores/{DATASTORE_RDN}",
296+
json=DATASTORE_RESPONSE,
297+
)
276298
requests_mock.post(
277299
f"{PSEUDONYM_SERVICE_URL}?unit_id_type=FNR&job_id={JOB_ID}",
278300
json=PSEUDONYM_DICT,
@@ -284,7 +306,11 @@ def test_build_partitioned_dataset(requests_mock: RequestsMocker):
284306
f"{WORKING_DIR}/{PARTITIONED_DATASET_NAME}__DRAFT.json"
285307
)
286308
requests_made = [
287-
{"method": req.method, "json": req.json(), "url": req.url}
309+
{
310+
"method": req.method,
311+
"json": req.json() if req.method != "GET" else None,
312+
"url": req.url,
313+
}
288314
for req in requests_mock.request_history
289315
]
290316
assert len(requests_made) == len(EXPECTED_REQUESTS_PARTITIONED)
@@ -298,6 +324,10 @@ def test_import(requests_mock: RequestsMocker):
298324
requests_mock.put(
299325
f"{DATASTORE_API_URL}/jobs/{JOB_ID}", json={"message": "OK"}
300326
)
327+
requests_mock.get(
328+
f"{DATASTORE_API_URL}/datastores/{DATASTORE_RDN}",
329+
json=DATASTORE_RESPONSE,
330+
)
301331
requests_mock.post(
302332
f"{PSEUDONYM_SERVICE_URL}?unit_id_type=FNR&job_id={JOB_ID}",
303333
json=PSEUDONYM_DICT,
@@ -315,7 +345,11 @@ def test_import(requests_mock: RequestsMocker):
315345
Path(INPUT_DIR_ARCHIVE) / f"unpackaged/{DATASET_NAME}.tar"
316346
).exists()
317347
requests_made = [
318-
{"method": req.method, "json": req.json(), "url": req.url}
348+
{
349+
"method": req.method,
350+
"json": req.json() if req.method != "GET" else None,
351+
"url": req.url,
352+
}
319353
for req in requests_mock.request_history
320354
]
321355
assert len(requests_made) == len(EXPECTED_REQUESTS_IMPORT)
@@ -329,6 +363,10 @@ def test_import_no_pseudonymization(requests_mock: RequestsMocker):
329363
requests_mock.put(
330364
f"{DATASTORE_API_URL}/jobs/{JOB_ID}", json={"message": "OK"}
331365
)
366+
requests_mock.get(
367+
f"{DATASTORE_API_URL}/datastores/{DATASTORE_RDN}",
368+
json=DATASTORE_RESPONSE,
369+
)
332370
run_worker(JOB, NO_PSEUDONYM_DATASET_NAME, Queue()) # type: ignore
333371
assert not os.path.exists(f"{INPUT_DIR}/{NO_PSEUDONYM_DATASET_NAME}.tar")
334372
assert not os.path.exists(f"{INPUT_DIR}/{NO_PSEUDONYM_DATASET_NAME}")
@@ -343,7 +381,11 @@ def test_import_no_pseudonymization(requests_mock: RequestsMocker):
343381
Path(INPUT_DIR_ARCHIVE) / f"unpackaged/{NO_PSEUDONYM_DATASET_NAME}.tar"
344382
).exists()
345383
requests_made = [
346-
{"method": req.method, "json": req.json(), "url": req.url}
384+
{
385+
"method": req.method,
386+
"json": req.json() if req.method != "GET" else None,
387+
"url": req.url,
388+
}
347389
for req in requests_mock.request_history
348390
]
349391
assert len(requests_made) == len(EXPECTED_REQUESTS_IMPORT_NO_PSEUDONYM)
@@ -359,6 +401,10 @@ def test_import_no_pseudonymization_no_partitioning(
359401
requests_mock.put(
360402
f"{DATASTORE_API_URL}/jobs/{JOB_ID}", json={"message": "OK"}
361403
)
404+
requests_mock.get(
405+
f"{DATASTORE_API_URL}/datastores/{DATASTORE_RDN}",
406+
json=DATASTORE_RESPONSE,
407+
)
362408
run_worker(JOB, NO_PSEUDONYM_FIXED_DATASET_NAME, Queue()) # type: ignore
363409
assert not os.path.exists(
364410
f"{INPUT_DIR}/{NO_PSEUDONYM_FIXED_DATASET_NAME}.tar"
@@ -384,7 +430,11 @@ def test_import_no_pseudonymization_no_partitioning(
384430
/ f"unpackaged/{NO_PSEUDONYM_FIXED_DATASET_NAME}.tar"
385431
).exists()
386432
requests_made = [
387-
{"method": req.method, "json": req.json(), "url": req.url}
433+
{
434+
"method": req.method,
435+
"json": req.json() if req.method != "GET" else None,
436+
"url": req.url,
437+
}
388438
for req in requests_mock.request_history
389439
]
390440
assert len(requests_made) == len(EXPECTED_REQUESTS_IMPORT_NO_PSEUDONYM)

tests/unit/worker/test_build_metadata_worker.py

Lines changed: 23 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
RSA_KEYS_DIRECTORY = Path(environment.datastore_dir) / "vault"
1616

1717
DATASET_NAME = "KJOENN"
18+
DATASTORE_RDN = os.environ["DATASTORE_RDN"]
1819
JOB_ID = "1234-1234-1234-1234"
1920
DATASTORE_DIR = os.environ["DATASTORE_DIR"]
2021
WORKING_DIR = DATASTORE_DIR + "_working"
@@ -23,6 +24,11 @@
2324
EXPECTED_DIR = "tests/resources/worker/build_metadata/expected"
2425
DATASTORE_API_URL = os.environ["DATASTORE_API_URL"]
2526
EXPECTED_REQUESTS = [
27+
{
28+
"json": None,
29+
"method": "GET",
30+
"url": f"{DATASTORE_API_URL}/datastores/{DATASTORE_RDN}",
31+
},
2632
{
2733
"json": {"status": "decrypting"},
2834
"method": "PUT",
@@ -49,8 +55,15 @@
4955
"url": f"{DATASTORE_API_URL}/jobs/{JOB_ID}",
5056
},
5157
]
52-
DATASTORE_RDN = os.environ["DATASTORE_RDN"]
5358
JOB = SimpleNamespace(job_id=JOB_ID, datastore_rdn=DATASTORE_RDN)
59+
DATASTORE_RESPONSE = {
60+
"datastore_id": 1,
61+
"rdn": DATASTORE_RDN,
62+
"name": "",
63+
"description": "",
64+
"bump_enabled": True,
65+
"directory": DATASTORE_DIR,
66+
}
5467

5568

5669
def setup_function():
@@ -77,6 +90,10 @@ def test_import(requests_mock: RequestsMocker):
7790
requests_mock.put(
7891
f"{DATASTORE_API_URL}/jobs/{JOB_ID}", json={"message": "OK"}
7992
)
93+
requests_mock.get(
94+
f"{DATASTORE_API_URL}/datastores/{DATASTORE_RDN}",
95+
json=DATASTORE_RESPONSE,
96+
)
8097
run_worker(JOB, DATASET_NAME, Queue()) # type: ignore
8198
with open(
8299
f"{WORKING_DIR}/{DATASET_NAME}__DRAFT.json", "r", encoding="utf-8"
@@ -89,7 +106,11 @@ def test_import(requests_mock: RequestsMocker):
89106

90107
assert actual_metadata == expected_metadata
91108
requests_made = [
92-
{"method": req.method, "json": req.json(), "url": req.url}
109+
{
110+
"method": req.method,
111+
"json": req.json() if req.method != "GET" else None,
112+
"url": req.url,
113+
}
93114
for req in requests_mock.request_history
94115
]
95116
assert requests_made == EXPECTED_REQUESTS

0 commit comments

Comments
 (0)