Skip to content

Commit 70e2590

Browse files
Reintroduce unit tests (#214)
* reintroduce_unit_tests: adapter tests * reintroduce_unit_tests: config tests * reintroduce_unit_tests: manager tests * reintroduce_unit_tests: worker tests * reintroduce_unit_tests: fix resources * reintroduce_unit_tests: fixes manager test * reintroduce_unit_tests: fixes * reintroduce_unit_tests: missing gitkeep * reintroduce_unit_tests: fix imports * reintroduce_unit_tests: test fixes
1 parent 1eb4544 commit 70e2590

37 files changed

+3878
-0
lines changed
Lines changed: 181 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,181 @@
1+
import os
2+
3+
import pytest
4+
from requests_mock import Mocker as RequestsMocker
5+
6+
from job_executor.adapter import datastore_api
7+
from job_executor.adapter.datastore_api.models import (
8+
Job,
9+
JobParameters,
10+
JobStatus,
11+
Operation,
12+
ReleaseStatus,
13+
UserInfo,
14+
)
15+
from job_executor.common.exceptions import HttpResponseError
16+
17+
DATASTORE_API_URL = os.environ["DATASTORE_API_URL"]
18+
DATASTORE_RDN = os.environ["DATASTORE_RDN"]
19+
JOB_ID = "123"
20+
JOB_LIST = [
21+
Job(
22+
job_id=JOB_ID,
23+
datastore_rdn=DATASTORE_RDN,
24+
status=JobStatus.QUEUED,
25+
parameters=JobParameters(target="INNTEKT", operation=Operation.CHANGE),
26+
log=[],
27+
created_at="2022-05-18T11:40:22.519222",
28+
created_by=UserInfo(
29+
user_id="123-123-123", first_name="Data", last_name="Admin"
30+
),
31+
),
32+
Job(
33+
job_id=JOB_ID,
34+
datastore_rdn=DATASTORE_RDN,
35+
status=JobStatus.QUEUED,
36+
parameters=JobParameters(
37+
operation=Operation.SET_STATUS,
38+
target="KJOENN",
39+
release_status=ReleaseStatus.PENDING_RELEASE,
40+
),
41+
log=[],
42+
created_at="2022-05-18T11:40:22.519222",
43+
created_by=UserInfo(
44+
user_id="123-123-123", first_name="Data", last_name="Admin"
45+
),
46+
),
47+
]
48+
LOG_MESSAGE = "log message"
49+
DESCRIPTION = "new description"
50+
ERROR_RESPONSE = "Internal Server Error"
51+
52+
53+
def test_get_jobs(requests_mock: RequestsMocker):
54+
requests_mock.get(
55+
f"{DATASTORE_API_URL}/jobs",
56+
json=[
57+
job.model_dump(by_alias=True, exclude_none=True) for job in JOB_LIST
58+
],
59+
)
60+
jobs = datastore_api.get_jobs()
61+
assert jobs == JOB_LIST
62+
assert len(requests_mock.request_history) == 1
63+
64+
65+
def test_update_job_status(requests_mock: RequestsMocker):
66+
requests_mock.put(
67+
f"{DATASTORE_API_URL}/jobs/{JOB_ID}", json={"message": "OK"}
68+
)
69+
datastore_api.update_job_status(JOB_ID, JobStatus.QUEUED)
70+
datastore_api.update_job_status(JOB_ID, JobStatus.QUEUED, LOG_MESSAGE)
71+
request_history = requests_mock.request_history
72+
assert len(request_history) == 2
73+
assert request_history[0].json() == {"status": "queued"}
74+
assert request_history[1].json() == {
75+
"status": "queued",
76+
"log": LOG_MESSAGE,
77+
}
78+
79+
80+
def test_update_description(requests_mock: RequestsMocker):
81+
requests_mock.put(
82+
f"{DATASTORE_API_URL}/jobs/{JOB_ID}", json={"message": "OK"}
83+
)
84+
datastore_api.update_description(JOB_ID, DESCRIPTION)
85+
request_history = requests_mock.request_history
86+
assert len(request_history) == 1
87+
assert request_history[0].json() == {"description": DESCRIPTION}
88+
89+
90+
def test_no_connection(requests_mock: RequestsMocker):
91+
requests_mock.get(
92+
f"{DATASTORE_API_URL}/jobs", status_code=500, text=ERROR_RESPONSE
93+
)
94+
requests_mock.put(
95+
f"{DATASTORE_API_URL}/jobs/{JOB_ID}",
96+
status_code=500,
97+
text=ERROR_RESPONSE,
98+
)
99+
with pytest.raises(HttpResponseError) as e:
100+
datastore_api.get_jobs()
101+
assert ERROR_RESPONSE in str(e)
102+
with pytest.raises(HttpResponseError) as e:
103+
datastore_api.update_job_status(JOB_ID, JobStatus.QUEUED)
104+
assert ERROR_RESPONSE in str(e)
105+
with pytest.raises(HttpResponseError) as e:
106+
datastore_api.update_description(JOB_ID, DESCRIPTION)
107+
assert ERROR_RESPONSE in str(e)
108+
109+
110+
def test_get_maintenance_status(requests_mock: RequestsMocker):
111+
requests_mock.get(
112+
f"{DATASTORE_API_URL}/maintenance-statuses/latest",
113+
json={
114+
"paused": False,
115+
"msg": "OK",
116+
"timestamp": "2023-05-08T06:31:00.519222",
117+
},
118+
)
119+
maintenance_status = datastore_api.get_maintenance_status()
120+
assert maintenance_status.paused is False
121+
122+
123+
def test_get_maintenance_status_error(requests_mock: RequestsMocker):
124+
requests_mock.get(
125+
f"{DATASTORE_API_URL}/maintenance-statuses/latest",
126+
status_code=500,
127+
text=ERROR_RESPONSE,
128+
)
129+
with pytest.raises(HttpResponseError) as e:
130+
datastore_api.get_maintenance_status()
131+
assert ERROR_RESPONSE in str(e)
132+
133+
134+
@pytest.mark.parametrize(
135+
"is_paused,expected_result",
136+
[
137+
(
138+
True,
139+
datastore_api.JobQueryResult(
140+
built_jobs=JOB_LIST,
141+
queued_manager_jobs=[],
142+
queued_worker_jobs=[],
143+
),
144+
),
145+
(
146+
False,
147+
datastore_api.JobQueryResult(
148+
built_jobs=JOB_LIST,
149+
queued_manager_jobs=JOB_LIST,
150+
queued_worker_jobs=JOB_LIST,
151+
),
152+
),
153+
],
154+
)
155+
def test_query_for_jobs(is_paused, expected_result, requests_mock, monkeypatch):
156+
monkeypatch.setattr(
157+
"job_executor.adapter.datastore_api.is_system_paused", lambda: is_paused
158+
)
159+
160+
# Always return built jobs even if system is paused
161+
# If system is paused, return empty list for queued and queued_manager jobs
162+
def mock_get_jobs(job_status=None, operations=None):
163+
if job_status == "built":
164+
return JOB_LIST
165+
elif job_status == "queued":
166+
return JOB_LIST if not is_paused else []
167+
elif job_status == "queued_manager":
168+
return JOB_LIST if not is_paused else []
169+
170+
monkeypatch.setattr(
171+
"job_executor.adapter.datastore_api.get_jobs", mock_get_jobs
172+
)
173+
174+
result = datastore_api.query_for_jobs()
175+
assert result.built_jobs == JOB_LIST
176+
if is_paused:
177+
assert result.queued_manager_jobs == []
178+
assert result.queued_worker_jobs == []
179+
else:
180+
assert result.queued_manager_jobs == JOB_LIST
181+
assert result.queued_worker_jobs == JOB_LIST
Lines changed: 195 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,195 @@
1+
import json
2+
import os
3+
import shutil
4+
from pathlib import Path
5+
6+
import pytest
7+
8+
from job_executor.adapter.fs import LocalStorageAdapter
9+
from job_executor.adapter.fs.models.datastore_versions import (
10+
DatastoreVersions,
11+
DraftVersion,
12+
)
13+
from job_executor.adapter.fs.models.metadata import (
14+
MetadataAll,
15+
)
16+
from job_executor.common.exceptions import LocalStorageError
17+
18+
DATASTORE_DIR = "tests/unit/resources/adapter/local_storage/TEST_DATASTORE"
19+
WORKING_DIR = DATASTORE_DIR + "_working"
20+
DATASTORE_DATA_DIR = f"{DATASTORE_DIR}/data"
21+
22+
local_storage = LocalStorageAdapter(Path(DATASTORE_DIR))
23+
24+
DATASTORE_VERSIONS_PATH = f"{DATASTORE_DIR}/datastore/datastore_versions.json"
25+
DRAFT_METADATA_ALL_PATH = f"{DATASTORE_DIR}/datastore/metadata_all__draft.json"
26+
DRAFT_VERSION_PATH = f"{DATASTORE_DIR}/datastore/draft_version.json"
27+
DATA_VERSIONS_PATH = f"{DATASTORE_DIR}/datastore/data_versions__1_0.json"
28+
METADATA_ALL_PATH = f"{DATASTORE_DIR}/datastore/metadata_all__1_0_0.json"
29+
30+
DRAFT_DATASET_NAME = "UTDANNING"
31+
DRAFT_DATA_PATH = f"{DATASTORE_DATA_DIR}/UTDANNING/UTDANNING__DRAFT.parquet"
32+
33+
DRAFT2_DATASET_NAME = "BRUTTO_INNTEKT"
34+
RELEASED_DRAFT2_DATA_PATH = (
35+
f"{DATASTORE_DATA_DIR}/BRUTTO_INNTEKT/BRUTTO_INNTEKT__1_1"
36+
)
37+
38+
WORKING_DIR_DATASET = "FOEDESTED"
39+
MOVED_WORKING_DIR_DATASET_DATA_PATH = (
40+
f"{DATASTORE_DATA_DIR}/FOEDESTED/FOEDESTED__DRAFT.parquet"
41+
)
42+
43+
44+
def setup_function():
45+
if os.path.isdir("tests/unit/resources_backup"):
46+
shutil.rmtree("tests/unit/resources_backup")
47+
shutil.copytree("tests/unit/resources", "tests/unit/resources_backup")
48+
49+
50+
def teardown_function():
51+
shutil.rmtree("tests/unit/resources")
52+
shutil.move("tests/unit/resources_backup", "tests/unit/resources")
53+
54+
55+
def read_json(file_path: str) -> dict:
56+
with open(file_path, encoding="utf-8") as f:
57+
return json.load(f)
58+
59+
60+
def test_make_dataset_dir():
61+
local_storage.datastore_dir.make_dataset_dir(WORKING_DIR_DATASET)
62+
assert os.path.isdir(f"{DATASTORE_DATA_DIR}/{WORKING_DIR_DATASET}")
63+
64+
65+
def test_get_data_versions():
66+
assert local_storage.datastore_dir.get_data_versions("1_0_0") == read_json(
67+
DATA_VERSIONS_PATH
68+
)
69+
70+
71+
def test_write_data_versions():
72+
local_storage.datastore_dir.write_data_versions({}, "1_0_0")
73+
assert read_json(DATA_VERSIONS_PATH) == {}
74+
75+
76+
def test_get_draft_version():
77+
assert isinstance(
78+
local_storage.datastore_dir.get_draft_version(), DraftVersion
79+
)
80+
81+
82+
def test_write_draft_version():
83+
draft_version = local_storage.datastore_dir.get_draft_version()
84+
draft_version.description = "updated"
85+
local_storage.datastore_dir.write_draft_version(draft_version)
86+
assert (
87+
local_storage.datastore_dir.get_draft_version().description == "updated"
88+
)
89+
90+
91+
def test_get_datastore_versions():
92+
assert isinstance(
93+
local_storage.datastore_dir.get_datastore_versions(), DatastoreVersions
94+
)
95+
96+
97+
def test_write_datastore_versions():
98+
datastore_versions = local_storage.datastore_dir.get_datastore_versions()
99+
datastore_versions.description = "updated"
100+
local_storage.datastore_dir.write_datastore_versions(datastore_versions)
101+
assert (
102+
local_storage.datastore_dir.get_datastore_versions().description
103+
== "updated"
104+
)
105+
106+
107+
def test_get_metadata_all():
108+
assert isinstance(
109+
local_storage.datastore_dir.get_metadata_all("1_0_0"), MetadataAll
110+
)
111+
112+
113+
def test_write_metadata_all():
114+
metadata_all = local_storage.datastore_dir.get_metadata_all("1_0_0")
115+
metadata_all.data_structures = []
116+
local_storage.datastore_dir.write_metadata_all(metadata_all, "1_0_0")
117+
assert (
118+
local_storage.datastore_dir.get_metadata_all("1_0_0").data_structures
119+
== []
120+
)
121+
122+
123+
def delete_parquet_draft():
124+
local_storage.datastore_dir.delete_parquet_draft(DRAFT_DATASET_NAME)
125+
assert not os.path.isfile(DRAFT_DATA_PATH)
126+
127+
128+
def test_rename_parquet_draft_to_release():
129+
release_path = local_storage.datastore_dir.rename_parquet_draft_to_release(
130+
DRAFT2_DATASET_NAME, "1_1_0"
131+
)
132+
assert os.path.isdir(RELEASED_DRAFT2_DATA_PATH)
133+
assert release_path == f"{DRAFT2_DATASET_NAME}__1_1"
134+
135+
136+
def test_move_working_dir_parquet_to_datastore():
137+
local_storage.datastore_dir.make_dataset_dir(WORKING_DIR_DATASET)
138+
local_storage.move_working_dir_parquet_to_datastore(WORKING_DIR_DATASET)
139+
assert os.path.isfile(MOVED_WORKING_DIR_DATASET_DATA_PATH)
140+
141+
142+
def test_make_temp_directory():
143+
datastore_content = os.listdir(Path(DATASTORE_DIR) / "datastore")
144+
local_storage.datastore_dir.save_temporary_backup()
145+
datastore_content_backup = os.listdir(Path(DATASTORE_DIR) / "datastore")
146+
assert len(datastore_content_backup) == len(datastore_content) + 1
147+
tmp_dir = Path(DATASTORE_DIR) / "datastore" / "tmp"
148+
assert os.path.isdir(tmp_dir)
149+
tmp_actual_content = os.listdir(tmp_dir)
150+
tmp_expected_content = [
151+
"metadata_all__DRAFT.json",
152+
"datastore_versions.json",
153+
"draft_version.json",
154+
]
155+
assert len(tmp_actual_content) == 3
156+
for content in tmp_expected_content:
157+
assert content in tmp_actual_content
158+
159+
160+
def test_make_temp_directory_already_exists():
161+
local_storage.datastore_dir.save_temporary_backup()
162+
datastore_content = os.listdir(Path(DATASTORE_DIR) / "datastore")
163+
assert "tmp" in datastore_content
164+
with pytest.raises(LocalStorageError) as e:
165+
local_storage.datastore_dir.save_temporary_backup()
166+
assert "tmp directory already exists" in str(e)
167+
168+
169+
def test_archive_temp_directory():
170+
local_storage.datastore_dir.save_temporary_backup()
171+
datastore_content = os.listdir(Path(DATASTORE_DIR) / "datastore")
172+
local_storage.datastore_dir.archive_temporary_backup()
173+
datastore_content_archived = os.listdir(Path(DATASTORE_DIR) / "datastore")
174+
assert len(datastore_content) == len(datastore_content_archived) + 1
175+
assert not os.path.isdir(Path(DATASTORE_DIR) / "datastore" / "tmp")
176+
177+
178+
def test_archived_temp_directory_unrecognized_files():
179+
local_storage.datastore_dir.save_temporary_backup()
180+
tmp_dir = Path(DATASTORE_DIR) / "datastore" / "tmp"
181+
assert os.path.isdir(tmp_dir)
182+
(tmp_dir / "newfile.txt").touch()
183+
184+
with pytest.raises(LocalStorageError) as e:
185+
local_storage.datastore_dir.archive_temporary_backup()
186+
assert "Found unrecognized files" in str(e)
187+
188+
189+
def test_archive_or_delete_non_existent_tmp_dir():
190+
with pytest.raises(LocalStorageError) as e:
191+
local_storage.datastore_dir.archive_temporary_backup()
192+
assert "Could not find a tmp directory to archive." in str(e)
193+
with pytest.raises(LocalStorageError) as e:
194+
local_storage.datastore_dir.delete_temporary_backup()
195+
assert "Could not find a tmp directory to delete." in str(e)

0 commit comments

Comments
 (0)