Skip to content

Commit 13a8586

Browse files
add test for logstreaming (#140)
* update workflow before publishing python package * fix dependency issue and bump version * point to website in project description * fix broken dependency * improve doc * add github token to download artifacts * ensure only read-access @wvangeit * yet another attempt at downloading artifacts * make sure to use repo that ran the trigger wf * another attempt at fixing * change owner * allow publishing to testpypi also when pr * minor change * revert minor (but breaking) change * minor fix * add debug messages * another debug message * hopefully the final version * final fix * minor fix * move master and tag to individual jobs * add debug messages * dev->post * add python script for determining semantic version * minor changes * minor changes * improve error handling and add version file to artifacts * check if release * minor fix * ensure to enter venv * also when tagging * source venv in publishin workflow * ensure only master * add script for testing 'pure' semver * adapt workflows to new python script * minor change * attempt to evaluate expressions correctly * several fixes to fix tests * ensure repo is checked out in publish workflow * several small fixes * cleanup * debug * minor cleanup * mionr changes * add debug message * minor change * minor change * yet another try * minor change * minor change * minor change * mionr change * minor changes * correct workflow run id * cosmetic change * avoid using gh * change to a single job for publishing * minor cleanup * swap loops in clean up jobs * correction * update server compatibility to new url * minor change to trigger ci * allow for async tests in e2e * add test * fix test * clea up tests
1 parent 2bc5fdd commit 13a8586

File tree

6 files changed

+149
-87
lines changed

6 files changed

+149
-87
lines changed

clients/python/requirements/e2e-test.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ pandas
1010
papermill<2.5.0 # due to bug in 2.5.0 (https://github.com/nteract/papermill/issues/735). Remove this requirement once the bug is fixed
1111
pydantic
1212
pytest
13+
pytest-asyncio
1314
pytest-env
1415
pytest-html
1516
packaging

clients/python/test/e2e/ci/e2e/e2e/_models.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@ class PytestConfig(BaseModel):
8282
env: str
8383
required_plugins: str
8484
addopts: str
85+
asyncio_mode: str
8586

8687

8788
class Artifacts(BaseModel):

clients/python/test/e2e/ci/e2e/e2e/preprocess.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,9 @@ def generate_ini(
7373
)
7474
pytest_config: PytestConfig = PytestConfig(
7575
env="\n" + "\n".join(envs),
76-
required_plugins="pytest-env pytest-html",
76+
required_plugins="pytest-env pytest-html pytest-asyncio",
7777
addopts=add_opts,
78+
asyncio_mode="auto",
7879
)
7980

8081
config: PytestIniFile = PytestIniFile(

clients/python/test/e2e/conftest.py

Lines changed: 35 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,11 @@
11
import logging
22
import os
33
from pathlib import Path
4+
from typing import Iterable
45

56
import osparc
67
import pytest
8+
from httpx import AsyncClient, BasicAuth
79
from numpy import random
810
from pydantic import ByteSize
911

@@ -13,18 +15,31 @@
1315

1416

1517
@pytest.fixture
16-
def cfg() -> osparc.Configuration:
17-
"""Configuration
18-
19-
Returns:
20-
osparc.Configuration: The Configuration
21-
"""
22-
cfg = osparc.Configuration(
23-
host=os.environ["OSPARC_API_HOST"],
24-
username=os.environ["OSPARC_API_KEY"],
25-
password=os.environ["OSPARC_API_SECRET"],
18+
def configuration() -> Iterable[osparc.Configuration]:
19+
assert (host := os.environ.get("OSPARC_API_HOST"))
20+
assert (username := os.environ.get("OSPARC_API_KEY"))
21+
assert (password := os.environ.get("OSPARC_API_SECRET"))
22+
yield osparc.Configuration(
23+
host=host,
24+
username=username,
25+
password=password,
2626
)
27-
return cfg
27+
28+
29+
@pytest.fixture
30+
def api_client(configuration: osparc.Configuration) -> Iterable[osparc.ApiClient]:
31+
with osparc.ApiClient(configuration=configuration) as api_client:
32+
yield api_client
33+
34+
35+
@pytest.fixture
36+
def async_client(configuration) -> Iterable[AsyncClient]:
37+
yield AsyncClient(
38+
base_url=configuration.host,
39+
auth=BasicAuth(
40+
username=configuration.username, password=configuration.password
41+
),
42+
) # type: ignore
2843

2944

3045
@pytest.fixture
@@ -42,3 +57,12 @@ def tmp_file(tmp_path: Path, caplog) -> Path:
4257
tmp_file.stat().st_size == byte_size
4358
), f"Could not create file of size: {byte_size}"
4459
return tmp_file
60+
61+
62+
@pytest.fixture
63+
def sleeper(api_client: osparc.ApiClient) -> Iterable[osparc.Solver]:
64+
solvers_api = osparc.SolversApi(api_client=api_client)
65+
sleeper: osparc.Solver = solvers_api.get_solver_release(
66+
"simcore/services/comp/itis/sleeper", "2.0.2"
67+
) # type: ignore
68+
yield sleeper

clients/python/test/e2e/test_files_api.py

Lines changed: 36 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -20,58 +20,54 @@ def _hash_file(file: Path) -> str:
2020

2121

2222
@requires_dev_features
23-
def test_upload_file(tmp_file: Path, cfg: osparc.Configuration) -> None:
23+
def test_upload_file(tmp_file: Path, api_client: osparc.ApiClient) -> None:
2424
"""Test that we can upload a file via the multipart upload"""
2525
tmp_path: Path = tmp_file.parent
26-
with osparc.ApiClient(cfg) as api_client:
27-
files_api: osparc.FilesApi = osparc.FilesApi(api_client=api_client)
28-
uploaded_file1: osparc.File = files_api.upload_file(tmp_file)
29-
uploaded_file2: osparc.File = files_api.upload_file(tmp_file)
30-
assert (
31-
uploaded_file1.id == uploaded_file2.id
32-
), "could not detect that file was already on server"
33-
downloaded_file = files_api.download_file(
34-
uploaded_file1.id, destination_folder=tmp_path
35-
)
36-
assert Path(downloaded_file).parent == tmp_path
37-
assert _hash_file(Path(downloaded_file)) == _hash_file(tmp_file)
38-
files_api.delete_file(uploaded_file1.id)
26+
files_api: osparc.FilesApi = osparc.FilesApi(api_client=api_client)
27+
uploaded_file1: osparc.File = files_api.upload_file(tmp_file)
28+
uploaded_file2: osparc.File = files_api.upload_file(tmp_file)
29+
assert (
30+
uploaded_file1.id == uploaded_file2.id
31+
), "could not detect that file was already on server"
32+
downloaded_file = files_api.download_file(
33+
uploaded_file1.id, destination_folder=tmp_path
34+
)
35+
assert Path(downloaded_file).parent == tmp_path
36+
assert _hash_file(Path(downloaded_file)) == _hash_file(tmp_file)
37+
files_api.delete_file(uploaded_file1.id)
3938

4039

4140
@requires_dev_features
4241
@pytest.mark.parametrize("use_checksum", [True, False])
4342
@pytest.mark.parametrize("use_id", [True, False])
4443
def test_search_files(
45-
tmp_file: Path, cfg: osparc.Configuration, use_checksum: bool, use_id: bool
44+
tmp_file: Path, api_client: osparc.ApiClient, use_checksum: bool, use_id: bool
4645
) -> None:
4746
checksum: str = _hash_file(tmp_file)
4847
results: osparc.PaginationGenerator
49-
with osparc.ApiClient(configuration=cfg) as api_client:
50-
files_api: osparc.FilesApi = osparc.FilesApi(api_client=api_client)
51-
try:
52-
results = files_api._search_files(sha256_checksum=checksum)
53-
assert len(results) == 0, "Found file which shouldn't be there"
48+
files_api: osparc.FilesApi = osparc.FilesApi(api_client=api_client)
49+
try:
50+
results = files_api._search_files(sha256_checksum=checksum)
51+
assert len(results) == 0, "Found file which shouldn't be there"
5452

55-
uploaded_file: osparc.File = files_api.upload_file(tmp_file)
56-
assert checksum == uploaded_file.checksum
53+
uploaded_file: osparc.File = files_api.upload_file(tmp_file)
54+
assert checksum == uploaded_file.checksum
5755

58-
results = files_api._search_files(
59-
file_id=uploaded_file.id if use_id else None,
60-
sha256_checksum=uploaded_file.checksum if use_checksum else None,
61-
)
62-
assert len(results) == 1, "Could not find file after it had been uploaded"
56+
results = files_api._search_files(
57+
file_id=uploaded_file.id if use_id else None,
58+
sha256_checksum=uploaded_file.checksum if use_checksum else None,
59+
)
60+
assert len(results) == 1, "Could not find file after it had been uploaded"
6361

64-
files_api.delete_file(uploaded_file.id)
65-
results = files_api._search_files(
66-
file_id=uploaded_file.id if use_id else None,
67-
sha256_checksum=uploaded_file.checksum if use_checksum else None,
68-
)
69-
assert (
70-
len(results) == 0
71-
), "Could find file on server after it had been deleted"
62+
files_api.delete_file(uploaded_file.id)
63+
results = files_api._search_files(
64+
file_id=uploaded_file.id if use_id else None,
65+
sha256_checksum=uploaded_file.checksum if use_checksum else None,
66+
)
67+
assert len(results) == 0, "Could find file on server after it had been deleted"
7268

73-
except Exception:
74-
# clean up in case of failure
75-
results = files_api._search_files(sha256_checksum=checksum)
76-
for file in results:
77-
files_api.delete_file(file.id)
69+
except Exception:
70+
# clean up in case of failure
71+
results = files_api._search_files(sha256_checksum=checksum)
72+
for file in results:
73+
files_api.delete_file(file.id)
Lines changed: 74 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1,48 +1,87 @@
1+
import json
2+
13
import osparc
24
from _utils import requires_dev_features
5+
from httpx import AsyncClient
6+
7+
DEFAULT_TIMEOUT_SECONDS = 10 * 60 # 10 min
38

49

510
@requires_dev_features
6-
def test_jobs(cfg: osparc.Configuration):
11+
def test_jobs(api_client: osparc.ApiClient, sleeper: osparc.Solver):
712
"""Test the jobs method
813
914
Args:
1015
configuration (osparc.Configuration): The Configuration
1116
"""
12-
solver: str = "simcore/services/comp/itis/sleeper"
13-
version: str = "2.0.2"
1417
n_jobs: int = 3
15-
with osparc.ApiClient(cfg) as api_client:
16-
solvers_api: osparc.SolversApi = osparc.SolversApi(api_client)
17-
sleeper: osparc.Solver = solvers_api.get_solver_release(solver, version)
18-
19-
# initial iterator
20-
init_iter = solvers_api.jobs(sleeper.id, sleeper.version)
21-
n_init_iter: int = len(init_iter)
22-
assert n_init_iter >= 0
23-
24-
# create n_jobs jobs
25-
created_job_ids = []
26-
for _ in range(n_jobs):
27-
job: osparc.Job = solvers_api.create_job(
28-
sleeper.id, sleeper.version, osparc.JobInputs({"input1": 1.0})
29-
)
30-
created_job_ids.append(job.id)
31-
32-
tmp_iter = solvers_api.jobs(sleeper.id, sleeper.version)
33-
solvers_api.jobs(sleeper.id, sleeper.version)
34-
35-
final_iter = solvers_api.jobs(sleeper.id, sleeper.version)
36-
assert len(final_iter) > 0, "No jobs were available"
37-
assert n_init_iter + n_jobs == len(
38-
final_iter
39-
), "An incorrect number of jobs was recorded"
40-
41-
for ii, elm in enumerate(tmp_iter):
42-
assert isinstance(elm, osparc.Job)
43-
if ii > 100:
18+
solvers_api = osparc.SolversApi(api_client=api_client)
19+
# initial iterator
20+
init_iter = solvers_api.jobs(sleeper.id, sleeper.version)
21+
n_init_iter: int = len(init_iter)
22+
assert n_init_iter >= 0
23+
24+
# create n_jobs jobs
25+
created_job_ids = []
26+
for _ in range(n_jobs):
27+
job: osparc.Job = solvers_api.create_job(
28+
sleeper.id, sleeper.version, osparc.JobInputs({"input1": 1.0})
29+
)
30+
created_job_ids.append(job.id)
31+
32+
tmp_iter = solvers_api.jobs(sleeper.id, sleeper.version)
33+
solvers_api.jobs(sleeper.id, sleeper.version)
34+
35+
final_iter = solvers_api.jobs(sleeper.id, sleeper.version)
36+
assert len(final_iter) > 0, "No jobs were available"
37+
assert n_init_iter + n_jobs == len(
38+
final_iter
39+
), "An incorrect number of jobs was recorded"
40+
41+
for ii, elm in enumerate(tmp_iter):
42+
assert isinstance(elm, osparc.Job)
43+
if ii > 100:
44+
break
45+
46+
# cleanup
47+
for elm in created_job_ids:
48+
solvers_api.delete_job(sleeper.id, sleeper.version, elm)
49+
50+
51+
@requires_dev_features
52+
async def test_logstreaming(
53+
api_client: osparc.ApiClient, sleeper: osparc.Solver, async_client: AsyncClient
54+
):
55+
"""Test the log streaming
56+
57+
Args:
58+
configuration (osparc.Configuration): The Configuration
59+
"""
60+
solvers_api: osparc.SolversApi = osparc.SolversApi(api_client)
61+
job: osparc.Job = solvers_api.create_job(
62+
sleeper.id, sleeper.version, osparc.JobInputs({"input1": 1.0})
63+
) # type: ignore
64+
65+
solvers_api.start_job(sleeper.id, sleeper.version, job.id)
66+
67+
nloglines: int = 0
68+
print("starting logstreaming...")
69+
async with async_client.stream(
70+
"GET",
71+
f"/v0/solvers/{sleeper.id}/releases/{sleeper.version}/jobs/{job.id}/logstream",
72+
timeout=DEFAULT_TIMEOUT_SECONDS,
73+
) as response:
74+
async for line in response.aiter_lines():
75+
log = json.loads(line)
76+
job_id = log.get("job_id")
77+
assert job_id
78+
assert job_id == job.id
79+
nloglines += 1
80+
print("\n".join(log.get("messages")))
81+
if nloglines > 10: # dont wait too long
82+
await response.aclose()
4483
break
4584

46-
# cleanup
47-
for elm in created_job_ids:
48-
solvers_api.delete_job(sleeper.id, sleeper.version, elm)
85+
assert nloglines > 0, f"Could not stream log for {sleeper.id=}, \
86+
{sleeper.version=} and {job.id=}" # type: ignore
87+
solvers_api.delete_job(sleeper.id, sleeper.version, job.id)

0 commit comments

Comments
 (0)