Skip to content

Commit 38649bf

Browse files
committed
introduce test to check RAM usage of download fcn
1 parent 62ca53a commit 38649bf

File tree

2 files changed

+36
-26
lines changed

2 files changed

+36
-26
lines changed

clients/python/test/e2e/conftest.py

Lines changed: 19 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -17,18 +17,14 @@
1717
from numpy import random
1818
from packaging.version import Version
1919
from pydantic import ByteSize
20+
from typing import Callable
2021

2122
try:
2223
from osparc._settings import ConfigurationEnvVars
2324
except ImportError:
2425
pass
2526

2627

27-
_KB: ByteSize = ByteSize(1024) # in bytes
28-
_MB: ByteSize = ByteSize(_KB * 1024) # in bytes
29-
_GB: ByteSize = ByteSize(_MB * 1024) # in bytes
30-
31-
3228
# Dictionary to store start times of tests
3329
_test_start_times = {}
3430

@@ -133,20 +129,24 @@ def async_client() -> Iterable[AsyncClient]:
133129

134130

135131
@pytest.fixture
136-
def tmp_file(tmp_path: Path, caplog: pytest.LogCaptureFixture) -> Path:
137-
caplog.set_level(logging.INFO)
138-
byte_size: ByteSize = 1 * _GB
139-
tmp_file = tmp_path / "large_test_file.txt"
140-
ss: random.SeedSequence = random.SeedSequence()
141-
logging.info("Entropy used to generate random file: %s", f"{ss.entropy}")
142-
rng: random.Generator = random.default_rng(ss)
143-
tmp_file.write_bytes(rng.bytes(1000))
144-
with open(tmp_file, "wb") as f:
145-
f.truncate(byte_size)
146-
assert (
147-
tmp_file.stat().st_size == byte_size
148-
), f"Could not create file of size: {byte_size}"
149-
return tmp_file
132+
def create_tmp_file(
133+
tmp_path: Path, caplog: pytest.LogCaptureFixture
134+
) -> Callable[[ByteSize], Path]:
135+
def _generate_file(file_size: ByteSize):
136+
caplog.set_level(logging.INFO)
137+
tmp_file = tmp_path / "large_test_file.txt"
138+
ss: random.SeedSequence = random.SeedSequence()
139+
logging.info("Entropy used to generate random file: %s", f"{ss.entropy}")
140+
rng: random.Generator = random.default_rng(ss)
141+
tmp_file.write_bytes(rng.bytes(1000))
142+
with open(tmp_file, "wb") as f:
143+
f.truncate(file_size)
144+
assert (
145+
tmp_file.stat().st_size == file_size
146+
), f"Could not create file of size: {file_size}"
147+
return tmp_file
148+
149+
return _generate_file
150150

151151

152152
@pytest.fixture

clients/python/test/e2e/test_files_api.py

Lines changed: 17 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,9 +10,13 @@
1010
import osparc
1111
import pytest
1212
from _utils import skip_if_no_dev_features
13-
from conftest import _KB
1413
from memory_profiler import memory_usage
15-
from typing import Final, List
14+
from typing import Final, List, Callable
15+
from pydantic import ByteSize
16+
17+
_KB: ByteSize = ByteSize(1024) # in bytes
18+
_MB: ByteSize = ByteSize(_KB * 1024) # in bytes
19+
_GB: ByteSize = ByteSize(_MB * 1024) # in bytes
1620

1721

1822
def _hash_file(file: Path) -> str:
@@ -27,10 +31,12 @@ def _hash_file(file: Path) -> str:
2731
return sha256.hexdigest()
2832

2933

30-
@skip_if_no_dev_features
31-
def test_upload_file(tmp_file: Path, api_client: osparc.ApiClient) -> None:
32-
"""Test that we can upload a file via the multipart upload"""
34+
def test_upload_file(
35+
create_tmp_file: Callable[[ByteSize], Path], api_client: osparc.ApiClient
36+
) -> None:
37+
"""Test that we can upload a file via the multipart upload and download it again. Also check RAM usage of upload/download fcns"""
3338
_allowed_ram_usage_in_mb: Final[int] = 300 # 300MB
39+
tmp_file = create_tmp_file(ByteSize(1 * _GB))
3440
assert (
3541
tmp_file.stat().st_size > _allowed_ram_usage_in_mb * 1024 * 1024
3642
), "For this test to make sense, file size must be larger than allowed ram usage."
@@ -46,7 +52,7 @@ def max_diff(data: List[int]) -> int:
4652
)
4753
assert (
4854
max_diff(upload_ram_usage_in_mb) < _allowed_ram_usage_in_mb
49-
), f"Used more than {_allowed_ram_usage_in_mb=} to upload file of size {tmp_file.stat().st_size=}"
55+
), f"Used more than {_allowed_ram_usage_in_mb=} to upload file of size {create_tmp_file.stat().st_size=}"
5056
uploaded_file2: osparc.File = files_api.upload_file(tmp_file)
5157
assert (
5258
uploaded_file1.id == uploaded_file2.id
@@ -72,8 +78,12 @@ def max_diff(data: List[int]) -> int:
7278
@pytest.mark.parametrize("use_checksum", [True, False])
7379
@pytest.mark.parametrize("use_id", [True, False])
7480
def test_search_files(
75-
tmp_file: Path, api_client: osparc.ApiClient, use_checksum: bool, use_id: bool
81+
create_tmp_file: Callable[[ByteSize], Path],
82+
api_client: osparc.ApiClient,
83+
use_checksum: bool,
84+
use_id: bool,
7685
) -> None:
86+
tmp_file = create_tmp_file(ByteSize(1 * _GB))
7787
checksum: str = _hash_file(tmp_file)
7888
results: osparc.PaginationGenerator
7989
files_api: osparc.FilesApi = osparc.FilesApi(api_client=api_client)

0 commit comments

Comments
 (0)