Skip to content

Commit 1b1bdaf

Browse files
committed
drop unused
1 parent 53e78f4 commit 1b1bdaf

File tree

4 files changed

+6
-86
lines changed

4 files changed

+6
-86
lines changed

packages/models-library/src/models_library/utils/change_case.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
""" String convesion
1+
"""String convesion
22
33
44
Example of usage in pydantic:
@@ -7,10 +7,11 @@
77
class Config:
88
extra = Extra.forbid
99
alias_generator = snake_to_camel # <--------
10-
json_loads = orjson.loads
10+
json_loads = json_loads
1111
json_dumps = json_dumps
1212
1313
"""
14+
1415
# Partially taken from https://github.com/autoferrit/python-change-case/blob/master/change_case/change_case.py#L131
1516
import re
1617
from typing import Final

packages/pytest-simcore/src/pytest_simcore/helpers/s3.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55

66
import aiofiles
77
import httpx
8-
import orjson
98
from aws_library.s3 import MultiPartUploadLinks
9+
from common_library.json_serialization import json_loads
1010
from fastapi import status
1111
from models_library.api_schemas_storage.storage_schemas import (
1212
ETag,
@@ -71,7 +71,7 @@ async def upload_file_part(
7171
assert response.status_code == status.HTTP_200_OK
7272
assert response.headers
7373
assert "Etag" in response.headers
74-
received_e_tag = orjson.loads(response.headers["Etag"])
74+
received_e_tag = json_loads(response.headers["Etag"])
7575
print(
7676
f"--> completed upload {this_file_chunk_size=} of {file=}, [{part_index + 1}/{num_parts}], {received_e_tag=}"
7777
)

services/web/server/src/simcore_service_webserver/utils.py

Lines changed: 1 addition & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
"""
2-
General utilities and helper functions
2+
General utilities and helper functions
33
"""
44

55
import asyncio
@@ -11,11 +11,8 @@
1111
import tracemalloc
1212
from datetime import datetime
1313
from pathlib import Path
14-
from typing import Any
1514

16-
import orjson
1715
from common_library.error_codes import ErrorCodeStr
18-
from models_library.basic_types import SHA1Str
1916
from typing_extensions import ( # https://docs.pydantic.dev/latest/api/standard_library_types/#typeddict
2017
TypedDict,
2118
)
@@ -176,21 +173,3 @@ def compose_support_error_msg(
176173

177174
def get_traceback_string(exception: BaseException) -> str:
178175
return "".join(traceback.format_exception(exception))
179-
180-
181-
# -----------------------------------------------
182-
#
183-
# SERIALIZATION, CHECKSUMS,
184-
#
185-
186-
187-
def compute_sha1_on_small_dataset(d: Any) -> SHA1Str:
188-
"""
189-
This should be used for small datasets, otherwise it should be chuncked
190-
and aggregated
191-
192-
More details in test_utils.py:test_compute_sha1_on_small_dataset
193-
"""
194-
# SEE options in https://github.com/ijl/orjson#option
195-
data_bytes = orjson.dumps(d, option=orjson.OPT_NON_STR_KEYS | orjson.OPT_SORT_KEYS)
196-
return SHA1Str(hashlib.sha1(data_bytes).hexdigest()) # nosec # NOSONAR

services/web/server/tests/unit/isolated/test_utils.py

Lines changed: 0 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,10 @@
1-
import asyncio
2-
import concurrent.futures
31
import time
4-
import timeit
52
import urllib.parse
6-
from contextlib import contextmanager
73
from datetime import datetime
84

9-
import pytest
105
from simcore_service_webserver.utils import (
116
DATETIME_FORMAT,
127
compose_support_error_msg,
13-
compute_sha1_on_small_dataset,
148
now_str,
159
to_datetime,
1610
)
@@ -65,60 +59,6 @@ def test_time_utils():
6559
assert now_time == datetime.strptime(snapshot, DATETIME_FORMAT)
6660

6761

68-
@pytest.mark.skip(reason="DEV-demo")
69-
async def test_compute_sha1_on_small_dataset(fake_project: dict):
70-
# Based on GitHK review https://github.com/ITISFoundation/osparc-simcore/pull/2556:
71-
# From what I know, these having function tend to be a bit CPU intensive, based on the size of the dataset.
72-
# Could we maybe have an async version of this function here, run it on an executor?
73-
#
74-
# PC: Here we check the overhead of sha when adding a pool executor
75-
76-
@contextmanager
77-
def timeit_ctx(what):
78-
start = timeit.default_timer()
79-
yield
80-
stop = timeit.default_timer()
81-
print(f"Time for {what}:", f"{stop - start} secs")
82-
83-
# dataset is N copies of a project dataset (typical dataset 'unit' in this module)
84-
N = 10_000
85-
data = [
86-
fake_project,
87-
] * N
88-
89-
print("-" * 100)
90-
with timeit_ctx("compute_sha1 sync"):
91-
project_sha2_sync = compute_sha1_on_small_dataset(data)
92-
93-
with timeit_ctx("compute_sha1 async"):
94-
loop = asyncio.get_running_loop()
95-
with concurrent.futures.ProcessPoolExecutor() as pool:
96-
project_sha2_async = await loop.run_in_executor(
97-
pool, compute_sha1_on_small_dataset, data
98-
)
99-
100-
assert project_sha2_sync == project_sha2_async
101-
102-
# N=1
103-
# Time for compute_sha1_sync: 3.153807483613491e-05 secs
104-
# Time for compute_sha1_async: 0.03046882478520274 secs
105-
106-
# N=100
107-
# Time for compute_sha1 sync: 0.0005367340054363012 secs
108-
# Time for compute_sha1 async: 0.029975621961057186 secs
109-
110-
# N=1000
111-
# Time for compute_sha1 sync: 0.005468853982165456 secs
112-
# Time for compute_sha1 async: 0.04451707797124982 secs
113-
114-
# N=10000
115-
# Time for compute_sha1 sync: 0.05151305114850402 secs
116-
# Time for compute_sha1 async: 0.09799357503652573 secs
117-
118-
# For larger datasets, async solution definitvely scales better
119-
# but for smaller ones, the overhead is considerable
120-
121-
12262
def test_compose_support_error_msg():
12363

12464
msg = compose_support_error_msg(

0 commit comments

Comments
 (0)