Skip to content

Commit d6f7120

Browse files
authored
Merge pull request #7 from allenhaozi/fix/fix-version-compatibility
fix version compatibility
2 parents f34a0fa + 0a15e5c commit d6f7120

File tree

8 files changed

+78
-71
lines changed

8 files changed

+78
-71
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -158,3 +158,5 @@ cython_debug/
158158
# and can be added to the global gitignore or merged into this file. For a more nuclear
159159
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
160160
#.idea/
161+
# local dev code
162+
dev

requirements.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,5 @@ requests==2.31.0
55
anyio==4.1.0
66
httpx==0.25.2
77
distro==1.8.0
8-
pydantic==2.4.2
8+
pydantic==2.4.2
9+
tqdm==4.66.4

src/bohrium/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
from ._client import Bohrium, AsyncBohrium
22
from ._base_client import AsyncAPIClient, SyncAPIClient
3+
from ._utils._logs import setup_logging as _setup_logging
34

5+
_setup_logging()
46

57
__all__ = ["Bohrium", "AsyncBohrium", "AsyncAPIClient", "SyncAPIClient"]

src/bohrium/_base_client.py

Lines changed: 12 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
TypeVar,
1313
Union,
1414
cast,
15+
Optional
1516
)
1617
from urllib.parse import urljoin
1718

@@ -22,7 +23,7 @@
2223
from ._constants import DEFAULT_CONNECTION_LIMITS, DEFAULT_MAX_RETRIES, DEFAULT_TIMEOUT
2324
from ._utils import lru_cache
2425

25-
logger = logging.Logger = logging.getLogger(__name__)
26+
logger = logging.getLogger(__name__)
2627

2728
Arch = Union[Literal["x32", "x64", "arm", "arm64", "unknown"]]
2829

@@ -118,12 +119,12 @@ class BaseClient(Generic[_HttpxClientT]):
118119
def __init__(
119120
self,
120121
*,
121-
_version: str | None = None,
122-
base_url: str | URL,
122+
_version: Optional[str] = None,
123+
base_url: Union[str, URL],
123124
limits: httpx.Limits,
124125
max_retries: int = DEFAULT_MAX_RETRIES,
125-
timeout: float | Timeout | None = DEFAULT_TIMEOUT,
126-
custom_headers: Mapping[str, str] | None = None,
126+
timeout: Optional[Union[float, httpx.Timeout]] = DEFAULT_TIMEOUT,
127+
custom_headers: Optional[Mapping[str, str]] = None,
127128
):
128129
self._base_url = self._enforce_trailing_slash(URL(base_url))
129130
self._version = _version
@@ -148,7 +149,7 @@ def _build_headers(self, custom_headers) -> httpx.Headers:
148149
def _build_params(self, custom_params) -> dict[str, str]:
149150
params = _merge_mappings(self.default_params, custom_params)
150151
return params
151-
152+
152153
@property
153154
def custom_auth(self) -> httpx.Auth | None:
154155
return None
@@ -224,13 +225,13 @@ class SyncAPIClient(BaseClient[httpx.Client]):
224225

225226
def __init__(
226227
self,
227-
base_url: str | URL,
228+
base_url: Union[str, httpx.URL],
228229
max_retries: int = DEFAULT_MAX_RETRIES,
229-
timeout: float | Timeout | None = DEFAULT_TIMEOUT,
230+
timeout: Union[float, Timeout, None] = DEFAULT_TIMEOUT,
230231
limits: Limits = DEFAULT_CONNECTION_LIMITS,
231-
_version: str | None = None,
232-
http_client: httpx.Client | None = None,
233-
custom_headers: Mapping[str, str] | None = None,
232+
_version: Optional[str] = None,
233+
http_client: Optional[httpx.Client] = None,
234+
custom_headers: Optional[Mapping[str, str]] = None,
234235
) -> None:
235236

236237
if http_client is not None and not isinstance(

src/bohrium/_client.py

Lines changed: 8 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
1-
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
2-
31
from __future__ import annotations
42

53
import os
6-
from typing import Mapping
4+
from typing import Mapping, Optional, Union
75

86
from httpx import URL, Client, Response, Timeout
97
from typing_extensions import override
@@ -23,12 +21,12 @@ class Bohrium(SyncAPIClient):
2321

2422
def __init__(
2523
self,
26-
access_key: str | None = None,
27-
base_url: str | URL | None = None,
28-
project_id: str | None = None,
29-
timeout: float | Timeout | None = 30.0,
30-
max_retries: int | None = DEFAULT_MAX_RETRIES,
31-
http_client: Client | None = None,
24+
access_key: Optional[str] = None,
25+
base_url: Optional[Union[str, URL]] = None,
26+
project_id: Optional[str] = None,
27+
timeout: Optional[Union[float, Timeout]] = 30.0,
28+
max_retries: Optional[int] = DEFAULT_MAX_RETRIES,
29+
http_client: Optional[Client] = None,
3230
) -> None:
3331
"""Construct a new synchronous openai client instance."""
3432
if access_key is None:
@@ -54,7 +52,7 @@ def __init__(
5452

5553
if base_url is None:
5654
base_url = "https://openapi.dp.tech"
57-
55+
5856
super().__init__(
5957
_version=__version__,
6058
base_url=base_url,
@@ -65,7 +63,6 @@ def __init__(
6563
)
6664

6765
self.job = resources.Job(self)
68-
6966

7067
@property
7168
@override
@@ -120,5 +117,3 @@ def _make_status_error(
120117

121118
class AsyncBohrium(AsyncAPIClient):
122119
pass
123-
124-

src/bohrium/_utils/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
from ._utils import lru_cache
22

3-
__all__ = ["lru_cache"]
3+
__all__ = ["lru_cache"]

src/bohrium/_utils/_logs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@ def _basic_config() -> None:
1414

1515

1616
def setup_logging() -> None:
17-
env = os.environ.get("BOHRIUM_LOG")
17+
env = os.environ.get("BOHRIUM_LOG_LEVEL", "info")
1818
if env == "debug":
1919
_basic_config()
2020
logger.setLevel(logging.DEBUG)

src/bohrium/resources/job/job.py

Lines changed: 50 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -1,36 +1,45 @@
11
import logging
2-
from ..._resource import AsyncAPIResource, SyncAPIResource
3-
from ..tiefblue.tiefblue import Tiefblue
4-
from ...types.job.job import JobAddRequest
2+
import os
3+
import uuid
4+
from pathlib import Path
5+
56
# from ..._resource import BaseClient
67
from pprint import pprint
78
from typing import Optional
8-
import os
9-
from pathlib import Path
10-
import uuid
11-
# log: logging.Logger = logging.getLogger(__name__)
9+
10+
from ..._resource import AsyncAPIResource, SyncAPIResource
11+
from ...types.job.job import JobAddRequest
12+
from ..tiefblue.tiefblue import Tiefblue
13+
14+
log = logging.getLogger(__name__)
1215

1316

1417
class Job(SyncAPIResource):
1518

19+
def detail(self, job_id):
20+
log.info(f"detail job {job_id}")
21+
response = self._client.get(f"/openapi/v1/job/{job_id}")
22+
log.debug(response)
23+
return response.json().get("data")
24+
1625
def submit(
17-
self,
18-
project_id: int,
26+
self,
27+
project_id: int,
1928
job_name: str,
2029
machine_type: str,
2130
cmd: str,
2231
image_address: str,
2332
job_group_id: int = 0,
24-
work_dir: str = '',
25-
result: str = '',
33+
work_dir: str = "",
34+
result: str = "",
2635
dataset_path: list = [],
2736
log_files: list = [],
2837
out_files: list = [],
2938
):
3039
# log.info(f"submit job {name},project_id:{project_id}")
3140
data = self.create_job(project_id, job_name, job_group_id)
3241
print(data)
33-
if work_dir != '':
42+
if work_dir != "":
3443
if not os.path.exists(work_dir):
3544
raise FileNotFoundError
3645
if os.path.isdir(work_dir):
@@ -39,11 +48,11 @@ def submit(
3948
file_name = os.path.basename(work_dir)
4049
object_key = os.path.join(data["storePath"], file_name)
4150
self.upload(work_dir, object_key, data["token"])
42-
51+
4352
ep = os.path.expanduser(result)
4453
p = Path(ep).absolute().resolve()
4554
p = p.joinpath(str(uuid.uuid4()) + "_temp.zip")
46-
55+
4756
job_add_request = JobAddRequest(
4857
download_path=str(p.absolute().resolve()),
4958
dataset_path=dataset_path,
@@ -55,47 +64,43 @@ def submit(
5564
scass_type=machine_type,
5665
cmd=cmd,
5766
log_files=log_files,
58-
out_files=out_files
67+
out_files=out_files,
5968
)
6069
return self.insert(job_add_request.to_dict())
61-
70+
6271
def insert(self, data):
6372
# log.info(f"insert job {data}")
64-
response = self._client.post(f"/openapi/v2/job/add", json=data)
73+
response = self._client.post("/openapi/v2/job/add", json=data)
6574
pprint(response.request)
6675
print(response.json())
67-
76+
6877
def delete(self, job_id):
6978
# log.info(f"delete job {job_id}")
7079
response = self._client.post(f"/openapi/v1/job/del/{job_id}")
7180
pprint(response.request)
7281
print(response.json())
73-
82+
7483
def terminate(self, job_id):
7584
# log.info(f"terminate job {job_id}")
7685
response = self._client.post(f"/openapi/v1/job/terminate/{job_id}")
7786
pprint(response.request)
7887
print(response.json())
79-
88+
8089
def kill(self, job_id):
8190
# log.info(f"kill job {job_id}")
8291
response = self._client.post(f"/openapi/v1/job/kill/{job_id}")
8392
pprint(response.request)
8493
print(response.json())
85-
94+
8695
def log(self, job_id, log_file="STDOUTERR", page=-1, page_size=8192):
8796
# log.info(f"log job {job_id}")
88-
response = self._client.get(f"/openapi/v1/job/{job_id}/log", params={"logFile": log_file, "page": page, "pageSize": page_size})
97+
response = self._client.get(
98+
f"/openapi/v1/job/{job_id}/log",
99+
params={"logFile": log_file, "page": page, "pageSize": page_size},
100+
)
89101
pprint(response.request)
90102
print(response.json().get("data")["log"])
91103
return response.json().get("data")["log"]
92-
93-
def detail(self, job_id):
94-
# log.info(f"detail job {job_id}")
95-
response = self._client.get(f"/openapi/v1/job/{job_id}")
96-
pprint(response.request)
97-
print(response.json())
98-
return response.json().get("data")
99104

100105
def create_job(
101106
self,
@@ -104,25 +109,28 @@ def create_job(
104109
group_id: Optional[int] = 0,
105110
):
106111
# log.info(f"create job {name}")
107-
response = self._client.get(f"/openapi/v1/ak/get")
108-
112+
response = self._client.get("/openapi/v1/ak/get")
113+
109114
data = {
110115
"userId": response.json().get("data").get("user_id"),
111116
"projectId": project_id,
112117
"name": name,
113118
"bohrGroupId": group_id,
114119
}
115-
response = self._client.post(f"/openapi/v1/job/pre_create", json=data)
120+
response = self._client.post("/openapi/v1/job/pre_create", json=data)
116121
pprint(response.request)
117122
print(response.json())
118123
return response.json().get("data")
119-
124+
120125
def create_job_group(self, project_id, job_group_name):
121126
# log.info(f"create job group {job_group_name}")
122-
response = self._client.post(f"/openapi/v1/job_group/add", json={"name": job_group_name, "projectId": project_id})
127+
response = self._client.post(
128+
"/openapi/v1/job_group/add",
129+
json={"name": job_group_name, "projectId": project_id},
130+
)
123131
pprint(response.request)
124132
print(response.json())
125-
133+
126134
def upload(
127135
self,
128136
file_path: str,
@@ -131,25 +139,23 @@ def upload(
131139
):
132140
tiefblue = Tiefblue()
133141
tiefblue.upload_From_file_multi_part(
134-
object_key=object_key,
135-
file_path=file_path,
136-
progress_bar=True)
137-
142+
object_key=object_key, file_path=file_path, progress_bar=True
143+
)
144+
138145
def uploadr(self, work_dir, store_path, token):
139-
if not work_dir.endswith('/'):
140-
work_dir = work_dir + '/'
146+
if not work_dir.endswith("/"):
147+
work_dir = work_dir + "/"
141148
for root, _, files in os.walk(work_dir):
142149
for file in files:
143150
full_path = os.path.join(root, file)
144151
object_key = full_path.replace(work_dir, store_path)
145152
self.upload(full_path, object_key, token)
146153

147-
148154
def download(self, job_id, save_path):
149155
detail = self.detail(job_id)
150156
tiefblue = Tiefblue()
151157
tiefblue.download_from_url(detail["resultUrl"], save_path)
152-
158+
159+
153160
class AsyncJob(AsyncAPIResource):
154161
pass
155-

0 commit comments

Comments
 (0)