|
1 | 1 | import logging |
2 | 2 | from ..._resource import AsyncAPIResource, SyncAPIResource |
| 3 | +from ..tiefblue.tiefblue import Tiefblue |
| 4 | +from ...types.job.job import JobAddRequest |
3 | 5 | # from ..._resource import BaseClient |
4 | 6 | from pprint import pprint |
5 | | - |
6 | | -log: logging.Logger = logging.getLogger(__name__) |
| 7 | +from typing import Optional |
| 8 | +import os |
| 9 | +from pathlib import Path |
| 10 | +import uuid |
| 11 | +# log: logging.Logger = logging.getLogger(__name__) |
7 | 12 |
|
8 | 13 |
|
9 | 14 | class Job(SyncAPIResource): |
10 | 15 |
|
11 | | - def submit(self, project_id, name): |
12 | | - log.info(f"submit job {name},project_id:{project_id}") |
13 | | - |
| 16 | + def submit( |
| 17 | + self, |
| 18 | + project_id: int, |
| 19 | + job_name: str, |
| 20 | + machine_type: str, |
| 21 | + cmd: str, |
| 22 | + image_address: str, |
| 23 | + job_group_id: int = 0, |
| 24 | + work_dir: str = '', |
| 25 | + result: str = '', |
| 26 | + dataset_path: list = [], |
| 27 | + log_files: list = [], |
| 28 | + out_files: list = [], |
| 29 | + ): |
| 30 | + # log.info(f"submit job {name},project_id:{project_id}") |
| 31 | + data = self.create_job(project_id, job_name, job_group_id) |
| 32 | + print(data) |
| 33 | + if work_dir != '': |
| 34 | + if not os.path.exists(work_dir): |
| 35 | + raise FileNotFoundError |
| 36 | + if os.path.isdir(work_dir): |
| 37 | + self.uploadr(work_dir, data["storePath"], data["token"]) |
| 38 | + else: |
| 39 | + file_name = os.path.basename(work_dir) |
| 40 | + object_key = os.path.join(data["storePath"], file_name) |
| 41 | + self.upload(work_dir, object_key, data["token"]) |
| 42 | + |
| 43 | + ep = os.path.expanduser(result) |
| 44 | + p = Path(ep).absolute().resolve() |
| 45 | + p = p.joinpath(str(uuid.uuid4()) + "_temp.zip") |
| 46 | + |
| 47 | + job_add_request = JobAddRequest( |
| 48 | + download_path=str(p.absolute().resolve()), |
| 49 | + dataset_path=dataset_path, |
| 50 | + job_name=job_name, |
| 51 | + project_id=project_id, |
| 52 | + job_id=data["jobId"], |
| 53 | + oss_path=data["storePath"], |
| 54 | + image_name=image_address, |
| 55 | + scass_type=machine_type, |
| 56 | + cmd=cmd, |
| 57 | + log_files=log_files, |
| 58 | + out_files=out_files |
| 59 | + ) |
| 60 | + return self.insert(job_add_request.to_dict()) |
| 61 | + |
| 62 | + def insert(self, data): |
| 63 | + # log.info(f"insert job {data}") |
| 64 | + response = self._client.post(f"/openapi/v2/job/add", json=data) |
| 65 | + pprint(response.request) |
| 66 | + print(response.json()) |
| 67 | + |
14 | 68 | def delete(self, job_id): |
15 | | - log.info(f"delete job {job_id}") |
| 69 | + # log.info(f"delete job {job_id}") |
16 | 70 | response = self._client.post(f"/openapi/v1/job/del/{job_id}") |
17 | | - |
18 | 71 | pprint(response.request) |
19 | | - |
20 | 72 | print(response.json()) |
21 | 73 |
|
22 | 74 | def terminate(self, job_id): |
23 | | - log.info(f"terminate job {job_id}") |
| 75 | + # log.info(f"terminate job {job_id}") |
24 | 76 | response = self._client.post(f"/openapi/v1/job/terminate/{job_id}") |
25 | | - |
26 | 77 | pprint(response.request) |
27 | | - |
28 | 78 | print(response.json()) |
29 | 79 |
|
30 | 80 | def kill(self, job_id): |
31 | | - log.info(f"kill job {job_id}") |
| 81 | + # log.info(f"kill job {job_id}") |
32 | 82 | response = self._client.post(f"/openapi/v1/job/kill/{job_id}") |
33 | | - |
34 | 83 | pprint(response.request) |
35 | | - |
36 | 84 | print(response.json()) |
37 | 85 |
|
38 | 86 | def log(self, job_id, log_file="STDOUTERR", page=-1, page_size=8192): |
39 | | - log.info(f"log job {job_id}") |
| 87 | + # log.info(f"log job {job_id}") |
40 | 88 | response = self._client.get(f"/openapi/v1/job/{job_id}/log", params={"logFile": log_file, "page": page, "pageSize": page_size}) |
41 | | - |
42 | 89 | pprint(response.request) |
43 | | - |
44 | 90 | print(response.json()) |
45 | 91 |
|
46 | 92 | def detail(self, job_id): |
47 | | - log.info(f"detail job {job_id}") |
| 93 | + # log.info(f"detail job {job_id}") |
48 | 94 | response = self._client.get(f"/openapi/v1/job/{job_id}") |
49 | | - |
50 | 95 | pprint(response.request) |
51 | | - |
52 | 96 | print(response.json()) |
53 | 97 |
|
| 98 | + def create_job( |
| 99 | + self, |
| 100 | + project_id: int, |
| 101 | + name: Optional[str] = None, |
| 102 | + group_id: Optional[int] = 0, |
| 103 | + ): |
| 104 | + # log.info(f"create job {name}") |
| 105 | + data = { |
| 106 | + "projectId": project_id, |
| 107 | + "name": name, |
| 108 | + "bohrGroupId": group_id, |
| 109 | + } |
| 110 | + response = self._client.post(f"/openapi/v1/job/create", json=data) |
| 111 | + pprint(response.request) |
| 112 | + print(response.json()) |
| 113 | + return response.json().get("data") |
| 114 | + |
54 | 115 | def create_job_group(self, project_id, job_group_name): |
55 | | - log.info(f"create job group {job_group_name}") |
| 116 | + # log.info(f"create job group {job_group_name}") |
56 | 117 | response = self._client.post(f"/openapi/v1/job_group/add", json={"name": job_group_name, "projectId": project_id}) |
57 | | - |
58 | 118 | pprint(response.request) |
59 | | - |
60 | 119 | print(response.json()) |
| 120 | + |
| 121 | + def upload( |
| 122 | + self, |
| 123 | + file_path: str, |
| 124 | + object_key: str, |
| 125 | + token: str, |
| 126 | + ): |
| 127 | + tiefblue = Tiefblue() |
| 128 | + tiefblue.upload_From_file_multi_part( |
| 129 | + object_key=object_key, |
| 130 | + file_path=file_path, |
| 131 | + progress_bar=True) |
| 132 | + |
| 133 | + def uploadr(self, work_dir, store_path, token): |
| 134 | + if not work_dir.endswith('/'): |
| 135 | + work_dir = work_dir + '/' |
| 136 | + for root, _, files in os.walk(work_dir): |
| 137 | + for file in files: |
| 138 | + full_path = os.path.join(root, file) |
| 139 | + object_key = full_path.replace(work_dir, store_path) |
| 140 | + self.upload(full_path, object_key, token) |
| 141 | + |
61 | 142 |
|
62 | 143 | class AsyncJob(AsyncAPIResource): |
63 | 144 | pass |
|
0 commit comments