11import logging
2- from ..._resource import AsyncAPIResource , SyncAPIResource
3- from ..tiefblue .tiefblue import Tiefblue
4- from ...types .job .job import JobAddRequest
2+ import os
3+ import uuid
4+ from pathlib import Path
5+
56# from ..._resource import BaseClient
67from pprint import pprint
78from typing import Optional
8- import os
9- from pathlib import Path
10- import uuid
11- # log: logging.Logger = logging.getLogger(__name__)
9+
10+ from ..._resource import AsyncAPIResource , SyncAPIResource
11+ from ...types .job .job import JobAddRequest
12+ from ..tiefblue .tiefblue import Tiefblue
13+
14+ log = logging .getLogger (__name__ )
1215
1316
1417class Job (SyncAPIResource ):
1518
19+ def detail (self , job_id ):
20+ log .info (f"detail job { job_id } " )
21+ response = self ._client .get (f"/openapi/v1/job/{ job_id } " )
22+ log .debug (response )
23+ return response .json ().get ("data" )
24+
1625 def submit (
17- self ,
18- project_id : int ,
26+ self ,
27+ project_id : int ,
1928 job_name : str ,
2029 machine_type : str ,
2130 cmd : str ,
2231 image_address : str ,
2332 job_group_id : int = 0 ,
24- work_dir : str = '' ,
25- result : str = '' ,
33+ work_dir : str = "" ,
34+ result : str = "" ,
2635 dataset_path : list = [],
2736 log_files : list = [],
2837 out_files : list = [],
2938 ):
3039 # log.info(f"submit job {name},project_id:{project_id}")
3140 data = self .create_job (project_id , job_name , job_group_id )
3241 print (data )
33- if work_dir != '' :
42+ if work_dir != "" :
3443 if not os .path .exists (work_dir ):
3544 raise FileNotFoundError
3645 if os .path .isdir (work_dir ):
@@ -39,11 +48,11 @@ def submit(
3948 file_name = os .path .basename (work_dir )
4049 object_key = os .path .join (data ["storePath" ], file_name )
4150 self .upload (work_dir , object_key , data ["token" ])
42-
51+
4352 ep = os .path .expanduser (result )
4453 p = Path (ep ).absolute ().resolve ()
4554 p = p .joinpath (str (uuid .uuid4 ()) + "_temp.zip" )
46-
55+
4756 job_add_request = JobAddRequest (
4857 download_path = str (p .absolute ().resolve ()),
4958 dataset_path = dataset_path ,
@@ -55,46 +64,43 @@ def submit(
5564 scass_type = machine_type ,
5665 cmd = cmd ,
5766 log_files = log_files ,
58- out_files = out_files
67+ out_files = out_files ,
5968 )
6069 return self .insert (job_add_request .to_dict ())
61-
70+
6271 def insert (self , data ):
6372 # log.info(f"insert job {data}")
64- response = self ._client .post (f "/openapi/v2/job/add" , json = data )
73+ response = self ._client .post ("/openapi/v2/job/add" , json = data )
6574 pprint (response .request )
6675 print (response .json ())
67-
76+
6877 def delete (self , job_id ):
6978 # log.info(f"delete job {job_id}")
7079 response = self ._client .post (f"/openapi/v1/job/del/{ job_id } " )
7180 pprint (response .request )
7281 print (response .json ())
73-
82+
7483 def terminate (self , job_id ):
7584 # log.info(f"terminate job {job_id}")
7685 response = self ._client .post (f"/openapi/v1/job/terminate/{ job_id } " )
7786 pprint (response .request )
7887 print (response .json ())
79-
88+
8089 def kill (self , job_id ):
8190 # log.info(f"kill job {job_id}")
8291 response = self ._client .post (f"/openapi/v1/job/kill/{ job_id } " )
8392 pprint (response .request )
8493 print (response .json ())
85-
94+
8695 def log (self , job_id , log_file = "STDOUTERR" , page = - 1 , page_size = 8192 ):
8796 # log.info(f"log job {job_id}")
88- response = self ._client .get (f"/openapi/v1/job/{ job_id } /log" , params = {"logFile" : log_file , "page" : page , "pageSize" : page_size })
89- pprint (response .request )
90- print (response .json ())
91-
92- def detail (self , job_id ):
93- # log.info(f"detail job {job_id}")
94- response = self ._client .get (f"/openapi/v1/job/{ job_id } " )
97+ response = self ._client .get (
98+ f"/openapi/v1/job/{ job_id } /log" ,
99+ params = {"logFile" : log_file , "page" : page , "pageSize" : page_size },
100+ )
95101 pprint (response .request )
96- print (response .json ())
97- return response .json ().get ("data" )
102+ print (response .json (). get ( "data" )[ "log" ] )
103+ return response .json ().get ("data" )[ "log" ]
98104
99105 def create_job (
100106 self ,
@@ -103,22 +109,28 @@ def create_job(
103109 group_id : Optional [int ] = 0 ,
104110 ):
105111 # log.info(f"create job {name}")
112+ response = self ._client .get ("/openapi/v1/ak/get" )
113+
106114 data = {
115+ "userId" : response .json ().get ("data" ).get ("user_id" ),
107116 "projectId" : project_id ,
108117 "name" : name ,
109118 "bohrGroupId" : group_id ,
110119 }
111- response = self ._client .post (f "/openapi/v1/job/create " , json = data )
120+ response = self ._client .post ("/openapi/v1/job/pre_create " , json = data )
112121 pprint (response .request )
113122 print (response .json ())
114123 return response .json ().get ("data" )
115-
124+
116125 def create_job_group (self , project_id , job_group_name ):
117126 # log.info(f"create job group {job_group_name}")
118- response = self ._client .post (f"/openapi/v1/job_group/add" , json = {"name" : job_group_name , "projectId" : project_id })
127+ response = self ._client .post (
128+ "/openapi/v1/job_group/add" ,
129+ json = {"name" : job_group_name , "projectId" : project_id },
130+ )
119131 pprint (response .request )
120132 print (response .json ())
121-
133+
122134 def upload (
123135 self ,
124136 file_path : str ,
@@ -127,25 +139,23 @@ def upload(
127139 ):
128140 tiefblue = Tiefblue ()
129141 tiefblue .upload_From_file_multi_part (
130- object_key = object_key ,
131- file_path = file_path ,
132- progress_bar = True )
133-
142+ object_key = object_key , file_path = file_path , progress_bar = True
143+ )
144+
134145 def uploadr (self , work_dir , store_path , token ):
135- if not work_dir .endswith ('/' ):
136- work_dir = work_dir + '/'
146+ if not work_dir .endswith ("/" ):
147+ work_dir = work_dir + "/"
137148 for root , _ , files in os .walk (work_dir ):
138149 for file in files :
139150 full_path = os .path .join (root , file )
140151 object_key = full_path .replace (work_dir , store_path )
141152 self .upload (full_path , object_key , token )
142153
143-
144154 def download (self , job_id , save_path ):
145155 detail = self .detail (job_id )
146156 tiefblue = Tiefblue ()
147157 tiefblue .download_from_url (detail ["resultUrl" ], save_path )
148-
158+
159+
149160class AsyncJob (AsyncAPIResource ):
150161 pass
151-
0 commit comments