Skip to content

Commit ddd8289

Browse files
committed
Added a function to get job status
Signed-off-by: SurajGudaji <[email protected]>
1 parent e272118 commit ddd8289

File tree

1 file changed

+69
-38
lines changed

1 file changed

+69
-38
lines changed

monitor.py

Lines changed: 69 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -173,6 +173,31 @@ def get_n_recent_jobs(prow_link,n):
173173
return "Failed to extract the spy-links"
174174

175175

176+
def check_job_status(spy_link):
177+
'''
178+
Gets the status of the job if it was a success or failure
179+
180+
Parameter:
181+
spy_link (string): SpyglassLink used to generate url to access logs of a job.
182+
183+
Returns:
184+
string: Job run Status
185+
'''
186+
job_status_url = PROW_VIEW_URL + spy_link[8:] + '/finished.json'
187+
try:
188+
response = requests.get(job_status_url, verify=False, timeout=15)
189+
if response.status_code == 200:
190+
cluster_status = json.loads(response.text)
191+
return cluster_status["result"]
192+
else:
193+
return 'ERROR'
194+
except requests.Timeout:
195+
return "Request timed out"
196+
except requests.RequestException:
197+
return "Error while sending request to url"
198+
except json.JSONDecodeError as e:
199+
return 'Error while parsing finished.json'
200+
176201
def cluster_deploy_status(spy_link):
177202

178203
'''
@@ -300,10 +325,7 @@ def get_node_status(spy_link):
300325
'''
301326

302327
job_type,job_platform = job_classifier(spy_link)
303-
if job_platform == "powervs" or job_platform == "mce":
304-
job_type += "/gather-extra"
305-
else:
306-
job_type += "/gather-libvirt"
328+
job_type += "/gather-extra"
307329

308330
node_log_url = PROW_VIEW_URL + spy_link[8:] + \
309331
"/artifacts/" + job_type +"/artifacts/oc_cmds/nodes"
@@ -708,22 +730,26 @@ def get_junit_symptom_detection_testcase_failures(spy_link,job_type):
708730

709731
symptom_detection_failed_testcase = []
710732

711-
if "power" in spy_link:
712-
job_type=job_type+"/gather-extra"
713-
elif "libvirt" in spy_link:
714-
job_type=job_type+"/gather-libvirt"
733+
job_type=job_type+"/gather-extra"
715734

716-
test_log_junit_dir_url = PROW_VIEW_URL + spy_link[8:] + "/artifacts/" + job_type + "/artifacts/junit/junit_symptoms.xml"
735+
test_log_junit_dir_url = PROW_VIEW_URL + spy_link[8:] + "/artifacts/" + job_type + "/artifacts/junit/"
717736
symptom_detection_failed_testcase = []
718737
try:
719738
response = requests.get(test_log_junit_dir_url,verify=False,timeout=15)
720739
if response.status_code == 200:
721-
root = ET.fromstring(response.content)
722-
for testcase in root.findall('.//testcase'):
723-
testcase_name = testcase.get('name')
724-
if testcase.find('failure') is not None:
725-
symptom_detection_failed_testcase.append(testcase_name)
726-
return symptom_detection_failed_testcase, None
740+
junit_failure_summary_filename_re = re.compile('junit_symptoms.xml')
741+
junit_failure_summary_filename_match = junit_failure_summary_filename_re.search(response.text, re.MULTILINE|re.DOTALL)
742+
if junit_failure_summary_filename_match is not None:
743+
test_log_junit_url = PROW_VIEW_URL + spy_link[8:] + "/artifacts/" + job_type + "/artifacts/junit/junit_symptoms.xml"
744+
response_2 = requests.get(test_log_junit_url,verify=False,timeout=15)
745+
root = ET.fromstring(response_2.content)
746+
for testcase in root.findall('.//testcase'):
747+
testcase_name = testcase.get('name')
748+
if testcase.find('failure') is not None:
749+
symptom_detection_failed_testcase.append(testcase_name)
750+
return symptom_detection_failed_testcase, None
751+
else:
752+
return symptom_detection_failed_testcase, "Junit test summary file not found"
727753
else:
728754
return symptom_detection_failed_testcase, 'Error fetching junit symptom detection test results'
729755
except requests.Timeout:
@@ -1117,39 +1143,44 @@ def get_detailed_job_info(prow_ci_name,prow_ci_link,start_date=None,end_date=Non
11171143
e2e_count = 0
11181144
i=0
11191145

1120-
pattern_job_id = r'/(\d+)'
1121-
11221146
jobs_to_deleted = []
11231147
for job in job_list:
1124-
match = re.search(pattern_job_id, job)
1125-
job_id = match.group(1)
11261148
lease, nightly = get_quota_and_nightly(job)
11271149
if zone is not None and lease not in zone:
11281150
jobs_to_deleted.append(job)
11291151
continue
11301152
i=i+1
11311153
print(i,"Job link: https://prow.ci.openshift.org/"+job)
11321154
print("Nightly info-", nightly)
1133-
cluster_status=cluster_deploy_status(job)
1134-
if "sno" not in job:
1135-
print("Lease Quota-", lease)
1136-
node_status = get_node_status(job)
1137-
print(node_status)
1138-
check_node_crash(job)
1139-
1140-
if cluster_status == 'SUCCESS':
1155+
job_status = check_job_status(job)
1156+
if job_status == 'SUCCESS':
11411157
deploy_count += 1
1142-
if "sno" not in prow_ci_link:
1143-
job_type,_ = job_classifier(job)
1144-
tc_exe_status=print_all_failed_tc(job,job_type)
1145-
if tc_exe_status=="SUCCESS":
1146-
e2e_count=e2e_count+1
1147-
1148-
elif cluster_status == 'FAILURE':
1149-
print("Cluster Creation Failed")
1150-
1151-
elif cluster_status == 'ERROR':
1152-
print('Unable to get cluster status please check prowCI UI ')
1158+
e2e_count=e2e_count+1
1159+
check_node_crash(job)
1160+
print("This is a Green build")
1161+
elif job_status == 'FAILURE':
1162+
cluster_status=cluster_deploy_status(job)
1163+
if "sno" not in job:
1164+
print("Lease Quota-", lease)
1165+
node_status = get_node_status(job)
1166+
print(node_status)
1167+
check_node_crash(job)
1168+
1169+
if cluster_status == 'SUCCESS':
1170+
deploy_count += 1
1171+
if "sno" not in prow_ci_link:
1172+
job_type,_ = job_classifier(job)
1173+
tc_exe_status=print_all_failed_tc(job,job_type)
1174+
if tc_exe_status=="SUCCESS":
1175+
e2e_count=e2e_count+1
1176+
1177+
elif cluster_status == 'FAILURE':
1178+
print("Cluster Creation Failed")
1179+
1180+
elif cluster_status == 'ERROR':
1181+
print('Unable to get cluster status please check prowCI UI ')
1182+
else:
1183+
print(job_status)
11531184

11541185
print("\n")
11551186

0 commit comments

Comments
 (0)