@@ -173,6 +173,31 @@ def get_n_recent_jobs(prow_link,n):
173
173
return "Failed to extract the spy-links"
174
174
175
175
176
+ def check_job_status (spy_link ):
177
+ '''
178
+ Gets the status of the job if it was a success or failure
179
+
180
+ Parameter:
181
+ spy_link (string): SpyglassLink used to generate url to access logs of a job.
182
+
183
+ Returns:
184
+ string: Job run Status
185
+ '''
186
+ job_status_url = PROW_VIEW_URL + spy_link [8 :] + '/finished.json'
187
+ try :
188
+ response = requests .get (job_status_url , verify = False , timeout = 15 )
189
+ if response .status_code == 200 :
190
+ cluster_status = json .loads (response .text )
191
+ return cluster_status ["result" ]
192
+ else :
193
+ return 'ERROR'
194
+ except requests .Timeout :
195
+ return "Request timed out"
196
+ except requests .RequestException :
197
+ return "Error while sending request to url"
198
+ except json .JSONDecodeError as e :
199
+ return 'Error while parsing finished.json'
200
+
176
201
def cluster_deploy_status (spy_link ):
177
202
178
203
'''
@@ -300,10 +325,7 @@ def get_node_status(spy_link):
300
325
'''
301
326
302
327
job_type ,job_platform = job_classifier (spy_link )
303
- if job_platform == "powervs" or job_platform == "mce" :
304
- job_type += "/gather-extra"
305
- else :
306
- job_type += "/gather-libvirt"
328
+ job_type += "/gather-extra"
307
329
308
330
node_log_url = PROW_VIEW_URL + spy_link [8 :] + \
309
331
"/artifacts/" + job_type + "/artifacts/oc_cmds/nodes"
@@ -708,22 +730,26 @@ def get_junit_symptom_detection_testcase_failures(spy_link,job_type):
708
730
709
731
symptom_detection_failed_testcase = []
710
732
711
- if "power" in spy_link :
712
- job_type = job_type + "/gather-extra"
713
- elif "libvirt" in spy_link :
714
- job_type = job_type + "/gather-libvirt"
733
+ job_type = job_type + "/gather-extra"
715
734
716
- test_log_junit_dir_url = PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/artifacts/junit/junit_symptoms.xml "
735
+ test_log_junit_dir_url = PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/artifacts/junit/"
717
736
symptom_detection_failed_testcase = []
718
737
try :
719
738
response = requests .get (test_log_junit_dir_url ,verify = False ,timeout = 15 )
720
739
if response .status_code == 200 :
721
- root = ET .fromstring (response .content )
722
- for testcase in root .findall ('.//testcase' ):
723
- testcase_name = testcase .get ('name' )
724
- if testcase .find ('failure' ) is not None :
725
- symptom_detection_failed_testcase .append (testcase_name )
726
- return symptom_detection_failed_testcase , None
740
+ junit_failure_summary_filename_re = re .compile ('junit_symptoms.xml' )
741
+ junit_failure_summary_filename_match = junit_failure_summary_filename_re .search (response .text , re .MULTILINE | re .DOTALL )
742
+ if junit_failure_summary_filename_match is not None :
743
+ test_log_junit_url = PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/artifacts/junit/junit_symptoms.xml"
744
+ response_2 = requests .get (test_log_junit_url ,verify = False ,timeout = 15 )
745
+ root = ET .fromstring (response_2 .content )
746
+ for testcase in root .findall ('.//testcase' ):
747
+ testcase_name = testcase .get ('name' )
748
+ if testcase .find ('failure' ) is not None :
749
+ symptom_detection_failed_testcase .append (testcase_name )
750
+ return symptom_detection_failed_testcase , None
751
+ else :
752
+ return symptom_detection_failed_testcase , "Junit test summary file not found"
727
753
else :
728
754
return symptom_detection_failed_testcase , 'Error fetching junit symptom detection test results'
729
755
except requests .Timeout :
@@ -1117,39 +1143,44 @@ def get_detailed_job_info(prow_ci_name,prow_ci_link,start_date=None,end_date=Non
1117
1143
e2e_count = 0
1118
1144
i = 0
1119
1145
1120
- pattern_job_id = r'/(\d+)'
1121
-
1122
1146
jobs_to_deleted = []
1123
1147
for job in job_list :
1124
- match = re .search (pattern_job_id , job )
1125
- job_id = match .group (1 )
1126
1148
lease , nightly = get_quota_and_nightly (job )
1127
1149
if zone is not None and lease not in zone :
1128
1150
jobs_to_deleted .append (job )
1129
1151
continue
1130
1152
i = i + 1
1131
1153
print (i ,"Job link: https://prow.ci.openshift.org/" + job )
1132
1154
print ("Nightly info-" , nightly )
1133
- cluster_status = cluster_deploy_status (job )
1134
- if "sno" not in job :
1135
- print ("Lease Quota-" , lease )
1136
- node_status = get_node_status (job )
1137
- print (node_status )
1138
- check_node_crash (job )
1139
-
1140
- if cluster_status == 'SUCCESS' :
1155
+ job_status = check_job_status (job )
1156
+ if job_status == 'SUCCESS' :
1141
1157
deploy_count += 1
1142
- if "sno" not in prow_ci_link :
1143
- job_type ,_ = job_classifier (job )
1144
- tc_exe_status = print_all_failed_tc (job ,job_type )
1145
- if tc_exe_status == "SUCCESS" :
1146
- e2e_count = e2e_count + 1
1147
-
1148
- elif cluster_status == 'FAILURE' :
1149
- print ("Cluster Creation Failed" )
1150
-
1151
- elif cluster_status == 'ERROR' :
1152
- print ('Unable to get cluster status please check prowCI UI ' )
1158
+ e2e_count = e2e_count + 1
1159
+ check_node_crash (job )
1160
+ print ("This is a Green build" )
1161
+ elif job_status == 'FAILURE' :
1162
+ cluster_status = cluster_deploy_status (job )
1163
+ if "sno" not in job :
1164
+ print ("Lease Quota-" , lease )
1165
+ node_status = get_node_status (job )
1166
+ print (node_status )
1167
+ check_node_crash (job )
1168
+
1169
+ if cluster_status == 'SUCCESS' :
1170
+ deploy_count += 1
1171
+ if "sno" not in prow_ci_link :
1172
+ job_type ,_ = job_classifier (job )
1173
+ tc_exe_status = print_all_failed_tc (job ,job_type )
1174
+ if tc_exe_status == "SUCCESS" :
1175
+ e2e_count = e2e_count + 1
1176
+
1177
+ elif cluster_status == 'FAILURE' :
1178
+ print ("Cluster Creation Failed" )
1179
+
1180
+ elif cluster_status == 'ERROR' :
1181
+ print ('Unable to get cluster status please check prowCI UI ' )
1182
+ else :
1183
+ print (job_status )
1153
1184
1154
1185
print ("\n " )
1155
1186
0 commit comments