@@ -173,6 +173,31 @@ def get_n_recent_jobs(prow_link,n):
173
173
return "Failed to extract the spy-links"
174
174
175
175
176
+ def check_job_status (spy_link ):
177
+ '''
178
+ Gets the status of the job if it was a success or failure
179
+
180
+ Parameter:
181
+ spy_link (string): SpyglassLink used to generate url to access logs of a job.
182
+
183
+ Returns:
184
+ string: Job run Status
185
+ '''
186
+ job_status_url = PROW_VIEW_URL + spy_link [8 :] + '/finished.json'
187
+ try :
188
+ response = requests .get (job_status_url , verify = False , timeout = 15 )
189
+ if response .status_code == 200 :
190
+ cluster_status = json .loads (response .text )
191
+ return cluster_status ["result" ]
192
+ else :
193
+ return 'ERROR'
194
+ except requests .Timeout :
195
+ return "Request timed out"
196
+ except requests .RequestException :
197
+ return "Error while sending request to url"
198
+ except json .JSONDecodeError as e :
199
+ return 'Error while parsing finished.json'
200
+
176
201
def cluster_deploy_status (spy_link ):
177
202
178
203
'''
@@ -300,10 +325,7 @@ def get_node_status(spy_link):
300
325
'''
301
326
302
327
job_type ,job_platform = job_classifier (spy_link )
303
- if job_platform == "powervs" or job_platform == "mce" :
304
- job_type += "/gather-extra"
305
- else :
306
- job_type += "/gather-libvirt"
328
+ job_type += "/gather-extra"
307
329
308
330
node_log_url = PROW_VIEW_URL + spy_link [8 :] + \
309
331
"/artifacts/" + job_type + "/artifacts/oc_cmds/nodes"
@@ -695,22 +717,26 @@ def get_junit_symptom_detection_testcase_failures(spy_link,job_type):
695
717
696
718
symptom_detection_failed_testcase = []
697
719
698
- if "power" in spy_link :
699
- job_type = job_type + "/gather-extra"
700
- elif "libvirt" in spy_link :
701
- job_type = job_type + "/gather-libvirt"
720
+ job_type = job_type + "/gather-extra"
702
721
703
- test_log_junit_dir_url = PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/artifacts/junit/junit_symptoms.xml "
722
+ test_log_junit_dir_url = PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/artifacts/junit/"
704
723
symptom_detection_failed_testcase = []
705
724
try :
706
725
response = requests .get (test_log_junit_dir_url ,verify = False ,timeout = 15 )
707
726
if response .status_code == 200 :
708
- root = ET .fromstring (response .content )
709
- for testcase in root .findall ('.//testcase' ):
710
- testcase_name = testcase .get ('name' )
711
- if testcase .find ('failure' ) is not None :
712
- symptom_detection_failed_testcase .append (testcase_name )
713
- return symptom_detection_failed_testcase , None
727
+ junit_failure_summary_filename_re = re .compile ('junit_symptoms.xml' )
728
+ junit_failure_summary_filename_match = junit_failure_summary_filename_re .search (response .text , re .MULTILINE | re .DOTALL )
729
+ if junit_failure_summary_filename_match is not None :
730
+ test_log_junit_url = PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/artifacts/junit/junit_symptoms.xml"
731
+ response_2 = requests .get (test_log_junit_url ,verify = False ,timeout = 15 )
732
+ root = ET .fromstring (response_2 .content )
733
+ for testcase in root .findall ('.//testcase' ):
734
+ testcase_name = testcase .get ('name' )
735
+ if testcase .find ('failure' ) is not None :
736
+ symptom_detection_failed_testcase .append (testcase_name )
737
+ return symptom_detection_failed_testcase , None
738
+ else :
739
+ return symptom_detection_failed_testcase , "Junit test summary file not found"
714
740
else :
715
741
return symptom_detection_failed_testcase , 'Error fetching junit symptom detection test results'
716
742
except requests .Timeout :
@@ -1104,39 +1130,44 @@ def get_detailed_job_info(prow_ci_name,prow_ci_link,start_date=None,end_date=Non
1104
1130
e2e_count = 0
1105
1131
i = 0
1106
1132
1107
- pattern_job_id = r'/(\d+)'
1108
-
1109
1133
jobs_to_deleted = []
1110
1134
for job in job_list :
1111
- match = re .search (pattern_job_id , job )
1112
- job_id = match .group (1 )
1113
1135
lease , nightly = get_quota_and_nightly (job )
1114
1136
if zone is not None and lease not in zone :
1115
1137
jobs_to_deleted .append (job )
1116
1138
continue
1117
1139
i = i + 1
1118
1140
print (i ,"Job link: https://prow.ci.openshift.org/" + job )
1119
1141
print ("Nightly info-" , nightly )
1120
- cluster_status = cluster_deploy_status (job )
1121
- if "sno" not in job :
1122
- print ("Lease Quota-" , lease )
1123
- node_status = get_node_status (job )
1124
- print (node_status )
1125
- check_node_crash (job )
1126
-
1127
- if cluster_status == 'SUCCESS' :
1142
+ job_status = check_job_status (job )
1143
+ if job_status == 'SUCCESS' :
1128
1144
deploy_count += 1
1129
- if "sno" not in prow_ci_link :
1130
- job_type ,_ = job_classifier (job )
1131
- tc_exe_status = print_all_failed_tc (job ,job_type )
1132
- if tc_exe_status == "SUCCESS" :
1133
- e2e_count = e2e_count + 1
1134
-
1135
- elif cluster_status == 'FAILURE' :
1136
- print ("Cluster Creation Failed" )
1137
-
1138
- elif cluster_status == 'ERROR' :
1139
- print ('Unable to get cluster status please check prowCI UI ' )
1145
+ e2e_count = e2e_count + 1
1146
+ check_node_crash (job )
1147
+ print ("This is a Green build" )
1148
+ elif job_status == 'FAILURE' :
1149
+ cluster_status = cluster_deploy_status (job )
1150
+ if "sno" not in job :
1151
+ print ("Lease Quota-" , lease )
1152
+ node_status = get_node_status (job )
1153
+ print (node_status )
1154
+ check_node_crash (job )
1155
+
1156
+ if cluster_status == 'SUCCESS' :
1157
+ deploy_count += 1
1158
+ if "sno" not in prow_ci_link :
1159
+ job_type ,_ = job_classifier (job )
1160
+ tc_exe_status = print_all_failed_tc (job ,job_type )
1161
+ if tc_exe_status == "SUCCESS" :
1162
+ e2e_count = e2e_count + 1
1163
+
1164
+ elif cluster_status == 'FAILURE' :
1165
+ print ("Cluster Creation Failed" )
1166
+
1167
+ elif cluster_status == 'ERROR' :
1168
+ print ('Unable to get cluster status please check prowCI UI ' )
1169
+ else :
1170
+ print (job_status )
1140
1171
1141
1172
print ("\n " )
1142
1173
0 commit comments