31
31
32
32
FLAG_LINE_PATTERN = re .compile (r"\s*(?P<flag>--\S+)\s*" )
33
33
34
+ MODULE_VERSION_PATTERN = re .compile (r'(?P<module_version>[a-z](?:[a-z0-9._-]*[a-z0-9])?@[^\s]+)' )
35
+
36
+ BAZEL_TEAM_OWNED_MODULES = frozenset ([
37
+ "bazel-skylib" ,
38
+ "rules_android" ,
39
+ "rules_android_ndk" ,
40
+ "rules_cc" ,
41
+ "rules_java" ,
42
+ "rules_license" ,
43
+ "rules_pkg" ,
44
+ "rules_platform" ,
45
+ "rules_shell" ,
46
+ "rules_testing" ,
47
+ ])
48
+
49
+ PROJECT = "module" if PIPELINE == "bcr-bazel-compatibility-test" else "project"
50
+
51
+ MAX_LOG_FETCHER_THREADS = 30
52
+ LOG_FETCHER_SEMAPHORE = threading .Semaphore (MAX_LOG_FETCHER_THREADS )
53
+
34
54
class LogFetcher (threading .Thread ):
35
55
def __init__ (self , job , client ):
36
56
threading .Thread .__init__ (self )
@@ -39,7 +59,8 @@ def __init__(self, job, client):
39
59
self .log = None
40
60
41
61
def run (self ):
42
- self .log = self .client .get_build_log (self .job )
62
+ with LOG_FETCHER_SEMAPHORE :
63
+ self .log = self .client .get_build_log (self .job )
43
64
44
65
45
66
def process_build_log (failed_jobs_per_flag , already_failing_jobs , log , job ):
@@ -59,6 +80,10 @@ def handle_failing_flags(line):
59
80
if index_success == - 1 or index_failure == - 1 :
60
81
raise bazelci .BuildkiteException ("Cannot recognize log of " + job ["web_url" ])
61
82
for line in log [index_failure :].split ("\n " ):
83
+ # Strip out BuildKite timestamp prefix
84
+ line = re .sub (r'\x1b.*?\x07' , '' , line .strip ())
85
+ if not line :
86
+ break
62
87
handle_failing_flags (line )
63
88
log = log [0 : log .rfind ("+++ Result" )]
64
89
@@ -67,6 +92,12 @@ def handle_failing_flags(line):
67
92
already_failing_jobs .append (job )
68
93
69
94
95
+ def extract_module_version (line ):
96
+ match = MODULE_VERSION_PATTERN .search (line )
97
+ if match :
98
+ return match .group ("module_version" )
99
+
100
+
70
101
def extract_flag (line ):
71
102
match = FLAG_LINE_PATTERN .match (line )
72
103
if match :
@@ -77,19 +108,28 @@ def get_html_link_text(content, link):
77
108
return f'<a href="{ link } " target="_blank">{ content } </a>'
78
109
79
110
111
+ def is_project_owned_by_bazel_team (project ):
112
+ if bazelci .is_downstream_pipeline () and project in bazelci .DOWNSTREAM_PROJECTS and bazelci .DOWNSTREAM_PROJECTS [project ].get (
113
+ "owned_by_bazel"
114
+ ):
115
+ # Check the downstream projects definition.
116
+ return True
117
+ elif project .split ("@" )[0 ] in BAZEL_TEAM_OWNED_MODULES :
118
+ # Parse the module name and check if it's bazel team owned.
119
+ return True
120
+ return False
121
+
80
122
# Check if any of the given jobs needs to be migrated by the Bazel team
81
123
def needs_bazel_team_migrate (jobs ):
82
124
for job in jobs :
83
- pipeline , _ = get_pipeline_and_platform (job )
84
- if pipeline in bazelci .DOWNSTREAM_PROJECTS and bazelci .DOWNSTREAM_PROJECTS [pipeline ].get (
85
- "owned_by_bazel"
86
- ):
125
+ project = get_project_name (job )
126
+ if is_project_owned_by_bazel_team (project ):
87
127
return True
88
128
return False
89
129
90
130
91
131
def print_flags_ready_to_flip (failed_jobs_per_flag , incompatible_flags ):
92
- info_text1 = ["#### The following flags didn't break any passing projects " ]
132
+ info_text1 = [f "#### The following flags didn't break any passing { PROJECT } s " ]
93
133
for flag in sorted (list (incompatible_flags .keys ())):
94
134
if flag not in failed_jobs_per_flag :
95
135
html_link_text = get_html_link_text (":github:" , incompatible_flags [flag ])
@@ -99,7 +139,7 @@ def print_flags_ready_to_flip(failed_jobs_per_flag, incompatible_flags):
99
139
info_text1 = []
100
140
101
141
info_text2 = [
102
- "#### The following flags didn't break any passing Bazel team owned/co-owned projects "
142
+ f "#### The following flags didn't break any passing Bazel team owned/co-owned { PROJECT } s "
103
143
]
104
144
for flag , jobs in failed_jobs_per_flag .items ():
105
145
if flag not in incompatible_flags :
@@ -128,7 +168,7 @@ def print_already_fail_jobs(already_failing_jobs):
128
168
129
169
130
170
def print_projects_need_to_migrate (failed_jobs_per_flag ):
131
- info_text = ["#### The following projects need migration" ]
171
+ info_text = [f "#### The following { PROJECT } s need migration" ]
132
172
jobs_need_migration = {}
133
173
for jobs in failed_jobs_per_flag .values ():
134
174
for job in jobs .values ():
@@ -141,14 +181,14 @@ def print_projects_need_to_migrate(failed_jobs_per_flag):
141
181
142
182
projects = set ()
143
183
for job in job_list :
144
- project , _ = get_pipeline_and_platform (job )
184
+ project = get_project_name (job )
145
185
projects .add (project )
146
186
project_num = len (projects )
147
187
148
188
s1 = "" if project_num == 1 else "s"
149
189
s2 = "s" if project_num == 1 else ""
150
190
info_text .append (
151
- f"<details><summary>{ project_num } project { s1 } need{ s2 } migration, click to see details</summary><ul>"
191
+ f"<details><summary>{ project_num } { PROJECT } { s1 } need{ s2 } migration, click to see details</summary><ul>"
152
192
)
153
193
154
194
entries = merge_and_format_jobs (job_list , " <li><strong>{}</strong>: {}</li>" )
@@ -179,62 +219,68 @@ def print_flags_need_to_migrate(failed_jobs_per_flag, incompatible_flags):
179
219
if jobs :
180
220
github_url = incompatible_flags [flag ]
181
221
info_text = [f"* **{ flag } ** " + get_html_link_text (":github:" , github_url )]
182
- jobs_per_pipeline = merge_jobs (jobs .values ())
183
- for pipeline , platforms in jobs_per_pipeline .items ():
222
+ jobs_per_project = merge_jobs (jobs .values ())
223
+ for project , platforms in jobs_per_project .items ():
184
224
bazel_mark = ""
185
- if pipeline in bazelci .DOWNSTREAM_PROJECTS and bazelci .DOWNSTREAM_PROJECTS [
186
- pipeline
187
- ].get ("owned_by_bazel" ):
225
+ if is_project_owned_by_bazel_team (project ):
188
226
bazel_mark = ":bazel:"
189
227
platforms_text = ", " .join (platforms )
190
- info_text .append (f" - { bazel_mark } **{ pipeline } **: { platforms_text } " )
228
+ info_text .append (f" - { bazel_mark } **{ project } **: { platforms_text } " )
191
229
# Use flag as the context so that each flag gets a different info box.
192
230
print_info (flag , "error" , info_text )
193
231
printed_flag_boxes = True
194
232
if not printed_flag_boxes :
195
233
return
196
234
info_text = [
197
- "#### Downstream projects need to migrate for the following flags:" ,
235
+ "#### Projects need to migrate for the following flags:" ,
198
236
"Projects marked with :bazel: need to be migrated by the Bazel team." ,
199
237
]
200
238
print_info ("flags_need_to_migrate" , "error" , info_text )
201
239
202
240
203
241
def merge_jobs (jobs ):
204
- jobs_per_pipeline = collections .defaultdict (list )
242
+ jobs_per_project = collections .defaultdict (list )
205
243
for job in sorted (jobs , key = lambda s : s ["name" ].lower ()):
206
- pipeline , platform = get_pipeline_and_platform (job )
207
- jobs_per_pipeline [pipeline ].append (get_html_link_text (platform , job ["web_url" ]))
208
- return jobs_per_pipeline
244
+ project = get_project_name (job )
245
+ platform_label = get_platform_emoji_name (job )
246
+ jobs_per_project [project ].append (get_html_link_text (platform_label , job ["web_url" ]))
247
+ return jobs_per_project
209
248
210
249
211
250
def merge_and_format_jobs (jobs , line_pattern ):
212
- # Merges all jobs for a single pipeline into one line.
251
+ # Merges all jobs for a single project into one line.
213
252
# Example:
214
- # pipeline (platform1)
215
- # pipeline (platform2)
216
- # pipeline (platform3)
253
+ # project (platform1)
254
+ # project (platform2)
255
+ # project (platform3)
217
256
# with line_pattern ">> {}: {}" becomes
218
- # >> pipeline : platform1, platform2, platform3
219
- jobs_per_pipeline = merge_jobs (jobs )
257
+ # >> project : platform1, platform2, platform3
258
+ jobs_per_project = merge_jobs (jobs )
220
259
return [
221
- line_pattern .format (pipeline , ", " .join (platforms ))
222
- for pipeline , platforms in jobs_per_pipeline .items ()
260
+ line_pattern .format (project , ", " .join (platforms ))
261
+ for project , platforms in jobs_per_project .items ()
223
262
]
224
263
225
264
226
- def get_pipeline_and_platform (job ):
265
+ def get_project_name (job ):
266
+ # Strip out platform label from job name
267
+ name = job ["name" ].replace (get_platform_emoji_name (job ), "" )
268
+ if bazelci .is_downstream_pipeline ():
269
+ # This is for downstream pipeline, parse the pipeline name
270
+ return name .partition ("-" )[0 ].partition ("(" )[0 ].strip ()
271
+ else :
272
+ # This is for BCR compatibility test pipeline, parse the module name + version
273
+ return extract_module_version (name )
274
+
275
+
276
+ def get_platform_emoji_name (job ):
277
+ # By search for the platform label in the job name.
227
278
name = job ["name" ]
228
- platform = ""
229
279
for p in bazelci .PLATFORMS .values ():
230
280
platform_label = p .get ("emoji-name" )
231
281
if platform_label in name :
232
- platform = platform_label
233
- name = name .replace (platform_label , "" )
234
- break
235
-
236
- name = name .partition ("-" )[0 ].partition ("(" )[0 ].strip ()
237
- return name , platform
282
+ return platform_label
283
+ raise bazelci .BuildkiteException ("Cannot detect platform name for: " + job ["web_url" ])
238
284
239
285
240
286
def print_info (context , style , info ):
@@ -264,8 +310,8 @@ def analyze_logs(build_number, client):
264
310
265
311
threads = []
266
312
for job in build_info ["jobs" ]:
267
- # Some irrelevant job has no "state" field
268
- if "state" in job :
313
+ # Some irrelevant job has no "state" or "raw_log_url" field
314
+ if "state" in job and "raw_log_url" in job :
269
315
thread = LogFetcher (job , client )
270
316
threads .append (thread )
271
317
thread .start ()
0 commit comments