diff --git a/infra/build/functions/build_and_push_test_images.py b/infra/build/functions/build_and_push_test_images.py index 724686e5ea9a..d9695e69f8a7 100755 --- a/infra/build/functions/build_and_push_test_images.py +++ b/infra/build/functions/build_and_push_test_images.py @@ -104,14 +104,13 @@ def wait_for_build_and_report_summary(build_id, cloud_project='oss-fuzz-base'): client_options=build_lib.REGIONAL_CLIENT_OPTIONS) cloudbuild_api = cloudbuild.projects().builds() - logs_url = build_lib.get_gcb_url(build_id, cloud_project) logging.info( '================================================================') logging.info(' PHASE 1: STARTED BASE IMAGE BUILD') logging.info( '----------------------------------------------------------------') - logging.info('GCB Build ID: %s', build_id) - logging.info('GCB Build URL: %s', logs_url) + for line in build_lib.get_build_info_lines(build_id, cloud_project): + logging.info(line) logging.info( '================================================================') @@ -128,14 +127,13 @@ def wait_for_build_and_report_summary(build_id, cloud_project='oss-fuzz-base'): logging.error('Error checking build status: %s', e) time.sleep(15) - logs_url = build_lib.get_gcb_url(build_id, cloud_project) logging.info( '================================================================') logging.info(' PHASE 1: BASE IMAGE BUILD REPORT') logging.info( '----------------------------------------------------------------') - logging.info('GCB Build ID: %s', build_id) - logging.info('GCB Build URL: %s', logs_url) + for line in build_lib.get_build_info_lines(build_id, cloud_project): + logging.info(line) logging.info( '================================================================') diff --git a/infra/build/functions/build_lib.py b/infra/build/functions/build_lib.py index 3a663996dcc2..87362d1a3544 100644 --- a/infra/build/functions/build_lib.py +++ b/infra/build/functions/build_lib.py @@ -282,24 +282,19 @@ def get_signed_policy_document_upload_prefix(bucket, path_prefix): ) +# pylint: disable=no-member def get_signed_url(path, method='PUT', content_type=''): - """Returns a signed URL for |path|.""" - timestamp = datetime.datetime.utcnow() + datetime.timedelta(hours=1) - timestamp = timestamp.strftime('%Y-%m-%dT%H:%M:%SZ') - - path = urlparse.urlparse(path) - blob_path = path.path.lstrip('/') - blob = f"""{method} - -{content_type} - -{timestamp} -/{path.netloc}/{blob_path}""" + """Returns signed url.""" + timestamp = int(time.time() + BUILD_TIMEOUT) + blob = f'{method}\n\n{content_type}\n{timestamp}\n{path}' client_id, signature = _sign_blob(blob) - return (f'https://storage.googleapis.com/{path.netloc}/{blob_path}' - f'?GoogleAccessId={client_id}&Expires={int(time.time() + 3600)}' - f'&Signature={urlparse.quote_plus(signature)}') + values = { + 'GoogleAccessId': client_id, + 'Expires': timestamp, + 'Signature': signature, + } + return f'https://storage.googleapis.com{path}?{urlparse.urlencode(values)}' def _normalized_name(name): @@ -685,6 +680,17 @@ def get_gcb_url(build_id, cloud_project='oss-fuzz'): f'{build_id}?project={cloud_project}') +def get_build_info_lines(build_id, cloud_project='oss-fuzz'): + """Returns a list of strings with build information.""" + gcb_url = get_gcb_url(build_id, cloud_project) + log_url = get_logs_url(build_id) + return [ + f'GCB Build ID: {build_id}', + f'GCB Build URL: {gcb_url}', + f'Log URL: {log_url}', + ] + + def get_runner_image_name(test_image_suffix, base_image_tag=None): """Returns the runner image that should be used. @@ -732,6 +738,7 @@ def get_build_body( # pylint: disable=too-many-arguments 'steps': steps, 'timeout': str(timeout) + 's', 'options': options, + 'logsBucket': 'gs://oss-fuzz-gcb-logs', } if tags: build_body['tags'] = tags diff --git a/infra/build/functions/gcb.py b/infra/build/functions/gcb.py index 6a040466010f..55134fe039cd 100644 --- a/infra/build/functions/gcb.py +++ b/infra/build/functions/gcb.py @@ -78,6 +78,9 @@ def exec_command_from_github(args): if full_command is None: logging.info('Trial build not requested.') + # Create a flag file to indicate that the build was skipped. + with open('trial_build_skipped.flag', 'w') as f: + pass return None command_file = full_command[0] command = full_command[1:] diff --git a/infra/build/functions/report_generator.py b/infra/build/functions/report_generator.py index e1626bedf2aa..b236ee3cfc44 100644 --- a/infra/build/functions/report_generator.py +++ b/infra/build/functions/report_generator.py @@ -128,6 +128,10 @@ def generate_comparison_table(all_results): def main(): """Main function to generate report and determine pipeline status.""" + if os.path.exists('trial_build_skipped.flag'): + print('Skipping report generation because trial build was not invoked.') + sys.exit(0) + all_results = {} any_failures = False any_results_found = False diff --git a/infra/build/functions/trial_build.py b/infra/build/functions/trial_build.py index 849effed8515..04ebd3268b86 100644 --- a/infra/build/functions/trial_build.py +++ b/infra/build/functions/trial_build.py @@ -362,14 +362,15 @@ def _do_test_builds(args, test_image_suffix, end_time, version_tag): for project, project_builds in sorted(build_ids.items()): logging.info(' - %s:', project) for build_id, build_type in project_builds: - logging.info(' - Build ID: %s', build_id) logging.info(' - Build Type: %s', build_type) - logging.info(' GCB URL: %s', - build_lib.get_gcb_url(build_id, build_lib.IMAGE_PROJECT)) + for line in build_lib.get_build_info_lines(build_id, + build_lib.IMAGE_PROJECT): + logging.info(' %s', line) logging.info('-----------------------') - wait_result = wait_on_builds(build_ids, credentials, build_lib.IMAGE_PROJECT, - end_time, skipped_projects, version_tag) + wait_result = wait_on_builds(args, build_ids, credentials, + build_lib.IMAGE_PROJECT, end_time, + skipped_projects, version_tag) if failed_to_start_builds: logging.error( @@ -483,7 +484,7 @@ def check_finished(build_id, cloudbuild_api, cloud_project, retries_map): return build_status -def wait_on_builds(build_ids, credentials, cloud_project, end_time, +def wait_on_builds(args, build_ids, credentials, cloud_project, end_time, skipped_projects, version_tag): # pylint: disable=too-many-locals """Waits on |builds|. Returns True if all builds succeed.""" cloudbuild = cloud_build('cloudbuild', @@ -544,8 +545,10 @@ def wait_on_builds(build_ids, credentials, cloud_project, end_time, if status == 'SUCCESS': successful_builds[project].append(build_id) else: - logs_url = build_lib.get_gcb_url(build_id, cloud_project) - failed_builds[project].append((status, logs_url, build_type)) + gcb_url = build_lib.get_gcb_url(build_id, cloud_project) + log_url = build_lib.get_logs_url(build_id) + failed_builds[project].append( + (status, gcb_url, build_type, log_url)) wait_builds[project].remove((build_id, build_type)) if not wait_builds[project]: @@ -553,13 +556,16 @@ def wait_on_builds(build_ids, credentials, cloud_project, end_time, elif retries_map.get(build_id, 0) >= MAX_RETRIES: # Max retries reached, mark as failed. + logging.error('HttpError for build %s. Max retries reached.', + build_id) if build_id in next_retry_time: del next_retry_time[build_id] finished_builds_count += 1 status = 'UNKNOWN (too many HttpErrors)' - logs_url = build_lib.get_gcb_url(build_id, cloud_project) - failed_builds[project].append((status, logs_url, build_type)) + gcb_url = build_lib.get_gcb_url(build_id, cloud_project) + log_url = build_lib.get_logs_url(build_id) + failed_builds[project].append((status, gcb_url, build_type, log_url)) wait_builds[project].remove((build_id, build_type)) if not wait_builds[project]: del wait_builds[project] @@ -570,8 +576,6 @@ def wait_on_builds(build_ids, credentials, cloud_project, end_time, random.uniform(0, 1)) next_retry_time[build_id] = (datetime.datetime.now() + datetime.timedelta(seconds=backoff_time)) - logging.warning('HttpError for build %s. Retrying in %.2f seconds.', - build_id, backoff_time) if not processed_a_build_in_iteration and wait_builds: # All remaining builds are in backoff, sleep to prevent busy-waiting. @@ -584,9 +588,10 @@ def wait_on_builds(build_ids, credentials, cloud_project, end_time, if wait_builds: for project, project_builds in list(wait_builds.items()): for build_id, build_type in project_builds: - logs_url = build_lib.get_gcb_url(build_id, cloud_project) + gcb_url = build_lib.get_gcb_url(build_id, cloud_project) + log_url = build_lib.get_logs_url(build_id) failed_builds[project].append( - ('TIMEOUT (Coordinator)', logs_url, build_type)) + ('TIMEOUT (Coordinator)', gcb_url, build_type, log_url)) # Final Report successful_builds_count = sum( @@ -634,17 +639,19 @@ def wait_on_builds(build_ids, credentials, cloud_project, end_time, logging.error('--- FAILED BUILDS ---') for project, failures in sorted(failed_builds.items()): logging.error(' - %s:', project) - for status, gcb_url, build_type in failures: + for status, gcb_url, build_type, log_url in failures: build_id = gcb_url.split('/')[-1].split('?')[0] - logging.error(' - Build ID: %s', build_id) logging.error(' - Build Type: %s', build_type) logging.error(' - Status: %s', status) - logging.error(' - GCB URL: %s', gcb_url) + for line in build_lib.get_build_info_lines(build_id, cloud_project): + logging.error(' - %s', line) logging.info('-----------------------') return False if not finished_builds_count and not skipped_builds_count: logging.warning('No builds were run.') + if args.skip_build_images: + return True return False logging.info('\nAll builds passed successfully!')