|
| 1 | +#!/usr/bin/env python3 |
| 2 | + |
| 3 | +import firebase_admin |
| 4 | +from firebase_admin import credentials |
| 5 | +from firebase_admin import firestore |
| 6 | +from datetime import datetime |
| 7 | +import json |
| 8 | +import argparse |
| 9 | +import re |
| 10 | +import os |
| 11 | + |
| 12 | +# make sure the working dir is flow/ |
| 13 | +os.chdir(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..')) |
| 14 | + |
| 15 | +# Create the argument parser |
| 16 | +parser = argparse.ArgumentParser(description='Process some integers.') |
| 17 | + |
| 18 | +# Add the named arguments |
| 19 | +parser.add_argument('--buildID', type=str, help='Build ID from jenkins') |
| 20 | +parser.add_argument('--branchName', type=str, help='Current Branch Name') |
| 21 | +parser.add_argument('--pipelineID', type=str, help='Jenkins pipeline ID') |
| 22 | +parser.add_argument('--commitSHA', type=str, help='Current commit sha') |
| 23 | +parser.add_argument('--cred', type=str, help='Service account credentials file') |
| 24 | +parser.add_argument('--variant', type=str, default='base') |
| 25 | + |
| 26 | +# Parse the arguments |
| 27 | +args = parser.parse_args() |
| 28 | + |
| 29 | + |
| 30 | +def upload_data(db, datafile, platform, design, variant, args): |
| 31 | + # Set the document data |
| 32 | + key = args.commitSHA + '-' + platform + '-' + design + '-' + variant |
| 33 | + doc_ref = db.collection('build_metrics').document(key) |
| 34 | + doc_ref.set({ |
| 35 | + 'build_id': args.buildID, |
| 36 | + 'branch_name': args.branchName, |
| 37 | + 'pipeline_id': args.pipelineID, |
| 38 | + 'commit_sha': args.commitSHA, |
| 39 | + }) |
| 40 | + |
| 41 | + # Load JSON data from file |
| 42 | + with open(dataFile) as f: |
| 43 | + data = json.load(f) |
| 44 | + |
| 45 | + # Replace the character ':' in the keys |
| 46 | + new_data = {} |
| 47 | + stages = [] |
| 48 | + excludes = ["run", "commit", "total_time", "constraints"] |
| 49 | + for k, v in data.items(): |
| 50 | + new_key = re.sub(':', '__', k) # replace ':' with '__' |
| 51 | + new_data[new_key] = v |
| 52 | + stage_name = k.split('__')[0] |
| 53 | + if stage_name not in excludes: |
| 54 | + stages.append(stage_name) |
| 55 | + if k == 'run__flow__generate_date': |
| 56 | + # Convert string to datetime |
| 57 | + gen_date = datetime.strptime(v, '%Y-%m-%d %H:%M') |
| 58 | + new_data[k] = gen_date |
| 59 | + stages = set(stages) |
| 60 | + new_data['stages'] = stages |
| 61 | + |
| 62 | + # Set the data to the document in Firestore |
| 63 | + doc_ref.update(new_data) |
| 64 | + |
| 65 | + |
| 66 | +# Initialize Firebase Admin SDK with service account credentials |
| 67 | +firebase_admin.initialize_app(credentials.Certificate(args.cred)) |
| 68 | +# Initialize Firestore client |
| 69 | +db = firestore.client() |
| 70 | + |
| 71 | +runFilename = f'metadata-{args.variant}.json' |
| 72 | + |
| 73 | +for reportDir, dirs, files in sorted(os.walk('reports', topdown=False)): |
| 74 | + dirList = reportDir.split(os.sep) |
| 75 | + if len(dirList) != 4: |
| 76 | + continue |
| 77 | + |
| 78 | + # basic info about test design |
| 79 | + platform = dirList[1] |
| 80 | + design = dirList[2] |
| 81 | + variant = dirList[3] |
| 82 | + dataFile = os.path.join(reportDir, runFilename) |
| 83 | + if not os.path.exists(dataFile): |
| 84 | + print(f'[WARN] No data file for {platform} {design} {variant}.') |
| 85 | + continue |
| 86 | + if platform == 'sky130hd_fakestack' or platform == 'src': |
| 87 | + print(f'[WARN] Skiping upload {platform} {design} {variant}.') |
| 88 | + continue |
| 89 | + print(f'[INFO] Upload data for {platform} {design} {variant}.') |
| 90 | + upload_data(db, dataFile, platform, design, variant, args) |
0 commit comments