Skip to content

Commit 3548d67

Browse files
authored
Merge pull request #1146 from habibayassin/modify-upload
jenkins Url, add branches/commits, add rules, nightly tag
2 parents afd2012 + a07eebe commit 3548d67

File tree

3 files changed

+65
-5
lines changed

3 files changed

+65
-5
lines changed

flow/util/uploadMetadata.py

Lines changed: 61 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import firebase_admin
44
from firebase_admin import credentials
55
from firebase_admin import firestore
6-
from datetime import datetime
6+
from datetime import datetime, timezone
77
import json
88
import argparse
99
import re
@@ -20,14 +20,15 @@
2020
parser.add_argument('--branchName', type=str, help='Current Branch Name')
2121
parser.add_argument('--pipelineID', type=str, help='Jenkins pipeline ID')
2222
parser.add_argument('--commitSHA', type=str, help='Current commit sha')
23+
parser.add_argument('--jenkinsURL', type=str, help='Jenkins Report URL')
2324
parser.add_argument('--cred', type=str, help='Service account credentials file')
2425
parser.add_argument('--variant', type=str, default='base')
2526

2627
# Parse the arguments
2728
args = parser.parse_args()
2829

2930

30-
def upload_data(db, datafile, platform, design, variant, args):
31+
def upload_data(db, datafile, platform, design, variant, args, rules):
3132
# Set the document data
3233
key = args.commitSHA + '-' + platform + '-' + design + '-' + variant
3334
doc_ref = db.collection('build_metrics').document(key)
@@ -36,6 +37,8 @@ def upload_data(db, datafile, platform, design, variant, args):
3637
'branch_name': args.branchName,
3738
'pipeline_id': args.pipelineID,
3839
'commit_sha': args.commitSHA,
40+
'jenkins_url': args.jenkinsURL,
41+
'rules': rules,
3942
})
4043

4144
# Load JSON data from file
@@ -46,6 +49,7 @@ def upload_data(db, datafile, platform, design, variant, args):
4649
new_data = {}
4750
stages = []
4851
excludes = ["run", "commit", "total_time", "constraints"]
52+
gen_date = datetime.now()
4953
for k, v in data.items():
5054
new_key = re.sub(':', '__', k) # replace ':' with '__'
5155
new_data[new_key] = v
@@ -62,6 +66,58 @@ def upload_data(db, datafile, platform, design, variant, args):
6266
# Set the data to the document in Firestore
6367
doc_ref.update(new_data)
6468

69+
branch_doc_ref = db.collection('branches').document(args.branchName)
70+
# check if date is greater than the one in the document if it exists
71+
if branch_doc_ref.get().exists:
72+
current_date = branch_doc_ref.get().to_dict().get('run__flow__generate_date')
73+
current_date = current_date.replace(tzinfo=timezone.utc)
74+
gen_date = gen_date.replace(tzinfo=timezone.utc)
75+
if current_date is not None and gen_date > current_date:
76+
branch_doc_ref.update({
77+
'run__flow__generate_date': gen_date,
78+
'jenkins_url': args.jenkinsURL,
79+
})
80+
else:
81+
branch_doc_ref.update({
82+
'jenkins_url': args.jenkinsURL,
83+
})
84+
else:
85+
branch_doc_ref.set({
86+
'name': args.branchName,
87+
'run__flow__generate_date': gen_date,
88+
'jenkins_url': args.jenkinsURL,
89+
})
90+
91+
commit_doc_ref = db.collection('commits').document(args.commitSHA)
92+
if commit_doc_ref.get().exists:
93+
current_date = commit_doc_ref.get().to_dict().get('run__flow__generate_date')
94+
current_date = current_date.replace(tzinfo=timezone.utc)
95+
gen_date = gen_date.replace(tzinfo=timezone.utc)
96+
if current_date is not None and gen_date > current_date:
97+
commit_doc_ref.update({
98+
'run__flow__generate_date': gen_date,
99+
'jenkins_url': args.jenkinsURL,
100+
})
101+
else:
102+
commit_doc_ref.update({
103+
'jenkins_url': args.jenkinsURL,
104+
})
105+
else:
106+
commit_doc_ref.set({
107+
'sha': args.commitSHA,
108+
'run__flow__generate_date': gen_date,
109+
'jenkins_url': args.jenkinsURL,
110+
})
111+
112+
def get_rules(platform, design, variant):
113+
runFilename = f'rules-{variant}.json'
114+
dataFile = os.path.join('designs', platform, design, runFilename)
115+
data = {}
116+
if os.path.exists(dataFile):
117+
with open(dataFile) as f:
118+
data = json.load(f)
119+
120+
return data
65121

66122
# Initialize Firebase Admin SDK with service account credentials
67123
firebase_admin.initialize_app(credentials.Certificate(args.cred))
@@ -86,5 +142,7 @@ def upload_data(db, datafile, platform, design, variant, args):
86142
if platform == 'sky130hd_fakestack' or platform == 'src':
87143
print(f'[WARN] Skiping upload {platform} {design} {variant}.')
88144
continue
145+
print(f'[INFO] Get rules for {platform} {design} {variant}.')
146+
rules = get_rules(platform, design, variant)
89147
print(f'[INFO] Upload data for {platform} {design} {variant}.')
90-
upload_data(db, dataFile, platform, design, variant, args)
148+
upload_data(db, dataFile, platform, design, variant, args, rules)

jenkins/public_nightly.Jenkinsfile

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -193,8 +193,9 @@ pipeline {
193193
sh """
194194
python3 flow/util/uploadMetadata.py \
195195
--buildID ${env.BUILD_ID} \
196-
--branchName ${env.BRANCH_NAME} \
197-
--commitSHA ${env.GIT_COMMIT} \
196+
--branchName nightly \
197+
--commitSHA ${env.GIT_COMMIT}-dirty \
198+
--jenkinsURL ${env.RUN_DISPLAY_URL} \
198199
--pipelineID ${env.BUILD_TAG} \
199200
""" + '--cred ${db_cred}'
200201
}

jenkins/public_tests_all.Jenkinsfile

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -167,6 +167,7 @@ pipeline {
167167
--buildID ${env.BUILD_ID} \
168168
--branchName ${env.BRANCH_NAME} \
169169
--commitSHA ${env.GIT_COMMIT} \
170+
--jenkinsURL ${env.RUN_DISPLAY_URL} \
170171
--pipelineID ${env.BUILD_TAG} \
171172
""" + '--cred ${db_cred}'
172173
}

0 commit comments

Comments
 (0)