1212import zipfile
1313import zlib
1414from datetime import date
15+ from functools import cache
1516from pathlib import Path
1617from types import TracebackType
1718from typing import TYPE_CHECKING , Literal , Optional , Type , TypedDict
@@ -1096,46 +1097,49 @@ def __exit__(
10961097 shutil .rmtree (self .repo_dir )
10971098
10981099
1100+ @cache
10991101def get_last_dependency_sizes_artifact (commit : str , platform : str ) -> str :
11001102 dep_sizes_json = get_dep_sizes_json (commit , platform )
11011103 if not dep_sizes_json :
1102- dep_sizes_json = get_previous_dep_sizes_json (commit , platform )
1104+ dep_sizes_json = get_previous_dep_sizes_json (commit , platform ) # change for merge base
11031105 return dep_sizes_json
11041106
11051107
1108+ @cache
11061109def get_run_id (commit , workflow ):
1107- # print(f"Getting run id for commit: {commit}, workflow: {workflow}")
1108- # try:
1109- # result = subprocess.run(
1110- # [
1111- # 'gh',
1112- # 'run',
1113- # 'list',
1114- # '--workflow',
1115- # workflow,
1116- # '-c',
1117- # commit,
1118- # '--json',
1119- # 'databaseId',
1120- # '--jq',
1121- # '.[-1].databaseId',
1122- # ],
1123- # capture_output=True,
1124- # text=True,
1125- # )
1126- # except subprocess.CalledProcessError as e:
1127- # stderr = (e.stderr or '').strip()
1128- # if stderr:
1129- # print(stderr)
1130- # print("Failed to get run id")
1131- # return None
1132- # run_id = result.stdout.strip() if result.stdout else None
1133- # print(f"Run id: {run_id}")
1134- run_id = os .environ .get ("GITHUB_RUN_ID" )
1110+ print (f"Getting run id for commit: { commit } , workflow: { workflow } " )
1111+ try :
1112+ result = subprocess .run (
1113+ [
1114+ 'gh' ,
1115+ 'run' ,
1116+ 'list' ,
1117+ '--workflow' ,
1118+ workflow ,
1119+ '-c' ,
1120+ commit ,
1121+ '--json' ,
1122+ 'databaseId' ,
1123+ '--jq' ,
1124+ '.[-1].databaseId' ,
1125+ ],
1126+ capture_output = True ,
1127+ text = True ,
1128+ )
1129+ except subprocess .CalledProcessError as e :
1130+ stderr = (e .stderr or '' ).strip ()
1131+ if stderr :
1132+ print (stderr )
1133+ print ("Failed to get run id" )
1134+ return None
1135+ run_id = result .stdout .strip () if result .stdout else None
1136+ print (f"Run id: { run_id } " )
1137+ # run_id = 17758756093 # os.environ.get("GITHUB_RUN_ID")
11351138
11361139 return run_id
11371140
11381141
1142+ @cache
11391143def get_dep_sizes_json (current_commit , platform ):
11401144 print (f"Getting dependency sizes json for commit: { current_commit } , platform: { platform } " )
11411145 run_id = get_run_id (current_commit , '.github/workflows/resolve-build-deps.yaml' )
@@ -1148,6 +1152,7 @@ def get_dep_sizes_json(current_commit, platform):
11481152 return None
11491153
11501154
1155+ @cache
11511156def check_artifact_exists (run_id , artifact_name ):
11521157 print (f"Checking if artifact exists: run_id={ run_id } , artifact_name={ artifact_name } " )
11531158 result = subprocess .run (
@@ -1173,6 +1178,7 @@ def check_artifact_exists(run_id, artifact_name):
11731178 return True
11741179
11751180
1181+ @cache
11761182def get_current_sizes_json (run_id , platform ):
11771183 print (f"Getting current sizes json for run_id={ run_id } , platform={ platform } " )
11781184 with tempfile .TemporaryDirectory () as tmpdir :
@@ -1206,6 +1212,7 @@ def get_current_sizes_json(run_id, platform):
12061212 return None
12071213
12081214
1215+ @cache
12091216def get_artifact (run_id , artifact_name ):
12101217 print (f"Downloading artifact: { artifact_name } from run_id={ run_id } " )
12111218 _ = subprocess .run (
@@ -1226,6 +1233,7 @@ def get_artifact(run_id, artifact_name):
12261233 return artifact_path
12271234
12281235
1236+ @cache
12291237def get_previous_dep_sizes_json (base_commit , platform ):
12301238 print (f"Getting previous dependency sizes json for base_commit={ base_commit } , platform={ platform } " )
12311239 run_id = get_run_id (base_commit , '.github/workflows/measure-disk-usage.yml' )
@@ -1246,6 +1254,7 @@ def get_previous_dep_sizes_json(base_commit, platform):
12461254 return output_path
12471255
12481256
1257+ @cache
12491258def parse_sizes_json (compressed_json_path , uncompressed_json_path ):
12501259 with open (compressed_json_path , 'r' ) as f :
12511260 compressed_list = list (json .load (f ))
0 commit comments