|
24 | 24 | from ddev.utils.toml import load_toml_file |
25 | 25 |
|
26 | 26 | METRIC_VERSION = 2 |
27 | | - |
| 27 | +METRIC_NAME = "datadog.agent_integrations" |
28 | 28 | if TYPE_CHECKING: |
29 | 29 | from matplotlib.axes import Axes |
30 | 30 | from matplotlib.patches import Patch |
@@ -227,6 +227,10 @@ def get_files(repo_path: str | Path, compressed: bool, py_version: str) -> list[ |
227 | 227 | relative_path = os.path.relpath(file_path, repo_path) |
228 | 228 | if not is_valid_integration_file(relative_path, str(repo_path)): |
229 | 229 | continue |
| 230 | + path = Path(relative_path) |
| 231 | + parts = path.parts |
| 232 | + |
| 233 | + integration_name = parts[0] |
230 | 234 |
|
231 | 235 | size = compress(file_path) if compressed else os.path.getsize(file_path) |
232 | 236 | integration_sizes[integration_name] = integration_sizes.get(integration_name, 0) + size |
@@ -837,7 +841,7 @@ def send_metrics_to_dd( |
837 | 841 | key: str, |
838 | 842 | compressed: bool, |
839 | 843 | ) -> None: |
840 | | - metric_name = "datadog.agent_integrations" |
| 844 | + metric_name = METRIC_NAME |
841 | 845 | size_type = "compressed" if compressed else "uncompressed" |
842 | 846 |
|
843 | 847 | config_file_info = get_org(app, org) if org else {"api_key": key, "site": "datadoghq.com"} |
@@ -1090,3 +1094,173 @@ def __exit__( |
1090 | 1094 | ) -> None: |
1091 | 1095 | if self.repo_dir and os.path.exists(self.repo_dir): |
1092 | 1096 | shutil.rmtree(self.repo_dir) |
| 1097 | + |
| 1098 | + |
| 1099 | +def get_last_dependency_sizes_artifact(commit: str, platform: str) -> str: |
| 1100 | + dep_sizes_json = get_dep_sizes_json(commit, platform) |
| 1101 | + if not dep_sizes_json: |
| 1102 | + dep_sizes_json = get_previous_dep_sizes_json(commit, platform) |
| 1103 | + return dep_sizes_json |
| 1104 | + |
| 1105 | + |
| 1106 | +def get_run_id(commit, workflow): |
| 1107 | + print(f"Getting run id for commit: {commit}, workflow: {workflow}") |
| 1108 | + try: |
| 1109 | + result = subprocess.run( |
| 1110 | + [ |
| 1111 | + 'gh', |
| 1112 | + 'run', |
| 1113 | + 'list', |
| 1114 | + '--workflow', |
| 1115 | + workflow, |
| 1116 | + '-c', |
| 1117 | + commit, |
| 1118 | + '--json', |
| 1119 | + 'databaseId', |
| 1120 | + '--jq', |
| 1121 | + '.[-1].databaseId', |
| 1122 | + ], |
| 1123 | + capture_output=True, |
| 1124 | + text=True, |
| 1125 | + ) |
| 1126 | + except subprocess.CalledProcessError as e: |
| 1127 | + stderr = (e.stderr or '').strip() |
| 1128 | + if stderr: |
| 1129 | + print(stderr) |
| 1130 | + print("Failed to get run id") |
| 1131 | + return None |
| 1132 | + run_id = result.stdout.strip() if result.stdout else None |
| 1133 | + print(f"Run id: {run_id}") |
| 1134 | + return run_id |
| 1135 | + |
| 1136 | + |
| 1137 | +def get_dep_sizes_json(current_commit, platform): |
| 1138 | + print(f"Getting dependency sizes json for commit: {current_commit}, platform: {platform}") |
| 1139 | + run_id = get_run_id(current_commit, '.github/workflows/resolve-build-deps.yaml') |
| 1140 | + if run_id and check_artifact_exists(run_id, f'target-{platform}'): |
| 1141 | + dep_sizes_json = get_current_sizes_json(run_id, platform) |
| 1142 | + print(f"Dependency sizes json path: {dep_sizes_json}") |
| 1143 | + return dep_sizes_json |
| 1144 | + else: |
| 1145 | + print("Dependency sizes json not found for current commit.") |
| 1146 | + return None |
| 1147 | + |
| 1148 | + |
| 1149 | +def check_artifact_exists(run_id, artifact_name): |
| 1150 | + print(f"Checking if artifact exists: run_id={run_id}, artifact_name={artifact_name}") |
| 1151 | + result = subprocess.run( |
| 1152 | + [ |
| 1153 | + 'gh', |
| 1154 | + 'api', |
| 1155 | + f'repos/Datadog/integrations-core/actions/runs/{run_id}/artifacts', |
| 1156 | + '--jq', |
| 1157 | + '.artifacts[].name', |
| 1158 | + ], |
| 1159 | + check=True, |
| 1160 | + capture_output=True, |
| 1161 | + text=True, |
| 1162 | + ) |
| 1163 | + |
| 1164 | + artifact_names = {n.strip() for n in (result.stdout or '').splitlines() if n.strip()} |
| 1165 | + print(f"Available artifacts: {artifact_names}") |
| 1166 | + if artifact_name not in artifact_names: |
| 1167 | + print(f"Artifact '{artifact_name}' not found in run {run_id}") |
| 1168 | + return False |
| 1169 | + |
| 1170 | + print(f"Found artifact: {artifact_name}") |
| 1171 | + return True |
| 1172 | + |
| 1173 | + |
| 1174 | +def get_current_sizes_json(run_id, platform): |
| 1175 | + print(f"Getting current sizes json for run_id={run_id}, platform={platform}") |
| 1176 | + with tempfile.TemporaryDirectory() as tmpdir: |
| 1177 | + print(f"Downloading artifacts to {tmpdir}") |
| 1178 | + _ = subprocess.run( |
| 1179 | + [ |
| 1180 | + 'gh', |
| 1181 | + 'run', |
| 1182 | + 'download', |
| 1183 | + run_id, |
| 1184 | + '--name', |
| 1185 | + f'target-{platform}', |
| 1186 | + '--dir', |
| 1187 | + tmpdir, |
| 1188 | + ], |
| 1189 | + check=True, |
| 1190 | + capture_output=True, |
| 1191 | + text=True, |
| 1192 | + ) |
| 1193 | + print(f"Downloaded artifacts to {tmpdir}") |
| 1194 | + # Look for the sizes.json file in the downloaded artifacts |
| 1195 | + sizes_file = os.path.join(tmpdir, platform, 'py3', 'sizes.json') |
| 1196 | + |
| 1197 | + if os.path.exists(sizes_file): |
| 1198 | + print(f"Found sizes.json at {sizes_file}") |
| 1199 | + dest_path = os.path.join(os.getcwd(), f'{platform}.json') |
| 1200 | + shutil.move(sizes_file, dest_path) |
| 1201 | + return dest_path |
| 1202 | + else: |
| 1203 | + print(f"sizes.json not found at {sizes_file}") |
| 1204 | + return None |
| 1205 | + |
| 1206 | + |
| 1207 | +def get_artifact(run_id, artifact_name): |
| 1208 | + print(f"Downloading artifact: {artifact_name} from run_id={run_id}") |
| 1209 | + _ = subprocess.run( |
| 1210 | + [ |
| 1211 | + 'gh', |
| 1212 | + 'run', |
| 1213 | + 'download', |
| 1214 | + run_id, |
| 1215 | + '--name', |
| 1216 | + artifact_name, |
| 1217 | + ], |
| 1218 | + check=True, |
| 1219 | + capture_output=True, |
| 1220 | + text=True, |
| 1221 | + ) |
| 1222 | + artifact_path = os.path.join(os.getcwd(), artifact_name) |
| 1223 | + print(f"Artifact downloaded to: {artifact_path}") |
| 1224 | + return artifact_path |
| 1225 | + |
| 1226 | + |
| 1227 | +def get_previous_dep_sizes_json(base_commit, platform): |
| 1228 | + print(f"Getting previous dependency sizes json for base_commit={base_commit}, platform={platform}") |
| 1229 | + run_id = get_run_id(base_commit, '.github/workflows/measure-disk-usage.yml') |
| 1230 | + print(f"Previous run_id: {run_id}") |
| 1231 | + compressed_json = None |
| 1232 | + uncompressed_json = None |
| 1233 | + if run_id and check_artifact_exists(run_id, f'status_compressed_{platform}.json'): |
| 1234 | + compressed_json = get_artifact(run_id, f'status_compressed_{platform}.json') |
| 1235 | + if run_id and check_artifact_exists(run_id, f'status_uncompressed_{platform}.json'): |
| 1236 | + uncompressed_json = get_artifact(run_id, f'status_uncompressed_{platform}.json') |
| 1237 | + print(f"Compressed json: {compressed_json}") |
| 1238 | + print(f"Uncompressed json: {uncompressed_json}") |
| 1239 | + sizes_json = parse_sizes_json(compressed_json, uncompressed_json) |
| 1240 | + output_path = f'{platform}.json' |
| 1241 | + with open(output_path, 'w') as f: |
| 1242 | + json.dump(sizes_json, f, indent=2) |
| 1243 | + print(f"Wrote merged sizes json to {output_path}") |
| 1244 | + return output_path |
| 1245 | + |
| 1246 | + |
| 1247 | +def parse_sizes_json(compressed_json_path, uncompressed_json_path): |
| 1248 | + with open(compressed_json_path, 'r') as f: |
| 1249 | + compressed_list = list(json.load(f)) |
| 1250 | + with open(uncompressed_json_path, 'r') as f: |
| 1251 | + uncompressed_list = list(json.load(f)) |
| 1252 | + |
| 1253 | + sizes_json = { |
| 1254 | + dep["Name"]: { |
| 1255 | + "compressed": int(dep["Size_Bytes"]), |
| 1256 | + "version": dep.get("Version"), |
| 1257 | + } |
| 1258 | + for dep in compressed_list |
| 1259 | + } |
| 1260 | + |
| 1261 | + for dep in uncompressed_list: |
| 1262 | + name = dep["Name"] |
| 1263 | + entry = sizes_json.setdefault(name, {"version": dep.get("Version")}) |
| 1264 | + entry["uncompressed"] = int(dep["Size_Bytes"]) |
| 1265 | + |
| 1266 | + return sizes_json |
0 commit comments