Skip to content

Commit a8e5286

Browse files
committed
fix
1 parent 658f89d commit a8e5286

File tree

1 file changed

+101
-54
lines changed

1 file changed

+101
-54
lines changed
Lines changed: 101 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,22 @@
11
import argparse
2+
import json
23
import os
34
import shutil
45
import subprocess
56
import sys
67
import tempfile
7-
import json
88

99

1010
def get_run_id(commit, workflow):
11+
print(f"Getting run id for commit: {commit}, workflow: {workflow}")
1112
try:
1213
result = subprocess.run(
1314
[
1415
'gh',
1516
'run',
1617
'list',
18+
'--repo',
19+
'Datadog/integrations-core',
1720
'--workflow',
1821
workflow,
1922
'-c',
@@ -25,50 +28,74 @@ def get_run_id(commit, workflow):
2528
'--jq',
2629
'.[-1].databaseId',
2730
],
31+
capture_output=True,
32+
text=True,
2833
)
2934
except subprocess.CalledProcessError as e:
3035
stderr = (e.stderr or '').strip()
3136
if stderr:
3237
print(stderr)
38+
print("Failed to get run id (exception).")
3339
return None
34-
return result.stdout.strip()
40+
run_id = result.stdout.strip() if result.stdout else None
41+
print(f"Run id: {run_id}")
42+
return run_id
43+
3544

3645
def get_dep_sizes_json(current_commit, platform):
46+
print(f"Getting dependency sizes json for commit: {current_commit}, platform: {platform}")
3747
run_id = get_run_id(current_commit, '.github/workflows/resolve-build-deps.yaml')
3848
if run_id and check_artifact_exists(run_id, f'target-{platform}'):
3949
dep_sizes_json = get_current_sizes_json(run_id, platform)
50+
print(f"Dependency sizes json path: {dep_sizes_json}")
4051
return dep_sizes_json
4152
else:
53+
print("Dependency sizes json not found for current commit.")
4254
return None
4355

4456

4557
def check_artifact_exists(run_id, artifact_name):
58+
print(f"Checking if artifact exists: run_id={run_id}, artifact_name={artifact_name}")
4659
result = subprocess.run(
47-
[
48-
'gh',
49-
'api',
50-
f'repos/Datadog/integrations-core/actions/runs/{run_id}/artifacts',
51-
'--jq',
52-
'.artifacts[].name',
53-
],
54-
check=True,
55-
capture_output=True,
56-
text=True,
57-
)
60+
[
61+
'gh',
62+
'api',
63+
f'repos/Datadog/integrations-core/actions/runs/{run_id}/artifacts',
64+
'--jq',
65+
'.artifacts[].name',
66+
],
67+
check=True,
68+
capture_output=True,
69+
text=True,
70+
)
5871

5972
artifact_names = {n.strip() for n in (result.stdout or '').splitlines() if n.strip()}
73+
print(f"Available artifacts: {artifact_names}")
6074
if artifact_name not in artifact_names:
6175
print(f"Artifact '{artifact_name}' not found in run {run_id}")
6276
return False
6377

6478
print(f"Found artifact: {artifact_name}")
6579
return True
6680

81+
6782
def get_current_sizes_json(run_id, platform):
83+
print(f"Getting current sizes json for run_id={run_id}, platform={platform}")
6884
with tempfile.TemporaryDirectory() as tmpdir:
6985
print(f"Downloading artifacts to {tmpdir}")
7086
_ = subprocess.run(
71-
['gh', 'run', 'download', run_id, '--name', f'target-{platform}', '--dir', tmpdir],
87+
[
88+
'gh',
89+
'run',
90+
'download',
91+
run_id,
92+
'--repo',
93+
'Datadog/integrations-core',
94+
'--name',
95+
f'target-{platform}',
96+
'--dir',
97+
tmpdir,
98+
],
7299
check=True,
73100
capture_output=True,
74101
text=True,
@@ -79,59 +106,75 @@ def get_current_sizes_json(run_id, platform):
79106

80107
if os.path.exists(sizes_file):
81108
print(f"Found sizes.json at {sizes_file}")
82-
shutil.move(sizes_file, os.path.join(os.getcwd(), f'{platform}.json'))
83-
return os.getcwd() / f'{platform}.json'
109+
dest_path = os.path.join(os.getcwd(), f'{platform}.json')
110+
shutil.move(sizes_file, dest_path)
111+
return dest_path
84112
else:
85113
print(f"sizes.json not found at {sizes_file}")
86114
return None
87115

116+
88117
def get_artifact(run_id, artifact_name):
118+
print(f"Downloading artifact: {artifact_name} from run_id={run_id}")
89119
_ = subprocess.run(
90-
['gh', 'run', 'download', run_id, '--name', artifact_name],
120+
[
121+
'gh',
122+
'run',
123+
'download',
124+
run_id,
125+
'--repo',
126+
'Datadog/integrations-core',
127+
'--name',
128+
artifact_name,
129+
],
91130
check=True,
92131
capture_output=True,
93132
text=True,
94133
)
95-
return os.path.join(os.getcwd(), artifact_name)
134+
artifact_path = os.path.join(os.getcwd(), artifact_name)
135+
print(f"Artifact downloaded to: {artifact_path}")
136+
return artifact_path
137+
96138

97139
def get_previous_dep_sizes_json(base_commit, platform):
98-
# Get the previous commit in master branch
99-
result = subprocess.run(
100-
['git', 'rev-parse', f'{base_commit}~1'],
101-
check=True,
102-
capture_output=True,
103-
text=True,
104-
)
105-
prev_commit = result.stdout.strip()
106-
run_id = get_run_id(prev_commit, '.github/workflows/measure-disk-usage.yml')
107-
if run_id and check_artifact_exists(run_id, f'status_uncompressed_{platform}.json'):
108-
uncompressed_json = get_artifact(run_id, f'status_uncompressed_{platform}.json')
140+
print(f"Getting previous dependency sizes json for base_commit={base_commit}, platform={platform}")
141+
run_id = get_run_id(base_commit, '.github/workflows/measure-disk-usage.yml')
142+
print(f"Previous run_id: {run_id}")
143+
compressed_json = None
144+
uncompressed_json = None
109145
if run_id and check_artifact_exists(run_id, f'status_compressed_{platform}.json'):
110146
compressed_json = get_artifact(run_id, f'status_compressed_{platform}.json')
111-
147+
if run_id and check_artifact_exists(run_id, f'status_uncompressed_{platform}.json'):
148+
uncompressed_json = get_artifact(run_id, f'status_uncompressed_{platform}.json')
149+
print(f"Compressed json: {compressed_json}")
150+
print(f"Uncompressed json: {uncompressed_json}")
112151
sizes_json = parse_sizes_json(compressed_json, uncompressed_json)
113-
with open(f'{platform}.json', 'w') as f:
152+
output_path = f'{platform}.json'
153+
with open(output_path, 'w') as f:
114154
json.dump(sizes_json, f, indent=2)
115-
return f'{platform}.json'
155+
print(f"Wrote merged sizes json to {output_path}")
156+
return output_path
116157

117158

118159
def parse_sizes_json(compressed_json_path, uncompressed_json_path):
119160
with open(compressed_json_path, 'r') as f:
120-
compressed_list = json.load(f)
161+
compressed_list = list(json.load(f))
121162
with open(uncompressed_json_path, 'r') as f:
122-
uncompressed_list = json.load(f)
163+
uncompressed_list = list(json.load(f))
123164

124165
sizes_json = {
125166
dep["Name"]: {
126-
"compressed": dep["Size_Bytes"],
127-
"version": dep["Version"]
167+
"compressed": int(dep["Size_Bytes"]),
168+
"version": dep.get("Version"),
128169
}
129170
for dep in compressed_list
130171
}
131172

132173
for dep in uncompressed_list:
133-
sizes_json[dep["Name"]]["uncompressed"] = dep["Size_Bytes"]
134-
174+
name = dep["Name"]
175+
entry = sizes_json.setdefault(name, {"version": dep.get("Version")})
176+
entry["uncompressed"] = int(dep["Size_Bytes"])
177+
135178
return sizes_json
136179

137180

@@ -147,25 +190,29 @@ def main():
147190
if not dep_sizes_json:
148191
dep_sizes_json = get_previous_dep_sizes_json(args.base_commit, args.platform)
149192

150-
command = (
151-
f"ddev size status "
152-
f"--platform {args.platform} "
153-
f"--dependency-sizes {dep_sizes_json} "
154-
f"--format json"
155-
)
156-
if args.send_to_dd:
157-
command += f" --to-dd-key {args.send_to_dd}"
158-
159-
subprocess.run(command, check=True)
160-
161-
command += "--compressed"
162-
163-
subprocess.run(command, check=True)
193+
command_args = [
194+
"ddev",
195+
"size",
196+
"status",
197+
"--platform",
198+
args.platform,
199+
"--dependency-sizes",
200+
dep_sizes_json,
201+
"--format",
202+
"json",
203+
]
204+
if args.to_dd_key:
205+
command_args += ["--to-dd-key", args.to_dd_key]
206+
207+
print(f"Running command: {' '.join(command_args)}")
208+
subprocess.run(command_args, check=True)
209+
210+
command_args_compressed = command_args + ["--compressed"]
211+
print(f"Running command: {' '.join(command_args_compressed)}")
212+
subprocess.run(command_args_compressed, check=True)
164213

165214
return 0
166215

167216

168-
169-
170217
if __name__ == '__main__':
171218
sys.exit(main())

0 commit comments

Comments
 (0)