Skip to content

Commit 470e383

Browse files
authored
Merge pull request ClickHouse#79709 from ClickHouse/ci_build_profile_data
CI: Enable build profiling
2 parents 072a30b + 997f58e commit 470e383

File tree

8 files changed

+171
-159
lines changed

8 files changed

+171
-159
lines changed

ci/defs/job_configs.py

Lines changed: 27 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,22 @@
22

33
from ci.defs.defs import ArtifactNames, BuildTypes, JobNames, RunnerLabels
44

5+
build_digest_config = Job.CacheDigestConfig(
6+
include_paths=[
7+
"./src",
8+
"./contrib/",
9+
"./CMakeLists.txt",
10+
"./PreLoad.cmake",
11+
"./cmake",
12+
"./base",
13+
"./programs",
14+
"./rust",
15+
"./ci/jobs/build_clickhouse.py",
16+
"./ci/jobs/scripts/job_hooks/build_profile_hook.py",
17+
],
18+
with_git_submodules=True,
19+
)
20+
521

622
class JobConfigs:
723
docker_build_arm = Job.Config(
@@ -97,20 +113,7 @@ class JobConfigs:
97113
run_in_docker="clickhouse/binary-builder+--network=host",
98114
timeout=3600 * 4,
99115
allow_merge_on_failure=True,
100-
digest_config=Job.CacheDigestConfig(
101-
include_paths=[
102-
"./src",
103-
"./contrib/",
104-
"./CMakeLists.txt",
105-
"./PreLoad.cmake",
106-
"./cmake",
107-
"./base",
108-
"./programs",
109-
"./rust",
110-
"./ci/jobs/build_clickhouse.py",
111-
],
112-
with_git_submodules=True,
113-
),
116+
digest_config=build_digest_config,
114117
).parametrize(
115118
parameter=[
116119
BuildTypes.ARM_TIDY,
@@ -127,21 +130,11 @@ class JobConfigs:
127130
# --network=host required for ec2 metadata http endpoint to work
128131
run_in_docker="clickhouse/binary-builder+--network=host",
129132
timeout=3600 * 2,
130-
digest_config=Job.CacheDigestConfig(
131-
include_paths=[
132-
"./src",
133-
"./contrib/",
134-
"./CMakeLists.txt",
135-
"./PreLoad.cmake",
136-
"./cmake",
137-
"./base",
138-
"./programs",
139-
"./rust",
140-
"./ci/jobs/build_clickhouse.py",
141-
],
142-
with_git_submodules=True,
143-
),
144-
post_hooks=["python3 ./ci/jobs/scripts/job_hooks/build_post_hook.py"],
133+
digest_config=build_digest_config,
134+
post_hooks=[
135+
"python3 ./ci/jobs/scripts/job_hooks/build_master_head_hook.py",
136+
"python3 ./ci/jobs/scripts/job_hooks/build_profile_hook.py",
137+
],
145138
).parametrize(
146139
parameter=[
147140
BuildTypes.AMD_DEBUG,
@@ -225,21 +218,11 @@ class JobConfigs:
225218
# --network=host required for ec2 metadata http endpoint to work
226219
run_in_docker="clickhouse/binary-builder+--network=host",
227220
timeout=3600 * 2,
228-
digest_config=Job.CacheDigestConfig(
229-
include_paths=[
230-
"./src",
231-
"./contrib/",
232-
"./CMakeLists.txt",
233-
"./PreLoad.cmake",
234-
"./cmake",
235-
"./base",
236-
"./programs",
237-
"./rust",
238-
"./ci/jobs/build_clickhouse.py",
239-
],
240-
with_git_submodules=True,
241-
),
242-
post_hooks=["python3 ./ci/jobs/scripts/job_hooks/build_post_hook.py"],
221+
digest_config=build_digest_config,
222+
post_hooks=[
223+
"python3 ./ci/jobs/scripts/job_hooks/build_master_head_hook.py",
224+
"python3 ./ci/jobs/scripts/job_hooks/build_profile_hook.py",
225+
],
243226
).parametrize(
244227
parameter=[
245228
BuildTypes.AMD_DARWIN,

ci/jobs/build_clickhouse.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -124,11 +124,12 @@ def main():
124124
results = []
125125

126126
if res and JobStages.CHECKOUT_SUBMODULES in stages:
127-
Shell.check(f"rm -rf {build_dir} && mkdir -p {build_dir}")
127+
Shell.check(f"mkdir -p {build_dir}")
128128
results.append(
129129
Result.from_commands_run(
130130
name="Checkout Submodules",
131131
command=f"git submodule sync --recursive && git submodule init && git submodule update --depth 1 --recursive --jobs {min([Utils.cpu_count(), 20])}",
132+
retries=3,
132133
)
133134
)
134135
res = results[-1].is_ok()
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
import traceback
2+
3+
from ci.defs.defs import S3_BUCKET_NAME, BuildTypes
4+
from ci.praktika.info import Info
5+
from ci.praktika.s3 import S3
6+
from ci.praktika.utils import Shell
7+
8+
BUILD_TYPE_TO_STATIC_LOCATION = {
9+
BuildTypes.AMD_RELEASE: "amd64",
10+
BuildTypes.ARM_RELEASE: "aarch64",
11+
BuildTypes.AMD_DARWIN: "macos",
12+
BuildTypes.ARM_DARWIN: "macos-aarch64",
13+
BuildTypes.ARM_V80COMPAT: "aarch64v80compat",
14+
BuildTypes.AMD_FREEBSD: "freebsd",
15+
BuildTypes.PPC64LE: "powerpc64le",
16+
BuildTypes.AMD_COMPAT: "amd64compat",
17+
BuildTypes.AMD_MUSL: "amd64musl",
18+
BuildTypes.RISCV64: "riscv64",
19+
BuildTypes.S390X: "s390x",
20+
BuildTypes.LOONGARCH64: "loongarch64",
21+
}
22+
23+
24+
def check():
25+
info = Info()
26+
Shell.check("find ./ci/tmp/build/programs -type f", verbose=True)
27+
if not info.pr_number and info.repo_name == "ClickHouse/ClickHouse":
28+
for build_type, prefix in BUILD_TYPE_TO_STATIC_LOCATION.items():
29+
if build_type in info.job_name:
30+
print("Upload builds to static location")
31+
try:
32+
S3.copy_file_to_s3(
33+
local_path=f"./ci/tmp/build/programs/self-extracting/clickhouse",
34+
s3_path=f"{S3_BUCKET_NAME}/{info.git_branch}/{BUILD_TYPE_TO_STATIC_LOCATION[build_type]}/clickhouse-full",
35+
with_rename=True,
36+
)
37+
S3.copy_file_to_s3(
38+
local_path=f"./ci/tmp/build/programs/self-extracting/clickhouse-stripped",
39+
s3_path=f"{S3_BUCKET_NAME}/{info.git_branch}/{BUILD_TYPE_TO_STATIC_LOCATION[build_type]}/clickhouse",
40+
with_rename=True,
41+
)
42+
except Exception as e:
43+
traceback.print_exc()
44+
return
45+
print(f"Not applicable for [{info.job_name}]")
46+
else:
47+
print(f"Not applicable")
48+
return True
49+
50+
51+
if __name__ == "__main__":
52+
check()

ci/jobs/scripts/job_hooks/build_post_hook.py

Lines changed: 0 additions & 103 deletions
This file was deleted.
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
import sys
2+
import traceback
3+
from pathlib import Path
4+
5+
from ci.jobs.scripts.log_cluster import LogClusterBuildProfileQueries
6+
from ci.praktika.info import Info
7+
from ci.praktika.result import Result
8+
from ci.praktika.utils import Shell, Utils
9+
10+
temp_dir = "./ci/tmp"
11+
build_dir = "./ci/tmp/build"
12+
13+
14+
def check():
15+
print("Prepare build profile data")
16+
profiles_dir = Path("./ci/tmp") / "profiles_source"
17+
profiles_dir.mkdir(parents=True, exist_ok=True)
18+
try:
19+
Shell.check(
20+
"./utils/prepare-time-trace/prepare-time-trace.sh "
21+
f"{build_dir} {profiles_dir.absolute()}",
22+
strict=True,
23+
verbose=True,
24+
)
25+
profile_data_file = Path(temp_dir) / "profile.json"
26+
with open(profile_data_file, "wb") as profile_fd:
27+
for profile_source in profiles_dir.iterdir():
28+
if profile_source.name not in (
29+
"binary_sizes.txt",
30+
"binary_symbols.txt",
31+
):
32+
with open(profile_source, "rb") as ps_fd:
33+
profile_fd.write(ps_fd.read())
34+
check_start_time = Utils.timestamp_to_str(
35+
Result.from_fs(Info().job_name).start_time
36+
)
37+
build_type = Info().job_name.split("(")[1].rstrip(")")
38+
assert build_type
39+
LogClusterBuildProfileQueries().insert_profile_data(
40+
build_name=build_type,
41+
start_time=check_start_time,
42+
file=profile_data_file,
43+
)
44+
LogClusterBuildProfileQueries().insert_build_size_data(
45+
build_name=build_type,
46+
start_time=check_start_time,
47+
file=profiles_dir / "binary_sizes.txt",
48+
)
49+
LogClusterBuildProfileQueries().insert_binary_symbol_data(
50+
build_name=build_type,
51+
start_time=check_start_time,
52+
file=profiles_dir / "binary_symbols.txt",
53+
)
54+
except Exception as e:
55+
print(f"ERROR: Failed to upload build profile data:")
56+
traceback.print_exc()
57+
sys.exit(1)
58+
59+
60+
if __name__ == "__main__":
61+
if Info().pr_number == 0:
62+
check()
63+
else:
64+
print("Not applicable for PRs")

0 commit comments

Comments
 (0)