Skip to content

Commit 5345879

Browse files
authored
Merge pull request ClickHouse#78662 from ClickHouse/ci_compress_artifacts
CI: Support artifacts compressing
2 parents 4d2e00b + bf87573 commit 5345879

File tree

16 files changed

+225
-108
lines changed

16 files changed

+225
-108
lines changed

ci/defs/defs.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -473,6 +473,7 @@ class ArtifactConfigs:
473473
name="...",
474474
type=Artifact.Type.S3,
475475
path=f"{TEMP_DIR}/build/src/unit_tests_dbms",
476+
compress_zst=True,
476477
).parametrize(
477478
names=[
478479
ArtifactNames.UNITTEST_AMD_ASAN,

ci/jobs/performance_tests.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@
66
import traceback
77
from pathlib import Path
88

9-
from ci.jobs.scripts.clickhouse_version import CHVersion
109
from ci.praktika.info import Info
1110
from ci.praktika.result import Result
1211
from ci.praktika.utils import MetaClasses, Shell, Utils
@@ -247,7 +246,7 @@ def main():
247246
compare_against_master or compare_against_release
248247
), "test option: head_master or prev_release must be selected"
249248

250-
release_version = CHVersion.get_release_version_as_dict()
249+
# release_version = CHVersion.get_release_version_as_dict()
251250
info = Info()
252251

253252
if Utils.is_arm():

ci/jobs/scripts/workflow_hooks/filter_job.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
1-
from ci.praktika.info import Info
2-
31
from ci.defs.defs import JobNames
42
from ci.jobs.scripts.workflow_hooks.pr_description import Labels
3+
from ci.praktika.info import Info
54

65

76
def only_docs(changed_files):

ci/praktika/artifact.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ class Config:
2020
name: str
2121
type: str
2222
path: Union[str, List[str]]
23+
compress_zst: bool = False
2324
_provided_by: str = ""
2425
_s3_path: str = ""
2526

ci/praktika/cache.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,10 @@ def push_success_record(cls, job_name, job_digest, sha, if_not_exist):
5454
record_file = Path(Settings.TEMP_DIR) / type_
5555
record.dump(record_file)
5656
S3.put(
57-
s3_path=record_path, local_path=record_file, if_none_matched=if_not_exist
57+
s3_path=record_path,
58+
local_path=record_file,
59+
if_none_matched=if_not_exist,
60+
no_strict=True,
5861
)
5962
record_file.unlink()
6063

@@ -74,6 +77,7 @@ def fetch_success(self, job_name, job_digest):
7477
s3_path=record_path,
7578
local_path=record_file_local_dir,
7679
_skip_download_counter=True,
80+
no_strict=True,
7781
)
7882

7983
if res:

ci/praktika/digest.py

Lines changed: 13 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,10 @@
66
from pathlib import Path
77
from typing import List
88

9-
from praktika.utils import Shell
10-
11-
from . import Job
129
from .docker import Docker
10+
from .job import Job
1311
from .settings import Settings
14-
from .utils import Utils
12+
from .utils import Shell, Utils
1513

1614

1715
class Digest:
@@ -30,7 +28,7 @@ def _hash_digest_config(digest_config: Job.CacheDigestConfig) -> str:
3028
def get_null_digest(cls):
3129
return "f" * Settings.CACHE_DIGEST_LEN
3230

33-
def calc_job_digest(self, job_config: Job.Config, docker_digests):
31+
def calc_job_digest(self, job_config: Job.Config, docker_digests, artifact_configs):
3432
config = job_config.digest_config
3533
if not config:
3634
return self.get_null_digest()
@@ -72,6 +70,7 @@ def calc_job_digest(self, job_config: Job.Config, docker_digests):
7270
digest = "-".join([docker_digest, digest])
7371

7472
job_config_dict = dataclasses.asdict(job_config)
73+
7574
drop_fields = [
7675
"requires",
7776
"enable_commit_status",
@@ -80,6 +79,15 @@ def calc_job_digest(self, job_config: Job.Config, docker_digests):
8079
filtered_job_dict = {
8180
k: v for k, v in job_config_dict.items() if k not in drop_fields
8281
}
82+
# add Articat.Configs list to the job config dict so that changed Articat.Config object affects job digest
83+
job_provides_artifact_configs = []
84+
for a in job_config.provides:
85+
if a in artifact_configs:
86+
job_provides_artifact_configs.append(
87+
dataclasses.asdict(artifact_configs[a])
88+
)
89+
filtered_job_dict["provides"] = job_provides_artifact_configs
90+
8391
config_digest = hashlib.md5(
8492
json.dumps(filtered_job_dict, sort_keys=True).encode()
8593
).hexdigest()[: min(Settings.CACHE_DIGEST_LEN // 4, 4)]

ci/praktika/gh.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ def get_pr_title_body_labels(cls, pr=None, repo=None):
121121
except Exception:
122122
print("ERROR: Failed to get PR data")
123123
traceback.print_exc()
124-
Info().store_exception_traceback()
124+
Info().store_traceback()
125125
return "", "", []
126126
return title, body, labels
127127

ci/praktika/hook_cache.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,15 @@ def configure(cls, workflow):
2020
), f"Outdated yaml pipelines or BUG. Configuration must be run only for workflow with enabled cache, workflow [{workflow.name}]"
2121
artifact_digest_map = {}
2222
job_digest_map = {}
23+
artifact_name_config_map = {}
24+
for a in workflow.artifacts:
25+
artifact_name_config_map[a.name] = a
2326

2427
for job in workflow.jobs:
2528
digest = cache.digest.calc_job_digest(
26-
job_config=job, docker_digests=docker_digests
29+
job_config=job,
30+
docker_digests=docker_digests,
31+
artifact_configs=artifact_name_config_map,
2732
)
2833
job_digest_map[job.name] = digest
2934
if job.provides:

ci/praktika/hook_html.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,9 @@ def pull_from_s3(cls):
9999
local_path = Path(cls.file_name())
100100
file_name = local_path.name
101101
s3_path = f"{cls.get_s3_path()}/{file_name}"
102-
if not S3.copy_file_from_s3(s3_path=s3_path, local_path=local_path):
102+
if not S3.copy_file_from_s3(
103+
s3_path=s3_path, local_path=local_path, no_strict=True
104+
):
103105
print(f"WARNING: failed to cp file [{s3_path}] from s3")
104106
return []
105107
return cls.from_json(local_path)
@@ -111,7 +113,9 @@ def push_to_s3(cls, commits):
111113
local_path = Path(cls.file_name())
112114
file_name = local_path.name
113115
s3_path = f"{cls.get_s3_path()}/{file_name}"
114-
if not S3.copy_file_to_s3(s3_path=s3_path, local_path=local_path, text=True):
116+
if not S3.copy_file_to_s3(
117+
s3_path=s3_path, local_path=local_path, text=True, no_strict=True
118+
):
115119
print(f"WARNING: failed to cp file [{local_path}] to s3")
116120

117121
@classmethod

ci/praktika/info.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,7 @@ def get_custom_data(self, key=None):
192192
return custom_data.get(key, None)
193193
return custom_data
194194

195-
def store_exception_traceback(self):
195+
def store_traceback(self):
196196
self.env.TRACEBACKS.append(traceback.format_exc())
197197
self.env.dump()
198198

0 commit comments

Comments
 (0)