Skip to content

Commit b8231b0

Browse files
committed
Merge remote-tracking branch 'upstream/master' into azure-remove-redundant-list
2 parents 2012c46 + 60579e2 commit b8231b0

File tree

328 files changed

+4455
-1692
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

328 files changed

+4455
-1692
lines changed

CMakeLists.txt

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -204,11 +204,6 @@ option(OMIT_HEAVY_DEBUG_SYMBOLS
204204
${OMIT_HEAVY_DEBUG_SYMBOLS_DEFAULT})
205205

206206
option(BUILD_STANDALONE_KEEPER "Build keeper as small standalone binary" OFF)
207-
if (NOT BUILD_STANDALONE_KEEPER)
208-
option(CREATE_KEEPER_SYMLINK "Create symlink for clickhouse-keeper to main server binary" ON)
209-
else ()
210-
option(CREATE_KEEPER_SYMLINK "Create symlink for clickhouse-keeper to main server binary" OFF)
211-
endif ()
212207

213208
# Create BuildID when using lld. For other linkers it is created by default.
214209
# (NOTE: LINKER_NAME can be either path or name, and in different variants)

ci/defs/job_configs.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -281,6 +281,7 @@ class JobConfigs:
281281
RunnerLabels.BUILDER_ARM, # fuzzers
282282
],
283283
)
284+
builds_for_tests = [b.name for b in build_jobs] + [tidy_build_jobs[0]]
284285
install_check_jobs = Job.Config(
285286
name=JobNames.INSTALL_TEST,
286287
runs_on=["..."],

ci/jobs/docs_job.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import os
22

33
from ci.praktika.result import Result
4-
from ci.praktika.utils import Shell, Utils
4+
from ci.praktika.utils import Utils
55

66
if __name__ == "__main__":
77

@@ -39,7 +39,7 @@
3939
results.append(
4040
Result.from_commands_run(
4141
name=testname,
42-
command=[f"yarn run-markdown-linter"],
42+
command=[f"yarn check-markdown"],
4343
workdir="/opt/clickhouse-docs",
4444
)
4545
)

ci/jobs/scripts/check_style/aspell-ignore/en/aspell-dict.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@ Approximative
3333
ArrayJoin
3434
ArrowCompression
3535
ArrowStream
36+
aspell
3637
AsyncInsertCacheSize
3738
AsynchronousHeavyMetricsCalculationTimeSpent
3839
AsynchronousHeavyMetricsUpdateInterval
@@ -129,6 +130,7 @@ CMPLNT
129130
CMake
130131
CMakeLists
131132
CODECS
133+
codespell
132134
CORS
133135
COVID
134136
CPUFrequencyMHz
@@ -659,6 +661,7 @@ Multiqueries
659661
Multithreading
660662
Multiword
661663
MurmurHash
664+
mypy
662665
MySQLConnection
663666
MySQLDataTypesSupport
664667
MySQLDump

ci/jobs/scripts/functional_tests/setup_ch_cluster.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ else
110110
fi
111111
clickhouse-client --query "CREATE TABLE test.hits_s3 (WatchID UInt64, JavaEnable UInt8, Title String, GoodEvent Int16, EventTime DateTime, EventDate Date, CounterID UInt32, ClientIP UInt32, ClientIP6 FixedString(16), RegionID UInt32, UserID UInt64, CounterClass Int8, OS UInt8, UserAgent UInt8, URL String, Referer String, URLDomain String, RefererDomain String, Refresh UInt8, IsRobot UInt8, RefererCategories Array(UInt16), URLCategories Array(UInt16), URLRegions Array(UInt32), RefererRegions Array(UInt32), ResolutionWidth UInt16, ResolutionHeight UInt16, ResolutionDepth UInt8, FlashMajor UInt8, FlashMinor UInt8, FlashMinor2 String, NetMajor UInt8, NetMinor UInt8, UserAgentMajor UInt16, UserAgentMinor FixedString(2), CookieEnable UInt8, JavascriptEnable UInt8, IsMobile UInt8, MobilePhone UInt8, MobilePhoneModel String, Params String, IPNetworkID UInt32, TraficSourceID Int8, SearchEngineID UInt16, SearchPhrase String, AdvEngineID UInt8, IsArtifical UInt8, WindowClientWidth UInt16, WindowClientHeight UInt16, ClientTimeZone Int16, ClientEventTime DateTime, SilverlightVersion1 UInt8, SilverlightVersion2 UInt8, SilverlightVersion3 UInt32, SilverlightVersion4 UInt16, PageCharset String, CodeVersion UInt32, IsLink UInt8, IsDownload UInt8, IsNotBounce UInt8, FUniqID UInt64, HID UInt32, IsOldCounter UInt8, IsEvent UInt8, IsParameter UInt8, DontCountHits UInt8, WithHash UInt8, HitColor FixedString(1), UTCEventTime DateTime, Age UInt8, Sex UInt8, Income UInt8, Interests UInt16, Robotness UInt8, GeneralInterests Array(UInt16), RemoteIP UInt32, RemoteIP6 FixedString(16), WindowName Int32, OpenerName Int32, HistoryLength Int16, BrowserLanguage FixedString(2), BrowserCountry FixedString(2), SocialNetwork String, SocialAction String, HTTPError UInt16, SendTiming Int32, DNSTiming Int32, ConnectTiming Int32, ResponseStartTiming Int32, ResponseEndTiming Int32, FetchTiming Int32, RedirectTiming Int32, DOMInteractiveTiming Int32, DOMContentLoadedTiming Int32, DOMCompleteTiming Int32, LoadEventStartTiming Int32, LoadEventEndTiming Int32, NSToDOMContentLoadedTiming Int32, FirstPaintTiming Int32, RedirectCount Int8, SocialSourceNetworkID UInt8, SocialSourcePage String, ParamPrice Int64, ParamOrderID String, ParamCurrency FixedString(3), ParamCurrencyID UInt16, GoalsReached Array(UInt32), OpenstatServiceName String, OpenstatCampaignID String, OpenstatAdID String, OpenstatSourceID String, UTMSource String, UTMMedium String, UTMCampaign String, UTMContent String, UTMTerm String, FromTag String, HasGCLID UInt8, RefererHash UInt64, URLHash UInt64, CLID UInt32, YCLID UInt64, ShareService String, ShareURL String, ShareTitle String, ParsedParams Nested(Key1 String, Key2 String, Key3 String, Key4 String, Key5 String, ValueDouble Float64), IslandID FixedString(16), RequestNum UInt32, RequestTry UInt8) ENGINE = MergeTree() PARTITION BY toYYYYMM(EventDate) ORDER BY (CounterID, EventDate, intHash32(UserID)) SAMPLE BY intHash32(UserID) SETTINGS index_granularity = 8192, storage_policy='s3_cache'"
112112
# AWS S3 is very inefficient, so increase memory even further:
113-
clickhouse-client --max_memory_usage 30G --max_memory_usage_for_user 30G --query "INSERT INTO test.hits_s3 SELECT * FROM test.hits SETTINGS enable_filesystem_cache_on_write_operations=0, max_insert_threads=16"
113+
clickhouse-client --max_execution_time 600 --max_memory_usage 30G --max_memory_usage_for_user 30G --query "INSERT INTO test.hits_s3 SELECT * FROM test.hits SETTINGS enable_filesystem_cache_on_write_operations=0, max_insert_threads=16"
114114
fi
115115

116116
clickhouse-client --query "SHOW TABLES FROM test"

ci/jobs/scripts/workflow_hooks/can_be_merged.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,8 @@ def check():
99
forbidden_labels = [
1010
Labels.CI_PERFORMANCE,
1111
Labels.NO_FAST_TESTS,
12+
Labels.CI_INTEGRATION_FLAKY,
13+
Labels.CI_FUNCTIONAL_FLAKY,
1214
Labels.CI_INTEGRATION,
1315
Labels.CI_FUNCTIONAL,
1416
]

ci/jobs/scripts/workflow_hooks/feature_docs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from ci.jobs.scripts.workflow_hooks.pr_description import Labels
66

77
files_for_which_docs_autogenerated = [
8-
"src/storages/MergeTree/MergeTreeSettings.cpp",
8+
"src/Storages/MergeTree/MergeTreeSettings.cpp",
99
"src/Core/Settings.cpp",
1010
]
1111

ci/jobs/scripts/workflow_hooks/filter_job.py

Lines changed: 25 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from ci.defs.defs import JobNames
22
from ci.jobs.scripts.workflow_hooks.pr_description import Labels
33
from ci.praktika.info import Info
4+
from ci.defs.job_configs import JobConfigs
45

56

67
def only_docs(changed_files):
@@ -32,12 +33,12 @@ def only_docs(changed_files):
3233
"Build (arm_tidy)",
3334
]
3435

35-
INTEGRATION_TEST_CHECK_JOBS = [
36+
INTEGRATION_TEST_FLAKY_CHECK_JOBS = [
3637
"Build (amd_asan)",
3738
"Integration tests (asan, flaky check)",
3839
]
3940

40-
FUNCTIONAL_TEST_CHECK_JOBS = [
41+
FUNCTIONAL_TEST_FLAKY_CHECK_JOBS = [
4142
"Build (amd_asan)",
4243
"Stateless tests (asan, flaky check)",
4344
]
@@ -65,17 +66,34 @@ def should_skip_job(job_name):
6566
return True, f"Skipped, labeled with '{Labels.NO_FAST_TESTS}'"
6667

6768
if (
68-
Labels.CI_INTEGRATION in _info_cache.pr_labels
69-
and job_name not in INTEGRATION_TEST_CHECK_JOBS
69+
Labels.CI_INTEGRATION_FLAKY in _info_cache.pr_labels
70+
and job_name not in INTEGRATION_TEST_FLAKY_CHECK_JOBS
7071
):
7172
return (
7273
True,
73-
f"Skipped, labeled with '{Labels.CI_INTEGRATION}' - run integration test jobs only",
74+
f"Skipped, labeled with '{Labels.CI_INTEGRATION_FLAKY}' - run integration test jobs only",
7475
)
7576

7677
if (
77-
Labels.CI_FUNCTIONAL in _info_cache.pr_labels
78-
and job_name not in FUNCTIONAL_TEST_CHECK_JOBS
78+
Labels.CI_FUNCTIONAL_FLAKY in _info_cache.pr_labels
79+
and job_name not in FUNCTIONAL_TEST_FLAKY_CHECK_JOBS
80+
):
81+
return (
82+
True,
83+
f"Skipped, labeled with '{Labels.CI_FUNCTIONAL_FLAKY}' - run stateless test jobs only",
84+
)
85+
86+
if Labels.CI_INTEGRATION in _info_cache.pr_labels and (
87+
job_name.startswith(JobNames.INTEGRATION) or job_name in JobConfigs.builds_for_tests
88+
):
89+
return (
90+
True,
91+
f"Skipped, labeled with '{Labels.CI_INTEGRATION}' - run integration test jobs only",
92+
)
93+
94+
if Labels.CI_FUNCTIONAL in _info_cache.pr_labels and (
95+
job_name.startswith(JobNames.STATELESS)
96+
or job_name.startswith(JobNames.STATEFUL or job_name in JobConfigs.builds_for_tests)
7997
):
8098
return (
8199
True,

ci/jobs/scripts/workflow_hooks/pr_description.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,10 @@ class Labels:
6969

7070
CI_PERFORMANCE = "ci-performance"
7171

72+
CI_INTEGRATION_FLAKY = "ci-integration-test-flaky"
7273
CI_INTEGRATION = "ci-integration-test"
74+
75+
CI_FUNCTIONAL_FLAKY = "ci-functional-test-flaky"
7376
CI_FUNCTIONAL = "ci-functional-test"
7477

7578
# automatic backport for critical bug fixes

docker/test/integration/runner/Dockerfile

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -63,18 +63,17 @@ RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - \
6363
COPY requirements.txt /
6464
RUN python3 -m pip install --no-cache-dir -r requirements.txt
6565

66-
# Hudi supports only spark 3.3.*, not 3.4
67-
RUN curl -fsSL -O https://archive.apache.org/dist/spark/spark-3.3.2/spark-3.3.2-bin-hadoop3.tgz \
68-
&& tar xzvf spark-3.3.2-bin-hadoop3.tgz -C / \
69-
&& rm spark-3.3.2-bin-hadoop3.tgz
66+
RUN curl -fsSL -O https://archive.apache.org/dist/spark/spark-3.5.5/spark-3.5.5-bin-hadoop3.tgz \
67+
&& tar xzvf spark-3.5.5-bin-hadoop3.tgz -C / \
68+
&& rm spark-3.5.5-bin-hadoop3.tgz
7069

7170
# download spark and packages
7271
# if you change packages, don't forget to update them in tests/integration/helpers/cluster.py
73-
RUN packages="org.apache.hudi:hudi-spark3.3-bundle_2.12:0.13.0,\
74-
io.delta:delta-core_2.12:2.3.0,\
75-
org.apache.iceberg:iceberg-spark-runtime-3.3_2.12:1.1.0" \
76-
&& /spark-3.3.2-bin-hadoop3/bin/spark-shell --packages "$packages" > /dev/null \
77-
&& find /root/.ivy2/ -name '*.jar' -exec ln -sf {} /spark-3.3.2-bin-hadoop3/jars/ \;
72+
RUN packages="io.delta:delta-spark_2.12:3.1.0,\
73+
org.apache.hudi:hudi-spark3.5-bundle_2.12:1.0.1,\
74+
org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.4.3" \
75+
&& /spark-3.5.5-bin-hadoop3/bin/spark-shell --packages "$packages" > /dev/null \
76+
&& find /root/.ivy2/ -name '*.jar' -exec ln -sf {} /spark-3.5.5-bin-hadoop3/jars/ \;
7877

7978
RUN set -x \
8079
&& addgroup --system dockremap \

0 commit comments

Comments
 (0)