Skip to content

Commit 957e164

Browse files
authored
Merge branch 'antalya-25.8' into feature/antalya-25.8/iceberg_local_cluster
2 parents 3f0edef + 60c312c commit 957e164

File tree

10 files changed

+35
-15
lines changed

10 files changed

+35
-15
lines changed

.github/workflows/master.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4178,7 +4178,7 @@ jobs:
41784178
secrets: inherit
41794179
with:
41804180
runner_type: altinity-on-demand, altinity-regression-tester
4181-
commit: 3fbe58a0ebe8fa5f97b7f36c45a2a69b1d3b6568
4181+
commit: fc19ce3a7322a10ab791de755c950a56744a12e7
41824182
arch: release
41834183
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
41844184
timeout_minutes: 300
@@ -4190,7 +4190,7 @@ jobs:
41904190
secrets: inherit
41914191
with:
41924192
runner_type: altinity-on-demand, altinity-regression-tester-aarch64
4193-
commit: 3fbe58a0ebe8fa5f97b7f36c45a2a69b1d3b6568
4193+
commit: fc19ce3a7322a10ab791de755c950a56744a12e7
41944194
arch: aarch64
41954195
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
41964196
timeout_minutes: 300

.github/workflows/pull_request.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4134,7 +4134,7 @@ jobs:
41344134
secrets: inherit
41354135
with:
41364136
runner_type: altinity-on-demand, altinity-regression-tester
4137-
commit: 3fbe58a0ebe8fa5f97b7f36c45a2a69b1d3b6568
4137+
commit: fc19ce3a7322a10ab791de755c950a56744a12e7
41384138
arch: release
41394139
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
41404140
timeout_minutes: 300
@@ -4146,7 +4146,7 @@ jobs:
41464146
secrets: inherit
41474147
with:
41484148
runner_type: altinity-on-demand, altinity-regression-tester-aarch64
4149-
commit: 3fbe58a0ebe8fa5f97b7f36c45a2a69b1d3b6568
4149+
commit: fc19ce3a7322a10ab791de755c950a56744a12e7
41504150
arch: aarch64
41514151
build_sha: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
41524152
timeout_minutes: 300

.gitmodules

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
url = https://github.com/Thalhammer/jwt-cpp
77
[submodule "contrib/zstd"]
88
path = contrib/zstd
9-
url = https://github.com/facebook/zstd
9+
url = https://github.com/ClickHouse/zstd.git
1010
[submodule "contrib/lz4"]
1111
path = contrib/lz4
1212
url = https://github.com/lz4/lz4
@@ -45,7 +45,7 @@
4545
url = https://github.com/ClickHouse/arrow
4646
[submodule "contrib/thrift"]
4747
path = contrib/thrift
48-
url = https://github.com/apache/thrift
48+
url = https://github.com/ClickHouse/thrift.git
4949
[submodule "contrib/libhdfs3"]
5050
path = contrib/libhdfs3
5151
url = https://github.com/ClickHouse/libhdfs3

ci/praktika/native_jobs.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
from .runtime import RunConfig
1919
from .settings import Settings
2020
from .utils import Shell, Utils
21+
from ci.defs.defs import ArtifactNames
2122

2223
assert Settings.CI_CONFIG_RUNS_ON
2324

@@ -403,17 +404,22 @@ def check_affected_jobs():
403404
# NOTE (strtgbb): We always want these build artifacts for our report and regression tests.
404405
# If we make FinishCIReport and regression tests into praktika jobs, we can remove this.
405406
if "CIReport" in workflow.additional_jobs:
406-
all_required_artifacts.update(["CH_AMD_RELEASE", "CH_ARM_RELEASE"])
407+
all_required_artifacts.update(
408+
[
409+
ArtifactNames.CH_AMD_RELEASE,
410+
ArtifactNames.CH_ARM_RELEASE,
411+
]
412+
)
407413
if (
408414
"Regression" in workflow.additional_jobs
409415
and "regression"
410416
not in workflow_config.custom_data.get("ci_exclude_tags", [])
411417
):
412-
all_required_artifacts.update(["CH_AMD_BINARY"])
418+
all_required_artifacts.update([ArtifactNames.CH_AMD_BINARY])
413419
if "aarch64" not in workflow_config.custom_data.get(
414420
"ci_exclude_tags", []
415421
):
416-
all_required_artifacts.update(["CH_ARM_BINARY"])
422+
all_required_artifacts.update([ArtifactNames.CH_ARM_BINARY])
417423
print(f"Including artifacts for custom jobs [{all_required_artifacts}]")
418424

419425
for job in workflow.jobs:

ci/praktika/yaml_additional_templates.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ class AltinityWorkflowTemplates:
3535
echo "Workflow Run Report: [View Report]($REPORT_LINK)" >> $GITHUB_STEP_SUMMARY
3636
"""
3737
# Additional jobs
38-
REGRESSION_HASH = "3fbe58a0ebe8fa5f97b7f36c45a2a69b1d3b6568"
38+
REGRESSION_HASH = "fc19ce3a7322a10ab791de755c950a56744a12e7"
3939
ALTINITY_JOBS = {
4040
"GrypeScan": r"""
4141
GrypeScanServer:

src/Storages/MergeTree/MergeTreeData.cpp

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -213,6 +213,7 @@ namespace Setting
213213
extern const SettingsUInt64 min_bytes_to_use_direct_io;
214214
extern const SettingsBool export_merge_tree_part_overwrite_file_if_exists;
215215
extern const SettingsBool output_format_parallel_formatting;
216+
extern const SettingsBool output_format_parquet_parallel_encoding;
216217
}
217218

218219
namespace MergeTreeSetting
@@ -6244,7 +6245,9 @@ void MergeTreeData::exportPartToTable(const PartitionCommand & command, ContextP
62446245
dest_storage->getStorageID(),
62456246
part,
62466247
query_context->getSettingsRef()[Setting::export_merge_tree_part_overwrite_file_if_exists],
6247-
query_context->getSettingsRef()[Setting::output_format_parallel_formatting]);
6248+
query_context->getSettingsRef()[Setting::output_format_parallel_formatting],
6249+
query_context->getSettingsRef()[Setting::output_format_parquet_parallel_encoding],
6250+
query_context->getSettingsRef()[Setting::max_threads]);
62486251

62496252
std::lock_guard lock(export_manifests_mutex);
62506253

@@ -6292,6 +6295,8 @@ void MergeTreeData::exportPartToTableImpl(
62926295
{
62936296
auto context_copy = Context::createCopy(local_context);
62946297
context_copy->setSetting("output_format_parallel_formatting", manifest.parallel_formatting);
6298+
context_copy->setSetting("output_format_parquet_parallel_encoding", manifest.parquet_parallel_encoding);
6299+
context_copy->setSetting("max_threads", manifest.max_threads);
62956300

62966301
sink = destination_storage->import(
62976302
manifest.data_part->name + "_" + manifest.data_part->checksums.getTotalChecksumHex(),

src/Storages/MergeTree/MergeTreeExportManifest.h

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,17 +13,24 @@ struct MergeTreeExportManifest
1313
const StorageID & destination_storage_id_,
1414
const DataPartPtr & data_part_,
1515
bool overwrite_file_if_exists_,
16-
bool parallel_formatting_)
16+
bool parallel_formatting_,
17+
bool parallel_formatting_parquet_,
18+
std::size_t max_threads_)
1719
: destination_storage_id(destination_storage_id_),
1820
data_part(data_part_),
1921
overwrite_file_if_exists(overwrite_file_if_exists_),
2022
parallel_formatting(parallel_formatting_),
23+
parquet_parallel_encoding(parallel_formatting_parquet_),
24+
max_threads(max_threads_),
2125
create_time(time(nullptr)) {}
2226

2327
StorageID destination_storage_id;
2428
DataPartPtr data_part;
2529
bool overwrite_file_if_exists;
2630
bool parallel_formatting;
31+
/// parquet has a different setting for parallel formatting
32+
bool parquet_parallel_encoding;
33+
std::size_t max_threads;
2734

2835
time_t create_time;
2936
mutable bool in_progress = false;

src/Storages/MergeTree/MergeTreeSequentialSource.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,8 @@ MergeTreeSequentialSource::MergeTreeSequentialSource(
169169
addThrottler(read_settings.local_throttler, context->getMergesThrottler());
170170
break;
171171
case Export:
172-
read_settings.local_throttler = context->getExportsThrottler();
172+
addThrottler(read_settings.local_throttler, context->getExportsThrottler());
173+
addThrottler(read_settings.remote_throttler, context->getExportsThrottler());
173174
break;
174175
}
175176

src/Storages/ObjectStorage/ObjectStorageFilePathGenerator.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ namespace DB
5555

5656
result += raw_path;
5757

58-
if (raw_path.back() != '/')
58+
if (!raw_path.empty() && raw_path.back() != '/')
5959
{
6060
result += "/";
6161
}

src/Storages/ObjectStorage/StorageObjectStorage.cpp

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
#include <optional>
12
#include <thread>
23
#include <Core/ColumnWithTypeAndName.h>
34
#include <Storages/ObjectStorage/StorageObjectStorage.h>
@@ -507,7 +508,7 @@ SinkToStoragePtr StorageObjectStorage::import(
507508
destination_file_path,
508509
object_storage,
509510
configuration,
510-
format_settings,
511+
std::nullopt, /// passing nullopt to force rebuild for format_settings based on query context
511512
std::make_shared<const Block>(getInMemoryMetadataPtr()->getSampleBlock()),
512513
local_context);
513514
}

0 commit comments

Comments
 (0)