Skip to content

Commit d6bcd47

Browse files
authored
Merge pull request #1149 from Altinity/antalya_25.8_backport_87528
Antalya 25.8 backport 87528
2 parents 482ba61 + 8ecf4aa commit d6bcd47

File tree

3 files changed

+21
-1
lines changed

3 files changed

+21
-1
lines changed
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
1
2+
raw_blob String
Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
-- Tags: no-parallel, no-fasttest, no-random-settings
2+
3+
INSERT INTO FUNCTION s3(
4+
s3_conn,
5+
filename='03631',
6+
format=Parquet,
7+
partition_strategy='hive',
8+
partition_columns_in_data_file=1) PARTITION BY (year, country) SELECT 'Brazil' as country, 2025 as year, 1 as id;
9+
10+
-- distinct because minio isn't cleaned up
11+
SELECT count(distinct year) FROM s3(s3_conn, filename='03631/**.parquet', format=RawBLOB) SETTINGS use_hive_partitioning=1;
12+
13+
DESCRIBE s3(s3_conn, filename='03631/**.parquet', format=RawBLOB) SETTINGS use_hive_partitioning=1;

src/Storages/prepareReadingFromFormat.cpp

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -234,7 +234,12 @@ ReadFromFormatInfo prepareReadingFromFormat(
234234
}
235235

236236
/// Create header for InputFormat with columns that will be read from the data.
237-
info.format_header = storage_snapshot->getSampleBlockForColumns(info.columns_description.getNamesOfPhysical());
237+
for (const auto & column : info.columns_description)
238+
{
239+
/// Never read hive partition columns from the data file. This fixes https://github.com/ClickHouse/ClickHouse/issues/87515
240+
if (!hive_parameters.hive_partition_columns_to_read_from_file_path_map.contains(column.name))
241+
info.format_header.insert(ColumnWithTypeAndName{column.type, column.name});
242+
}
238243

239244
info.serialization_hints = getSerializationHintsForFileLikeStorage(storage_snapshot->metadata, context);
240245

0 commit comments

Comments
 (0)