Skip to content

Commit f637c23

Browse files
youngsofunsundy-li
andauthored
chore: rm parquet_rs mod level and suffix. (#17923)
* chore: rm `parquet_rs` mod level and suffix. * refactor: it test to unit test. --------- Co-authored-by: sundyli <[email protected]>
1 parent d3d4b86 commit f637c23

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+335
-383
lines changed

src/common/storage/src/lib.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -55,9 +55,9 @@ mod column_node;
5555
pub use column_node::ColumnNode;
5656
pub use column_node::ColumnNodes;
5757

58-
pub mod parquet_rs;
59-
pub use parquet_rs::read_metadata_async;
60-
pub use parquet_rs::read_parquet_schema_async_rs;
58+
pub mod parquet;
59+
pub use parquet::read_metadata_async;
60+
pub use parquet::read_parquet_schema_async_rs;
6161

6262
mod stage;
6363
pub use stage::init_stage_operator;

src/common/storage/src/parquet_rs.rs renamed to src/common/storage/src/parquet.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -274,8 +274,8 @@ mod tests {
274274
use databend_common_expression::TableSchema;
275275
use parquet::arrow::ArrowSchemaConverter;
276276

277-
use crate::parquet_rs::build_parquet_schema_tree;
278-
use crate::parquet_rs::ParquetSchemaTreeNode;
277+
use crate::parquet::build_parquet_schema_tree;
278+
use crate::parquet::ParquetSchemaTreeNode;
279279

280280
#[test]
281281
fn test_build_parquet_schema_tree() {

src/query/catalog/src/plan/projection.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@ use std::fmt::Formatter;
1818
use databend_common_exception::Result;
1919
use databend_common_expression::FieldIndex;
2020
use databend_common_expression::TableSchema;
21-
use databend_common_storage::parquet_rs::build_parquet_schema_tree;
22-
use databend_common_storage::parquet_rs::traverse_parquet_schema_tree;
21+
use databend_common_storage::parquet::build_parquet_schema_tree;
22+
use databend_common_storage::parquet::traverse_parquet_schema_tree;
2323
use databend_common_storage::ColumnNode;
2424
use databend_common_storage::ColumnNodes;
2525
use parquet::arrow::ProjectionMask;

src/query/service/src/sessions/query_ctx.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -110,7 +110,7 @@ use databend_common_storages_fuse::FuseTable;
110110
use databend_common_storages_fuse::TableContext;
111111
use databend_common_storages_iceberg::IcebergTable;
112112
use databend_common_storages_orc::OrcTable;
113-
use databend_common_storages_parquet::ParquetRSTable;
113+
use databend_common_storages_parquet::ParquetTable;
114114
use databend_common_storages_result_cache::ResultScan;
115115
use databend_common_storages_stage::StageTable;
116116
use databend_common_storages_stream::stream_table::StreamTable;
@@ -603,7 +603,7 @@ impl TableContext for QueryContext {
603603
DataSourceInfo::StageSource(stage_info) => {
604604
self.build_external_by_table_info(stage_info, plan.tbl_args.clone())
605605
}
606-
DataSourceInfo::ParquetSource(table_info) => ParquetRSTable::from_info(table_info),
606+
DataSourceInfo::ParquetSource(table_info) => ParquetTable::from_info(table_info),
607607
DataSourceInfo::ResultScanSource(table_info) => ResultScan::from_info(table_info),
608608
DataSourceInfo::ORCSource(table_info) => OrcTable::from_info(table_info),
609609
}
@@ -1643,7 +1643,7 @@ impl TableContext for QueryContext {
16431643
read_options = read_options.with_do_prewhere(false);
16441644
}
16451645

1646-
ParquetRSTable::create(
1646+
ParquetTable::create(
16471647
stage_info.clone(),
16481648
files_info,
16491649
read_options,

src/query/service/tests/it/parquet_rs/prune_pages.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ use databend_common_base::base::tokio;
1818
use databend_common_catalog::plan::ParquetReadOptions;
1919
use databend_common_expression::FunctionContext;
2020
use databend_common_expression::TableSchema;
21-
use databend_common_storages_parquet::ParquetRSPruner;
21+
use databend_common_storages_parquet::ParquetPruner;
2222
use parquet::arrow::arrow_reader::ArrowReaderMetadata;
2323
use parquet::arrow::arrow_reader::ArrowReaderOptions;
2424
use parquet::arrow::arrow_reader::RowSelection;
@@ -50,7 +50,7 @@ async fn test_batch(batches: &[(Scenario, &str, RowSelection)]) {
5050
let schema = TableSchema::try_from(arrow_schema.as_ref()).unwrap();
5151
let leaf_fields = Arc::new(schema.leaf_fields());
5252

53-
let pruner = ParquetRSPruner::try_create(
53+
let pruner = ParquetPruner::try_create(
5454
FunctionContext::default(),
5555
Arc::new(schema),
5656
leaf_fields,

src/query/service/tests/it/parquet_rs/prune_row_groups.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ use databend_common_base::base::tokio;
1818
use databend_common_catalog::plan::ParquetReadOptions;
1919
use databend_common_expression::FunctionContext;
2020
use databend_common_expression::TableSchema;
21-
use databend_common_storages_parquet::ParquetRSPruner;
21+
use databend_common_storages_parquet::ParquetPruner;
2222

2323
use super::data::make_test_file_rg;
2424
use super::data::Scenario;
@@ -51,7 +51,7 @@ async fn test_impl_batch(args: &[(Scenario, &str, Vec<usize>)], prune: bool) {
5151
let schema = TableSchema::try_from(arrow_schema.as_ref()).unwrap();
5252
let leaf_fields = Arc::new(schema.leaf_fields());
5353

54-
let pruner = ParquetRSPruner::try_create(
54+
let pruner = ParquetPruner::try_create(
5555
FunctionContext::default(),
5656
Arc::new(schema),
5757
leaf_fields,

src/query/storages/delta/src/table.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -44,8 +44,8 @@ use databend_common_pipeline_core::Pipeline;
4444
use databend_common_storage::init_operator;
4545
use databend_common_storages_parquet::ParquetFilePart;
4646
use databend_common_storages_parquet::ParquetPart;
47-
use databend_common_storages_parquet::ParquetRSPruner;
48-
use databend_common_storages_parquet::ParquetRSReaderBuilder;
47+
use databend_common_storages_parquet::ParquetPruner;
48+
use databend_common_storages_parquet::ParquetReaderBuilder;
4949
use databend_storages_common_pruner::partition_prunner::FetchPartitionScalars;
5050
use databend_storages_common_pruner::partition_prunner::PartitionPruner;
5151
use databend_storages_common_table_meta::table::OPT_KEY_ENGINE_META;
@@ -221,7 +221,7 @@ impl DeltaTable {
221221
read_options = read_options.with_do_prewhere(false);
222222
}
223223

224-
let pruner = ParquetRSPruner::try_create(
224+
let pruner = ParquetPruner::try_create(
225225
ctx.get_function_context()?,
226226
table_schema.clone(),
227227
leaf_fields,
@@ -248,7 +248,7 @@ impl DeltaTable {
248248
None
249249
};
250250
let mut builder =
251-
ParquetRSReaderBuilder::create(ctx.clone(), Arc::new(op), table_schema, arrow_schema)?
251+
ParquetReaderBuilder::create(ctx.clone(), Arc::new(op), table_schema, arrow_schema)?
252252
.with_options(read_options)
253253
.with_push_downs(push_downs.as_ref())
254254
.with_pruner(Some(pruner))
@@ -337,7 +337,7 @@ impl DeltaTable {
337337
let partition_values = get_partition_values(add, &partition_fields)?;
338338
Ok(Arc::new(Box::new(DeltaPartInfo {
339339
partition_values,
340-
data: ParquetPart::ParquetFile(
340+
data: ParquetPart::File(
341341
ParquetFilePart {
342342
file: add.path.clone(),
343343
compressed_size: add.size as u64,

src/query/storages/delta/src/table_source.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ use databend_common_pipeline_core::processors::Processor;
3535
use databend_common_pipeline_core::processors::ProcessorPtr;
3636
use databend_common_storages_parquet::ParquetFileReader;
3737
use databend_common_storages_parquet::ParquetPart;
38-
use databend_common_storages_parquet::ParquetRSFullReader;
38+
use databend_common_storages_parquet::ParquetWholeFileReader;
3939
use parquet::arrow::async_reader::ParquetRecordBatchStream;
4040

4141
use crate::partition::DeltaPartInfo;
@@ -52,7 +52,7 @@ pub struct DeltaTableSource {
5252
ctx: Arc<dyn TableContext>,
5353

5454
// Used to read parquet file.
55-
parquet_reader: Arc<ParquetRSFullReader>,
55+
parquet_reader: Arc<ParquetWholeFileReader>,
5656

5757
// Used to insert partition_block_entries to data block
5858
// FieldIndex is the index in the output_schema
@@ -73,7 +73,7 @@ impl DeltaTableSource {
7373
ctx: Arc<dyn TableContext>,
7474
output: Arc<OutputPort>,
7575
output_schema: DataSchemaRef,
76-
parquet_reader: Arc<ParquetRSFullReader>,
76+
parquet_reader: Arc<ParquetWholeFileReader>,
7777
partition_fields: Vec<TableField>,
7878
) -> Result<ProcessorPtr> {
7979
let output_partition_columns = output_schema
@@ -172,7 +172,7 @@ impl Processor for DeltaTableSource {
172172
} else if let Some(part) = self.ctx.get_partition() {
173173
let part = DeltaPartInfo::from_part(&part)?;
174174
match &part.data {
175-
ParquetPart::ParquetFile(f) => {
175+
ParquetPart::File(f) => {
176176
let partition_fields = self
177177
.partition_fields
178178
.iter()

src/query/storages/fuse/src/io/read/agg_index/agg_index_reader_parquet.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
use databend_common_catalog::plan::PartInfoPtr;
1616
use databend_common_exception::Result;
1717
use databend_common_expression::DataBlock;
18-
use databend_common_storage::parquet_rs::read_metadata_sync;
18+
use databend_common_storage::parquet::read_metadata_sync;
1919
use databend_common_storage::read_metadata_async;
2020
use databend_storages_common_io::ReadSettings;
2121
use log::debug;

src/query/storages/hive/hive/src/hive_table.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -51,8 +51,8 @@ use databend_common_pipeline_sources::SyncSource;
5151
use databend_common_pipeline_sources::SyncSourcer;
5252
use databend_common_storage::init_operator;
5353
use databend_common_storage::DataOperator;
54-
use databend_common_storages_parquet::ParquetRSPruner;
55-
use databend_common_storages_parquet::ParquetRSReaderBuilder;
54+
use databend_common_storages_parquet::ParquetPruner;
55+
use databend_common_storages_parquet::ParquetReaderBuilder;
5656
use databend_storages_common_pruner::partition_prunner::PartitionPruner;
5757
use databend_storages_common_table_meta::meta::SnapshotId;
5858
use databend_storages_common_table_meta::meta::TableMetaTimestamps;
@@ -156,7 +156,7 @@ impl HiveTable {
156156
read_options = read_options.with_do_prewhere(false);
157157
}
158158

159-
let pruner = ParquetRSPruner::try_create(
159+
let pruner = ParquetPruner::try_create(
160160
ctx.get_function_context()?,
161161
table_schema.clone(),
162162
leaf_fields,
@@ -190,7 +190,7 @@ impl HiveTable {
190190
None
191191
};
192192
let mut builder =
193-
ParquetRSReaderBuilder::create(ctx.clone(), Arc::new(op), table_schema, arrow_schema)?
193+
ParquetReaderBuilder::create(ctx.clone(), Arc::new(op), table_schema, arrow_schema)?
194194
.with_options(read_options)
195195
.with_push_downs(push_downs.as_ref())
196196
.with_pruner(Some(pruner))

0 commit comments

Comments
 (0)