Skip to content

Commit 1bbe13f

Browse files
authored
Enable clone_on_ref_ptr clippy lint on core crate (#13338)
* Enable clone_on_ref_ptr clippy lint on physical-expr-common crate * cargo fmt * remove explicit type * information_schema * listing_schema * memory * avro_to_arrow.reader * cte_worktable * clone_on_ref_ptr * type infer * fmt * except test * after rebase main brach
1 parent 5cc1223 commit 1bbe13f

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+270
-238
lines changed

datafusion/core/src/catalog_common/information_schema.rs

Lines changed: 22 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -493,7 +493,7 @@ impl SchemaProvider for InformationSchemaProvider {
493493
};
494494

495495
Ok(Some(Arc::new(
496-
StreamingTable::try_new(table.schema().clone(), vec![table]).unwrap(),
496+
StreamingTable::try_new(Arc::clone(table.schema()), vec![table]).unwrap(),
497497
)))
498498
}
499499

@@ -526,7 +526,7 @@ impl InformationSchemaTables {
526526
schema_names: StringBuilder::new(),
527527
table_names: StringBuilder::new(),
528528
table_types: StringBuilder::new(),
529-
schema: self.schema.clone(),
529+
schema: Arc::clone(&self.schema),
530530
}
531531
}
532532
}
@@ -540,7 +540,7 @@ impl PartitionStream for InformationSchemaTables {
540540
let mut builder = self.builder();
541541
let config = self.config.clone();
542542
Box::pin(RecordBatchStreamAdapter::new(
543-
self.schema.clone(),
543+
Arc::clone(&self.schema),
544544
// TODO: Stream this
545545
futures::stream::once(async move {
546546
config.make_tables(&mut builder).await?;
@@ -582,7 +582,7 @@ impl InformationSchemaTablesBuilder {
582582

583583
fn finish(&mut self) -> RecordBatch {
584584
RecordBatch::try_new(
585-
self.schema.clone(),
585+
Arc::clone(&self.schema),
586586
vec![
587587
Arc::new(self.catalog_names.finish()),
588588
Arc::new(self.schema_names.finish()),
@@ -618,7 +618,7 @@ impl InformationSchemaViews {
618618
schema_names: StringBuilder::new(),
619619
table_names: StringBuilder::new(),
620620
definitions: StringBuilder::new(),
621-
schema: self.schema.clone(),
621+
schema: Arc::clone(&self.schema),
622622
}
623623
}
624624
}
@@ -632,7 +632,7 @@ impl PartitionStream for InformationSchemaViews {
632632
let mut builder = self.builder();
633633
let config = self.config.clone();
634634
Box::pin(RecordBatchStreamAdapter::new(
635-
self.schema.clone(),
635+
Arc::clone(&self.schema),
636636
// TODO: Stream this
637637
futures::stream::once(async move {
638638
config.make_views(&mut builder).await?;
@@ -670,7 +670,7 @@ impl InformationSchemaViewBuilder {
670670

671671
fn finish(&mut self) -> RecordBatch {
672672
RecordBatch::try_new(
673-
self.schema.clone(),
673+
Arc::clone(&self.schema),
674674
vec![
675675
Arc::new(self.catalog_names.finish()),
676676
Arc::new(self.schema_names.finish()),
@@ -733,7 +733,7 @@ impl InformationSchemaColumns {
733733
numeric_scales: UInt64Builder::with_capacity(default_capacity),
734734
datetime_precisions: UInt64Builder::with_capacity(default_capacity),
735735
interval_types: StringBuilder::new(),
736-
schema: self.schema.clone(),
736+
schema: Arc::clone(&self.schema),
737737
}
738738
}
739739
}
@@ -747,7 +747,7 @@ impl PartitionStream for InformationSchemaColumns {
747747
let mut builder = self.builder();
748748
let config = self.config.clone();
749749
Box::pin(RecordBatchStreamAdapter::new(
750-
self.schema.clone(),
750+
Arc::clone(&self.schema),
751751
// TODO: Stream this
752752
futures::stream::once(async move {
753753
config.make_columns(&mut builder).await?;
@@ -876,7 +876,7 @@ impl InformationSchemaColumnsBuilder {
876876

877877
fn finish(&mut self) -> RecordBatch {
878878
RecordBatch::try_new(
879-
self.schema.clone(),
879+
Arc::clone(&self.schema),
880880
vec![
881881
Arc::new(self.catalog_names.finish()),
882882
Arc::new(self.schema_names.finish()),
@@ -921,7 +921,7 @@ impl InformationSchemata {
921921

922922
fn builder(&self) -> InformationSchemataBuilder {
923923
InformationSchemataBuilder {
924-
schema: self.schema.clone(),
924+
schema: Arc::clone(&self.schema),
925925
catalog_name: StringBuilder::new(),
926926
schema_name: StringBuilder::new(),
927927
schema_owner: StringBuilder::new(),
@@ -967,7 +967,7 @@ impl InformationSchemataBuilder {
967967

968968
fn finish(&mut self) -> RecordBatch {
969969
RecordBatch::try_new(
970-
self.schema.clone(),
970+
Arc::clone(&self.schema),
971971
vec![
972972
Arc::new(self.catalog_name.finish()),
973973
Arc::new(self.schema_name.finish()),
@@ -991,7 +991,7 @@ impl PartitionStream for InformationSchemata {
991991
let mut builder = self.builder();
992992
let config = self.config.clone();
993993
Box::pin(RecordBatchStreamAdapter::new(
994-
self.schema.clone(),
994+
Arc::clone(&self.schema),
995995
// TODO: Stream this
996996
futures::stream::once(async move {
997997
config.make_schemata(&mut builder).await;
@@ -1023,7 +1023,7 @@ impl InformationSchemaDfSettings {
10231023
names: StringBuilder::new(),
10241024
values: StringBuilder::new(),
10251025
descriptions: StringBuilder::new(),
1026-
schema: self.schema.clone(),
1026+
schema: Arc::clone(&self.schema),
10271027
}
10281028
}
10291029
}
@@ -1037,7 +1037,7 @@ impl PartitionStream for InformationSchemaDfSettings {
10371037
let config = self.config.clone();
10381038
let mut builder = self.builder();
10391039
Box::pin(RecordBatchStreamAdapter::new(
1040-
self.schema.clone(),
1040+
Arc::clone(&self.schema),
10411041
// TODO: Stream this
10421042
futures::stream::once(async move {
10431043
// create a mem table with the names of tables
@@ -1064,7 +1064,7 @@ impl InformationSchemaDfSettingsBuilder {
10641064

10651065
fn finish(&mut self) -> RecordBatch {
10661066
RecordBatch::try_new(
1067-
self.schema.clone(),
1067+
Arc::clone(&self.schema),
10681068
vec![
10691069
Arc::new(self.names.finish()),
10701070
Arc::new(self.values.finish()),
@@ -1102,7 +1102,7 @@ impl InformationSchemaRoutines {
11021102

11031103
fn builder(&self) -> InformationSchemaRoutinesBuilder {
11041104
InformationSchemaRoutinesBuilder {
1105-
schema: self.schema.clone(),
1105+
schema: Arc::clone(&self.schema),
11061106
specific_catalog: StringBuilder::new(),
11071107
specific_schema: StringBuilder::new(),
11081108
specific_name: StringBuilder::new(),
@@ -1161,7 +1161,7 @@ impl InformationSchemaRoutinesBuilder {
11611161

11621162
fn finish(&mut self) -> RecordBatch {
11631163
RecordBatch::try_new(
1164-
self.schema.clone(),
1164+
Arc::clone(&self.schema),
11651165
vec![
11661166
Arc::new(self.specific_catalog.finish()),
11671167
Arc::new(self.specific_schema.finish()),
@@ -1189,7 +1189,7 @@ impl PartitionStream for InformationSchemaRoutines {
11891189
let config = self.config.clone();
11901190
let mut builder = self.builder();
11911191
Box::pin(RecordBatchStreamAdapter::new(
1192-
self.schema.clone(),
1192+
Arc::clone(&self.schema),
11931193
futures::stream::once(async move {
11941194
config.make_routines(
11951195
ctx.scalar_functions(),
@@ -1229,7 +1229,7 @@ impl InformationSchemaParameters {
12291229

12301230
fn builder(&self) -> InformationSchemaParametersBuilder {
12311231
InformationSchemaParametersBuilder {
1232-
schema: self.schema.clone(),
1232+
schema: Arc::clone(&self.schema),
12331233
specific_catalog: StringBuilder::new(),
12341234
specific_schema: StringBuilder::new(),
12351235
specific_name: StringBuilder::new(),
@@ -1295,7 +1295,7 @@ impl InformationSchemaParametersBuilder {
12951295

12961296
fn finish(&mut self) -> RecordBatch {
12971297
RecordBatch::try_new(
1298-
self.schema.clone(),
1298+
Arc::clone(&self.schema),
12991299
vec![
13001300
Arc::new(self.specific_catalog.finish()),
13011301
Arc::new(self.specific_schema.finish()),
@@ -1321,7 +1321,7 @@ impl PartitionStream for InformationSchemaParameters {
13211321
let config = self.config.clone();
13221322
let mut builder = self.builder();
13231323
Box::pin(RecordBatchStreamAdapter::new(
1324-
self.schema.clone(),
1324+
Arc::clone(&self.schema),
13251325
futures::stream::once(async move {
13261326
config.make_parameters(
13271327
ctx.scalar_functions(),

datafusion/core/src/catalog_common/listing_schema.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,8 @@ impl ListingSchemaProvider {
148148
},
149149
)
150150
.await?;
151-
let _ = self.register_table(table_name.to_string(), provider.clone())?;
151+
let _ =
152+
self.register_table(table_name.to_string(), Arc::clone(&provider))?;
152153
}
153154
}
154155
Ok(())
@@ -190,7 +191,7 @@ impl SchemaProvider for ListingSchemaProvider {
190191
self.tables
191192
.lock()
192193
.expect("Can't lock tables")
193-
.insert(name, table.clone());
194+
.insert(name, Arc::clone(&table));
194195
Ok(Some(table))
195196
}
196197

datafusion/core/src/catalog_common/memory.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ impl CatalogProviderList for MemoryCatalogProviderList {
6767
}
6868

6969
fn catalog(&self, name: &str) -> Option<Arc<dyn CatalogProvider>> {
70-
self.catalogs.get(name).map(|c| c.value().clone())
70+
self.catalogs.get(name).map(|c| Arc::clone(c.value()))
7171
}
7272
}
7373

@@ -102,7 +102,7 @@ impl CatalogProvider for MemoryCatalogProvider {
102102
}
103103

104104
fn schema(&self, name: &str) -> Option<Arc<dyn SchemaProvider>> {
105-
self.schemas.get(name).map(|s| s.value().clone())
105+
self.schemas.get(name).map(|s| Arc::clone(s.value()))
106106
}
107107

108108
fn register_schema(
@@ -175,7 +175,7 @@ impl SchemaProvider for MemorySchemaProvider {
175175
&self,
176176
name: &str,
177177
) -> datafusion_common::Result<Option<Arc<dyn TableProvider>>, DataFusionError> {
178-
Ok(self.tables.get(name).map(|table| table.value().clone()))
178+
Ok(self.tables.get(name).map(|table| Arc::clone(table.value())))
179179
}
180180

181181
fn register_table(

datafusion/core/src/datasource/avro_to_arrow/reader.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ impl<'a, R: Read> Reader<'a, R> {
142142
Ok(Self {
143143
array_reader: AvroArrowArrayReader::try_new(
144144
reader,
145-
schema.clone(),
145+
Arc::clone(&schema),
146146
projection,
147147
)?,
148148
schema,
@@ -153,7 +153,7 @@ impl<'a, R: Read> Reader<'a, R> {
153153
/// Returns the schema of the reader, useful for getting the schema without reading
154154
/// record batches
155155
pub fn schema(&self) -> SchemaRef {
156-
self.schema.clone()
156+
Arc::clone(&self.schema)
157157
}
158158
}
159159

datafusion/core/src/datasource/cte_worktable.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ impl TableProvider for CteWorkTable {
7777
}
7878

7979
fn schema(&self) -> SchemaRef {
80-
self.table_schema.clone()
80+
Arc::clone(&self.table_schema)
8181
}
8282

8383
fn table_type(&self) -> TableType {
@@ -94,7 +94,7 @@ impl TableProvider for CteWorkTable {
9494
// TODO: pushdown filters and limits
9595
Ok(Arc::new(WorkTableExec::new(
9696
self.name.clone(),
97-
self.table_schema.clone(),
97+
Arc::clone(&self.table_schema),
9898
)))
9999
}
100100

datafusion/core/src/datasource/default_table_source.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ pub fn source_as_provider(
9696
.as_any()
9797
.downcast_ref::<DefaultTableSource>()
9898
{
99-
Some(source) => Ok(source.table_provider.clone()),
99+
Some(source) => Ok(Arc::clone(&source.table_provider)),
100100
_ => internal_err!("TableSource was not DefaultTableSource"),
101101
}
102102
}

datafusion/core/src/datasource/empty.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ impl TableProvider for EmptyTable {
6161
}
6262

6363
fn schema(&self) -> SchemaRef {
64-
self.schema.clone()
64+
Arc::clone(&self.schema)
6565
}
6666

6767
fn table_type(&self) -> TableType {

datafusion/core/src/datasource/file_format/arrow.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ impl FileFormat for ArrowFormat {
186186
return not_impl_err!("Overwrites are not implemented yet for Arrow format");
187187
}
188188

189-
let sink_schema = conf.output_schema().clone();
189+
let sink_schema = Arc::clone(conf.output_schema());
190190
let sink = Arc::new(ArrowFileSink::new(conf));
191191

192192
Ok(Arc::new(DataSinkExec::new(
@@ -229,7 +229,7 @@ impl ArrowFileSink {
229229
.collect::<Vec<_>>(),
230230
))
231231
} else {
232-
self.config.output_schema().clone()
232+
Arc::clone(self.config.output_schema())
233233
}
234234
}
235235
}
@@ -302,7 +302,7 @@ impl DataSink for ArrowFileSink {
302302
let mut object_store_writer = create_writer(
303303
FileCompressionType::UNCOMPRESSED,
304304
&path,
305-
object_store.clone(),
305+
Arc::clone(&object_store),
306306
)
307307
.await?;
308308
file_write_tasks.spawn(async move {

datafusion/core/src/datasource/file_format/csv.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -417,7 +417,7 @@ impl FileFormat for CsvFormat {
417417

418418
let writer_options = CsvWriterOptions::try_from(&options)?;
419419

420-
let sink_schema = conf.output_schema().clone();
420+
let sink_schema = Arc::clone(conf.output_schema());
421421
let sink = Arc::new(CsvSink::new(conf, writer_options));
422422

423423
Ok(Arc::new(DataSinkExec::new(

datafusion/core/src/datasource/file_format/json.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -264,7 +264,7 @@ impl FileFormat for JsonFormat {
264264

265265
let writer_options = JsonWriterOptions::try_from(&self.options)?;
266266

267-
let sink_schema = conf.output_schema().clone();
267+
let sink_schema = Arc::clone(conf.output_schema());
268268
let sink = Arc::new(JsonSink::new(conf, writer_options));
269269

270270
Ok(Arc::new(DataSinkExec::new(

0 commit comments

Comments
 (0)