Skip to content

Commit 183a862

Browse files
committed
fix some
Signed-off-by: Ruihang Xia <[email protected]>
1 parent a87250f commit 183a862

File tree

17 files changed

+35
-34
lines changed

17 files changed

+35
-34
lines changed

datafusion-examples/examples/expr_api.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ async fn main() -> Result<()> {
8585
boundary_analysis_and_selectivity_demo()?;
8686

8787
// See how boundary analysis works for `AND` & `OR` conjunctions.
88-
boundary_analysis_in_conjuctions_demo()?;
88+
boundary_analysis_in_conjunctions_demo()?;
8989

9090
// See how to determine the data types of expressions
9191
expression_type_demo()?;
@@ -351,7 +351,7 @@ fn boundary_analysis_and_selectivity_demo() -> Result<()> {
351351

352352
/// This function shows how to think about and leverage the analysis API
353353
/// to infer boundaries in `AND` & `OR` conjunctions.
354-
fn boundary_analysis_in_conjuctions_demo() -> Result<()> {
354+
fn boundary_analysis_in_conjunctions_demo() -> Result<()> {
355355
// Let us consider the more common case of AND & OR conjunctions.
356356
//
357357
// age > 18 AND age <= 25

datafusion/catalog/src/information_schema.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -810,7 +810,7 @@ impl InformationSchemaColumnsBuilder {
810810
) {
811811
use DataType::*;
812812

813-
// Note: append_value is actually infallable.
813+
// Note: append_value is actually infallible.
814814
self.catalog_names.append_value(catalog_name);
815815
self.schema_names.append_value(schema_name);
816816
self.table_names.append_value(table_name);

datafusion/common/src/test_util.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ macro_rules! assert_batches_sorted_eq {
158158
/// Is a macro so test error
159159
/// messages are on the same line as the failure;
160160
///
161-
/// Both arguments must be convertable into Strings ([`Into`]<[`String`]>)
161+
/// Both arguments must be convertible into Strings ([`Into`]<[`String`]>)
162162
#[macro_export]
163163
macro_rules! assert_contains {
164164
($ACTUAL: expr, $EXPECTED: expr) => {
@@ -181,7 +181,7 @@ macro_rules! assert_contains {
181181
/// Is a macro so test error
182182
/// messages are on the same line as the failure;
183183
///
184-
/// Both arguments must be convertable into Strings ([`Into`]<[`String`]>)
184+
/// Both arguments must be convertible into Strings ([`Into`]<[`String`]>)
185185
#[macro_export]
186186
macro_rules! assert_not_contains {
187187
($ACTUAL: expr, $UNEXPECTED: expr) => {

datafusion/core/src/datasource/listing/table.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -221,8 +221,8 @@ impl ListingTableConfig {
221221
.unwrap_or(FileCompressionType::UNCOMPRESSED);
222222

223223
if file_compression_type.is_compressed() {
224-
let splitted2 = exts.next().unwrap_or("");
225-
Ok((splitted2.to_string(), Some(split.to_string())))
224+
let split2 = exts.next().unwrap_or("");
225+
Ok((split2.to_string(), Some(split.to_string())))
226226
} else {
227227
Ok((split.to_string(), None))
228228
}
@@ -1107,7 +1107,7 @@ impl ListingTable {
11071107

11081108
// Expressions can be used for parttion pruning if they can be evaluated using
11091109
// only the partiton columns and there are partition columns.
1110-
fn can_be_evaluted_for_partition_pruning(
1110+
fn can_be_evaluated_for_partition_pruning(
11111111
partition_column_names: &[&str],
11121112
expr: &Expr,
11131113
) -> bool {
@@ -1156,7 +1156,7 @@ impl TableProvider for ListingTable {
11561156
// pushdown it to TableScan, otherwise, `unhandled` pruning predicates will be generated
11571157
let (partition_filters, filters): (Vec<_>, Vec<_>) =
11581158
filters.iter().cloned().partition(|filter| {
1159-
can_be_evaluted_for_partition_pruning(&table_partition_col_names, filter)
1159+
can_be_evaluated_for_partition_pruning(&table_partition_col_names, filter)
11601160
});
11611161

11621162
// We should not limit the number of partitioned files to scan if there are filters and limit
@@ -1245,7 +1245,7 @@ impl TableProvider for ListingTable {
12451245
filters
12461246
.iter()
12471247
.map(|filter| {
1248-
if can_be_evaluted_for_partition_pruning(&partition_column_names, filter)
1248+
if can_be_evaluated_for_partition_pruning(&partition_column_names, filter)
12491249
{
12501250
// if filter can be handled by partition pruning, it is exact
12511251
return Ok(TableProviderFilterPushDown::Exact);

datafusion/core/src/datasource/physical_plan/parquet.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -332,7 +332,7 @@ mod tests {
332332
let metric = get_value(&metrics, "pushdown_rows_pruned");
333333
assert_eq!(metric, 3, "Expected all rows to be pruned");
334334

335-
// If we excplicitly allow nulls the rest of the predicate should work
335+
// If we explicitly allow nulls the rest of the predicate should work
336336
let filter = col("c2").is_null().and(col("c1").eq(lit(1_i32)));
337337
let rt = RoundTrip::new()
338338
.with_table_schema(table_schema.clone())
@@ -390,7 +390,7 @@ mod tests {
390390
let metric = get_value(&metrics, "pushdown_rows_pruned");
391391
assert_eq!(metric, 3, "Expected all rows to be pruned");
392392

393-
// If we excplicitly allow nulls the rest of the predicate should work
393+
// If we explicitly allow nulls the rest of the predicate should work
394394
let filter = col("c2").is_null().and(col("c1").eq(lit(1_i32)));
395395
let rt = RoundTrip::new()
396396
.with_table_schema(table_schema.clone())
@@ -452,7 +452,7 @@ mod tests {
452452
let metric = get_value(&metrics, "pushdown_rows_pruned");
453453
assert_eq!(metric, 3, "Expected all rows to be pruned");
454454

455-
// If we excplicitly allow nulls the rest of the predicate should work
455+
// If we explicitly allow nulls the rest of the predicate should work
456456
let filter = col("c2").is_null().and(col("c1").eq(lit(1_i32)));
457457
let rt = RoundTrip::new()
458458
.with_table_schema(table_schema.clone())
@@ -514,7 +514,7 @@ mod tests {
514514
let metric = get_value(&metrics, "pushdown_rows_pruned");
515515
assert_eq!(metric, 3, "Expected all rows to be pruned");
516516

517-
// If we excplicitly allow nulls the rest of the predicate should work
517+
// If we explicitly allow nulls the rest of the predicate should work
518518
let filter = col("c2").is_null().and(col("c3").eq(lit(7_i32)));
519519
let rt = RoundTrip::new()
520520
.with_table_schema(table_schema.clone())

datafusion/optimizer/src/analyzer/type_coercion.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2414,9 +2414,9 @@ mod test {
24142414
let map_type_entries = DataType::Map(Arc::new(fields), false);
24152415

24162416
let fields = Field::new("key_value", DataType::Struct(struct_fields), false);
2417-
let may_type_cutsom = DataType::Map(Arc::new(fields), false);
2417+
let may_type_custom = DataType::Map(Arc::new(fields), false);
24182418

2419-
let expr = col("a").eq(cast(col("a"), may_type_cutsom));
2419+
let expr = col("a").eq(cast(col("a"), may_type_custom));
24202420
let empty = empty_with_type(map_type_entries);
24212421
let plan = LogicalPlan::Projection(Projection::try_new(vec![expr], empty)?);
24222422

datafusion/physical-expr/src/async_scalar_function.rs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -192,13 +192,14 @@ impl AsyncFuncExpr {
192192
);
193193
}
194194

195-
let datas = ColumnarValue::values_to_arrays(&result_batches)?
195+
let data_vec = ColumnarValue::values_to_arrays(&result_batches)?
196196
.iter()
197197
.map(|b| b.to_data())
198198
.collect::<Vec<_>>();
199-
let total_len = datas.iter().map(|d| d.len()).sum();
200-
let mut mutable = MutableArrayData::new(datas.iter().collect(), false, total_len);
201-
datas.iter().enumerate().for_each(|(i, data)| {
199+
let total_len = data_vec.iter().map(|d| d.len()).sum();
200+
let mut mutable =
201+
MutableArrayData::new(data_vec.iter().collect(), false, total_len);
202+
data_vec.iter().enumerate().for_each(|(i, data)| {
202203
mutable.extend(i, 0, data.len());
203204
});
204205
let array_ref = make_array(mutable.freeze());

datafusion/physical-expr/src/equivalence/properties/mod.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -315,7 +315,7 @@ impl EquivalenceProperties {
315315
self.oeq_class.extend(orderings);
316316
// Normalize given orderings to update the cache:
317317
self.oeq_cache.normal_cls.extend(normal_orderings);
318-
// TODO: If no ordering is found to be redunant during extension, we
318+
// TODO: If no ordering is found to be redundant during extension, we
319319
// can use a shortcut algorithm to update the leading map.
320320
self.oeq_cache.update_map();
321321
}

datafusion/physical-expr/src/expressions/case.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -413,7 +413,7 @@ impl CaseExpr {
413413
fn expr_or_expr(&self, batch: &RecordBatch) -> Result<ColumnarValue> {
414414
let return_type = self.data_type(&batch.schema())?;
415415

416-
// evalute when condition on batch
416+
// evaluate when condition on batch
417417
let when_value = self.when_then_expr[0].0.evaluate(batch)?;
418418
let when_value = when_value.into_array(batch.num_rows())?;
419419
let when_value = as_boolean_array(&when_value).map_err(|e| {

datafusion/physical-expr/src/schema_rewriter.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ use crate::expressions::{self, CastExpr, Column};
3636
/// against the physical schema of the file being scanned. It allows for handling
3737
/// differences between logical and physical schemas, such as type mismatches or missing columns.
3838
///
39-
/// You can create a custom implemention of this trait to handle specific rewriting logic.
39+
/// You can create a custom implementation of this trait to handle specific rewriting logic.
4040
/// For example, to fill in missing columns with default values instead of nulls:
4141
///
4242
/// ```rust
@@ -253,7 +253,7 @@ impl<'a> DefaultPhysicalExprAdapterRewriter<'a> {
253253
physical_field
254254
} else {
255255
// A completely unknown column that doesn't exist in either schema!
256-
// This should probably never be hit unless something upstream broke, but nontheless it's better
256+
// This should probably never be hit unless something upstream broke, but nonetheless it's better
257257
// for us to return a handleable error than to panic / do something unexpected.
258258
return Err(e.into());
259259
}

0 commit comments

Comments
 (0)