Skip to content

Commit 18f64c4

Browse files
alambMazterQyou
authored andcommitted
Fix clippy by avoiding deprecated functions in chrono (apache#4189)
* Fix clippy by avoiding deprecated functions in chrono * Fixup
1 parent 0c3f567 commit 18f64c4

File tree

3 files changed

+22
-6
lines changed

3 files changed

+22
-6
lines changed

datafusion/core/src/datasource/listing/helpers.rs

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -348,7 +348,9 @@ fn batches_to_paths(batches: &[RecordBatch]) -> Vec<PartitionedFile> {
348348
(0..batch.num_rows()).map(move |row| PartitionedFile {
349349
file_meta: FileMeta {
350350
last_modified: match modified_array.is_null(row) {
351-
false => Some(Utc.timestamp_millis(modified_array.value(row))),
351+
false => {
352+
Some(to_timestamp_millis(modified_array.value(row)).unwrap())
353+
}
352354
true => None,
353355
},
354356
sized_file: SizedFile {
@@ -366,6 +368,20 @@ fn batches_to_paths(batches: &[RecordBatch]) -> Vec<PartitionedFile> {
366368
.collect()
367369
}
368370

371+
fn to_timestamp_millis(v: i64) -> Result<chrono::DateTime<Utc>> {
372+
match Utc.timestamp_millis_opt(v) {
373+
chrono::LocalResult::None => Err(DataFusionError::Execution(format!(
374+
"Can not convert {} to UTC millisecond timestamp",
375+
v
376+
))),
377+
chrono::LocalResult::Single(v) => Ok(v),
378+
chrono::LocalResult::Ambiguous(_, _) => Err(DataFusionError::Execution(format!(
379+
"Ambiguous timestamp when converting {} to UTC millisecond timestamp",
380+
v
381+
))),
382+
}
383+
}
384+
369385
/// Extract the partition values for the given `file_path` (in the given `table_path`)
370386
/// associated to the partitions defined by `table_partition_cols`
371387
fn parse_partitions_for_path<'a>(
@@ -649,7 +665,7 @@ mod tests {
649665
path: String::from("mybucket/tablepath/part1=val1/file.parquet"),
650666
size: 100,
651667
},
652-
last_modified: Some(Utc.timestamp_millis(1634722979123)),
668+
last_modified: Some(to_timestamp_millis(1634722979123).unwrap()),
653669
},
654670
FileMeta {
655671
sized_file: SizedFile {
@@ -683,7 +699,7 @@ mod tests {
683699
path: String::from("mybucket/tablepath/part1=val1/file.parquet"),
684700
size: 100,
685701
},
686-
last_modified: Some(Utc.timestamp_millis(1634722979123)),
702+
last_modified: Some(to_timestamp_millis(1634722979123).unwrap()),
687703
},
688704
FileMeta {
689705
sized_file: SizedFile {

datafusion/core/tests/parquet_pruning.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ async fn prune_date64() {
120120
let date = "2020-01-02"
121121
.parse::<chrono::NaiveDate>()
122122
.unwrap()
123-
.and_time(chrono::NaiveTime::from_hms(0, 0, 0));
123+
.and_time(chrono::NaiveTime::from_hms_opt(0, 0, 0).unwrap());
124124
let date = ScalarValue::Date64(Some(date.timestamp_millis()));
125125

126126
let output = ContextWithParquet::new(Scenario::Dates)
@@ -801,7 +801,7 @@ fn make_date_batch(offset: Duration) -> RecordBatch {
801801
let t = t
802802
.parse::<chrono::NaiveDate>()
803803
.unwrap()
804-
.and_time(chrono::NaiveTime::from_hms(0, 0, 0));
804+
.and_time(chrono::NaiveTime::from_hms_opt(0, 0, 0).unwrap());
805805
let t = t + offset;
806806
t.timestamp_millis()
807807
})

datafusion/core/tests/sql/timestamp.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -780,7 +780,7 @@ async fn group_by_timestamp_millis() -> Result<()> {
780780
),
781781
Field::new("count", DataType::Int32, false),
782782
]));
783-
let base_dt = Utc.ymd(2018, 7, 1).and_hms(6, 0, 0); // 2018-Jul-01 06:00
783+
let base_dt = Utc.with_ymd_and_hms(2018, 7, 1, 6, 0, 0).unwrap(); // 2018-Jul-01 06:00
784784
let hour1 = Duration::hours(1);
785785
let timestamps = vec![
786786
base_dt.timestamp_millis(),

0 commit comments

Comments
 (0)