@@ -4,7 +4,7 @@ use std::sync::Arc;
4
4
use super :: schema:: postgres_to_delta_schema;
5
5
use deltalake:: arrow:: record_batch:: RecordBatch ;
6
6
use deltalake:: { DeltaOps , DeltaResult , DeltaTable , DeltaTableBuilder , open_table} ;
7
- use etl:: types:: { TableSchema , TableRow , Cell } ;
7
+ use etl:: types:: { Cell , TableRow , TableSchema } ;
8
8
9
9
/// Client for connecting to Delta Lake tables.
10
10
#[ derive( Clone ) ]
@@ -246,7 +246,7 @@ impl DeltaLakeClient {
246
246
// Malformed composite key, skip
247
247
return "false" . to_string ( ) ;
248
248
}
249
-
249
+
250
250
let conditions: Vec < String > = pk_column_names
251
251
. iter ( )
252
252
. zip ( key_parts. iter ( ) )
@@ -258,7 +258,7 @@ impl DeltaLakeClient {
258
258
)
259
259
} )
260
260
. collect ( ) ;
261
-
261
+
262
262
format ! ( "({})" , conditions. join( " AND " ) )
263
263
} )
264
264
. filter ( |cond| cond != "false" ) // Remove malformed conditions
@@ -386,7 +386,7 @@ impl DeltaLakeClient {
386
386
Cell :: Bytes ( b) => {
387
387
let hex_string: String = b. iter ( ) . map ( |byte| format ! ( "{:02x}" , byte) ) . collect ( ) ;
388
388
format ! ( "\\ x{}" , hex_string)
389
- } ,
389
+ }
390
390
Cell :: Array ( _) => "[ARRAY]" . to_string ( ) , // Arrays shouldn't be PKs
391
391
}
392
392
}
@@ -414,7 +414,7 @@ impl DeltaLakeClient {
414
414
let mut parts = Vec :: new ( ) ;
415
415
let mut current_part = String :: new ( ) ;
416
416
let mut chars = composite_key. chars ( ) . peekable ( ) ;
417
-
417
+
418
418
while let Some ( ch) = chars. next ( ) {
419
419
if ch == ':' {
420
420
if chars. peek ( ) == Some ( & ':' ) {
@@ -437,12 +437,12 @@ impl DeltaLakeClient {
437
437
current_part. push ( ch) ;
438
438
}
439
439
}
440
-
440
+
441
441
// Add the final part
442
442
if !current_part. is_empty ( ) || !parts. is_empty ( ) {
443
443
parts. push ( current_part) ;
444
444
}
445
-
445
+
446
446
parts
447
447
}
448
448
@@ -472,7 +472,7 @@ impl DeltaLakeClient {
472
472
#[ cfg( test) ]
473
473
mod tests {
474
474
use super :: * ;
475
- use etl:: types:: { ColumnSchema , TableName , Type , Cell , TableId , TableRow , TableSchema } ;
475
+ use etl:: types:: { Cell , ColumnSchema , TableId , TableName , TableRow , TableSchema , Type } ;
476
476
477
477
fn create_test_schema ( ) -> TableSchema {
478
478
TableSchema :: new (
@@ -486,10 +486,7 @@ mod tests {
486
486
}
487
487
488
488
fn create_test_row ( id : i32 , name : & str ) -> TableRow {
489
- TableRow :: new ( vec ! [
490
- Cell :: I32 ( id) ,
491
- Cell :: String ( name. to_string( ) ) ,
492
- ] )
489
+ TableRow :: new ( vec ! [ Cell :: I32 ( id) , Cell :: String ( name. to_string( ) ) ] )
493
490
}
494
491
495
492
#[ test]
@@ -509,7 +506,7 @@ mod tests {
509
506
let mut schema = create_test_schema ( ) ;
510
507
// Make both columns primary keys
511
508
schema. column_schemas [ 1 ] . primary = true ;
512
-
509
+
513
510
let row = create_test_row ( 42 , "test" ) ;
514
511
515
512
let result = client. extract_primary_key ( & row, & schema) ;
@@ -523,10 +520,10 @@ mod tests {
523
520
let mut keys = HashSet :: new ( ) ;
524
521
keys. insert ( "42" . to_string ( ) ) ;
525
522
keys. insert ( "43" . to_string ( ) ) ;
526
-
523
+
527
524
let pk_columns = vec ! [ "id" . to_string( ) ] ;
528
525
let predicate = client. build_pk_predicate ( & keys, & pk_columns) ;
529
-
526
+
530
527
// Should be `id` IN ('42', '43') - order may vary
531
528
assert ! ( predicate. contains( "`id` IN" ) ) ;
532
529
assert ! ( predicate. contains( "'42'" ) ) ;
@@ -539,10 +536,10 @@ mod tests {
539
536
let mut keys = HashSet :: new ( ) ;
540
537
keys. insert ( "42::test" . to_string ( ) ) ;
541
538
keys. insert ( "43::hello" . to_string ( ) ) ;
542
-
539
+
543
540
let pk_columns = vec ! [ "id" . to_string( ) , "name" . to_string( ) ] ;
544
541
let predicate = client. build_pk_predicate ( & keys, & pk_columns) ;
545
-
542
+
546
543
// Should be (`id` = '42' AND `name` = 'test') OR (`id` = '43' AND `name` = 'hello')
547
544
assert ! ( predicate. contains( "`id` = '42' AND `name` = 'test'" ) ) ;
548
545
assert ! ( predicate. contains( "`id` = '43' AND `name` = 'hello'" ) ) ;
@@ -554,7 +551,7 @@ mod tests {
554
551
let client = DeltaLakeClient :: new ( None ) ;
555
552
let keys = HashSet :: new ( ) ;
556
553
let pk_columns = vec ! [ "id" . to_string( ) ] ;
557
-
554
+
558
555
let predicate = client. build_pk_predicate ( & keys, & pk_columns) ;
559
556
assert_eq ! ( predicate, "false" ) ;
560
557
}
@@ -564,20 +561,26 @@ mod tests {
564
561
let parts = vec ! [ "value::with::delimiter" . to_string( ) , "normal" . to_string( ) ] ;
565
562
let composite = DeltaLakeClient :: join_composite_key ( & parts) ;
566
563
assert_eq ! ( composite, "value::::with::::delimiter::normal" ) ;
567
-
564
+
568
565
let split_parts = DeltaLakeClient :: split_composite_key ( & composite) ;
569
566
assert_eq ! ( split_parts, parts) ;
570
567
}
571
568
572
569
#[ test]
573
570
fn test_escape_identifier ( ) {
574
571
assert_eq ! ( DeltaLakeClient :: escape_identifier( "normal" ) , "`normal`" ) ;
575
- assert_eq ! ( DeltaLakeClient :: escape_identifier( "with`backtick" ) , "`with``backtick`" ) ;
572
+ assert_eq ! (
573
+ DeltaLakeClient :: escape_identifier( "with`backtick" ) ,
574
+ "`with``backtick`"
575
+ ) ;
576
576
}
577
577
578
578
#[ test]
579
579
fn test_escape_string_literal ( ) {
580
580
assert_eq ! ( DeltaLakeClient :: escape_string_literal( "normal" ) , "'normal'" ) ;
581
- assert_eq ! ( DeltaLakeClient :: escape_string_literal( "with'quote" ) , "'with''quote'" ) ;
581
+ assert_eq ! (
582
+ DeltaLakeClient :: escape_string_literal( "with'quote" ) ,
583
+ "'with''quote'"
584
+ ) ;
582
585
}
583
586
}
0 commit comments