Skip to content

Commit b30195c

Browse files
committed
remove test accidentally brought in from apache#1778.
1 parent c953837 commit b30195c

File tree

2 files changed

+4
-120
lines changed

2 files changed

+4
-120
lines changed

crates/iceberg/src/arrow/caching_delete_file_loader.rs

Lines changed: 0 additions & 116 deletions
Original file line numberDiff line numberDiff line change
@@ -788,120 +788,4 @@ mod tests {
788788
assert_eq!(data_col.value(1), "d");
789789
assert_eq!(data_col.value(2), "g");
790790
}
791-
792-
/// Test loading a FileScanTask with BOTH positional and equality deletes.
793-
/// Verifies the fix for the inverted condition that caused "Missing predicate for equality delete file" errors.
794-
#[tokio::test]
795-
async fn test_load_deletes_with_mixed_types() {
796-
use crate::scan::FileScanTask;
797-
use crate::spec::{DataFileFormat, Schema};
798-
799-
let tmp_dir = TempDir::new().unwrap();
800-
let table_location = tmp_dir.path();
801-
let file_io = FileIO::from_path(table_location.as_os_str().to_str().unwrap())
802-
.unwrap()
803-
.build()
804-
.unwrap();
805-
806-
// Create the data file schema
807-
let data_file_schema = Arc::new(
808-
Schema::builder()
809-
.with_fields(vec![
810-
crate::spec::NestedField::optional(
811-
2,
812-
"y",
813-
crate::spec::Type::Primitive(crate::spec::PrimitiveType::Long),
814-
)
815-
.into(),
816-
crate::spec::NestedField::optional(
817-
3,
818-
"z",
819-
crate::spec::Type::Primitive(crate::spec::PrimitiveType::Long),
820-
)
821-
.into(),
822-
])
823-
.build()
824-
.unwrap(),
825-
);
826-
827-
// Write positional delete file
828-
let positional_delete_schema = crate::arrow::delete_filter::tests::create_pos_del_schema();
829-
let file_path_values =
830-
vec![format!("{}/data-1.parquet", table_location.to_str().unwrap()); 4];
831-
let file_path_col = Arc::new(StringArray::from_iter_values(&file_path_values));
832-
let pos_col = Arc::new(Int64Array::from_iter_values(vec![0i64, 1, 2, 3]));
833-
834-
let positional_deletes_to_write =
835-
RecordBatch::try_new(positional_delete_schema.clone(), vec![
836-
file_path_col,
837-
pos_col,
838-
])
839-
.unwrap();
840-
841-
let props = WriterProperties::builder()
842-
.set_compression(Compression::SNAPPY)
843-
.build();
844-
845-
let pos_del_path = format!("{}/pos-del-mixed.parquet", table_location.to_str().unwrap());
846-
let file = File::create(&pos_del_path).unwrap();
847-
let mut writer = ArrowWriter::try_new(
848-
file,
849-
positional_deletes_to_write.schema(),
850-
Some(props.clone()),
851-
)
852-
.unwrap();
853-
writer.write(&positional_deletes_to_write).unwrap();
854-
writer.close().unwrap();
855-
856-
// Write equality delete file
857-
let eq_delete_path = setup_write_equality_delete_file_1(table_location.to_str().unwrap());
858-
859-
// Create FileScanTask with BOTH positional and equality deletes
860-
let pos_del = FileScanTaskDeleteFile {
861-
file_path: pos_del_path,
862-
file_type: DataContentType::PositionDeletes,
863-
partition_spec_id: 0,
864-
equality_ids: None,
865-
};
866-
867-
let eq_del = FileScanTaskDeleteFile {
868-
file_path: eq_delete_path.clone(),
869-
file_type: DataContentType::EqualityDeletes,
870-
partition_spec_id: 0,
871-
equality_ids: Some(vec![2, 3]), // Only use field IDs that exist in both schemas
872-
};
873-
874-
let file_scan_task = FileScanTask {
875-
start: 0,
876-
length: 0,
877-
record_count: None,
878-
data_file_path: format!("{}/data-1.parquet", table_location.to_str().unwrap()),
879-
data_file_format: DataFileFormat::Parquet,
880-
schema: data_file_schema.clone(),
881-
project_field_ids: vec![2, 3],
882-
predicate: None,
883-
deletes: vec![pos_del, eq_del],
884-
partition: None,
885-
partition_spec: None,
886-
name_mapping: None,
887-
};
888-
889-
// Load the deletes - should handle both types without error
890-
let delete_file_loader = CachingDeleteFileLoader::new(file_io.clone(), 10);
891-
let delete_filter = delete_file_loader
892-
.load_deletes(&file_scan_task.deletes, file_scan_task.schema_ref())
893-
.await
894-
.unwrap()
895-
.unwrap();
896-
897-
// Verify both delete types can be processed together
898-
let result = delete_filter
899-
.build_equality_delete_predicate(&file_scan_task)
900-
.await;
901-
assert!(
902-
result.is_ok(),
903-
"Failed to build equality delete predicate: {:?}",
904-
result.err()
905-
);
906-
}
907791
}

crates/iceberg/src/arrow/record_batch_transformer.rs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -537,9 +537,8 @@ impl RecordBatchTransformer {
537537
// - When name mapping is present, field ID matches alone aren't sufficient
538538
// - We verify the field NAME also matches to ensure it's the correct field
539539
// - If names don't match, we treat the field as "not present" and use name mapping
540-
let field_by_id = field_id_to_source_schema_map
541-
.get(field_id)
542-
.and_then(|(source_field, source_index)| {
540+
let field_by_id = field_id_to_source_schema_map.get(field_id).and_then(
541+
|(source_field, source_index)| {
543542
let name_matches = source_field.name() == &iceberg_field.name;
544543

545544
if name_mapping.is_some() && !name_matches {
@@ -560,7 +559,8 @@ impl RecordBatchTransformer {
560559
source_index: *source_index,
561560
})
562561
}
563-
});
562+
},
563+
);
564564

565565
if let Some(source) = field_by_id {
566566
source

0 commit comments

Comments
 (0)