Skip to content

Commit b888cab

Browse files
committed
could have it all for tests
1 parent c783ebf commit b888cab

File tree

1 file changed

+31
-38
lines changed

1 file changed

+31
-38
lines changed

crates/iceberg/src/writer/base_writer/rolling_writer.rs

Lines changed: 31 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -171,23 +171,40 @@ mod tests {
171171
use crate::writer::tests::check_parquet_data_file;
172172
use crate::writer::{IcebergWriter, IcebergWriterBuilder, RecordBatch};
173173

174+
fn make_test_schema() -> Result<Schema> {
175+
Schema::builder()
176+
.with_schema_id(1)
177+
.with_fields(vec![
178+
NestedField::required(1, "id", Type::Primitive(PrimitiveType::Int)).into(),
179+
NestedField::required(2, "name", Type::Primitive(PrimitiveType::String)).into(),
180+
])
181+
.build()
182+
}
183+
184+
fn make_test_arrow_schema() -> ArrowSchema {
185+
ArrowSchema::new(vec![
186+
Field::new("id", DataType::Int32, false).with_metadata(HashMap::from([(
187+
PARQUET_FIELD_ID_META_KEY.to_string(),
188+
1.to_string(),
189+
)])),
190+
Field::new("name", DataType::Utf8, false).with_metadata(HashMap::from([(
191+
PARQUET_FIELD_ID_META_KEY.to_string(),
192+
2.to_string(),
193+
)])),
194+
])
195+
}
196+
174197
#[tokio::test]
175198
async fn test_rolling_writer_basic() -> Result<()> {
176-
let temp_dir = TempDir::new().unwrap();
177-
let file_io = FileIOBuilder::new_fs_io().build().unwrap();
199+
let temp_dir = TempDir::new()?;
200+
let file_io = FileIOBuilder::new_fs_io().build()?;
178201
let location_gen =
179202
MockLocationGenerator::new(temp_dir.path().to_str().unwrap().to_string());
180203
let file_name_gen =
181204
DefaultFileNameGenerator::new("test".to_string(), None, DataFileFormat::Parquet);
182205

183206
// Create schema
184-
let schema = Schema::builder()
185-
.with_schema_id(1)
186-
.with_fields(vec![
187-
NestedField::required(1, "id", Type::Primitive(PrimitiveType::Int)).into(),
188-
NestedField::required(2, "name", Type::Primitive(PrimitiveType::String)).into(),
189-
])
190-
.build()?;
207+
let schema = make_test_schema()?;
191208

192209
// Create writer builders
193210
let parquet_writer_builder = ParquetWriterBuilder::new(
@@ -209,16 +226,7 @@ mod tests {
209226
let mut writer = rolling_writer_builder.build().await?;
210227

211228
// Create test data
212-
let arrow_schema = ArrowSchema::new(vec![
213-
Field::new("id", DataType::Int32, false).with_metadata(HashMap::from([(
214-
PARQUET_FIELD_ID_META_KEY.to_string(),
215-
1.to_string(),
216-
)])),
217-
Field::new("name", DataType::Utf8, false).with_metadata(HashMap::from([(
218-
PARQUET_FIELD_ID_META_KEY.to_string(),
219-
2.to_string(),
220-
)])),
221-
]);
229+
let arrow_schema = make_test_arrow_schema();
222230

223231
let batch = RecordBatch::try_new(Arc::new(arrow_schema), vec![
224232
Arc::new(Int32Array::from(vec![1, 2, 3])),
@@ -246,21 +254,15 @@ mod tests {
246254

247255
#[tokio::test]
248256
async fn test_rolling_writer_with_rolling() -> Result<()> {
249-
let temp_dir = TempDir::new().unwrap();
250-
let file_io = FileIOBuilder::new_fs_io().build().unwrap();
257+
let temp_dir = TempDir::new()?;
258+
let file_io = FileIOBuilder::new_fs_io().build()?;
251259
let location_gen =
252260
MockLocationGenerator::new(temp_dir.path().to_str().unwrap().to_string());
253261
let file_name_gen =
254262
DefaultFileNameGenerator::new("test".to_string(), None, DataFileFormat::Parquet);
255263

256264
// Create schema
257-
let schema = Schema::builder()
258-
.with_schema_id(1)
259-
.with_fields(vec![
260-
NestedField::required(1, "id", Type::Primitive(PrimitiveType::Int)).into(),
261-
NestedField::required(2, "name", Type::Primitive(PrimitiveType::String)).into(),
262-
])
263-
.build()?;
265+
let schema = make_test_schema()?;
264266

265267
// Create writer builders
266268
let parquet_writer_builder = ParquetWriterBuilder::new(
@@ -282,16 +284,7 @@ mod tests {
282284
let mut writer = rolling_writer_builder.build().await?;
283285

284286
// Create test data
285-
let arrow_schema = ArrowSchema::new(vec![
286-
Field::new("id", DataType::Int32, false).with_metadata(HashMap::from([(
287-
PARQUET_FIELD_ID_META_KEY.to_string(),
288-
1.to_string(),
289-
)])),
290-
Field::new("name", DataType::Utf8, false).with_metadata(HashMap::from([(
291-
PARQUET_FIELD_ID_META_KEY.to_string(),
292-
2.to_string(),
293-
)])),
294-
]);
287+
let arrow_schema = make_test_arrow_schema();
295288

296289
// Create multiple batches to trigger rolling
297290
let batch1 = RecordBatch::try_new(Arc::new(arrow_schema.clone()), vec![

0 commit comments

Comments
 (0)