|
19 | 19 | package org.apache.parquet.avro; |
20 | 20 |
|
21 | 21 | import static org.apache.parquet.avro.AvroTestUtil.optional; |
| 22 | +import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT32; |
| 23 | +import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.INT64; |
| 24 | +import static org.apache.parquet.schema.Type.Repetition.REQUIRED; |
22 | 25 | import static org.junit.Assert.assertEquals; |
23 | 26 | import static org.junit.Assert.assertNotNull; |
24 | 27 |
|
|
61 | 64 | import org.apache.parquet.conf.ParquetConfiguration; |
62 | 65 | import org.apache.parquet.conf.PlainParquetConfiguration; |
63 | 66 | import org.apache.parquet.example.data.Group; |
| 67 | +import org.apache.parquet.example.data.GroupFactory; |
| 68 | +import org.apache.parquet.example.data.simple.SimpleGroupFactory; |
64 | 69 | import org.apache.parquet.hadoop.ParquetReader; |
65 | 70 | import org.apache.parquet.hadoop.ParquetWriter; |
66 | 71 | import org.apache.parquet.hadoop.api.WriteSupport; |
| 72 | +import org.apache.parquet.hadoop.example.ExampleParquetWriter; |
67 | 73 | import org.apache.parquet.hadoop.example.GroupReadSupport; |
68 | 74 | import org.apache.parquet.hadoop.util.HadoopCodecs; |
69 | 75 | import org.apache.parquet.io.InputFile; |
70 | 76 | import org.apache.parquet.io.LocalInputFile; |
71 | 77 | import org.apache.parquet.io.LocalOutputFile; |
72 | 78 | import org.apache.parquet.io.api.Binary; |
73 | 79 | import org.apache.parquet.io.api.RecordConsumer; |
| 80 | +import org.apache.parquet.schema.LogicalTypeAnnotation; |
| 81 | +import org.apache.parquet.schema.MessageType; |
74 | 82 | import org.apache.parquet.schema.MessageTypeParser; |
| 83 | +import org.apache.parquet.schema.PrimitiveType; |
75 | 84 | import org.junit.Assert; |
76 | 85 | import org.junit.Rule; |
77 | 86 | import org.junit.Test; |
@@ -400,6 +409,68 @@ public void testFixedDecimalValues() throws Exception { |
400 | 409 | Assert.assertEquals("Content should match", expected, records); |
401 | 410 | } |
402 | 411 |
|
| 412 | + @Test |
| 413 | + public void testDecimalIntegerValues() throws Exception { |
| 414 | + |
| 415 | + File file = temp.newFile("test_decimal_integer_values.parquet"); |
| 416 | + file.delete(); |
| 417 | + Path path = new Path(file.toString()); |
| 418 | + |
| 419 | + MessageType parquetSchema = new MessageType( |
| 420 | + "test_decimal_integer_values", |
| 421 | + new PrimitiveType(REQUIRED, INT32, "decimal_age") |
| 422 | + .withLogicalTypeAnnotation(LogicalTypeAnnotation.decimalType(2, 5)), |
| 423 | + new PrimitiveType(REQUIRED, INT64, "decimal_salary") |
| 424 | + .withLogicalTypeAnnotation(LogicalTypeAnnotation.decimalType(1, 10))); |
| 425 | + |
| 426 | + try (ParquetWriter<Group> writer = |
| 427 | + ExampleParquetWriter.builder(path).withType(parquetSchema).build()) { |
| 428 | + |
| 429 | + GroupFactory factory = new SimpleGroupFactory(parquetSchema); |
| 430 | + |
| 431 | + Group group1 = factory.newGroup(); |
| 432 | + group1.add("decimal_age", 2534); |
| 433 | + group1.add("decimal_salary", 234L); |
| 434 | + writer.write(group1); |
| 435 | + |
| 436 | + Group group2 = factory.newGroup(); |
| 437 | + group2.add("decimal_age", 4267); |
| 438 | + group2.add("decimal_salary", 1203L); |
| 439 | + writer.write(group2); |
| 440 | + } |
| 441 | + |
| 442 | + GenericData decimalSupport = new GenericData(); |
| 443 | + decimalSupport.addLogicalTypeConversion(new Conversions.DecimalConversion()); |
| 444 | + |
| 445 | + List<GenericRecord> records = Lists.newArrayList(); |
| 446 | + try (ParquetReader<GenericRecord> reader = AvroParquetReader.<GenericRecord>builder(path) |
| 447 | + .withDataModel(decimalSupport) |
| 448 | + .build()) { |
| 449 | + GenericRecord rec; |
| 450 | + while ((rec = reader.read()) != null) { |
| 451 | + records.add(rec); |
| 452 | + } |
| 453 | + } |
| 454 | + |
| 455 | + Assert.assertEquals("Should read 2 records", 2, records.size()); |
| 456 | + |
| 457 | + // INT32 values |
| 458 | + Object firstAge = records.get(0).get("decimal_age"); |
| 459 | + Object secondAge = records.get(1).get("decimal_age"); |
| 460 | + |
| 461 | + Assert.assertTrue("Should be BigDecimal, but is " + firstAge.getClass(), firstAge instanceof BigDecimal); |
| 462 | + Assert.assertEquals("Should be 25.34, but is " + firstAge, new BigDecimal("25.34"), firstAge); |
| 463 | + Assert.assertEquals("Should be 42.67, but is " + secondAge, new BigDecimal("42.67"), secondAge); |
| 464 | + |
| 465 | + // INT64 values |
| 466 | + Object firstSalary = records.get(0).get("decimal_salary"); |
| 467 | + Object secondSalary = records.get(1).get("decimal_salary"); |
| 468 | + |
| 469 | + Assert.assertTrue("Should be BigDecimal, but is " + firstSalary.getClass(), firstSalary instanceof BigDecimal); |
| 470 | + Assert.assertEquals("Should be 23.4, but is " + firstSalary, new BigDecimal("23.4"), firstSalary); |
| 471 | + Assert.assertEquals("Should be 120.3, but is " + secondSalary, new BigDecimal("120.3"), secondSalary); |
| 472 | + } |
| 473 | + |
403 | 474 | @Test |
404 | 475 | public void testAll() throws Exception { |
405 | 476 | Schema schema = |
|
0 commit comments