Skip to content

Commit 86cd806

Browse files
committed
Parameterize the tests on numRecord (default=10000) and rowsPerPage
(default = 2000)
1 parent bf01d89 commit 86cd806

File tree

1 file changed

+16
-9
lines changed

1 file changed

+16
-9
lines changed

parquet-hadoop/src/test/java/org/apache/parquet/hadoop/rewrite/ParquetRewriterTest.java

Lines changed: 16 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -107,8 +107,8 @@
107107
@RunWith(Parameterized.class)
108108
public class ParquetRewriterTest {
109109

110-
private final int numRecord = 10000;
111-
private final Configuration conf = new Configuration();
110+
private final int numRecord;
111+
private final Configuration conf;
112112
private final ParquetConfiguration parquetConf = new PlainParquetConfiguration();
113113
private final ParquetProperties.WriterVersion writerVersion;
114114
private final IndexCache.CacheStrategy indexCacheStrategy;
@@ -122,21 +122,28 @@ public class ParquetRewriterTest {
122122
private final EncryptionTestFile gzipEncryptionTestFileWithoutBloomFilterColumn;
123123
private final EncryptionTestFile uncompressedEncryptionTestFileWithoutBloomFilterColumn;
124124

125-
@Parameterized.Parameters(name = "WriterVersion = {0}, IndexCacheStrategy = {1}, UsingHadoop = {2}")
125+
@Parameterized.Parameters(name = "WriterVersion = {0}, IndexCacheStrategy = {1}, UsingHadoop = {2}, numRecord = {3}, rowsPerPage = {4}")
126126
public static Object[][] parameters() {
127+
final int DefaultNumRecord = 10000;
128+
final int DefaultRowsPerPage = DefaultNumRecord / 5;
127129
return new Object[][] {
128-
{"v1", "NONE", true},
129-
{"v1", "PREFETCH_BLOCK", true},
130-
{"v2", "PREFETCH_BLOCK", true},
131-
{"v2", "PREFETCH_BLOCK", false}
130+
{"v1", "NONE", true, DefaultNumRecord, DefaultRowsPerPage},
131+
{"v1", "PREFETCH_BLOCK", true, DefaultNumRecord, DefaultRowsPerPage},
132+
{"v2", "PREFETCH_BLOCK", true, DefaultNumRecord, DefaultRowsPerPage},
133+
{"v2", "PREFETCH_BLOCK", false, DefaultNumRecord, DefaultRowsPerPage}
132134
};
133135
}
134136

135-
public ParquetRewriterTest(String writerVersion, String indexCacheStrategy, boolean usingHadoop)
137+
public ParquetRewriterTest(String writerVersion, String indexCacheStrategy, boolean _usingHadoop, int _numRecord, int rowsPerPage)
136138
throws IOException {
137139
this.writerVersion = ParquetProperties.WriterVersion.fromString(writerVersion);
138140
this.indexCacheStrategy = IndexCache.CacheStrategy.valueOf(indexCacheStrategy);
139-
this.usingHadoop = usingHadoop;
141+
this.usingHadoop = _usingHadoop;
142+
this.numRecord = _numRecord;
143+
144+
Configuration _conf = new Configuration();
145+
_conf.set("parquet.page.row.count.limit", Integer.toString(rowsPerPage));
146+
this.conf = _conf;
140147

141148
MessageType testSchema = createSchema();
142149
this.gzipEncryptionTestFileWithoutBloomFilterColumn = new TestFileBuilder(conf, testSchema)

0 commit comments

Comments
 (0)