Skip to content

Commit 25cc3a4

Browse files
lidavidmraulcd
authored andcommitted
GH-35932: [Java] Make JDBC test less brittle (#35940)
### Rationale for this change A JDBC test is brittle because it compares string representations. ### What changes are included in this PR? Compare values directly. ### Are these changes tested? N/A ### Are there any user-facing changes? No. * Closes: #35932 Authored-by: David Li <[email protected]> Signed-off-by: Raúl Cumplido <[email protected]>
1 parent 72f3a5c commit 25cc3a4

File tree

6 files changed

+79
-174
lines changed

6 files changed

+79
-174
lines changed

dev/release/verify-release-candidate.sh

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -563,7 +563,10 @@ test_package_java() {
563563
show_header "Build and test Java libraries"
564564

565565
# Build and test Java (Requires newer Maven -- I used 3.3.9)
566-
maybe_setup_conda maven || exit 1
566+
# Pin OpenJDK 17 since OpenJDK 20 is incompatible with our versions
567+
# of things like Mockito, and we also can't update Mockito due to
568+
# not supporting Java 8 anymore
569+
maybe_setup_conda maven openjdk=17.0.3 || exit 1
567570

568571
pushd java
569572
mvn test

java/adapter/jdbc/src/test/java/org/apache/arrow/adapter/jdbc/JdbcToArrowCommentMetadataTest.java

Lines changed: 70 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@
2929
import java.sql.ResultSetMetaData;
3030
import java.sql.SQLException;
3131
import java.sql.Statement;
32+
import java.util.Arrays;
3233
import java.util.Collections;
3334
import java.util.HashMap;
3435
import java.util.HashSet;
@@ -37,18 +38,18 @@
3738
import java.util.Set;
3839

3940
import org.apache.arrow.memory.RootAllocator;
41+
import org.apache.arrow.vector.types.Types;
42+
import org.apache.arrow.vector.types.pojo.ArrowType;
43+
import org.apache.arrow.vector.types.pojo.Field;
44+
import org.apache.arrow.vector.types.pojo.FieldType;
4045
import org.apache.arrow.vector.types.pojo.Schema;
41-
import org.apache.arrow.vector.util.ObjectMapperFactory;
4246
import org.junit.After;
4347
import org.junit.Before;
4448
import org.junit.Test;
4549

46-
import com.fasterxml.jackson.databind.ObjectWriter;
47-
4850
public class JdbcToArrowCommentMetadataTest {
4951

5052
private static final String COMMENT = "comment"; //use this metadata key for interoperability with Spark StructType
51-
private final ObjectWriter schemaSerializer = ObjectMapperFactory.newObjectMapper().writerWithDefaultPrettyPrinter();
5253
private Connection conn = null;
5354

5455
/**
@@ -73,26 +74,85 @@ public void tearDown() throws SQLException {
7374
}
7475
}
7576

77+
private static Field field(String name, boolean nullable, ArrowType type, Map<String, String> metadata) {
78+
return new Field(name, new FieldType(nullable, type, null, metadata), Collections.emptyList());
79+
}
80+
81+
private static Map<String, String> metadata(String... entries) {
82+
if (entries.length % 2 != 0) {
83+
throw new IllegalArgumentException("Map must have equal number of keys and values");
84+
}
85+
86+
final Map<String, String> result = new HashMap<>();
87+
for (int i = 0; i < entries.length; i += 2) {
88+
result.put(entries[i], entries[i + 1]);
89+
}
90+
return result;
91+
}
92+
7693
@Test
7794
public void schemaComment() throws Exception {
7895
boolean includeMetadata = false;
79-
String schemaJson = schemaSerializer.writeValueAsString(getSchemaWithCommentFromQuery(includeMetadata));
80-
String expectedSchema = getExpectedSchema("/h2/expectedSchemaWithComments.json");
81-
assertThat(schemaJson).isEqualTo(expectedSchema);
96+
Schema schema = getSchemaWithCommentFromQuery(includeMetadata);
97+
Schema expectedSchema = new Schema(Arrays.asList(
98+
field("ID", false, Types.MinorType.BIGINT.getType(),
99+
metadata("comment", "Record identifier")),
100+
field("NAME", true, Types.MinorType.VARCHAR.getType(),
101+
metadata("comment", "Name of record")),
102+
field("COLUMN1", true, Types.MinorType.BIT.getType(),
103+
metadata()),
104+
field("COLUMNN", true, Types.MinorType.INT.getType(),
105+
metadata("comment", "Informative description of columnN"))
106+
), metadata("comment", "This is super special table with valuable data"));
107+
assertThat(schema).isEqualTo(expectedSchema);
82108
}
83109

84110
@Test
85111
public void schemaCommentWithDatabaseMetadata() throws Exception {
86112
boolean includeMetadata = true;
87-
String schemaJson = schemaSerializer.writeValueAsString(getSchemaWithCommentFromQuery(includeMetadata));
88-
String expectedSchema = getExpectedSchema("/h2/expectedSchemaWithCommentsAndJdbcMeta.json");
113+
Schema schema = getSchemaWithCommentFromQuery(includeMetadata);
114+
Schema expectedSchema = new Schema(Arrays.asList(
115+
field("ID", false, Types.MinorType.BIGINT.getType(),
116+
metadata(
117+
"SQL_CATALOG_NAME", "JDBCTOARROWTEST?CHARACTERENCODING=UTF-8",
118+
"SQL_SCHEMA_NAME", "PUBLIC",
119+
"SQL_TABLE_NAME", "TABLE1",
120+
"SQL_COLUMN_NAME", "ID",
121+
"SQL_TYPE", "BIGINT",
122+
"comment", "Record identifier"
123+
)),
124+
field("NAME", true, Types.MinorType.VARCHAR.getType(),
125+
metadata(
126+
"SQL_CATALOG_NAME", "JDBCTOARROWTEST?CHARACTERENCODING=UTF-8",
127+
"SQL_SCHEMA_NAME", "PUBLIC",
128+
"SQL_TABLE_NAME", "TABLE1",
129+
"SQL_COLUMN_NAME", "NAME",
130+
"SQL_TYPE", "VARCHAR",
131+
"comment", "Name of record")),
132+
field("COLUMN1", true, Types.MinorType.BIT.getType(),
133+
metadata(
134+
"SQL_CATALOG_NAME", "JDBCTOARROWTEST?CHARACTERENCODING=UTF-8",
135+
"SQL_SCHEMA_NAME", "PUBLIC",
136+
"SQL_TABLE_NAME", "TABLE1",
137+
"SQL_COLUMN_NAME", "COLUMN1",
138+
"SQL_TYPE", "BOOLEAN")),
139+
field("COLUMNN", true, Types.MinorType.INT.getType(),
140+
metadata(
141+
"SQL_CATALOG_NAME", "JDBCTOARROWTEST?CHARACTERENCODING=UTF-8",
142+
"SQL_SCHEMA_NAME", "PUBLIC",
143+
"SQL_TABLE_NAME", "TABLE1",
144+
"SQL_COLUMN_NAME", "COLUMNN",
145+
"SQL_TYPE", "INTEGER",
146+
"comment", "Informative description of columnN"))
147+
), metadata("comment", "This is super special table with valuable data"));
148+
assertThat(schema).isEqualTo(expectedSchema);
89149
/* corresponding Apache Spark DDL after conversion:
90150
ID BIGINT NOT NULL COMMENT 'Record identifier',
91151
NAME STRING COMMENT 'Name of record',
92152
COLUMN1 BOOLEAN,
93153
COLUMNN INT COMMENT 'Informative description of columnN'
94154
*/
95-
assertThat(schemaJson).isEqualTo(expectedSchema);
155+
assertThat(schema).isEqualTo(expectedSchema);
96156
}
97157

98158
private Schema getSchemaWithCommentFromQuery(boolean includeMetadata) throws SQLException {

java/adapter/jdbc/src/test/resources/h2/expectedSchemaWithComments.json

Lines changed: 0 additions & 51 deletions
This file was deleted.

java/adapter/jdbc/src/test/resources/h2/expectedSchemaWithCommentsAndJdbcMeta.json

Lines changed: 0 additions & 112 deletions
This file was deleted.

java/flight/flight-core/src/test/java/org/apache/arrow/flight/TestBasicOperation.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,8 @@
5656
import org.apache.arrow.vector.types.pojo.Schema;
5757
import org.junit.jupiter.api.Assertions;
5858
import org.junit.jupiter.api.Test;
59+
import org.junit.jupiter.api.condition.DisabledOnOs;
60+
import org.junit.jupiter.api.condition.OS;
5961

6062
import com.google.common.base.Charsets;
6163
import com.google.protobuf.ByteString;
@@ -270,6 +272,7 @@ public void getStream() throws Exception {
270272

271273
/** Ensure the client is configured to accept large messages. */
272274
@Test
275+
@DisabledOnOs(value = {OS.WINDOWS}, disabledReason = "https://github.com/apache/arrow/issues/33237: flaky test")
273276
public void getStreamLargeBatch() throws Exception {
274277
test(c -> {
275278
try (final FlightStream stream = c.getStream(new Ticket(Producer.TICKET_LARGE_BATCH))) {

java/flight/flight-sql-jdbc-core/src/test/java/org/apache/arrow/driver/jdbc/ArrowFlightPreparedStatementTest.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@
3232
import org.junit.Before;
3333
import org.junit.BeforeClass;
3434
import org.junit.ClassRule;
35+
import org.junit.Ignore;
3536
import org.junit.Rule;
3637
import org.junit.Test;
3738
import org.junit.rules.ErrorCollector;
@@ -73,6 +74,7 @@ public void testSimpleQueryNoParameterBinding() throws SQLException {
7374
}
7475

7576
@Test
77+
@Ignore("https://github.com/apache/arrow/issues/34741: flaky test")
7678
public void testPreparedStatementExecutionOnce() throws SQLException {
7779
final PreparedStatement statement = connection.prepareStatement(CoreMockedSqlProducers.LEGACY_REGULAR_SQL_CMD);
7880
// Expect that there is one entry in the map -- {prepared statement action type, invocation count}.

0 commit comments

Comments
 (0)