Skip to content

Commit 5f3b0b7

Browse files
committed
Update test and pom to support jdk17.
1 parent 289bf28 commit 5f3b0b7

File tree

8 files changed

+30
-12
lines changed

8 files changed

+30
-12
lines changed

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-kafka/src/main/java/org/apache/flink/cdc/connectors/kafka/json/canal/CanalJsonSerializationSchema.java

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,10 @@ public byte[] serialize(Event event) {
146146
reuseGenericRowData.setField(
147147
5,
148148
new GenericArrayData(
149-
jsonSerializers.get(dataChangeEvent.tableId()).getSchema().primaryKeys()
149+
jsonSerializers
150+
.get(dataChangeEvent.tableId())
151+
.getSchema()
152+
.primaryKeys()
150153
.stream()
151154
.map(StringData::fromString)
152155
.toArray()));

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ limitations under the License.
3232
<paimon.version>1.2.0</paimon.version>
3333
<hadoop.version>2.8.5</hadoop.version>
3434
<hive.version>2.3.9</hive.version>
35-
<mockito.version>3.4.6</mockito.version>
35+
<mockito.version>3.12.4</mockito.version>
3636
</properties>
3737

3838
<dependencies>

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/v2/PaimonWriter.java

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -109,16 +109,17 @@ public PaimonWriter(
109109
public Collection<MultiTableCommittable> prepareCommit() {
110110
long startTime = System.currentTimeMillis();
111111
List<MultiTableCommittable> committables =
112-
writes.entrySet()
113-
.parallelStream()
112+
writes.entrySet().parallelStream()
114113
.flatMap(
115114
entry -> {
116115
try {
117116
// here we set it to lastCheckpointId+1 to
118117
// avoid prepareCommit the same checkpointId with the first
119118
// round.
120-
return entry.getValue()
121-
.prepareCommit(false, lastCheckpointId + 1).stream()
119+
return entry
120+
.getValue()
121+
.prepareCommit(false, lastCheckpointId + 1)
122+
.stream()
122123
.map(
123124
committable ->
124125
MultiTableCommittable

flink-cdc-connect/flink-cdc-source-connectors/flink-connector-db2-cdc/src/main/java/io/debezium/connector/db2/Db2Connection.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -399,7 +399,8 @@ public Table getTableSchemaFromChangeTable(Db2ChangeTable changeTable) throws SQ
399399
// final List<Column> columns = columnEditors.subList(CHANGE_TABLE_DATA_COLUMN_OFFSET,
400400
// columnEditors.size() - 1).stream()
401401
final List<Column> columns =
402-
columnEditors.subList(CHANGE_TABLE_DATA_COLUMN_OFFSET, columnEditors.size())
402+
columnEditors
403+
.subList(CHANGE_TABLE_DATA_COLUMN_OFFSET, columnEditors.size())
403404
.stream()
404405
.map(
405406
c ->

flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/utils/PipelineTestEnvironment.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -137,9 +137,10 @@ private int getParallelism() {
137137
"parallelism.default: 4",
138138
"execution.checkpointing.interval: 300",
139139
"state.backend.type: hashmap",
140-
"env.java.opts.all: -Doracle.jdbc.timezoneAsRegion=false",
140+
"env.java.default-opts.all: --add-exports=java.base/sun.net.util=ALL-UNNAMED --add-exports=java.rmi/sun.rmi.registry=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED --add-exports=java.security.jgss/sun.security.krb5=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.text=ALL-UNNAMED --add-opens=java.base/java.time=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.locks=ALL-UNNAMED --add-opens=java.base/jdk.internal.loader=ALL-UNNAMED --add-opens=java.base/java.security=ALL-UNNAMED --add-exports=java.base/sun.net.www=ALL-UNNAMED -Doracle.jdbc.timezoneAsRegion=false",
141141
"execution.checkpointing.savepoint-dir: file:///opt/flink",
142142
"restart-strategy.type: off",
143+
"pekko.ask.timeout: 60s",
143144
// Set off-heap memory explicitly to avoid "java.lang.OutOfMemoryError: Direct
144145
// buffer memory" error.
145146
"taskmanager.memory.task.off-heap.size: 128mb",

flink-cdc-e2e-tests/flink-cdc-source-e2e-tests/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -212,7 +212,7 @@ limitations under the License.
212212
<executions>
213213
<execution>
214214
<id>copy-jars</id>
215-
<phase>test</phase>
215+
<phase>package</phase>
216216
<goals>
217217
<goal>copy</goal>
218218
</goals>

flink-cdc-e2e-tests/flink-cdc-source-e2e-tests/src/test/java/org/apache/flink/cdc/connectors/tests/utils/FlinkContainerTestEnvironment.java

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -308,7 +308,10 @@ private String copyAndGetContainerPath(GenericContainer<?> container, String fil
308308
}
309309

310310
private String getFlinkDockerImageTag() {
311-
return String.format("flink:%s-scala_2.12", flinkVersion);
311+
if (System.getProperty("java.specification.version").equals("17")) {
312+
return String.format("flink:%s-scala_2.12-java17", flinkVersion);
313+
}
314+
return String.format("flink:%s-scala_2.12-java11", flinkVersion);
312315
}
313316

314317
protected String getJdbcConnectorResourceName() {

pom.xml

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ limitations under the License.
7878
<version.awaitility>4.2.0</version.awaitility>
7979
<slf4j.version>1.7.36</slf4j.version>
8080
<log4j.version>2.17.1</log4j.version>
81-
<spotless.version>2.27.1</spotless.version>
81+
<spotless.version>2.43.0</spotless.version>
8282
<commons-lang3.version>3.12.0</commons-lang3.version>
8383
<json-path.version>2.7.0</json-path.version>
8484
<jackson.version>2.13.2</jackson.version>
@@ -473,7 +473,7 @@ limitations under the License.
473473
<configuration>
474474
<java>
475475
<googleJavaFormat>
476-
<version>1.7</version>
476+
<version>1.10.0</version>
477477
<style>AOSP</style>
478478
</googleJavaFormat>
479479

@@ -569,6 +569,15 @@ limitations under the License.
569569
does not understand some time zone which used in Ubuntu OS -->
570570
<argLine>-Xms256m -Xmx2048m -Dmvn.forkNumber=${surefire.forkNumber}
571571
-XX:+UseG1GC -Doracle.jdbc.timezoneAsRegion=false
572+
--add-opens=java.base/java.lang=ALL-UNNAMED
573+
--add-opens=java.base/java.lang.reflect=ALL-UNNAMED
574+
--add-opens=java.base/java.util.concurrent=ALL-UNNAMED
575+
--add-opens=java.base/java.util=ALL-UNNAMED
576+
--add-opens=java.sql/java.sql=ALL-UNNAMED
577+
--add-opens=java.base/java.nio=ALL-UNNAMED
578+
--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED
579+
--add-opens java.base/java.net=ALL-UNNAMED
580+
--add-exports=java.base/sun.nio.ch=ALL-UNNAMED
572581
</argLine>
573582
</configuration>
574583
</plugin>

0 commit comments

Comments
 (0)