Skip to content

Commit 1da0567

Browse files
authored
[FLINK-38730][build] Bump default JDK version from 8 to 11 and support optional JDK17
This closes #4187.
1 parent 097aa9a commit 1da0567

File tree

13 files changed

+135
-36
lines changed

13 files changed

+135
-36
lines changed

.github/workflows/flink_cdc_base.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ on:
2222
description: "Jdk versions to test against."
2323
required: false
2424
type: string
25-
default: "['8']"
25+
default: "['11']"
2626
flink-versions:
2727
description: "Flink versions to test against."
2828
required: false

.github/workflows/flink_cdc_ci.yml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ jobs:
5050
- name: Set JDK
5151
uses: actions/setup-java@v4
5252
with:
53-
java-version: '8'
53+
java-version: '11'
5454
distribution: 'temurin'
5555
cache: 'maven'
5656
- name: Compiling jar packages
@@ -61,19 +61,19 @@ jobs:
6161
name: Common Unit Tests
6262
uses: ./.github/workflows/flink_cdc_base.yml
6363
with:
64-
java-versions: "[8]"
64+
java-versions: "[11]"
6565
modules: "['core']"
6666
pipeline-ut:
6767
name: Pipeline Unit Tests
6868
uses: ./.github/workflows/flink_cdc_base.yml
6969
with:
70-
java-versions: "[8]"
70+
java-versions: "[11]"
7171
modules: "['mysql-pipeline', 'postgres-pipeline', 'oceanbase-pipeline', 'doris', 'elasticsearch', 'iceberg', 'kafka', 'maxcompute', 'paimon', 'starrocks', 'fluss', 'hudi']"
7272
source-ut:
7373
name: Source Unit Tests
7474
uses: ./.github/workflows/flink_cdc_base.yml
7575
with:
76-
java-versions: "[8]"
76+
java-versions: "[11]"
7777
modules: "['mysql-source', 'postgres-source', 'oracle', 'mongodb7', 'mongodb8', 'sqlserver', 'tidb', 'oceanbase-source', 'db2', 'vitess']"
7878
pipeline_e2e:
7979
strategy:
@@ -83,14 +83,14 @@ jobs:
8383
name: Pipeline E2E Tests (${{ matrix.parallelism }}-Parallelism)
8484
uses: ./.github/workflows/flink_cdc_base.yml
8585
with:
86-
java-versions: "[8]"
86+
java-versions: "[11]"
8787
flink-versions: "['1.19.3', '1.20.3']"
8888
modules: "['pipeline_e2e']"
8989
parallelism: ${{ matrix.parallelism }}
9090
source_e2e:
9191
name: Source E2E Tests
9292
uses: ./.github/workflows/flink_cdc_base.yml
9393
with:
94-
java-versions: "[8]"
94+
java-versions: "[11]"
9595
flink-versions: "['1.19.3', '1.20.3']"
9696
modules: "['source_e2e']"

.github/workflows/flink_cdc_ci_nightly.yml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ jobs:
4040
- name: Set JDK
4141
uses: actions/setup-java@v4
4242
with:
43-
java-version: '11'
43+
java-version: '17'
4444
distribution: 'temurin'
4545
cache: 'maven'
4646
- name: Compiling jar packages
@@ -52,21 +52,21 @@ jobs:
5252
name: Common Unit Tests
5353
uses: ./.github/workflows/flink_cdc_base.yml
5454
with:
55-
java-versions: "[11]"
55+
java-versions: "[17]"
5656
modules: "['core']"
5757
pipeline-ut:
5858
if: github.repository == 'apache/flink-cdc'
5959
name: Pipeline Unit Tests
6060
uses: ./.github/workflows/flink_cdc_base.yml
6161
with:
62-
java-versions: "[11]"
62+
java-versions: "[17]"
6363
modules: "['mysql-pipeline', 'postgres-pipeline', 'oceanbase-pipeline', 'doris', 'elasticsearch', 'iceberg', 'kafka', 'maxcompute', 'paimon', 'starrocks', 'fluss', 'hudi']"
6464
source-ut:
6565
if: github.repository == 'apache/flink-cdc'
6666
name: Source Unit Tests
6767
uses: ./.github/workflows/flink_cdc_base.yml
6868
with:
69-
java-versions: "[11]"
69+
java-versions: "[17]"
7070
modules: "['mysql-source', 'postgres-source', 'oracle', 'mongodb7', 'mongodb8', 'sqlserver', 'tidb', 'oceanbase-source', 'db2', 'vitess']"
7171
pipeline_e2e:
7272
if: github.repository == 'apache/flink-cdc'
@@ -77,7 +77,7 @@ jobs:
7777
name: Pipeline E2E Tests (${{ matrix.parallelism }}-Parallelism)
7878
uses: ./.github/workflows/flink_cdc_base.yml
7979
with:
80-
java-versions: "[11]"
80+
java-versions: "[17]"
8181
flink-versions: "['1.19.3', '1.20.3']"
8282
modules: "['pipeline_e2e']"
8383
parallelism: ${{ matrix.parallelism }}
@@ -86,6 +86,6 @@ jobs:
8686
name: Source E2E Tests
8787
uses: ./.github/workflows/flink_cdc_base.yml
8888
with:
89-
java-versions: "[11]"
89+
java-versions: "[17]"
9090
flink-versions: "['1.19.3', '1.20.3']"
9191
modules: "['source_e2e']"

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-hudi/pom.xml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -265,7 +265,6 @@ limitations under the License.
265265
<plugin>
266266
<groupId>org.apache.maven.plugins</groupId>
267267
<artifactId>maven-shade-plugin</artifactId>
268-
<version>3.2.4</version>
269268
<executions>
270269
<execution>
271270
<phase>package</phase>

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-hudi/src/test/java/org/apache/flink/cdc/connectors/hudi/sink/HudiSinkITCase.java

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -402,7 +402,10 @@ private List<String> fetchTableContent(Configuration conf, String basePath) thro
402402
.orElse(null);
403403

404404
List<FileSlice> fileSlices =
405-
table.getSliceView().getAllLatestFileSlicesBeforeOrOn(latestInstant).values()
405+
table
406+
.getSliceView()
407+
.getAllLatestFileSlicesBeforeOrOn(latestInstant)
408+
.values()
406409
.stream()
407410
.flatMap(s -> s)
408411
.collect(Collectors.toList());

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-kafka/src/main/java/org/apache/flink/cdc/connectors/kafka/json/canal/CanalJsonSerializationSchema.java

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,10 @@ public byte[] serialize(Event event) {
146146
reuseGenericRowData.setField(
147147
5,
148148
new GenericArrayData(
149-
jsonSerializers.get(dataChangeEvent.tableId()).getSchema().primaryKeys()
149+
jsonSerializers
150+
.get(dataChangeEvent.tableId())
151+
.getSchema()
152+
.primaryKeys()
150153
.stream()
151154
.map(StringData::fromString)
152155
.toArray()));

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ limitations under the License.
3232
<paimon.version>1.3.1</paimon.version>
3333
<hadoop.version>2.8.5</hadoop.version>
3434
<hive.version>2.3.9</hive.version>
35-
<mockito.version>3.4.6</mockito.version>
35+
<mockito.version>3.12.4</mockito.version>
3636
</properties>
3737

3838
<dependencies>

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/v2/PaimonWriter.java

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -109,16 +109,17 @@ public PaimonWriter(
109109
public Collection<MultiTableCommittable> prepareCommit() {
110110
long startTime = System.currentTimeMillis();
111111
List<MultiTableCommittable> committables =
112-
writes.entrySet()
113-
.parallelStream()
112+
writes.entrySet().parallelStream()
114113
.flatMap(
115114
entry -> {
116115
try {
117116
// here we set it to lastCheckpointId+1 to
118117
// avoid prepareCommit the same checkpointId with the first
119118
// round.
120-
return entry.getValue()
121-
.prepareCommit(false, lastCheckpointId + 1).stream()
119+
return entry
120+
.getValue()
121+
.prepareCommit(false, lastCheckpointId + 1)
122+
.stream()
122123
.map(
123124
committable ->
124125
MultiTableCommittable

flink-cdc-connect/flink-cdc-source-connectors/flink-connector-db2-cdc/src/main/java/io/debezium/connector/db2/Db2Connection.java

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -399,7 +399,8 @@ public Table getTableSchemaFromChangeTable(Db2ChangeTable changeTable) throws SQ
399399
// final List<Column> columns = columnEditors.subList(CHANGE_TABLE_DATA_COLUMN_OFFSET,
400400
// columnEditors.size() - 1).stream()
401401
final List<Column> columns =
402-
columnEditors.subList(CHANGE_TABLE_DATA_COLUMN_OFFSET, columnEditors.size())
402+
columnEditors
403+
.subList(CHANGE_TABLE_DATA_COLUMN_OFFSET, columnEditors.size())
403404
.stream()
404405
.map(
405406
c ->

flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests/src/test/java/org/apache/flink/cdc/pipeline/tests/utils/PipelineTestEnvironment.java

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -137,9 +137,10 @@ private int getParallelism() {
137137
"parallelism.default: 4",
138138
"execution.checkpointing.interval: 300",
139139
"state.backend.type: hashmap",
140-
"env.java.opts.all: -Doracle.jdbc.timezoneAsRegion=false",
140+
"env.java.default-opts.all: --add-exports=java.base/sun.net.util=ALL-UNNAMED --add-exports=java.rmi/sun.rmi.registry=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED --add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED --add-exports=java.security.jgss/sun.security.krb5=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED --add-opens=java.base/java.io=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.lang.reflect=ALL-UNNAMED --add-opens=java.base/java.text=ALL-UNNAMED --add-opens=java.base/java.time=ALL-UNNAMED --add-opens=java.base/java.util=ALL-UNNAMED --add-opens=java.base/java.util.concurrent=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED --add-opens=java.base/java.util.concurrent.locks=ALL-UNNAMED --add-opens=java.base/jdk.internal.loader=ALL-UNNAMED --add-opens=java.base/java.security=ALL-UNNAMED --add-exports=java.base/sun.net.www=ALL-UNNAMED -Doracle.jdbc.timezoneAsRegion=false",
141141
"execution.checkpointing.savepoint-dir: file:///opt/flink",
142142
"restart-strategy.type: off",
143+
"pekko.ask.timeout: 60s",
143144
// Set off-heap memory explicitly to avoid "java.lang.OutOfMemoryError: Direct
144145
// buffer memory" error.
145146
"taskmanager.memory.task.off-heap.size: 128mb",
@@ -403,7 +404,10 @@ public void waitUntilJobState(Duration timeout, JobStatus expectedStatus) {
403404
}
404405

405406
protected String getFlinkDockerImageTag() {
406-
return String.format("flink:%s-scala_2.12", flinkVersion);
407+
if (System.getProperty("java.specification.version").equals("17")) {
408+
return String.format("flink:%s-scala_2.12-java17", flinkVersion);
409+
}
410+
return String.format("flink:%s-scala_2.12-java11", flinkVersion);
407411
}
408412

409413
private ExecResult executeAndCheck(GenericContainer<?> container, String... command) {

0 commit comments

Comments
 (0)