Skip to content

Commit 17f076e

Browse files
committed
Resolve conflicts
1 parent e2c42fe commit 17f076e

File tree

3 files changed

+15
-11
lines changed

3 files changed

+15
-11
lines changed

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/test/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonHashFunctionTest.java

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,6 @@
3131
import org.apache.paimon.flink.FlinkCatalogFactory;
3232
import org.apache.paimon.options.Options;
3333
import org.junit.jupiter.api.AfterEach;
34-
import org.junit.jupiter.api.Assertions;
3534
import org.junit.jupiter.api.BeforeEach;
3635
import org.junit.jupiter.api.Test;
3736
import org.junit.jupiter.api.io.TempDir;
@@ -122,8 +121,9 @@ public void testHashCodeForAppendOnlyTable() {
122121
BinaryStringData.fromString("2024")
123122
}));
124123
int key3 = hashFunction.hashcode(dataChangeEvent3);
125-
Assertions.assertTrue(
126-
key1 >= 0 && key1 < 4 && key2 >= 0 && key2 < 4 && key3 >= 0 && key3 < 4);
124+
assertThat(key1).isBetween(0, 3);
125+
assertThat(key2).isBetween(0, 3);
126+
assertThat(key3).isBetween(0, 3);
127127
}
128128

129129
@Test

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/test/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonMetadataApplierTest.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -278,10 +278,10 @@ public void testCreateTableWithoutPrimaryKey(String metastore)
278278
new DataField(1, "col2", DataTypes.STRING()),
279279
new DataField(2, "col3", DataTypes.STRING()),
280280
new DataField(3, "col4", DataTypes.STRING())));
281-
Assertions.assertEquals(tableSchema, table.rowType());
282-
Assertions.assertEquals(new ArrayList<>(), table.primaryKeys());
283-
Assertions.assertEquals(new ArrayList<>(), table.partitionKeys());
284-
Assertions.assertEquals("-1", table.options().get("bucket"));
281+
Assertions.assertThat(table.rowType()).isEqualTo(tableSchema);
282+
Assertions.assertThat(table.primaryKeys()).isEmpty();
283+
Assertions.assertThat(table.partitionKeys()).isEmpty();
284+
Assertions.assertThat(table.options()).containsEntry("bucket", "-1");
285285
}
286286

287287
@ParameterizedTest

flink-cdc-connect/flink-cdc-source-connectors/flink-connector-postgres-cdc/src/test/java/org/apache/flink/cdc/connectors/postgres/source/PostgresSourceITCase.java

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -717,8 +717,9 @@ void testHeartBeat(String scanStartupMode) throws Exception {
717717
}
718718
}
719719

720-
@Test
721-
public void testCommitLsnWhenTaskManagerFailover() throws Exception {
720+
@ParameterizedTest
721+
@ValueSource(strings = {"initial", "latest-offset"})
722+
void testCommitLsnWhenTaskManagerFailover(String scanStartupMode) throws Exception {
722723
int parallelism = 1;
723724
PostgresTestUtils.FailoverType failoverType = PostgresTestUtils.FailoverType.TM;
724725
PostgresTestUtils.FailoverPhase failoverPhase = PostgresTestUtils.FailoverPhase.STREAM;
@@ -1239,7 +1240,10 @@ private void checkStreamDataWithTestLsn(
12391240
}
12401241
if (failoverPhase == PostgresTestUtils.FailoverPhase.STREAM) {
12411242
triggerFailover(
1242-
failoverType, jobId, miniClusterResource.getMiniCluster(), () -> sleepMs(200));
1243+
failoverType,
1244+
jobId,
1245+
miniClusterResource.get().getMiniCluster(),
1246+
() -> sleepMs(200));
12431247
waitUntilJobRunning(tableResult);
12441248
}
12451249
// wait for the stream reading and isCommitOffset is true
@@ -1251,7 +1255,7 @@ private void checkStreamDataWithTestLsn(
12511255
}
12521256
Thread.sleep(5000L);
12531257
try (PostgresConnection connection = getConnection()) {
1254-
assertTrue(!confirmedFlushLsn.equals(getConfirmedFlushLsn(connection)));
1258+
Assertions.assertThat(getConfirmedFlushLsn(connection)).isNotEqualTo(confirmedFlushLsn);
12551259
}
12561260
}
12571261

0 commit comments

Comments
 (0)