Skip to content

Commit d3c049d

Browse files
authored
[FLINK-36524][pipeline-connector][paimon] Bump Paimon version to 0.9.0
This closes #3644
1 parent fc71888 commit d3c049d

File tree

7 files changed

+38
-8
lines changed

7 files changed

+38
-8
lines changed

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ limitations under the License.
2929
<artifactId>flink-cdc-pipeline-connector-paimon</artifactId>
3030

3131
<properties>
32-
<paimon.version>0.8.2</paimon.version>
32+
<paimon.version>0.9.0</paimon.version>
3333
<hadoop.version>2.8.5</hadoop.version>
3434
<hive.version>2.3.9</hive.version>
3535
<mockito.version>3.4.6</mockito.version>

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonDataSinkFactory.java

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,8 @@ public DataSink createDataSink(Context context) {
6969
}
7070
});
7171
Options options = Options.fromMap(catalogOptions);
72+
// Avoid using previous table schema.
73+
options.setString("cache-enabled", "false");
7274
try (Catalog catalog = FlinkCatalogFactory.createPaimonCatalog(options)) {
7375
Preconditions.checkNotNull(
7476
catalog.listDatabases(), "catalog option of Paimon is invalid.");

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/v2/PaimonCommitter.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,8 +45,8 @@ public PaimonCommitter(Options catalogOptions, String commitUser) {
4545
storeMultiCommitter =
4646
new StoreMultiCommitter(
4747
() -> FlinkCatalogFactory.createPaimonCatalog(catalogOptions),
48-
commitUser,
49-
null);
48+
org.apache.paimon.flink.sink.Committer.createContext(
49+
commitUser, null, true, false, null));
5050
}
5151

5252
@Override

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/v2/StoreSinkWriteImpl.java

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,6 +142,9 @@ public void withCompactExecutor(ExecutorService compactExecutor) {
142142
write.withCompactExecutor(compactExecutor);
143143
}
144144

145+
@Override
146+
public void withInsertOnly(boolean b) {}
147+
145148
@Override
146149
public SinkRecord write(InternalRow internalRow) throws Exception {
147150
return write.writeAndReturn(internalRow);

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/main/java/org/apache/flink/cdc/connectors/paimon/sink/v2/bucket/BucketAssignOperator.java

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -151,26 +151,26 @@ public void processElement(StreamRecord<Event> streamRecord) throws Exception {
151151
dataChangeEvent,
152152
schemaMaps.get(dataChangeEvent.tableId()).getFieldGetters());
153153
switch (tuple4.f0) {
154-
case DYNAMIC:
154+
case HASH_DYNAMIC:
155155
{
156156
bucket =
157157
tuple4.f2.assign(
158158
tuple4.f3.partition(genericRow),
159159
tuple4.f3.trimmedPrimaryKey(genericRow).hashCode());
160160
break;
161161
}
162-
case FIXED:
162+
case HASH_FIXED:
163163
{
164164
tuple4.f1.setRecord(genericRow);
165165
bucket = tuple4.f1.bucket();
166166
break;
167167
}
168-
case UNAWARE:
168+
case BUCKET_UNAWARE:
169169
{
170170
bucket = 0;
171171
break;
172172
}
173-
case GLOBAL_DYNAMIC:
173+
case CROSS_PARTITION:
174174
default:
175175
{
176176
throw new RuntimeException("Unsupported bucket mode: " + tuple4.f0);

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/test/java/org/apache/flink/cdc/connectors/paimon/sink/PaimonMetadataApplierTest.java

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,7 @@ private void initialize(String metastore)
8787
}
8888
catalogOptions.setString("metastore", metastore);
8989
catalogOptions.setString("warehouse", warehouse);
90+
catalogOptions.setString("cache-enabled", "false");
9091
this.catalog = FlinkCatalogFactory.createPaimonCatalog(catalogOptions);
9192
this.catalog.dropDatabase(TEST_DATABASE, true, true);
9293
}
@@ -206,6 +207,30 @@ public void testApplySchemaChange(String metastore)
206207
catalog.getTable(Identifier.fromString("test.table_with_partition"));
207208
Assertions.assertEquals(tableSchema, tableWithPartition.rowType());
208209
Assertions.assertEquals(Arrays.asList("col1", "dt"), tableWithPartition.primaryKeys());
210+
// Create table with upper case.
211+
catalogOptions.setString("allow-upper-case", "true");
212+
metadataApplier = new PaimonMetadataApplier(catalogOptions);
213+
createTableEvent =
214+
new CreateTableEvent(
215+
TableId.parse("test.table_with_upper_case"),
216+
org.apache.flink.cdc.common.schema.Schema.newBuilder()
217+
.physicalColumn(
218+
"COL1",
219+
org.apache.flink.cdc.common.types.DataTypes.STRING()
220+
.notNull())
221+
.physicalColumn(
222+
"col2", org.apache.flink.cdc.common.types.DataTypes.INT())
223+
.primaryKey("COL1")
224+
.build());
225+
metadataApplier.applySchemaChange(createTableEvent);
226+
tableSchema =
227+
new RowType(
228+
Arrays.asList(
229+
new DataField(0, "COL1", DataTypes.STRING().notNull()),
230+
new DataField(1, "col2", DataTypes.INT())));
231+
Assertions.assertEquals(
232+
tableSchema,
233+
catalog.getTable(Identifier.fromString("test.table_with_upper_case")).rowType());
209234
}
210235

211236
@ParameterizedTest

flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon/src/test/java/org/apache/flink/cdc/connectors/paimon/sink/v2/PaimonSinkITCase.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ private void initialize(String metastore)
127127
+ "'metastore'='hive', "
128128
+ "'hadoop-conf-dir'='%s', "
129129
+ "'hive-conf-dir'='%s', "
130-
+ "'cache-enabled'='false' "
130+
+ "'cache-enabled'='false'"
131131
+ ")",
132132
warehouse, HADOOP_CONF_DIR, HIVE_CONF_DIR));
133133
} else {

0 commit comments

Comments
 (0)