Skip to content

Commit 24bb243

Browse files
committed
upgrade Dez to 3.3.2
1 parent 18949a0 commit 24bb243

File tree

5 files changed

+76
-46
lines changed

5 files changed

+76
-46
lines changed

v2/cdc-parent/cdc-embedded-connector/src/main/java/com/google/cloud/dataflow/cdc/connector/DebeziumToPubSubDataSender.java

Lines changed: 38 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -19,12 +19,14 @@
1919
import com.google.common.base.Preconditions;
2020
import com.google.common.collect.ImmutableMap;
2121
import io.debezium.config.Configuration;
22-
import io.debezium.embedded.EmbeddedEngine;
22+
import io.debezium.embedded.Connect;
23+
import io.debezium.engine.ChangeEvent;
24+
import io.debezium.engine.DebeziumEngine;
2325
import io.debezium.relational.HistorizedRelationalDatabaseConnectorConfig;
2426
import io.debezium.relational.RelationalDatabaseConnectorConfig;
25-
import io.debezium.relational.history.FileDatabaseHistory;
26-
import io.debezium.relational.history.MemoryDatabaseHistory;
27-
import io.debezium.util.Clock;
27+
import io.debezium.relational.history.MemorySchemaHistory;
28+
import io.debezium.storage.file.history.FileSchemaHistory;
29+
import java.io.IOException;
2830
import java.util.Iterator;
2931
import java.util.Set;
3032
import java.util.concurrent.CancellationException;
@@ -36,6 +38,7 @@
3638
import java.util.concurrent.TimeoutException;
3739
import java.util.function.Function;
3840
import java.util.stream.Collectors;
41+
import org.apache.kafka.connect.source.SourceRecord;
3942
import org.slf4j.Logger;
4043
import org.slf4j.LoggerFactory;
4144

@@ -74,7 +77,7 @@ public class DebeziumToPubSubDataSender implements Runnable {
7477

7578
private final Set<String> whitelistedTables;
7679

77-
private EmbeddedEngine engine;
80+
private DebeziumEngine<ChangeEvent<SourceRecord, SourceRecord>> engine;
7881
private ExecutorService executorService;
7982

8083
public DebeziumToPubSubDataSender(
@@ -122,8 +125,8 @@ public DebeziumToPubSubDataSender(
122125
Configuration.empty()
123126
.withSystemProperties(Function.identity())
124127
.edit()
125-
.with(EmbeddedEngine.CONNECTOR_CLASS, RDBMS_TO_CONNECTOR_MAP.get(rdbms))
126-
.with(EmbeddedEngine.ENGINE_NAME, APP_NAME)
128+
.with("connector.class", RDBMS_TO_CONNECTOR_MAP.get(rdbms))
129+
.with("name", APP_NAME)
127130
// Database connection information.
128131
.with("database.hostname", this.databaseAddress)
129132
.with("database.port", this.databasePort)
@@ -132,35 +135,33 @@ public DebeziumToPubSubDataSender(
132135
.with("database.server.name", databaseName)
133136
.with("decimal.handling.mode", "string")
134137
.with(
135-
HistorizedRelationalDatabaseConnectorConfig.DATABASE_HISTORY,
136-
MemoryDatabaseHistory.class.getName());
138+
HistorizedRelationalDatabaseConnectorConfig.SCHEMA_HISTORY.name(),
139+
MemorySchemaHistory.class.getName());
137140

138141
if (!whitelistedTables.isEmpty()) {
139142
LOG.info("Whitelisting tables: {}", dbzWhitelistedTables);
140143
configBuilder =
141144
configBuilder.with(
142-
RelationalDatabaseConnectorConfig.TABLE_WHITELIST, dbzWhitelistedTables);
145+
RelationalDatabaseConnectorConfig.TABLE_INCLUDE_LIST.name(), dbzWhitelistedTables);
143146
}
144147

145148
if (this.inMemoryOffsetStorage) {
146149
LOG.info("Setting up in memory offset storage.");
147150
configBuilder =
148151
configBuilder.with(
149-
EmbeddedEngine.OFFSET_STORAGE,
152+
"offset.storage",
150153
"org.apache.kafka.connect.storage.MemoryOffsetBackingStore");
151154
} else {
152155
LOG.info("Setting up in File-based offset storage in {}.", this.offsetStorageFile);
153156
configBuilder =
154157
configBuilder
158+
.with("offset.storage", "org.apache.kafka.connect.storage.FileOffsetBackingStore")
159+
.with("offset.storage.file.filename", this.offsetStorageFile)
160+
.with(DebeziumEngine.OFFSET_FLUSH_INTERVAL_MS_PROP, DEFAULT_FLUSH_INTERVAL_MS)
155161
.with(
156-
EmbeddedEngine.OFFSET_STORAGE,
157-
"org.apache.kafka.connect.storage.FileOffsetBackingStore")
158-
.with(EmbeddedEngine.OFFSET_STORAGE_FILE_FILENAME, this.offsetStorageFile)
159-
.with(EmbeddedEngine.OFFSET_FLUSH_INTERVAL_MS, DEFAULT_FLUSH_INTERVAL_MS)
160-
.with(
161-
HistorizedRelationalDatabaseConnectorConfig.DATABASE_HISTORY,
162-
FileDatabaseHistory.class.getName())
163-
.with("database.history.file.filename", this.databaseHistoryFile);
162+
HistorizedRelationalDatabaseConnectorConfig.SCHEMA_HISTORY.name(),
163+
FileSchemaHistory.class.getName())
164+
.with(FileSchemaHistory.FILE_PATH.name(), this.databaseHistoryFile);
164165
}
165166

166167
Iterator<String> keys = debeziumConfig.getKeys();
@@ -182,10 +183,10 @@ public void run() {
182183
PubSubChangeConsumer.DEFAULT_PUBLISHER_FACTORY);
183184

184185
engine =
185-
EmbeddedEngine.create()
186-
.using(config)
186+
DebeziumEngine.create(Connect.class)
187+
.using(config.asProperties())
187188
.using(this.getClass().getClassLoader())
188-
.using(Clock.SYSTEM)
189+
.using(java.time.Clock.systemUTC())
189190
.notifying(changeConsumer)
190191
.build();
191192

@@ -198,14 +199,22 @@ public void run() {
198199
new Thread(
199200
() -> {
200201
LOG.info("Requesting embedded engine to shut down");
201-
engine.stop();
202+
try {
203+
engine.close();
204+
} catch (IOException e) {
205+
LOG.error("Error closing the engine", e);
206+
}
202207
}));
203208

204209
awaitTermination(future);
205210
}
206211

207212
public void stop() {
208-
engine.stop();
213+
try {
214+
engine.close();
215+
} catch (IOException e) {
216+
LOG.error("Error closing the engine", e);
217+
}
209218
executorService.shutdown();
210219
}
211220

@@ -217,7 +226,11 @@ private void awaitTermination(Future<?> future) {
217226
try {
218227
future.get(30, TimeUnit.SECONDS);
219228
if (future.isDone() || future.isCancelled()) {
220-
engine.stop();
229+
try {
230+
engine.close();
231+
} catch (IOException e) {
232+
LOG.error("Error closing the engine", e);
233+
}
221234
executorService.shutdown();
222235
break;
223236
}

v2/cdc-parent/cdc-embedded-connector/src/main/java/com/google/cloud/dataflow/cdc/connector/PubSubChangeConsumer.java

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,9 @@
2323
import com.google.protobuf.ByteString;
2424
import com.google.pubsub.v1.ProjectTopicName;
2525
import com.google.pubsub.v1.PubsubMessage;
26-
import io.debezium.embedded.EmbeddedEngine;
27-
import io.debezium.embedded.EmbeddedEngine.RecordCommitter;
26+
import io.debezium.engine.ChangeEvent;
27+
import io.debezium.engine.DebeziumEngine;
28+
import io.debezium.engine.DebeziumEngine.RecordCommitter;
2829
import java.io.ByteArrayOutputStream;
2930
import java.io.IOException;
3031
import java.util.HashMap;
@@ -40,7 +41,7 @@
4041
import org.slf4j.LoggerFactory;
4142

4243
/** Implements Debezium's Embedded Engine change consumer to push data to PubSub. */
43-
public class PubSubChangeConsumer implements EmbeddedEngine.ChangeConsumer {
44+
public class PubSubChangeConsumer implements DebeziumEngine.ChangeConsumer<ChangeEvent<SourceRecord, SourceRecord>> {
4445

4546
private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(PubSubChangeConsumer.class);
4647

@@ -100,15 +101,16 @@ private RowCoder getCoderForRow(String tableName, Row record) {
100101
}
101102

102103
@Override
103-
public void handleBatch(List<SourceRecord> records, RecordCommitter committer)
104+
public void handleBatch(List<ChangeEvent<SourceRecord, SourceRecord>> records, RecordCommitter<ChangeEvent<SourceRecord, SourceRecord>> committer)
104105
throws InterruptedException {
105106

106107
ImmutableList.Builder<ApiFuture<String>> futureListBuilder = ImmutableList.builder();
107108

108109
Set<Publisher> usedPublishers = new HashSet<>();
109110

110111
// TODO(pabloem): Improve the commit logic.
111-
for (SourceRecord r : records) {
112+
for (ChangeEvent<SourceRecord, SourceRecord> event : records) {
113+
SourceRecord r = event.value();
112114

113115
// Debezium publishes updates for each table in a separate Kafka topic, which is the fully
114116
// qualified name of the MySQL table (e.g. dbInstanceName.databaseName.table_name).
@@ -159,7 +161,7 @@ public void handleBatch(List<SourceRecord> records, RecordCommitter committer)
159161
} else {
160162
LOG.debug("Discarding record: {}", r);
161163
}
162-
committer.markProcessed(r);
164+
committer.markProcessed(event);
163165
}
164166

165167
usedPublishers.forEach(p -> p.publishAllOutstanding());

v2/cdc-parent/cdc-embedded-connector/src/test/java/com/google/cloud/dataflow/cdc/connector/DebeziumMysqlToPubSubDataSenderTest.java

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,15 @@
1717

1818
import com.google.cloud.dataflow.cdc.common.DataCatalogSchemaUtils;
1919
import com.google.common.collect.Sets;
20-
import io.debezium.embedded.EmbeddedEngine;
21-
import io.debezium.util.Clock;
20+
import io.debezium.embedded.Connect;
21+
import io.debezium.engine.ChangeEvent;
22+
import io.debezium.engine.DebeziumEngine;
23+
import java.io.IOException;
2224
import java.util.concurrent.ExecutorService;
2325
import java.util.concurrent.Executors;
2426
import java.util.concurrent.Future;
2527
import java.util.concurrent.TimeUnit;
28+
import org.apache.kafka.connect.source.SourceRecord;
2629
import org.junit.jupiter.api.Test;
2730
import org.slf4j.Logger;
2831
import org.slf4j.LoggerFactory;
@@ -49,11 +52,11 @@ private void runEmbeddedEngine() {
4952
DataCatalogSchemaUtils.getSchemaManager(GCP_PROJECT, PUBSUB_PREFIX, false),
5053
PubSubChangeConsumer.DEFAULT_PUBLISHER_FACTORY);
5154

52-
final EmbeddedEngine engine =
53-
EmbeddedEngine.create()
55+
final DebeziumEngine<ChangeEvent<SourceRecord, SourceRecord>> engine =
56+
DebeziumEngine.create(Connect.class)
5457
// .using(config)
5558
.using(this.getClass().getClassLoader())
56-
.using(Clock.SYSTEM)
59+
.using(java.time.Clock.systemUTC())
5760
.notifying(changeConsumer)
5861
.build();
5962

@@ -66,7 +69,11 @@ private void runEmbeddedEngine() {
6669
new Thread(
6770
() -> {
6871
LOG.info("Requesting embedded engine to shut down");
69-
engine.stop();
72+
try {
73+
engine.close();
74+
} catch (IOException e) {
75+
LOG.error("Failed to close embedded engine", e);
76+
}
7077
}));
7178

7279
try {

v2/cdc-parent/cdc-embedded-connector/src/test/java/com/google/cloud/dataflow/cdc/connector/PubSubChangeConsumerTest.java

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,8 @@
2323
import com.google.common.collect.ImmutableMap;
2424
import com.google.common.collect.Sets;
2525
import com.google.pubsub.v1.PubsubMessage;
26-
import io.debezium.embedded.EmbeddedEngine.RecordCommitter;
26+
import io.debezium.engine.ChangeEvent;
27+
import io.debezium.engine.DebeziumEngine.RecordCommitter;
2728
import java.util.List;
2829
import org.apache.kafka.connect.data.Schema;
2930
import org.apache.kafka.connect.data.SchemaBuilder;
@@ -87,9 +88,10 @@ public void testBasicRecordAndFilteredRecordInput() throws InterruptedException
8788

8889
// We are going to pass two records to be checked, but only one of them belongs to a table that
8990
// is whitelisted, therefore, only one will be published to pubsub.
90-
List<SourceRecord> recordBatch =
91-
ImmutableList.of(
92-
new SourceRecord(
91+
ChangeEvent<SourceRecord, SourceRecord> mockEvent1 = Mockito.mock(ChangeEvent.class);
92+
ChangeEvent<SourceRecord, SourceRecord> mockEvent2 = Mockito.mock(ChangeEvent.class);
93+
94+
SourceRecord record1 = new SourceRecord(
9395
ImmutableMap.of("server", "mainstance"),
9496
ImmutableMap.of(
9597
"file",
@@ -106,8 +108,9 @@ public void testBasicRecordAndFilteredRecordInput() throws InterruptedException
106108
keySchema,
107109
key,
108110
valueSchema,
109-
value),
110-
new SourceRecord(
111+
value);
112+
113+
SourceRecord record2 = new SourceRecord(
111114
ImmutableMap.of("server", "mainstance"),
112115
ImmutableMap.of(
113116
"file",
@@ -124,9 +127,14 @@ public void testBasicRecordAndFilteredRecordInput() throws InterruptedException
124127
keySchema,
125128
key,
126129
valueSchema,
127-
value));
130+
value);
128131

129-
RecordCommitter mockCommitter = Mockito.mock(RecordCommitter.class);
132+
Mockito.when(mockEvent1.value()).thenReturn(record1);
133+
Mockito.when(mockEvent2.value()).thenReturn(record2);
134+
135+
List<ChangeEvent<SourceRecord, SourceRecord>> recordBatch = ImmutableList.of(mockEvent1, mockEvent2);
136+
137+
RecordCommitter<ChangeEvent<SourceRecord, SourceRecord>> mockCommitter = Mockito.mock(RecordCommitter.class);
130138

131139
changeConsumer.handleBatch(recordBatch, mockCommitter);
132140

v2/cdc-parent/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
<packaging>pom</packaging>
3232

3333
<properties>
34-
<debezium.version>0.9.5.Final</debezium.version>
34+
<debezium.version>3.3.2.Final</debezium.version>
3535
<licenseHeaderFile>../../JAVA_LICENSE_HEADER</licenseHeaderFile>
3636
</properties>
3737

0 commit comments

Comments
 (0)