|
18 | 18 | import static com.mongodb.kafka.connect.source.MongoSourceConfig.COLLECTION_CONFIG; |
19 | 19 | import static com.mongodb.kafka.connect.source.MongoSourceConfig.COPY_EXISTING_CONFIG; |
20 | 20 | import static com.mongodb.kafka.connect.source.MongoSourceConfig.DATABASE_CONFIG; |
| 21 | +import static com.mongodb.kafka.connect.source.MongoSourceConfig.ERRORS_LOG_ENABLE_CONFIG; |
| 22 | +import static com.mongodb.kafka.connect.source.MongoSourceConfig.ERRORS_TOLERANCE_CONFIG; |
21 | 23 | import static com.mongodb.kafka.connect.source.MongoSourceConfig.OUTPUT_FORMAT_VALUE_CONFIG; |
22 | 24 | import static com.mongodb.kafka.connect.source.MongoSourceConfig.OUTPUT_JSON_FORMATTER_CONFIG; |
23 | 25 | import static com.mongodb.kafka.connect.source.MongoSourceConfig.OUTPUT_SCHEMA_INFER_VALUE_CONFIG; |
|
26 | 28 | import static com.mongodb.kafka.connect.source.MongoSourceConfig.TOPIC_PREFIX_CONFIG; |
27 | 29 | import static java.lang.String.format; |
28 | 30 | import static java.util.stream.Collectors.toList; |
| 31 | +import static org.junit.jupiter.api.Assertions.assertTrue; |
29 | 32 | import static org.junit.jupiter.api.Assumptions.assumeTrue; |
30 | 33 |
|
31 | 34 | import java.util.ArrayList; |
32 | 35 | import java.util.List; |
33 | 36 | import java.util.Properties; |
34 | 37 | import java.util.stream.IntStream; |
| 38 | +import java.util.stream.Stream; |
35 | 39 |
|
36 | 40 | import org.apache.kafka.connect.converters.ByteArrayConverter; |
37 | 41 | import org.apache.kafka.connect.storage.StringConverter; |
| 42 | +import org.apache.log4j.Logger; |
38 | 43 | import org.junit.jupiter.api.AfterEach; |
39 | 44 | import org.junit.jupiter.api.BeforeEach; |
40 | 45 | import org.junit.jupiter.api.DisplayName; |
41 | 46 | import org.junit.jupiter.api.Test; |
42 | 47 |
|
43 | 48 | import org.bson.BsonDocument; |
44 | | -import org.bson.BsonString; |
45 | 49 |
|
46 | 50 | import com.mongodb.client.MongoCollection; |
47 | 51 | import com.mongodb.client.MongoDatabase; |
48 | 52 |
|
| 53 | +import com.mongodb.kafka.connect.log.LogCapture; |
49 | 54 | import com.mongodb.kafka.connect.mongodb.MongoKafkaTestCase; |
| 55 | +import com.mongodb.kafka.connect.source.MongoSourceConfig.ErrorTolerance; |
50 | 56 | import com.mongodb.kafka.connect.source.MongoSourceConfig.OutputFormat; |
51 | 57 |
|
52 | 58 | public class FullDocumentRoundTripIntegrationTest extends MongoKafkaTestCase { |
@@ -196,36 +202,54 @@ void testRoundTripSchema() { |
196 | 202 | @Test |
197 | 203 | @DisplayName("Ensure collection round trip inferring schema value") |
198 | 204 | void testRoundTripInferSchemaValue() { |
199 | | - Properties sourceProperties = new Properties(); |
200 | | - sourceProperties.put( |
201 | | - OUTPUT_JSON_FORMATTER_CONFIG, |
202 | | - "com.mongodb.kafka.connect.source.json.formatter.SimplifiedJson"); |
203 | | - sourceProperties.put(OUTPUT_FORMAT_VALUE_CONFIG, OutputFormat.SCHEMA.name()); |
204 | | - sourceProperties.put(OUTPUT_SCHEMA_INFER_VALUE_CONFIG, "true"); |
205 | | - sourceProperties.put(PUBLISH_FULL_DOCUMENT_ONLY_CONFIG, "true"); |
206 | | - sourceProperties.put("value.converter", "io.confluent.connect.avro.AvroConverter"); |
207 | | - sourceProperties.put("value.converter.schema.registry.url", KAFKA.schemaRegistryUrl()); |
208 | | - |
209 | | - Properties sinkProperties = new Properties(); |
210 | | - sinkProperties.put("value.converter", "io.confluent.connect.avro.AvroConverter"); |
211 | | - sinkProperties.put("value.converter.schema.registry.url", KAFKA.schemaRegistryUrl()); |
212 | | - |
213 | | - assertRoundTrip( |
214 | | - IntStream.range(1, 100) |
215 | | - .mapToObj(i -> BsonDocument.parse(format(FULL_DOCUMENT_JSON, i))) |
216 | | - .collect(toList()), |
217 | | - IntStream.range(1, 100) |
218 | | - .mapToObj( |
219 | | - i -> { |
220 | | - BsonDocument doc = BsonDocument.parse(format(FULL_DOCUMENT_JSON, i)); |
221 | | - doc.put( |
222 | | - "myObjectId", |
223 | | - new BsonString(doc.getObjectId("myObjectId").getValue().toHexString())); |
224 | | - return doc; |
225 | | - }) |
226 | | - .collect(toList()), |
227 | | - sourceProperties, |
228 | | - sinkProperties); |
| 205 | + try (LogCapture logCapture = |
| 206 | + new LogCapture( |
| 207 | + Logger.getLogger("io.confluent.rest.exceptions.DebuggableExceptionMapper"))) { |
| 208 | + Properties sourceProperties = new Properties(); |
| 209 | + sourceProperties.put( |
| 210 | + OUTPUT_JSON_FORMATTER_CONFIG, |
| 211 | + "com.mongodb.kafka.connect.source.json.formatter.SimplifiedJson"); |
| 212 | + sourceProperties.put(OUTPUT_FORMAT_VALUE_CONFIG, OutputFormat.SCHEMA.name()); |
| 213 | + sourceProperties.put(OUTPUT_SCHEMA_INFER_VALUE_CONFIG, "true"); |
| 214 | + sourceProperties.put(PUBLISH_FULL_DOCUMENT_ONLY_CONFIG, "true"); |
| 215 | + sourceProperties.put("value.converter", "io.confluent.connect.avro.AvroConverter"); |
| 216 | + sourceProperties.put("value.converter.schema.registry.url", KAFKA.schemaRegistryUrl()); |
| 217 | + sourceProperties.put(ERRORS_TOLERANCE_CONFIG, ErrorTolerance.ALL.value()); |
| 218 | + sourceProperties.put(ERRORS_LOG_ENABLE_CONFIG, "true"); |
| 219 | + |
| 220 | + Properties sinkProperties = new Properties(); |
| 221 | + sinkProperties.put("value.converter", "io.confluent.connect.avro.AvroConverter"); |
| 222 | + sinkProperties.put("value.converter.schema.registry.url", KAFKA.schemaRegistryUrl()); |
| 223 | + |
| 224 | + List<BsonDocument> originals = |
| 225 | + Stream.of( |
| 226 | + "{_id: 1, a: 1, b: 1}", |
| 227 | + "{b: 1, _id: 2, a: 1}", // Different field order |
| 228 | + "{_id: 3, b: 1, c: 1, d: 1}", // Missing a field and added two new fields |
| 229 | + "{_id: 4, E: 1, f: 1, g: 1, h: {h1: 2, h2: '2'}}", // All new fields |
| 230 | + "{_id: 5, h: {h2: '3', h1: 2, h4: [1]}}", // Nested field order |
| 231 | + "{_id: 10, h: ['1']}", // Invalid schema ignored due to errors.tolerance |
| 232 | + "{_id: 6, g: 3, a: 2, h: {h1: 2, h2: '2'}}" // Different field order |
| 233 | + ) |
| 234 | + .map(BsonDocument::parse) |
| 235 | + .collect(toList()); |
| 236 | + |
| 237 | + List<BsonDocument> expected = |
| 238 | + originals.stream().filter(d -> d.getInt32("_id").getValue() != 10).collect(toList()); |
| 239 | + |
| 240 | + assertRoundTrip(originals, expected, sourceProperties, sinkProperties); |
| 241 | + |
| 242 | + assertTrue( |
| 243 | + logCapture.getEvents().stream() |
| 244 | + .filter(e -> e.getThrowableInformation() != null) |
| 245 | + .anyMatch( |
| 246 | + e -> |
| 247 | + e.getThrowableInformation() |
| 248 | + .getThrowable() |
| 249 | + .getMessage() |
| 250 | + .equals( |
| 251 | + "Schema being registered is incompatible with an earlier schema"))); |
| 252 | + } |
229 | 253 | } |
230 | 254 |
|
231 | 255 | void assertRoundTrip(final List<BsonDocument> originals) { |
|
0 commit comments