Skip to content

Commit a52042c

Browse files
committed
Merge branch '0.6.x' into pr_merge_from_0_5_x_to_0_6_x
2 parents 3d9e41d + 81c02d6 commit a52042c

31 files changed

+16450
-428
lines changed

.github/CODEOWNERS

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,3 @@
11
* @confluentinc/connect
2+
* @confluentinc/devx
3+
* @confluentinc/integration-architecture

Jenkinsfile

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
#!/usr/bin/env groovy
2+
/*
3+
* Copyright [2018 - 2018] Confluent Inc.
4+
*/
5+
common {
6+
slackChannel = '#connect-warn'
7+
nodeLabel = 'docker-debian-jdk8'
8+
}

pom.xml

Lines changed: 19 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,12 @@
2020
<parent>
2121
<groupId>io.confluent</groupId>
2222
<artifactId>common-parent</artifactId>
23-
<version>7.5.0</version>
23+
<version>7.5.2</version>
2424
</parent>
2525

2626
<groupId>io.confluent.kafka.connect</groupId>
2727
<artifactId>kafka-connect-datagen</artifactId>
28-
<version>0.5.5-SNAPSHOT</version>
28+
<version>0.6.4-SNAPSHOT</version>
2929
<packaging>jar</packaging>
3030

3131

@@ -35,12 +35,13 @@
3535
<confluent.avro.generator.version>0.4.1</confluent.avro.generator.version>
3636
<junit.version>4.12</junit.version>
3737
<guava.version>32.0.1-jre</guava.version>
38-
<avro.version>1.11.3</avro.version>
3938
<jackson.version>2.15.2</jackson.version>
4039
<maven.release.plugin.version>2.5.3</maven.release.plugin.version>
4140
<!-- temporary fix by pinning the version until we upgrade to a version of common that contains this or newer version.
4241
See https://github.com/confluentinc/common/pull/332 for details -->
4342
<dependency.check.version>6.1.6</dependency.check.version>
43+
<hamcrest.version>2.2</hamcrest.version>
44+
<snappy.java.version>1.1.10.5</snappy.java.version>
4445
</properties>
4546

4647
<name>kafka-connect-datagen</name>
@@ -65,7 +66,7 @@
6566
<connection>scm:git:git://github.com/confluentinc/kafka-connect-datagen.git</connection>
6667
<developerConnection>scm:git:[email protected]:confluentinc/kafka-connect-datagen.git</developerConnection>
6768
<url>https://github.com/confluentinc/kafka-connect-datagen</url>
68-
<tag>0.5.x</tag>
69+
<tag>0.6.x</tag>
6970
</scm>
7071

7172
<developers>
@@ -115,7 +116,7 @@
115116
<dependency>
116117
<groupId>org.xerial.snappy</groupId>
117118
<artifactId>snappy-java</artifactId>
118-
<version>1.1.10.3</version>
119+
<version>${snappy.java.version}</version>
119120
</dependency>
120121
</dependencies>
121122
</dependencyManagement>
@@ -142,12 +143,21 @@
142143
</exclusions>
143144
</dependency>
144145
<dependency>
145-
<groupId>junit</groupId>
146-
<artifactId>junit</artifactId>
147-
<version>${junit.version}</version>
146+
<groupId>com.google.guava</groupId>
147+
<artifactId>guava</artifactId>
148+
<version>${guava.version}</version>
149+
</dependency>
150+
<dependency>
151+
<groupId>org.junit.jupiter</groupId>
152+
<artifactId>junit-jupiter</artifactId>
153+
<scope>test</scope>
154+
</dependency>
155+
<dependency>
156+
<groupId>org.hamcrest</groupId>
157+
<artifactId>hamcrest</artifactId>
158+
<version>${hamcrest.version}</version>
148159
<scope>test</scope>
149160
</dependency>
150-
151161
<dependency>
152162
<groupId>io.confluent.avro</groupId>
153163
<artifactId>avro-random-generator</artifactId>

src/main/java/io/confluent/kafka/connect/datagen/DatagenConnectorConfig.java

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
package io.confluent.kafka.connect.datagen;
1818

1919
import com.google.common.collect.ImmutableList;
20-
import io.confluent.kafka.connect.datagen.DatagenTask.Quickstart;
2120
import java.util.List;
2221
import java.util.Map;
2322
import org.apache.avro.Schema;
@@ -163,11 +162,11 @@ public void ensureValid(String name, Object value) {
163162
if (((String) value).isEmpty()) {
164163
return;
165164
}
166-
if (!Quickstart.configValues.contains(((String) value).toLowerCase())) {
165+
if (!Quickstart.configValues().contains(((String) value).toLowerCase())) {
167166
throw new ConfigException(String.format(
168167
"%s must be one out of %s",
169168
name,
170-
String.join(",", DatagenTask.Quickstart.configValues)
169+
String.join(",", Quickstart.configValues())
171170
));
172171
}
173172
}

src/main/java/io/confluent/kafka/connect/datagen/DatagenTask.java

Lines changed: 0 additions & 143 deletions
Original file line numberDiff line numberDiff line change
@@ -21,19 +21,12 @@
2121
import java.util.ArrayList;
2222
import java.util.Collections;
2323
import java.util.HashMap;
24-
import java.util.HashSet;
2524
import java.util.List;
2625
import java.util.Map;
2726
import java.util.Random;
28-
import java.util.Set;
29-
import java.util.stream.Collectors;
30-
import org.apache.avro.generic.GenericData.Record;
3127
import org.apache.avro.generic.GenericRecord;
32-
import org.apache.kafka.connect.data.Field;
3328
import org.apache.kafka.connect.data.Schema;
3429
import org.apache.kafka.connect.data.SchemaAndValue;
35-
import org.apache.kafka.connect.data.SchemaBuilder;
36-
import org.apache.kafka.connect.data.Struct;
3730
import org.apache.kafka.connect.errors.ConnectException;
3831
import org.apache.kafka.connect.header.ConnectHeaders;
3932
import org.apache.kafka.connect.source.SourceRecord;
@@ -67,49 +60,6 @@ public class DatagenTask extends SourceTask {
6760
private long taskGeneration;
6861
private final Random random = new Random();
6962

70-
protected enum Quickstart {
71-
CLICKSTREAM_CODES("clickstream_codes_schema.avro", "code"),
72-
CLICKSTREAM("clickstream_schema.avro", "ip"),
73-
CLICKSTREAM_USERS("clickstream_users_schema.avro", "user_id"),
74-
ORDERS("orders_schema.avro", "orderid"),
75-
RATINGS("ratings_schema.avro", "rating_id"),
76-
USERS("users_schema.avro", "userid"),
77-
USERS_("users_array_map_schema.avro", "userid"),
78-
PAGEVIEWS("pageviews_schema.avro", "viewtime"),
79-
STOCK_TRADES("stock_trades_schema.avro", "symbol"),
80-
INVENTORY("inventory.avro", "id"),
81-
PRODUCT("product.avro", "id"),
82-
PURCHASES("purchase.avro", "id"),
83-
TRANSACTIONS("transactions.avro", "transaction_id"),
84-
STORES("stores.avro", "store_id"),
85-
CREDIT_CARDS("credit_cards.avro", "card_id"),
86-
CAMPAIGN_FINANCE("campaign_finance.avro", "candidate_id");
87-
88-
static final Set<String> configValues = new HashSet<>();
89-
90-
static {
91-
for (Quickstart q : Quickstart.values()) {
92-
configValues.add(q.name().toLowerCase());
93-
}
94-
}
95-
96-
private final String schemaFilename;
97-
private final String keyName;
98-
99-
Quickstart(String schemaFilename, String keyName) {
100-
this.schemaFilename = schemaFilename;
101-
this.keyName = keyName;
102-
}
103-
104-
public String getSchemaFilename() {
105-
return schemaFilename;
106-
}
107-
108-
public String getSchemaKeyField() {
109-
return keyName;
110-
}
111-
}
112-
11363
@Override
11464
public String version() {
11565
return VersionUtil.getVersion();
@@ -176,19 +126,6 @@ public List<SourceRecord> poll() throws InterruptedException {
176126
}
177127
final GenericRecord randomAvroMessage = (GenericRecord) generatedObject;
178128

179-
final List<Object> genericRowValues = new ArrayList<>();
180-
for (org.apache.avro.Schema.Field field : avroSchema.getFields()) {
181-
final Object value = randomAvroMessage.get(field.name());
182-
if (value instanceof Record) {
183-
final Record record = (Record) value;
184-
final Object ksqlValue = avroData.toConnectData(record.getSchema(), record).value();
185-
Object optionValue = getOptionalValue(ksqlSchema.field(field.name()).schema(), ksqlValue);
186-
genericRowValues.add(optionValue);
187-
} else {
188-
genericRowValues.add(value);
189-
}
190-
}
191-
192129
// Key
193130
SchemaAndValue key = new SchemaAndValue(DEFAULT_KEY_SCHEMA, null);
194131
if (!schemaKeyField.isEmpty()) {
@@ -248,84 +185,4 @@ public List<SourceRecord> poll() throws InterruptedException {
248185
@Override
249186
public void stop() {
250187
}
251-
252-
private org.apache.kafka.connect.data.Schema getOptionalSchema(
253-
final org.apache.kafka.connect.data.Schema schema
254-
) {
255-
switch (schema.type()) {
256-
case BOOLEAN:
257-
return org.apache.kafka.connect.data.Schema.OPTIONAL_BOOLEAN_SCHEMA;
258-
case INT32:
259-
return org.apache.kafka.connect.data.Schema.OPTIONAL_INT32_SCHEMA;
260-
case INT64:
261-
return org.apache.kafka.connect.data.Schema.OPTIONAL_INT64_SCHEMA;
262-
case FLOAT64:
263-
return org.apache.kafka.connect.data.Schema.OPTIONAL_FLOAT64_SCHEMA;
264-
case STRING:
265-
return org.apache.kafka.connect.data.Schema.OPTIONAL_STRING_SCHEMA;
266-
case ARRAY:
267-
return SchemaBuilder.array(getOptionalSchema(schema.valueSchema())).optional().build();
268-
case MAP:
269-
return SchemaBuilder.map(
270-
getOptionalSchema(schema.keySchema()),
271-
getOptionalSchema(schema.valueSchema())
272-
).optional().build();
273-
case STRUCT:
274-
final SchemaBuilder schemaBuilder = SchemaBuilder.struct();
275-
for (Field field : schema.fields()) {
276-
schemaBuilder.field(
277-
field.name(),
278-
getOptionalSchema(field.schema())
279-
);
280-
}
281-
return schemaBuilder.optional().build();
282-
default:
283-
throw new ConnectException("Unsupported type: " + schema);
284-
}
285-
}
286-
287-
private Object getOptionalValue(
288-
final org.apache.kafka.connect.data.Schema schema,
289-
final Object value
290-
) {
291-
if (value == null) {
292-
return null;
293-
}
294-
295-
switch (schema.type()) {
296-
case BOOLEAN:
297-
case INT32:
298-
case INT64:
299-
case FLOAT64:
300-
case STRING:
301-
return value;
302-
case ARRAY:
303-
final List<?> list = (List<?>) value;
304-
return list.stream()
305-
.map(listItem -> getOptionalValue(schema.valueSchema(), listItem))
306-
.collect(Collectors.toList());
307-
case MAP:
308-
final Map<?, ?> map = (Map<?, ?>) value;
309-
return map.entrySet().stream()
310-
.collect(Collectors.toMap(
311-
k -> getOptionalValue(schema.keySchema(), k),
312-
v -> getOptionalValue(schema.valueSchema(), v)
313-
));
314-
case STRUCT:
315-
final Struct struct = (Struct) value;
316-
final Struct optionalStruct = new Struct(getOptionalSchema(schema));
317-
for (Field field : schema.fields()) {
318-
optionalStruct.put(
319-
field.name(),
320-
getOptionalValue(
321-
field.schema(),
322-
struct.get(field.name())
323-
)
324-
);
325-
}
326-
return optionalStruct;
327-
default:
328-
throw new ConnectException("Invalid value schema: " + schema + ", value = " + value);
329-
}
330-
}
331188
}
Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
/*
2+
* Copyright 2018 Confluent Inc.
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
package io.confluent.kafka.connect.datagen;
18+
19+
import com.google.common.collect.ImmutableSet;
20+
21+
import java.util.Arrays;
22+
import java.util.Set;
23+
24+
enum Quickstart {
25+
CLICKSTREAM_CODES("clickstream_codes_schema.avro", "code"),
26+
CLICKSTREAM("clickstream_schema.avro", "ip"),
27+
CLICKSTREAM_USERS("clickstream_users_schema.avro", "user_id"),
28+
ORDERS("orders_schema.avro", "orderid"),
29+
RATINGS("ratings_schema.avro", "rating_id"),
30+
USERS("users_schema.avro", "userid"),
31+
USERS_("users_array_map_schema.avro", "userid"),
32+
PAGEVIEWS("pageviews_schema.avro", "viewtime"),
33+
STOCK_TRADES("stock_trades_schema.avro", "symbol"),
34+
INVENTORY("inventory.avro", "id"),
35+
PRODUCT("product.avro", "id"),
36+
PURCHASES("purchase.avro", "id"),
37+
TRANSACTIONS("transactions.avro", "transaction_id"),
38+
STORES("stores.avro", "store_id"),
39+
CREDIT_CARDS("credit_cards.avro", "card_id"),
40+
CAMPAIGN_FINANCE("campaign_finance.avro", "candidate_id"),
41+
FLEET_MGMT_DESCRIPTION("fleet_mgmt_description.avro", "vehicle_id"),
42+
FLEET_MGMT_LOCATION("fleet_mgmt_location.avro", "vehicle_id"),
43+
FLEET_MGMT_SENSORS("fleet_mgmt_sensors.avro", "vehicle_id"),
44+
PIZZA_ORDERS("pizza_orders.avro", "store_id"),
45+
PIZZA_ORDERS_COMPLETED("pizza_orders_completed.avro", "store_id"),
46+
PIZZA_ORDERS_CANCELLED("pizza_orders_cancelled.avro", "store_id"),
47+
INSURANCE_OFFERS("insurance_offers.avro", "offer_id"),
48+
INSURANCE_CUSTOMERS("insurance_customers.avro", "customer_id"),
49+
INSURANCE_CUSTOMER_ACTIVITY("insurance_customer_activity.avro", "activity_id"),
50+
GAMING_GAMES("gaming_games.avro", "id"),
51+
GAMING_PLAYERS("gaming_players.avro", "player_id"),
52+
GAMING_PLAYER_ACTIVITY("gaming_player_activity.avro", "player_id"),
53+
PAYROLL_EMPLOYEE("payroll_employee.avro", "employee_id"),
54+
PAYROLL_EMPLOYEE_LOCATION("payroll_employee_location.avro", "employee_id"),
55+
PAYROLL_BONUS("payroll_bonus.avro", "employee_id"),
56+
SYSLOG_LOGS("syslog_logs.avro", "host"),
57+
DEVICE_INFORMATION("device_information.avro", "device_ip"),
58+
SIEM_LOGS("siem_logs.avro", "hostname"),
59+
SHOES("shoes.avro", "id"),
60+
SHOE_CUSTOMERS("shoe_customers.avro", "id"),
61+
SHOE_ORDERS("shoe_orders.avro", "order_id"),
62+
SHOE_CLICKSTREAM("shoe_clickstream.avro", "product_id");
63+
64+
private static final Set<String> configValues;
65+
66+
static {
67+
ImmutableSet.Builder<String> immutableSetBuilder = ImmutableSet.builder();
68+
Arrays.stream(Quickstart.values())
69+
.map(Quickstart::name)
70+
.map(String::toLowerCase)
71+
.forEach(immutableSetBuilder::add);
72+
configValues = immutableSetBuilder.build();
73+
}
74+
75+
private final String schemaFilename;
76+
private final String keyName;
77+
78+
Quickstart(String schemaFilename, String keyName) {
79+
this.schemaFilename = schemaFilename;
80+
this.keyName = keyName;
81+
}
82+
83+
public static Set<String> configValues() {
84+
return configValues;
85+
}
86+
87+
public String getSchemaFilename() {
88+
return schemaFilename;
89+
}
90+
91+
public String getSchemaKeyField() {
92+
return keyName;
93+
}
94+
}

0 commit comments

Comments
 (0)