Skip to content

Commit 32a2e75

Browse files
Haaroleanworkshur
andauthored
Allow producing messages with an empty key/value
* [ISSUE 1046] UI allows to submit message with empty key & value (#1264) * [ISSUE 1046] UI allows to submit message with empty key & value * Update Contract (cherry picked from commit 4b730eb) * Backend fix * Refactoring * Fix nullable & checkstyle * Fix jsonnullable get * Remove unnecessary check and add a test Co-authored-by: Oleg Shur <[email protected]>
1 parent 5a487e4 commit 32a2e75

File tree

21 files changed

+342
-489
lines changed

21 files changed

+342
-489
lines changed

kafka-ui-api/src/main/java/com/provectus/kafka/ui/config/Config.java

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
package com.provectus.kafka.ui.config;
22

3+
import com.fasterxml.jackson.databind.Module;
34
import com.provectus.kafka.ui.model.JmxConnectionInfo;
45
import com.provectus.kafka.ui.util.JmxPoolFactory;
56
import java.util.Collections;
@@ -9,6 +10,7 @@
910
import org.apache.commons.pool2.KeyedObjectPool;
1011
import org.apache.commons.pool2.impl.GenericKeyedObjectPool;
1112
import org.apache.commons.pool2.impl.GenericKeyedObjectPoolConfig;
13+
import org.openapitools.jackson.nullable.JsonNullableModule;
1214
import org.springframework.beans.factory.ObjectProvider;
1315
import org.springframework.beans.factory.annotation.Value;
1416
import org.springframework.boot.autoconfigure.web.ServerProperties;
@@ -78,4 +80,9 @@ public WebClient webClient(
7880
.codecs(c -> c.defaultCodecs().maxInMemorySize((int) maxBuffSize.toBytes()))
7981
.build();
8082
}
83+
84+
@Bean
85+
public JsonNullableModule jsonNullableModule() {
86+
return new JsonNullableModule();
87+
}
8188
}

kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/DeserializationService.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ private RecordSerDe createRecordDeserializerForCluster(KafkaCluster cluster) {
3939
objectMapper);
4040
} else {
4141
log.info("Using SchemaRegistryAwareRecordSerDe for cluster '{}'", cluster.getName());
42-
return new SchemaRegistryAwareRecordSerDe(cluster);
42+
return new SchemaRegistryAwareRecordSerDe(cluster, objectMapper);
4343
}
4444
} catch (Throwable e) {
4545
throw new RuntimeException("Can't init deserializer", e);

kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/SimpleRecordSerDe.java

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,8 @@
1313

1414
public class SimpleRecordSerDe implements RecordSerDe {
1515

16+
private static final ObjectMapper objectMapper = new ObjectMapper();
17+
1618
@Override
1719
public DeserializedKeyValue deserialize(ConsumerRecord<Bytes, Bytes> msg) {
1820
var builder = DeserializedKeyValue.builder();
@@ -45,7 +47,7 @@ public TopicMessageSchemaDTO getTopicSchema(String topic) {
4547
final MessageSchemaDTO schema = new MessageSchemaDTO()
4648
.name("unknown")
4749
.source(MessageSchemaDTO.SourceEnum.UNKNOWN)
48-
.schema(JsonSchema.stringSchema().toJson(new ObjectMapper()));
50+
.schema(JsonSchema.stringSchema().toJson(objectMapper));
4951
return new TopicMessageSchemaDTO()
5052
.key(schema)
5153
.value(schema);

kafka-ui-api/src/main/java/com/provectus/kafka/ui/serde/schemaregistry/SchemaRegistryAwareRecordSerDe.java

Lines changed: 41 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
import com.provectus.kafka.ui.model.MessageSchemaDTO;
1111
import com.provectus.kafka.ui.model.TopicMessageSchemaDTO;
1212
import com.provectus.kafka.ui.serde.RecordSerDe;
13-
import com.provectus.kafka.ui.util.ConsumerRecordUtil;
1413
import com.provectus.kafka.ui.util.jsonschema.AvroJsonSchemaConverter;
1514
import com.provectus.kafka.ui.util.jsonschema.JsonSchema;
1615
import com.provectus.kafka.ui.util.jsonschema.ProtobufSchemaConverter;
@@ -27,7 +26,6 @@
2726
import io.confluent.kafka.schemaregistry.protobuf.ProtobufSchemaProvider;
2827
import java.net.URI;
2928
import java.nio.ByteBuffer;
30-
import java.util.Collections;
3129
import java.util.HashMap;
3230
import java.util.List;
3331
import java.util.Map;
@@ -47,31 +45,32 @@ public class SchemaRegistryAwareRecordSerDe implements RecordSerDe {
4745

4846
private static final int CLIENT_IDENTITY_MAP_CAPACITY = 100;
4947

48+
private static final StringMessageFormatter stringFormatter = new StringMessageFormatter();
49+
private static final ProtobufSchemaConverter protoSchemaConverter = new ProtobufSchemaConverter();
50+
private static final AvroJsonSchemaConverter avroSchemaConverter = new AvroJsonSchemaConverter();
51+
5052
private final KafkaCluster cluster;
5153
private final Map<String, MessageFormatter> valueFormatMap = new ConcurrentHashMap<>();
5254
private final Map<String, MessageFormatter> keyFormatMap = new ConcurrentHashMap<>();
5355

5456
@Nullable
5557
private final SchemaRegistryClient schemaRegistryClient;
56-
5758
@Nullable
5859
private final AvroMessageFormatter avroFormatter;
59-
6060
@Nullable
6161
private final ProtobufMessageFormatter protobufFormatter;
62-
6362
@Nullable
6463
private final JsonSchemaMessageFormatter jsonSchemaMessageFormatter;
6564

66-
private final StringMessageFormatter stringFormatter = new StringMessageFormatter();
67-
private final ProtobufSchemaConverter protoSchemaConverter = new ProtobufSchemaConverter();
68-
private final AvroJsonSchemaConverter avroSchemaConverter = new AvroJsonSchemaConverter();
69-
private final ObjectMapper objectMapper = new ObjectMapper();
65+
private ObjectMapper objectMapper;
7066

71-
private static SchemaRegistryClient createSchemaRegistryClient(KafkaCluster cluster) {
67+
private SchemaRegistryClient createSchemaRegistryClient(KafkaCluster cluster,
68+
ObjectMapper objectMapper) {
7269
if (cluster.getSchemaRegistry() == null) {
7370
throw new ValidationException("schemaRegistry is not specified");
7471
}
72+
this.objectMapper = objectMapper;
73+
7574
List<SchemaProvider> schemaProviders =
7675
List.of(new AvroSchemaProvider(), new ProtobufSchemaProvider(), new JsonSchemaProvider());
7776

@@ -97,10 +96,10 @@ private static SchemaRegistryClient createSchemaRegistryClient(KafkaCluster clus
9796
);
9897
}
9998

100-
public SchemaRegistryAwareRecordSerDe(KafkaCluster cluster) {
99+
public SchemaRegistryAwareRecordSerDe(KafkaCluster cluster, ObjectMapper objectMapper) {
101100
this.cluster = cluster;
102101
this.schemaRegistryClient = cluster.getSchemaRegistry() != null
103-
? createSchemaRegistryClient(cluster)
102+
? createSchemaRegistryClient(cluster, objectMapper)
104103
: null;
105104
if (schemaRegistryClient != null) {
106105
this.avroFormatter = new AvroMessageFormatter(schemaRegistryClient);
@@ -147,41 +146,45 @@ public ProducerRecord<byte[], byte[]> serialize(String topic,
147146
@Nullable String key,
148147
@Nullable String data,
149148
@Nullable Integer partition) {
150-
final Optional<SchemaMetadata> maybeValueSchema = getSchemaBySubject(topic, false);
151149
final Optional<SchemaMetadata> maybeKeySchema = getSchemaBySubject(topic, true);
150+
final Optional<SchemaMetadata> maybeValueSchema = getSchemaBySubject(topic, false);
152151

153-
final byte[] serializedValue = data != null
154-
? serialize(maybeValueSchema, topic, data, false)
155-
: null;
156-
final byte[] serializedKey = key != null
157-
? serialize(maybeKeySchema, topic, key, true)
158-
: null;
152+
final byte[] serializedKey = maybeKeySchema.isPresent()
153+
? serialize(maybeKeySchema.get(), topic, key, true)
154+
: serialize(key);
155+
156+
final byte[] serializedValue = maybeValueSchema.isPresent()
157+
? serialize(maybeValueSchema.get(), topic, data, false)
158+
: serialize(data);
159159

160160
return new ProducerRecord<>(topic, partition, serializedKey, serializedValue);
161161
}
162162

163163
@SneakyThrows
164-
private byte[] serialize(
165-
Optional<SchemaMetadata> maybeSchema, String topic, String value, boolean isKey) {
166-
if (maybeSchema.isPresent()) {
167-
final SchemaMetadata schema = maybeSchema.get();
168-
169-
MessageReader<?> reader;
170-
if (schema.getSchemaType().equals(MessageFormat.PROTOBUF.name())) {
171-
reader = new ProtobufMessageReader(topic, isKey, schemaRegistryClient, schema);
172-
} else if (schema.getSchemaType().equals(MessageFormat.AVRO.name())) {
173-
reader = new AvroMessageReader(topic, isKey, schemaRegistryClient, schema);
174-
} else if (schema.getSchemaType().equals(MessageFormat.JSON.name())) {
175-
reader = new JsonSchemaMessageReader(topic, isKey, schemaRegistryClient, schema);
176-
} else {
177-
throw new IllegalStateException("Unsupported schema type: " + schema.getSchemaType());
178-
}
179-
180-
return reader.read(value);
164+
private byte[] serialize(SchemaMetadata schema, String topic, String value, boolean isKey) {
165+
if (value == null) {
166+
return null;
167+
}
168+
MessageReader<?> reader;
169+
if (schema.getSchemaType().equals(MessageFormat.PROTOBUF.name())) {
170+
reader = new ProtobufMessageReader(topic, isKey, schemaRegistryClient, schema);
171+
} else if (schema.getSchemaType().equals(MessageFormat.AVRO.name())) {
172+
reader = new AvroMessageReader(topic, isKey, schemaRegistryClient, schema);
173+
} else if (schema.getSchemaType().equals(MessageFormat.JSON.name())) {
174+
reader = new JsonSchemaMessageReader(topic, isKey, schemaRegistryClient, schema);
181175
} else {
182-
// if no schema provided serialize input as raw string
183-
return value.getBytes();
176+
throw new IllegalStateException("Unsupported schema type: " + schema.getSchemaType());
177+
}
178+
179+
return reader.read(value);
180+
}
181+
182+
private byte[] serialize(String value) {
183+
if (value == null) {
184+
return null;
184185
}
186+
// if no schema provided serialize input as raw string
187+
return value.getBytes();
185188
}
186189

187190
@Override

kafka-ui-api/src/main/java/com/provectus/kafka/ui/service/MessagesService.java

Lines changed: 2 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -81,9 +81,6 @@ private Mono<Map<TopicPartition, Long>> offsetsForDeletion(KafkaCluster cluster,
8181

8282
public Mono<RecordMetadata> sendMessage(KafkaCluster cluster, String topic,
8383
CreateTopicMessageDTO msg) {
84-
if (msg.getKey() == null && msg.getContent() == null) {
85-
throw new ValidationException("Invalid message: both key and value can't be null");
86-
}
8784
if (msg.getPartition() != null
8885
&& msg.getPartition() > metricsCache.get(cluster).getTopicDescriptions()
8986
.get(topic).partitions().size() - 1) {
@@ -100,8 +97,8 @@ public Mono<RecordMetadata> sendMessage(KafkaCluster cluster, String topic,
10097
try (KafkaProducer<byte[], byte[]> producer = new KafkaProducer<>(properties)) {
10198
ProducerRecord<byte[], byte[]> producerRecord = serde.serialize(
10299
topic,
103-
msg.getKey(),
104-
msg.getContent(),
100+
msg.getKey().orElse(null),
101+
msg.getContent().orElse(null),
105102
msg.getPartition()
106103
);
107104
producerRecord = new ProducerRecord<>(

kafka-ui-api/src/test/java/com/provectus/kafka/ui/serde/SchemaRegistryRecordDeserializerTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ class SchemaRegistryRecordDeserializerTest {
1717
new SchemaRegistryAwareRecordSerDe(
1818
KafkaCluster.builder()
1919
.schemaNameTemplate("%s-value")
20-
.build()
20+
.build(), new ObjectMapper()
2121
);
2222

2323
@Test

kafka-ui-api/src/test/java/com/provectus/kafka/ui/service/SendAndReadTests.java

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -465,6 +465,20 @@ void topicMessageMetadataJson() {
465465
});
466466
}
467467

468+
@Test
469+
void noKeyAndNoContentPresentTest() {
470+
new SendAndReadSpec()
471+
.withMsgToSend(
472+
new CreateTopicMessageDTO()
473+
.key(null)
474+
.content(null)
475+
)
476+
.doAssert(polled -> {
477+
assertThat(polled.getKey()).isNull();
478+
assertThat(polled.getContent()).isNull();
479+
});
480+
}
481+
468482
@SneakyThrows
469483
private void assertJsonEqual(String actual, String expected) {
470484
var mapper = new ObjectMapper();

kafka-ui-contract/src/main/resources/swagger/kafka-ui-api.yaml

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1870,12 +1870,16 @@ components:
18701870
type: integer
18711871
key:
18721872
type: string
1873+
nullable: true
18731874
headers:
18741875
type: object
18751876
additionalProperties:
18761877
type: string
18771878
content:
18781879
type: string
1880+
nullable: true
1881+
required:
1882+
- partition
18791883

18801884
TopicMessageSchema:
18811885
type: object
@@ -2635,4 +2639,4 @@ components:
26352639
- DELETE
26362640
- COMPACT
26372641
- COMPACT_DELETE
2638-
- UNKNOWN
2642+
- UNKNOWN

kafka-ui-react-app/src/components/Alerts/Alert.styled.ts

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import styled from 'styled-components';
33

44
export const Alert = styled.div<{ $type: AlertType }>`
55
background-color: ${({ $type, theme }) => theme.alert.color[$type]};
6-
width: 400px;
6+
min-width: 400px;
77
min-height: 64px;
88
border-radius: 8px;
99
padding: 12px;
@@ -20,8 +20,14 @@ export const Title = styled.div`
2020
font-size: 14px;
2121
`;
2222

23-
export const Message = styled.p`
23+
export const Message = styled.div`
2424
font-weight: normal;
2525
font-size: 14px;
2626
margin: 3px 0;
27+
28+
ol,
29+
ul {
30+
padding-left: 25px;
31+
list-style: auto;
32+
}
2733
`;

kafka-ui-react-app/src/components/App.styled.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -168,7 +168,7 @@ export const AlertsContainer = styled.div`
168168
width: 500px;
169169
position: fixed;
170170
bottom: 15px;
171-
left: 15px;
171+
right: 15px;
172172
z-index: 1000;
173173
174174
@media screen and (max-width: 1023px) {

0 commit comments

Comments
 (0)