diff --git a/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java b/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java index 23be86369..310c7283d 100644 --- a/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java +++ b/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java @@ -70,6 +70,7 @@ public static class Cluster { List<@Valid Masking> masking; AuditProperties audit; + } @Data @@ -113,6 +114,7 @@ public static class ConnectCluster { public static class SchemaRegistryAuth { String username; String password; + String bearerAuthCustomProviderClass; } @Data diff --git a/api/src/main/java/io/kafbat/ui/controller/SchemasController.java b/api/src/main/java/io/kafbat/ui/controller/SchemasController.java index 6f73d3525..bf1b09a9b 100644 --- a/api/src/main/java/io/kafbat/ui/controller/SchemasController.java +++ b/api/src/main/java/io/kafbat/ui/controller/SchemasController.java @@ -2,6 +2,8 @@ import io.kafbat.ui.api.SchemasApi; import io.kafbat.ui.exception.ValidationException; +import io.kafbat.ui.mapper.GcpKafkaSrMapper; +import io.kafbat.ui.mapper.GcpKafkaSrMapperImpl; import io.kafbat.ui.mapper.KafkaSrMapper; import io.kafbat.ui.mapper.KafkaSrMapperImpl; import io.kafbat.ui.model.CompatibilityCheckResponseDTO; @@ -12,6 +14,7 @@ import io.kafbat.ui.model.SchemaSubjectsResponseDTO; import io.kafbat.ui.model.rbac.AccessContext; import io.kafbat.ui.model.rbac.permission.SchemaAction; +import io.kafbat.ui.service.GcpSchemaRegistryService; import io.kafbat.ui.service.SchemaRegistryService; import io.kafbat.ui.service.mcp.McpTool; import java.util.List; @@ -34,18 +37,22 @@ public class SchemasController extends AbstractController implements SchemasApi, private static final Integer DEFAULT_PAGE_SIZE = 25; private final KafkaSrMapper kafkaSrMapper = new KafkaSrMapperImpl(); + private final GcpKafkaSrMapper gcpKafkaSrMapper = new GcpKafkaSrMapperImpl(); private final SchemaRegistryService schemaRegistryService; + private final GcpSchemaRegistryService gcpSchemaRegistryService; @Override protected KafkaCluster getCluster(String clusterName) { var c = super.getCluster(clusterName); - if (c.getSchemaRegistryClient() == null) { + if (c.getSchemaRegistryClient() == null && c.getGcpSchemaRegistryClient() == null) { throw new ValidationException("Schema Registry is not set for cluster " + clusterName); } return c; } + + @Override public Mono> checkSchemaCompatibility( String clusterName, String subject, @Valid Mono newSchemaSubjectMono, @@ -57,13 +64,16 @@ public Mono> checkSchemaCompatibil .build(); return validateAccess(context).then( - newSchemaSubjectMono.flatMap(subjectDTO -> - schemaRegistryService.checksSchemaCompatibility( - getCluster(clusterName), - subject, - kafkaSrMapper.fromDto(subjectDTO) - )) - .map(kafkaSrMapper::toDto) + newSchemaSubjectMono.flatMap(subjectDTO -> { + var cluster = getCluster(clusterName); + return cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.checksSchemaCompatibility( + cluster, subject, gcpKafkaSrMapper.fromDto(subjectDTO)) + .map(gcpKafkaSrMapper::toDto) : + schemaRegistryService.checksSchemaCompatibility( + cluster, subject, kafkaSrMapper.fromDto(subjectDTO)) + .map(kafkaSrMapper::toDto); + }) .map(ResponseEntity::ok) ).doOnEach(sig -> audit(context, sig)); } @@ -73,22 +83,23 @@ public Mono> createNewSchema( String clusterName, @Valid Mono newSchemaSubjectMono, ServerWebExchange exchange) { return newSchemaSubjectMono.flatMap(newSubject -> { - var context = AccessContext.builder() - .cluster(clusterName) - .schemaActions(newSubject.getSubject(), SchemaAction.CREATE) - .operationName("createNewSchema") - .build(); - return validateAccess(context).then( + var context = AccessContext.builder() + .cluster(clusterName) + .schemaActions(newSubject.getSubject(), SchemaAction.CREATE) + .operationName("createNewSchema") + .build(); + var cluster = getCluster(clusterName); + return validateAccess(context).then( + cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.registerNewSchema( + cluster, newSubject.getSubject(), gcpKafkaSrMapper.fromDto(newSubject)) + .map(gcpKafkaSrMapper::toDto) : schemaRegistryService.registerNewSchema( - getCluster(clusterName), - newSubject.getSubject(), - kafkaSrMapper.fromDto(newSubject) - )) - .map(kafkaSrMapper::toDto) - .map(ResponseEntity::ok) - .doOnEach(sig -> audit(context, sig)); - } - ); + cluster, newSubject.getSubject(), kafkaSrMapper.fromDto(newSubject)) + .map(kafkaSrMapper::toDto)) + .map(ResponseEntity::ok) + .doOnEach(sig -> audit(context, sig)); + }); } @Override @@ -100,8 +111,11 @@ public Mono> deleteLatestSchema( .operationName("deleteLatestSchema") .build(); + var cluster = getCluster(clusterName); return validateAccess(context).then( - schemaRegistryService.deleteLatestSchemaSubject(getCluster(clusterName), subject) + (cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.deleteLatestSchemaSubject(cluster, subject) : + schemaRegistryService.deleteLatestSchemaSubject(cluster, subject)) .doOnEach(sig -> audit(context, sig)) .thenReturn(ResponseEntity.ok().build()) ); @@ -116,8 +130,11 @@ public Mono> deleteSchema( .operationName("deleteSchema") .build(); + var cluster = getCluster(clusterName); return validateAccess(context).then( - schemaRegistryService.deleteSchemaSubjectEntirely(getCluster(clusterName), subject) + (cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.deleteSchemaSubjectEntirely(cluster, subject) : + schemaRegistryService.deleteSchemaSubjectEntirely(cluster, subject)) .doOnEach(sig -> audit(context, sig)) .thenReturn(ResponseEntity.ok().build()) ); @@ -132,8 +149,11 @@ public Mono> deleteSchemaByVersion( .operationName("deleteSchemaByVersion") .build(); + var cluster = getCluster(clusterName); return validateAccess(context).then( - schemaRegistryService.deleteSchemaSubjectByVersion(getCluster(clusterName), subjectName, version) + (cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.deleteSchemaSubjectByVersion(cluster, subjectName, version) : + schemaRegistryService.deleteSchemaSubjectByVersion(cluster, subjectName, version)) .doOnEach(sig -> audit(context, sig)) .thenReturn(ResponseEntity.ok().build()) ); @@ -148,9 +168,10 @@ public Mono>> getAllVersionsBySubject( .operationName("getAllVersionsBySubject") .build(); - Flux schemas = - schemaRegistryService.getAllVersionsBySubject(getCluster(clusterName), subjectName) - .map(kafkaSrMapper::toDto); + var cluster = getCluster(clusterName); + Flux schemas = cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.getAllVersionsBySubject(cluster, subjectName).map(gcpKafkaSrMapper::toDto) : + schemaRegistryService.getAllVersionsBySubject(cluster, subjectName).map(kafkaSrMapper::toDto); return validateAccess(context) .thenReturn(ResponseEntity.ok(schemas)) @@ -160,8 +181,12 @@ public Mono>> getAllVersionsBySubject( @Override public Mono> getGlobalSchemaCompatibilityLevel( String clusterName, ServerWebExchange exchange) { - return schemaRegistryService.getGlobalSchemaCompatibilityLevel(getCluster(clusterName)) - .map(c -> new CompatibilityLevelDTO().compatibility(kafkaSrMapper.toDto(c))) + var cluster = getCluster(clusterName); + return (cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.getGlobalSchemaCompatibilityLevel(cluster) + .map(c -> new CompatibilityLevelDTO().compatibility(gcpKafkaSrMapper.toDto(c))) : + schemaRegistryService.getGlobalSchemaCompatibilityLevel(cluster) + .map(c -> new CompatibilityLevelDTO().compatibility(kafkaSrMapper.toDto(c)))) .map(ResponseEntity::ok) .defaultIfEmpty(ResponseEntity.notFound().build()); } @@ -176,9 +201,13 @@ public Mono> getLatestSchema(String clusterName .operationName("getLatestSchema") .build(); + var cluster = getCluster(clusterName); return validateAccess(context).then( - schemaRegistryService.getLatestSchemaVersionBySubject(getCluster(clusterName), subject) - .map(kafkaSrMapper::toDto) + (cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.getLatestSchemaVersionBySubject(cluster, subject) + .map(gcpKafkaSrMapper::toDto) : + schemaRegistryService.getLatestSchemaVersionBySubject(cluster, subject) + .map(kafkaSrMapper::toDto)) .map(ResponseEntity::ok) ).doOnEach(sig -> audit(context, sig)); } @@ -193,10 +222,13 @@ public Mono> getSchemaByVersion( .operationParams(Map.of("subject", subject, "version", version)) .build(); + var cluster = getCluster(clusterName); return validateAccess(context).then( - schemaRegistryService.getSchemaSubjectByVersion( - getCluster(clusterName), subject, version) - .map(kafkaSrMapper::toDto) + (cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.getSchemaSubjectByVersion(cluster, subject, version) + .map(gcpKafkaSrMapper::toDto) : + schemaRegistryService.getSchemaSubjectByVersion(cluster, subject, version) + .map(kafkaSrMapper::toDto)) .map(ResponseEntity::ok) ).doOnEach(sig -> audit(context, sig)); } @@ -212,8 +244,10 @@ public Mono> getSchemas(String cluster .operationName("getSchemas") .build(); - return schemaRegistryService - .getAllSubjectNames(getCluster(clusterName)) + var cluster = getCluster(clusterName); + return (cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.getAllSubjectNames(cluster) : + schemaRegistryService.getAllSubjectNames(cluster)) .flatMapIterable(l -> l) .filterWhen(schema -> accessControlService.isSchemaAccessible(schema, clusterName)) .collectList() @@ -230,9 +264,14 @@ public Mono> getSchemas(String cluster .skip(subjectToSkip) .limit(pageSize) .toList(); - return schemaRegistryService.getAllLatestVersionSchemas(getCluster(clusterName), subjectsToRender) - .map(subjs -> subjs.stream().map(kafkaSrMapper::toDto).toList()) - .map(subjs -> new SchemaSubjectsResponseDTO().pageCount(totalPages).schemas(subjs)); + return (cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.getAllLatestVersionSchemas(cluster, subjectsToRender) + .map(subjs -> subjs.stream() + .map(gcpKafkaSrMapper::toDto).toList()) : + schemaRegistryService.getAllLatestVersionSchemas(cluster, subjectsToRender) + .map(subjs -> subjs.stream().map(kafkaSrMapper::toDto).toList())) + .map(subjs -> new SchemaSubjectsResponseDTO() + .pageCount(totalPages).schemas(subjs)); }).map(ResponseEntity::ok) .doOnEach(sig -> audit(context, sig)); } @@ -247,13 +286,15 @@ public Mono> updateGlobalSchemaCompatibilityLevel( .operationName("updateGlobalSchemaCompatibilityLevel") .build(); + var cluster = getCluster(clusterName); return validateAccess(context).then( compatibilityLevelMono .flatMap(compatibilityLevelDTO -> - schemaRegistryService.updateGlobalSchemaCompatibility( - getCluster(clusterName), - kafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility()) - )) + cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.updateGlobalSchemaCompatibility( + cluster, gcpKafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility())) : + schemaRegistryService.updateGlobalSchemaCompatibility( + cluster, kafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility()))) .doOnEach(sig -> audit(context, sig)) .thenReturn(ResponseEntity.ok().build()) ); @@ -271,15 +312,16 @@ public Mono> updateSchemaCompatibilityLevel( .operationParams(Map.of("subject", subject)) .build(); - return compatibilityLevelMono.flatMap(compatibilityLevelDTO -> - validateAccess(context).then( - schemaRegistryService.updateSchemaCompatibility( - getCluster(clusterName), - subject, - kafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility()) - )) - .doOnEach(sig -> audit(context, sig)) - .thenReturn(ResponseEntity.ok().build()) - ); + return compatibilityLevelMono.flatMap(compatibilityLevelDTO -> { + var cluster = getCluster(clusterName); + return validateAccess(context).then( + cluster.isGcpSchemaRegistryEnabled() + ? gcpSchemaRegistryService.updateSchemaCompatibility( + cluster, subject, gcpKafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility())) : + schemaRegistryService.updateSchemaCompatibility( + cluster, subject, kafkaSrMapper.fromDto(compatibilityLevelDTO.getCompatibility()))) + .doOnEach(sig -> audit(context, sig)) + .thenReturn(ResponseEntity.ok().build()); + }); } } diff --git a/api/src/main/java/io/kafbat/ui/mapper/GcpKafkaSrMapper.java b/api/src/main/java/io/kafbat/ui/mapper/GcpKafkaSrMapper.java new file mode 100644 index 000000000..2a604c358 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/mapper/GcpKafkaSrMapper.java @@ -0,0 +1,48 @@ +package io.kafbat.ui.mapper; + +import io.kafbat.ui.model.CompatibilityCheckResponseDTO; +import io.kafbat.ui.model.CompatibilityLevelDTO; +import io.kafbat.ui.model.NewSchemaSubjectDTO; +import io.kafbat.ui.model.SchemaReferenceDTO; +import io.kafbat.ui.model.SchemaSubjectDTO; +import io.kafbat.ui.model.SchemaTypeDTO; +import io.kafbat.ui.service.GcpSchemaRegistryService; +import java.util.List; +import java.util.Optional; +import org.mapstruct.Mapper; + +@Mapper +public interface GcpKafkaSrMapper { + + // Convert GCP SubjectWithCompatibilityLevel to DTO + default SchemaSubjectDTO toDto(GcpSchemaRegistryService.SubjectWithCompatibilityLevel s) { + return new SchemaSubjectDTO() + .id(s.getId()) + .version(s.getVersion()) + .subject(s.getSubject()) + .schema(s.getSchema()) + .schemaType(SchemaTypeDTO.fromValue( + Optional.ofNullable(s.getSchemaType()) + .orElse(io.kafbat.ui.gcp.sr.model.SchemaType.AVRO) + .getValue())) + .references(toDto(s.getReferences())) + .compatibilityLevel(Optional.ofNullable(s.getCompatibility()) + .map(Object::toString).orElse(null)); + } + + // Convert GCP SchemaReference list to DTO list + List toDto(List references); + + // Convert GCP CompatibilityCheckResponse to DTO + CompatibilityCheckResponseDTO toDto(io.kafbat.ui.gcp.sr.model.CompatibilityCheckResponse ccr); + + // Convert GCP Compatibility to DTO enum + CompatibilityLevelDTO.CompatibilityEnum toDto(io.kafbat.ui.gcp.sr.model.Compatibility compatibility); + + // Convert DTO to GCP NewSubject + io.kafbat.ui.gcp.sr.model.NewSubject fromDto(NewSchemaSubjectDTO subjectDto); + + // Convert DTO enum to GCP Compatibility + io.kafbat.ui.gcp.sr.model.Compatibility fromDto(CompatibilityLevelDTO.CompatibilityEnum dtoEnum); + +} diff --git a/api/src/main/java/io/kafbat/ui/model/KafkaCluster.java b/api/src/main/java/io/kafbat/ui/model/KafkaCluster.java index 6e2a00988..7806b274b 100644 --- a/api/src/main/java/io/kafbat/ui/model/KafkaCluster.java +++ b/api/src/main/java/io/kafbat/ui/model/KafkaCluster.java @@ -3,6 +3,7 @@ import io.kafbat.ui.config.ClustersProperties; import io.kafbat.ui.connect.api.KafkaConnectClientApi; import io.kafbat.ui.emitter.PollingSettings; +import io.kafbat.ui.gcp.sr.api.KafkaGcpSrClientApi; import io.kafbat.ui.service.ksql.KsqlApiClient; import io.kafbat.ui.service.masking.DataMasking; import io.kafbat.ui.sr.api.KafkaSrClientApi; @@ -31,6 +32,8 @@ public class KafkaCluster { private final DataMasking masking; private final PollingSettings pollingSettings; private final ReactiveFailover schemaRegistryClient; + private final ReactiveFailover gcpSchemaRegistryClient; + private final boolean isGcpSchemaRegistryEnabled; private final Map> connectsClients; private final ReactiveFailover ksqlClient; } diff --git a/api/src/main/java/io/kafbat/ui/serdes/builtin/sr/MessageFormatter.java b/api/src/main/java/io/kafbat/ui/serdes/builtin/sr/MessageFormatter.java index 5ab77ffeb..c6e5a1d72 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/builtin/sr/MessageFormatter.java +++ b/api/src/main/java/io/kafbat/ui/serdes/builtin/sr/MessageFormatter.java @@ -11,6 +11,7 @@ import io.confluent.kafka.serializers.json.KafkaJsonSchemaDeserializer; import io.confluent.kafka.serializers.protobuf.KafkaProtobufDeserializer; import io.kafbat.ui.util.jsonschema.JsonAvroConversion; +import java.util.HashMap; import java.util.Map; import lombok.SneakyThrows; @@ -18,9 +19,10 @@ interface MessageFormatter { String format(String topic, byte[] value); - static Map createMap(SchemaRegistryClient schemaRegistryClient) { + static Map createMap(SchemaRegistryClient schemaRegistryClient, + String bearerAuthCustomProviderClass) { return Map.of( - SchemaType.AVRO, new AvroMessageFormatter(schemaRegistryClient), + SchemaType.AVRO, new AvroMessageFormatter(schemaRegistryClient, bearerAuthCustomProviderClass), SchemaType.JSON, new JsonSchemaMessageFormatter(schemaRegistryClient), SchemaType.PROTOBUF, new ProtobufMessageFormatter(schemaRegistryClient) ); @@ -29,17 +31,23 @@ SchemaType.PROTOBUF, new ProtobufMessageFormatter(schemaRegistryClient) class AvroMessageFormatter implements MessageFormatter { private final KafkaAvroDeserializer avroDeserializer; - AvroMessageFormatter(SchemaRegistryClient client) { + AvroMessageFormatter(SchemaRegistryClient client, String bearerAuthCustomProviderClass) { this.avroDeserializer = new KafkaAvroDeserializer(client); - this.avroDeserializer.configure( - Map.of( - AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "wontbeused", - KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, false, - KafkaAvroDeserializerConfig.SCHEMA_REFLECTION_CONFIG, false, - KafkaAvroDeserializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG, true - ), - false - ); + + final Map avroProps = new HashMap<>(); + avroProps.put(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "wontbeused"); + avroProps.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, false); + avroProps.put(KafkaAvroDeserializerConfig.SCHEMA_REFLECTION_CONFIG, false); + avroProps.put(KafkaAvroDeserializerConfig.AVRO_USE_LOGICAL_TYPE_CONVERTERS_CONFIG, true); + + if (bearerAuthCustomProviderClass != null && !bearerAuthCustomProviderClass.isBlank()) { + avroProps.put(KafkaAvroDeserializerConfig.BEARER_AUTH_CREDENTIALS_SOURCE, "CUSTOM"); + avroProps.put(KafkaAvroDeserializerConfig.BEARER_AUTH_CUSTOM_PROVIDER_CLASS, + String.format("class %s", bearerAuthCustomProviderClass)); + } + + this.avroDeserializer.configure(avroProps, false); + } @Override diff --git a/api/src/main/java/io/kafbat/ui/serdes/builtin/sr/SchemaRegistrySerde.java b/api/src/main/java/io/kafbat/ui/serdes/builtin/sr/SchemaRegistrySerde.java index d6f7a3699..f4697d33b 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/builtin/sr/SchemaRegistrySerde.java +++ b/api/src/main/java/io/kafbat/ui/serdes/builtin/sr/SchemaRegistrySerde.java @@ -42,6 +42,7 @@ public class SchemaRegistrySerde implements BuiltInSerde { private static final byte SR_PAYLOAD_MAGIC_BYTE = 0x0; private static final int SR_PAYLOAD_PREFIX_LENGTH = 5; + private static final String CUSTOM_BEARER_AUTH_CREDENTIALS_SOURCE = "CUSTOM"; public static String name() { return "SchemaRegistry"; @@ -77,11 +78,15 @@ public void autoConfigure(PropertyResolver kafkaClusterProperties, urls, kafkaClusterProperties.getProperty("schemaRegistryAuth.username", String.class).orElse(null), kafkaClusterProperties.getProperty("schemaRegistryAuth.password", String.class).orElse(null), + kafkaClusterProperties.getProperty("schemaRegistryAuth.bearerAuthCustomProviderClass", String.class) + .orElse(null), kafkaClusterProperties.getProperty("schemaRegistrySsl.keystoreLocation", String.class).orElse(null), kafkaClusterProperties.getProperty("schemaRegistrySsl.keystorePassword", String.class).orElse(null), kafkaClusterProperties.getProperty("ssl.truststoreLocation", String.class).orElse(null), kafkaClusterProperties.getProperty("ssl.truststorePassword", String.class).orElse(null) ), + kafkaClusterProperties.getProperty("schemaRegistryAuth.bearerAuthCustomProviderClass", String.class) + .orElse(null), kafkaClusterProperties.getProperty("schemaRegistryKeySchemaNameTemplate", String.class).orElse("%s-key"), kafkaClusterProperties.getProperty("schemaRegistrySchemaNameTemplate", String.class).orElse("%s-value"), kafkaClusterProperties.getProperty("schemaRegistryCheckSchemaExistenceForDeserialize", Boolean.class) @@ -103,11 +108,15 @@ public void configure(PropertyResolver serdeProperties, urls, serdeProperties.getProperty("username", String.class).orElse(null), serdeProperties.getProperty("password", String.class).orElse(null), + kafkaClusterProperties.getProperty("schemaRegistryAuth.bearerAuthCustomProviderClass", String.class) + .orElse(null), serdeProperties.getProperty("keystoreLocation", String.class).orElse(null), serdeProperties.getProperty("keystorePassword", String.class).orElse(null), kafkaClusterProperties.getProperty("ssl.truststoreLocation", String.class).orElse(null), kafkaClusterProperties.getProperty("ssl.truststorePassword", String.class).orElse(null) ), + kafkaClusterProperties.getProperty("schemaRegistryAuth.bearerAuthCustomProviderClass", String.class) + .orElse(null), serdeProperties.getProperty("keySchemaNameTemplate", String.class).orElse("%s-key"), serdeProperties.getProperty("schemaNameTemplate", String.class).orElse("%s-value"), serdeProperties.getProperty("checkSchemaExistenceForDeserialize", Boolean.class) @@ -119,6 +128,7 @@ public void configure(PropertyResolver serdeProperties, void configure( List schemaRegistryUrls, SchemaRegistryClient schemaRegistryClient, + String bearerAuthCustomProviderClass, String keySchemaNameTemplate, String valueSchemaNameTemplate, boolean checkTopicSchemaExistenceForDeserialize) { @@ -126,17 +136,19 @@ void configure( this.schemaRegistryClient = schemaRegistryClient; this.keySchemaNameTemplate = keySchemaNameTemplate; this.valueSchemaNameTemplate = valueSchemaNameTemplate; - this.schemaRegistryFormatters = MessageFormatter.createMap(schemaRegistryClient); + this.schemaRegistryFormatters = MessageFormatter.createMap(schemaRegistryClient, bearerAuthCustomProviderClass); this.checkSchemaExistenceForDeserialize = checkTopicSchemaExistenceForDeserialize; } private static SchemaRegistryClient createSchemaRegistryClient(List urls, @Nullable String username, @Nullable String password, + @Nullable String bearerAuthCustomProviderClass, @Nullable String keyStoreLocation, @Nullable String keyStorePassword, @Nullable String trustStoreLocation, - @Nullable String trustStorePassword) { + @Nullable String trustStorePassword + ) { Map configs = new HashMap<>(); if (username != null && password != null) { configs.put(BASIC_AUTH_CREDENTIALS_SOURCE, "USER_INFO"); @@ -166,6 +178,11 @@ private static SchemaRegistryClient createSchemaRegistryClient(List urls keyStorePassword); } + if (bearerAuthCustomProviderClass != null) { + configs.put(SchemaRegistryClientConfig.BEARER_AUTH_CREDENTIALS_SOURCE, CUSTOM_BEARER_AUTH_CREDENTIALS_SOURCE); + configs.put(SchemaRegistryClientConfig.BEARER_AUTH_CUSTOM_PROVIDER_CLASS, bearerAuthCustomProviderClass); + } + return new CachedSchemaRegistryClient( urls, 1_000, diff --git a/api/src/main/java/io/kafbat/ui/service/FeatureService.java b/api/src/main/java/io/kafbat/ui/service/FeatureService.java index 59a23236b..5d44fe112 100644 --- a/api/src/main/java/io/kafbat/ui/service/FeatureService.java +++ b/api/src/main/java/io/kafbat/ui/service/FeatureService.java @@ -36,7 +36,7 @@ public Mono> getAvailableFeatures(ReactiveAdminClient admin features.add(Mono.just(ClusterFeature.KSQL_DB)); } - if (cluster.getSchemaRegistryClient() != null) { + if (cluster.getSchemaRegistryClient() != null || cluster.getGcpSchemaRegistryClient() != null) { features.add(Mono.just(ClusterFeature.SCHEMA_REGISTRY)); } diff --git a/api/src/main/java/io/kafbat/ui/service/GcpSchemaRegistryService.java b/api/src/main/java/io/kafbat/ui/service/GcpSchemaRegistryService.java new file mode 100644 index 000000000..e1266c558 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/service/GcpSchemaRegistryService.java @@ -0,0 +1,180 @@ +package io.kafbat.ui.service; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.json.JsonMapper; +import io.kafbat.ui.exception.SchemaCompatibilityException; +import io.kafbat.ui.exception.SchemaNotFoundException; +import io.kafbat.ui.exception.ValidationException; +import io.kafbat.ui.gcp.sr.api.KafkaGcpSrClientApi; +import io.kafbat.ui.gcp.sr.model.Compatibility; +import io.kafbat.ui.gcp.sr.model.CompatibilityCheckResponse; +import io.kafbat.ui.gcp.sr.model.CompatibilityConfig; +import io.kafbat.ui.gcp.sr.model.CompatibilityLevelChange; +import io.kafbat.ui.gcp.sr.model.NewSubject; +import io.kafbat.ui.gcp.sr.model.SchemaSubject; +import io.kafbat.ui.model.KafkaCluster; +import io.kafbat.ui.util.ReactiveFailover; +import java.util.List; +import java.util.stream.Collectors; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import lombok.SneakyThrows; +import lombok.experimental.Delegate; +import lombok.extern.slf4j.Slf4j; +import org.springframework.stereotype.Service; +import org.springframework.web.reactive.function.client.WebClientResponseException; +import reactor.core.publisher.Flux; +import reactor.core.publisher.Mono; + +@Service +@Slf4j +@RequiredArgsConstructor +public class GcpSchemaRegistryService { + + private static final String LATEST = "latest"; + + @AllArgsConstructor + public static class SubjectWithCompatibilityLevel { + @Delegate + SchemaSubject subject; + @Getter + Compatibility compatibility; + } + + private ReactiveFailover api(KafkaCluster cluster) { + return cluster.getGcpSchemaRegistryClient(); + } + + public Mono> getAllLatestVersionSchemas(KafkaCluster cluster, + List subjects) { + return Flux.fromIterable(subjects) + .concatMap(subject -> getLatestSchemaVersionBySubject(cluster, subject)) + .collect(Collectors.toList()); + } + + public Mono> getAllSubjectNames(KafkaCluster cluster) { + return api(cluster) + .mono(c -> c.getAllSubjectNames(null, false)) + .flatMapIterable(this::parseSubjectListString) + .collectList(); + } + + @SneakyThrows + private List parseSubjectListString(String subjectNamesStr) { + //workaround for https://github.com/spring-projects/spring-framework/issues/24734 + return new JsonMapper().readValue(subjectNamesStr, new TypeReference<>() { + }); + } + + public Flux getAllVersionsBySubject(KafkaCluster cluster, String subject) { + Flux versions = getSubjectVersions(cluster, subject); + return versions.flatMap(version -> getSchemaSubjectByVersion(cluster, subject, version)); + } + + private Flux getSubjectVersions(KafkaCluster cluster, String schemaName) { + return api(cluster).flux(c -> c.getSubjectVersions(schemaName)); + } + + public Mono getSchemaSubjectByVersion(KafkaCluster cluster, + String schemaName, + Integer version) { + return getSchemaSubject(cluster, schemaName, String.valueOf(version)); + } + + public Mono getLatestSchemaVersionBySubject(KafkaCluster cluster, + String schemaName) { + return getSchemaSubject(cluster, schemaName, LATEST); + } + + private Mono getSchemaSubject(KafkaCluster cluster, String schemaName, + String version) { + return api(cluster) + .mono(c -> c.getSubjectVersion(schemaName, version, false)) + .zipWith(getSchemaCompatibilityInfoOrGlobal(cluster, schemaName)) + .map(t -> new SubjectWithCompatibilityLevel(t.getT1(), t.getT2())) + .onErrorResume(WebClientResponseException.NotFound.class, th -> Mono.error( + new SchemaNotFoundException())); + } + + public Mono deleteSchemaSubjectByVersion(KafkaCluster cluster, String schemaName, Integer version) { + return deleteSchemaSubject(cluster, schemaName, String.valueOf(version)); + } + + public Mono deleteLatestSchemaSubject(KafkaCluster cluster, String schemaName) { + return deleteSchemaSubject(cluster, schemaName, LATEST); + } + + private Mono deleteSchemaSubject(KafkaCluster cluster, String schemaName, String version) { + return api(cluster).mono(c -> c.deleteSubjectVersion(schemaName, version, false)); + } + + public Mono deleteSchemaSubjectEntirely(KafkaCluster cluster, String schemaName) { + return api(cluster).mono(c -> c.deleteAllSubjectVersions(schemaName, false)); + } + + /** + * Checks whether the provided schema duplicates the previous or not, creates a new schema + * and then returns the whole content by requesting its latest version. + */ + public Mono registerNewSchema(KafkaCluster cluster, + String subject, + NewSubject newSchemaSubject) { + return api(cluster) + .mono(c -> c.registerNewSchema(subject, newSchemaSubject)) + .onErrorMap(WebClientResponseException.Conflict.class, + th -> new SchemaCompatibilityException()) + .onErrorMap(WebClientResponseException.UnprocessableEntity.class, + th -> new ValidationException("Invalid schema. Error from registry: " + + th.getResponseBodyAsString())) + .then(getLatestSchemaVersionBySubject(cluster, subject)); + } + + public Mono updateSchemaCompatibility(KafkaCluster cluster, + String schemaName, + Compatibility compatibility) { + return api(cluster) + .mono(c -> c.updateSubjectCompatibilityLevel( + schemaName, new CompatibilityLevelChange().compatibility(compatibility))) + .then(); + } + + public Mono updateGlobalSchemaCompatibility(KafkaCluster cluster, + Compatibility compatibility) { + return api(cluster) + .mono(c -> c.updateGlobalCompatibilityLevel(new CompatibilityLevelChange() + .compatibility(compatibility))) + .then(); + } + + public Mono getSchemaCompatibilityLevel(KafkaCluster cluster, + String schemaName) { + return api(cluster) + .mono(c -> c.getSubjectCompatibilityLevel(schemaName, true)) + .map(CompatibilityConfig::getCompatibility) + .onErrorResume(error -> Mono.empty()); + } + + public Mono getGlobalSchemaCompatibilityLevel(KafkaCluster cluster) { + return api(cluster) + .mono(c -> c.getGlobalCompatibilityLevel()) + .map(CompatibilityConfig::getCompatibility); + } + + private Mono getSchemaCompatibilityInfoOrGlobal(KafkaCluster cluster, + String schemaName) { + return getSchemaCompatibilityLevel(cluster, schemaName) + .switchIfEmpty(this.getGlobalSchemaCompatibilityLevel(cluster)); + } + + public Mono checksSchemaCompatibility(KafkaCluster cluster, + String schemaName, + NewSubject newSchemaSubject) { + return api(cluster).mono(c -> c.checkSchemaCompatibility( + schemaName, LATEST, true, newSchemaSubject)); + } + +} + + + diff --git a/api/src/main/java/io/kafbat/ui/service/KafkaClusterFactory.java b/api/src/main/java/io/kafbat/ui/service/KafkaClusterFactory.java index f8c528f90..941c6c336 100644 --- a/api/src/main/java/io/kafbat/ui/service/KafkaClusterFactory.java +++ b/api/src/main/java/io/kafbat/ui/service/KafkaClusterFactory.java @@ -5,6 +5,7 @@ import io.kafbat.ui.config.WebclientProperties; import io.kafbat.ui.connect.api.KafkaConnectClientApi; import io.kafbat.ui.emitter.PollingSettings; +import io.kafbat.ui.gcp.sr.api.KafkaGcpSrClientApi; import io.kafbat.ui.model.ApplicationPropertyValidationDTO; import io.kafbat.ui.model.ClusterConfigValidationDTO; import io.kafbat.ui.model.KafkaCluster; @@ -16,10 +17,12 @@ import io.kafbat.ui.util.KafkaServicesValidation; import io.kafbat.ui.util.ReactiveFailover; import io.kafbat.ui.util.WebClientConfigurator; +import io.kafbat.ui.util.gcp.GcpBearerAuthFilter; import java.time.Duration; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.Properties; import java.util.stream.Stream; @@ -66,7 +69,15 @@ public KafkaCluster create(ClustersProperties properties, builder.pollingSettings(PollingSettings.create(clusterProperties, properties)); if (schemaRegistryConfigured(clusterProperties)) { - builder.schemaRegistryClient(schemaRegistryClient(clusterProperties)); + var auth = clusterProperties.getSchemaRegistryAuth(); + if (auth != null && Objects.equals(auth.getBearerAuthCustomProviderClass(), + GcpBearerAuthFilter.getGcpBearerAuthCustomProviderClass())) { + builder.isGcpSchemaRegistryEnabled(true); + builder.gcpSchemaRegistryClient(gcpSchemaRegistryClient(clusterProperties)); + } else { + builder.isGcpSchemaRegistryEnabled(false); + builder.schemaRegistryClient(schemaRegistryClient(clusterProperties)); + } } if (connectClientsConfigured(clusterProperties)) { builder.connectsClients(connectClients(clusterProperties)); @@ -99,8 +110,13 @@ public Mono validate(ClustersProperties.Cluster clus clusterProperties.getSsl() ), schemaRegistryConfigured(clusterProperties) - ? KafkaServicesValidation.validateSchemaRegistry( - () -> schemaRegistryClient(clusterProperties)).map(Optional::of) + ? (clusterProperties.getSchemaRegistryAuth().getBearerAuthCustomProviderClass() + != GcpBearerAuthFilter.getGcpBearerAuthCustomProviderClass() + ? KafkaServicesValidation.validateSchemaRegistry( + () -> schemaRegistryClient(clusterProperties)) + : KafkaServicesValidation.validateGcpSchemaRegistry( + () -> gcpSchemaRegistryClient(clusterProperties)) + ).map(Optional::of) : Mono.>just(Optional.empty()), ksqlConfigured(clusterProperties) @@ -169,11 +185,12 @@ private boolean schemaRegistryConfigured(ClustersProperties.Cluster clusterPrope private ReactiveFailover schemaRegistryClient(ClustersProperties.Cluster clusterProperties) { var auth = Optional.ofNullable(clusterProperties.getSchemaRegistryAuth()) .orElse(new ClustersProperties.SchemaRegistryAuth()); + WebClient webClient = new WebClientConfigurator() - .configureSsl(clusterProperties.getSsl(), clusterProperties.getSchemaRegistrySsl()) .configureBasicAuth(auth.getUsername(), auth.getPassword()) .configureBufferSize(webClientMaxBuffSize) .build(); + return ReactiveFailover.create( parseUrlList(clusterProperties.getSchemaRegistry()), url -> new KafkaSrClientApi(new ApiClient(webClient, null, null).setBasePath(url)), @@ -183,6 +200,23 @@ private ReactiveFailover schemaRegistryClient(ClustersProperti ); } + private ReactiveFailover gcpSchemaRegistryClient(ClustersProperties.Cluster clusterProperties) { + WebClientConfigurator webClientConfigurator = new WebClientConfigurator() + .configureSsl(clusterProperties.getSsl(), clusterProperties.getSchemaRegistrySsl()) + .configureBufferSize(webClientMaxBuffSize) + .filter(new GcpBearerAuthFilter()); + + WebClient webClient = webClientConfigurator.build(); + + return ReactiveFailover.create( + parseUrlList(clusterProperties.getSchemaRegistry()), + url -> new KafkaGcpSrClientApi(new io.kafbat.ui.gcp.sr.ApiClient(webClient, null, null).setBasePath(url)), + ReactiveFailover.CONNECTION_REFUSED_EXCEPTION_FILTER, + "No live schemaRegistry instances available", + ReactiveFailover.DEFAULT_RETRY_GRACE_PERIOD_MS + ); + } + private boolean ksqlConfigured(ClustersProperties.Cluster clusterProperties) { return clusterProperties.getKsqldbServer() != null; } diff --git a/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java b/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java index c725a787e..faad76e05 100644 --- a/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java +++ b/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java @@ -169,4 +169,8 @@ public Mono checksSchemaCompatibility(KafkaCluster c NewSubject newSchemaSubject) { return api(cluster).mono(c -> c.checkSchemaCompatibility(schemaName, LATEST, true, newSchemaSubject)); } + } + + + diff --git a/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java b/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java index 019a33543..db661086f 100644 --- a/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java +++ b/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java @@ -3,6 +3,7 @@ import static io.kafbat.ui.config.ClustersProperties.TruststoreConfig; import io.kafbat.ui.connect.api.KafkaConnectClientApi; +import io.kafbat.ui.gcp.sr.api.KafkaGcpSrClientApi; import io.kafbat.ui.model.ApplicationPropertyValidationDTO; import io.kafbat.ui.service.ReactiveAdminClient; import io.kafbat.ui.service.ksql.KsqlApiClient; @@ -104,6 +105,21 @@ public static Mono validateSchemaRegistry( .onErrorResume(KafkaServicesValidation::invalid); } + public static Mono validateGcpSchemaRegistry( + Supplier> clientSupplier) { + ReactiveFailover client; + try { + client = clientSupplier.get(); + } catch (Exception e) { + log.error("Error creating Schema Registry client", e); + return invalid("Error creating Schema Registry client: " + e.getMessage()); + } + return client + .mono(KafkaGcpSrClientApi::getGlobalCompatibilityLevel) + .then(valid()) + .onErrorResume(KafkaServicesValidation::invalid); + } + public static Mono validateConnect( Supplier> clientSupplier) { ReactiveFailover client; diff --git a/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java b/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java index 170530be1..c4405e89f 100644 --- a/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java +++ b/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java @@ -11,6 +11,8 @@ import java.io.FileInputStream; import java.security.KeyStore; import java.time.Duration; +import java.util.ArrayList; +import java.util.List; import java.util.function.Consumer; import javax.annotation.Nullable; import javax.net.ssl.KeyManagerFactory; @@ -24,9 +26,11 @@ import org.springframework.http.codec.json.Jackson2JsonEncoder; import org.springframework.util.ResourceUtils; import org.springframework.util.unit.DataSize; +import org.springframework.web.reactive.function.client.ExchangeFilterFunction; import org.springframework.web.reactive.function.client.WebClient; import reactor.netty.http.client.HttpClient; + public class WebClientConfigurator { private final WebClient.Builder builder = WebClient.builder(); @@ -34,6 +38,8 @@ public class WebClientConfigurator { .create() .proxyWithSystemProperties(); + private final List filters = new ArrayList<>(); + public WebClientConfigurator() { configureObjectMapper(defaultOM()); } @@ -45,6 +51,13 @@ private static ObjectMapper defaultOM() { .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); } + public WebClientConfigurator filter(ExchangeFilterFunction filter) { + if (filter != null) { + this.filters.add(filter); + } + return this; + } + public WebClientConfigurator configureSsl(@Nullable ClustersProperties.TruststoreConfig truststoreConfig, @Nullable ClustersProperties.KeystoreConfig keystoreConfig) { if (truststoreConfig != null && !truststoreConfig.isVerifySsl()) { @@ -151,6 +164,7 @@ public WebClientConfigurator configureResponseTimeout(Duration responseTimeout) } public WebClient build() { + builder.filters(filterList -> filterList.addAll(this.filters)); return builder.clientConnector(new ReactorClientHttpConnector(httpClient)).build(); } } diff --git a/api/src/main/java/io/kafbat/ui/util/gcp/GcpBearerAuthFilter.java b/api/src/main/java/io/kafbat/ui/util/gcp/GcpBearerAuthFilter.java new file mode 100644 index 000000000..3faae1d76 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/util/gcp/GcpBearerAuthFilter.java @@ -0,0 +1,40 @@ +package io.kafbat.ui.util.gcp; + +import com.google.cloud.hosted.kafka.auth.GcpBearerAuthCredentialProvider; +import org.jetbrains.annotations.NotNull; +import org.springframework.web.reactive.function.client.ClientRequest; +import org.springframework.web.reactive.function.client.ClientResponse; +import org.springframework.web.reactive.function.client.ExchangeFilterFunction; +import org.springframework.web.reactive.function.client.ExchangeFunction; +import reactor.core.publisher.Mono; + +public class GcpBearerAuthFilter implements ExchangeFilterFunction { + + private static final String GCP_BEARER_AUTH_CUSTOM_PROVIDER_CLASS = + GcpBearerAuthCredentialProvider.class.getName(); + + private final GcpBearerAuthCredentialProvider credentialProvider; + + public GcpBearerAuthFilter() { + this.credentialProvider = new GcpBearerAuthCredentialProvider(); + } + + @NotNull + @Override + public Mono filter(ClientRequest request, ExchangeFunction next) { + // This Mono ensures token fetching happens for EACH request + return Mono.fromCallable(() -> this.credentialProvider.getBearerToken(null)) + .flatMap(token -> { + // Create a new request with the Authorization header + ClientRequest newRequest = ClientRequest.from(request) + .headers(headers -> headers.setBearerAuth(token)) + .build(); + // Pass the new request to the next filter in the chain + return next.exchange(newRequest); + }); + } + + public static String getGcpBearerAuthCustomProviderClass() { + return GCP_BEARER_AUTH_CUSTOM_PROVIDER_CLASS; + } +} diff --git a/api/src/test/java/io/kafbat/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java b/api/src/test/java/io/kafbat/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java index d66a8d004..9a4be577f 100644 --- a/api/src/test/java/io/kafbat/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java +++ b/api/src/test/java/io/kafbat/ui/serdes/builtin/sr/SchemaRegistrySerdeTest.java @@ -35,7 +35,7 @@ class SchemaRegistrySerdeTest { @BeforeEach void init() { serde = new SchemaRegistrySerde(); - serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", true); + serde.configure(List.of("wontbeused"), registryClient, null, "%s-key", "%s-value", true); } @ParameterizedTest @@ -135,7 +135,7 @@ class SerdeWithDisabledSubjectExistenceCheck { @BeforeEach void init() { - serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", false); + serde.configure(List.of("wontbeused"), registryClient, null, "%s-key", "%s-value", false); } @Test @@ -151,7 +151,7 @@ class SerdeWithEnabledSubjectExistenceCheck { @BeforeEach void init() { - serde.configure(List.of("wontbeused"), registryClient, "%s-key", "%s-value", true); + serde.configure(List.of("wontbeused"), registryClient, null, "%s-key", "%s-value", true); } @Test diff --git a/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java b/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java index 43cb29382..7e6693610 100644 --- a/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java +++ b/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java @@ -43,7 +43,7 @@ private void init(List subjects) { new SchemaRegistryService.SubjectWithCompatibilityLevel( new SchemaSubject().subject(a.getArgument(1)), Compatibility.FULL))); - this.controller = new SchemasController(schemaRegistryService); + this.controller = new SchemasController(schemaRegistryService, null); this.controller.setAccessControlService(new AccessControlServiceMock().getMock()); this.controller.setAuditService(mock(AuditService.class)); this.controller.setClustersStorage(clustersStorage); diff --git a/contract/build.gradle b/contract/build.gradle index d6d662970..68fd0d712 100644 --- a/contract/build.gradle +++ b/contract/build.gradle @@ -95,6 +95,23 @@ tasks.register('generateSchemaRegistryClient', GenerateTask) { dateLibrary : "java8",] } +tasks.register('generateGcpSchemaRegistryClient', GenerateTask) { + generatorName = "java" + inputSpec = specDir.file("kafka-gcp-sr-api.yaml").asFile.absolutePath + outputDir = targetDir.dir("kafka-gcp-sr-client").asFile.absolutePath + generateApiTests = false + generateModelTests = false + apiPackage = "io.kafbat.ui.gcp.sr.api" + invokerPackage = "io.kafbat.ui.gcp.sr" + modelPackage = "io.kafbat.ui.gcp.sr.model" + + configOptions = [asyncNative : "true", + library : "webclient", + useJakartaEe : "true", + useBeanValidation: "true", + dateLibrary : "java8",] +} + sourceSets { main { java { @@ -102,6 +119,7 @@ sourceSets { srcDir targetDir.dir("kafka-connect-client/src/main/java") srcDir targetDir.dir("kafbat-ui-client/src/main/java") srcDir targetDir.dir("kafka-sr-client/src/main/java") + srcDir targetDir.dir("kafka-gcp-sr-client/src/main/java") } resources { @@ -110,5 +128,5 @@ sourceSets { } } -compileJava.dependsOn generateUiClient, generateBackendApi, generateConnectClient, generateSchemaRegistryClient -processResources.dependsOn generateUiClient, generateBackendApi, generateConnectClient, generateSchemaRegistryClient +compileJava.dependsOn generateUiClient, generateBackendApi, generateConnectClient, generateSchemaRegistryClient, generateGcpSchemaRegistryClient +processResources.dependsOn generateUiClient, generateBackendApi, generateConnectClient, generateSchemaRegistryClient, generateGcpSchemaRegistryClient diff --git a/contract/src/main/resources/swagger/kafbat-ui-api.yaml b/contract/src/main/resources/swagger/kafbat-ui-api.yaml index 8769e6aa1..1d92b4eff 100644 --- a/contract/src/main/resources/swagger/kafbat-ui-api.yaml +++ b/contract/src/main/resources/swagger/kafbat-ui-api.yaml @@ -4334,6 +4334,8 @@ components: type: string password: type: string + bearerAuthCustomProviderClass: + type: string schemaRegistrySsl: type: object properties: diff --git a/contract/src/main/resources/swagger/kafka-gcp-sr-api.yaml b/contract/src/main/resources/swagger/kafka-gcp-sr-api.yaml new file mode 100644 index 000000000..6eb30da6e --- /dev/null +++ b/contract/src/main/resources/swagger/kafka-gcp-sr-api.yaml @@ -0,0 +1,422 @@ +openapi: 3.0.0 +info: + description: Api Documentation + version: 0.1.0 + title: Api Documentation + termsOfService: urn:tos + contact: {} + license: + name: Apache 2.0 + url: http://www.apache.org/licenses/LICENSE-2.0 +tags: + - name: /gcpschemaregistry +servers: + - url: /localhost + +paths: + /subjects: + get: + tags: + - KafkaGcpSrClient + summary: get all connectors from Kafka Connect service + operationId: getAllSubjectNames + parameters: + - name: subjectPrefix + in: query + required: false + schema: + type: string + - name: deleted + in: query + schema: + type: boolean + responses: + 200: + description: OK + content: + application/json: + schema: + #workaround for https://github.com/spring-projects/spring-framework/issues/24734 + type: string + + /subjects/{subject}: + delete: + tags: + - KafkaGcpSrClient + operationId: deleteAllSubjectVersions + parameters: + - name: subject + in: path + required: true + schema: + type: string + - name: permanent + in: query + schema: + type: boolean + required: false + responses: + 200: + description: OK + 404: + description: Not found + + /subjects/{subject}/versions/{version}: + get: + tags: + - KafkaGcpSrClient + operationId: getSubjectVersion + parameters: + - name: subject + in: path + required: true + schema: + type: string + - name: version + in: path + required: true + schema: + type: string + - name: deleted + in: query + schema: + type: boolean + responses: + 200: + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/SchemaSubject' + 404: + description: Not found + 422: + description: Invalid version + delete: + tags: + - KafkaGcpSrClient + operationId: deleteSubjectVersion + parameters: + - name: subject + in: path + required: true + schema: + type: string + - name: permanent + in: query + required: false + schema: + type: boolean + default: false + - name: version + in: path + required: true + schema: + type: string + responses: + 200: + description: OK + 404: + description: Not found + + /subjects/{subject}/versions: + get: + tags: + - KafkaGcpSrClient + operationId: getSubjectVersions + parameters: + - name: subject + in: path + required: true + schema: + type: string + responses: + 200: + description: OK + content: + application/json: + schema: + type: array + items: + type: integer + format: int32 + 404: + description: Not found + post: + tags: + - KafkaGcpSrClient + operationId: registerNewSchema + parameters: + - name: subject + in: path + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/NewSubject' + responses: + 200: + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/SubjectId' + + /config: + get: + tags: + - KafkaGcpSrClient + operationId: getGlobalCompatibilityLevel + responses: + 200: + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/CompatibilityConfig' + 404: + description: Not found + put: + tags: + - KafkaGcpSrClient + operationId: updateGlobalCompatibilityLevel + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CompatibilityLevelChange' + responses: + 200: + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/CompatibilityLevelChange' + 404: + description: Not found + + /config/{subject}: + get: + tags: + - KafkaGcpSrClient + operationId: getSubjectCompatibilityLevel + parameters: + - name: subject + in: path + required: true + schema: + type: string + - name: defaultToGlobal + in: query + required: true + schema: + type: boolean + responses: + 200: + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/CompatibilityConfig' + 404: + description: Not found + put: + tags: + - KafkaGcpSrClient + operationId: updateSubjectCompatibilityLevel + parameters: + - name: subject + in: path + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CompatibilityLevelChange' + responses: + 200: + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/CompatibilityLevelChange' + 404: + description: Not found + delete: + tags: + - KafkaGcpSrClient + operationId: deleteSubjectCompatibilityLevel + parameters: + - name: subject + in: path + required: true + schema: + type: string + responses: + 200: + description: OK + 404: + description: Not found + + /compatibility/subjects/{subject}/versions/{version}: + post: + tags: + - KafkaGcpSrClient + operationId: checkSchemaCompatibility + parameters: + - name: subject + in: path + required: true + schema: + type: string + - name: version + in: path + required: true + schema: + type: string + - name: verbose + in: query + description: Show reason a schema fails the compatibility test + schema: + type: boolean + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/NewSubject' + responses: + 200: + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/CompatibilityCheckResponse' + 404: + description: Not found + +security: + - basicAuth: [] + +components: + securitySchemes: + basicAuth: + type: http + scheme: basic + schemas: + SchemaSubject: + type: object + properties: + subject: + type: string + version: + type: string + id: + type: integer + schema: + type: string + schemaType: + $ref: '#/components/schemas/SchemaType' + references: + type: array + items: + $ref: '#/components/schemas/SchemaReference' + required: + - id + - subject + - version + - schema + - schemaType + + SchemaType: + type: string + description: upon updating a schema, the type of an existing schema can't be changed + enum: + - AVRO + - JSON + - PROTOBUF + + SchemaReference: + type: object + properties: + name: + type: string + subject: + type: string + version: + type: integer + required: + - name + - subject + - version + + SubjectId: + type: object + properties: + id: + type: integer + + NewSubject: + type: object + description: should be set for creating/updating schema subject + properties: + schema: + type: string + schemaType: + $ref: '#/components/schemas/SchemaType' + references: + type: array + items: + $ref: '#/components/schemas/SchemaReference' + required: + - schema + - schemaType + +# CompatibilityConfig: +# type: object +# properties: +# compatibilityLevel: +# $ref: '#/components/schemas/Compatibility' +# required: +# - compatibilityLevel + + CompatibilityConfig: + type: object + properties: + alias: + type: string + compatibility: + $ref: '#/components/schemas/Compatibility' + normalize: + type: boolean + required: + - compatibility + + CompatibilityLevelChange: + type: object + properties: + compatibility: + $ref: '#/components/schemas/Compatibility' + required: + - compatibility + + Compatibility: + type: string + enum: + - BACKWARD + - BACKWARD_TRANSITIVE + - FORWARD + - FORWARD_TRANSITIVE + - FULL + - FULL_TRANSITIVE + - NONE + + CompatibilityCheckResponse: + type: object + properties: + is_compatible: + type: boolean