From 9d9010b1582153692f2ec7c9b6493c7518478674 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Sun, 5 Jan 2025 12:54:18 +0100 Subject: [PATCH 01/32] use autoconfig instead of manual config: 999 Next, disable wildcard imports: navigate to Editor -> Code Style -> Java -> Imports and set Class count to use import with '*' to 999. Do the same with Names count to use static import with '*'. --- .editorconfig | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000000..c23676479e5b1 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,10 @@ +[*] +charset = utf-8 +end_of_line = lf +indent_style = space +insert_final_newline = false +max_line_length = 160 + +[*.java] +ij_java_names_count_to_use_import_on_demand = 999 +ij_java_class_count_to_use_import_on_demand = 999 From 6dc43b3ef0eaa1049de74404c40e03f318fc416e Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Sun, 5 Jan 2025 12:54:51 +0100 Subject: [PATCH 02/32] max_line_length = 140 --- .editorconfig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index c23676479e5b1..97a5881cfa7fd 100644 --- a/.editorconfig +++ b/.editorconfig @@ -3,7 +3,7 @@ charset = utf-8 end_of_line = lf indent_style = space insert_final_newline = false -max_line_length = 160 +max_line_length = 140 [*.java] ij_java_names_count_to_use_import_on_demand = 999 From 0057f06bfb1bf25a0c6bab9094a79312b933defc Mon Sep 17 00:00:00 2001 From: Ozan Gunalp Date: Tue, 14 Jan 2025 17:39:02 +0100 Subject: [PATCH 03/32] Bump strimzi-test-container version from 0.107.0 to 0.109.1 server properties no longer works with auto-config --- bom/application/pom.xml | 6 +- bom/test/pom.xml | 6 +- .../it/kafka/KafkaKeycloakTestResource.java | 27 ++- .../src/test/resources/kafkaServer.properties | 164 ----------------- .../it/kafka/KafkaSaslTestResource.java | 16 +- .../src/test/resources/kafkaServer.properties | 172 ------------------ .../it/kafka/KafkaSASLTestResource.java | 26 ++- .../src/test/resources/server.properties | 157 ---------------- .../it/kafka/KafkaSSLTestResource.java | 15 +- .../src/test/resources/server.properties | 154 ---------------- .../kafka/streams/KafkaSSLTestResource.java | 12 +- .../src/test/resources/server.properties | 154 ---------------- 12 files changed, 88 insertions(+), 821 deletions(-) delete mode 100644 integration-tests/kafka-oauth-keycloak/src/test/resources/kafkaServer.properties delete mode 100644 integration-tests/kafka-sasl-elytron/src/test/resources/kafkaServer.properties delete mode 100644 integration-tests/kafka-sasl/src/test/resources/server.properties delete mode 100644 integration-tests/kafka-ssl/src/test/resources/server.properties delete mode 100644 integration-tests/kafka-streams/src/test/resources/server.properties diff --git a/bom/application/pom.xml b/bom/application/pom.xml index 853be36300924..adbaa412256d8 100644 --- a/bom/application/pom.xml +++ b/bom/application/pom.xml @@ -141,7 +141,7 @@ 3.7.2 1.8.0 1.1.10.5 - 0.107.0 + 0.109.1 2.13.16 1.2.3 @@ -4496,6 +4496,10 @@ junit junit + + org.apache.logging.log4j + log4j-slf4j-impl + diff --git a/bom/test/pom.xml b/bom/test/pom.xml index 91b39861d8ace..aefe674f92418 100644 --- a/bom/test/pom.xml +++ b/bom/test/pom.xml @@ -20,7 +20,7 @@ 2.3.1 1.3.8 - 0.107.0 + 0.109.1 1.0.0-alpha @@ -67,6 +67,10 @@ com.github.docker-java docker-java-api + + org.apache.logging.log4j + log4j-slf4j-impl + diff --git a/integration-tests/kafka-oauth-keycloak/src/test/java/io/quarkus/it/kafka/KafkaKeycloakTestResource.java b/integration-tests/kafka-oauth-keycloak/src/test/java/io/quarkus/it/kafka/KafkaKeycloakTestResource.java index a1f6c02497b70..ba3b08c697c1d 100644 --- a/integration-tests/kafka-oauth-keycloak/src/test/java/io/quarkus/it/kafka/KafkaKeycloakTestResource.java +++ b/integration-tests/kafka-oauth-keycloak/src/test/java/io/quarkus/it/kafka/KafkaKeycloakTestResource.java @@ -1,12 +1,11 @@ package io.quarkus.it.kafka; import static io.strimzi.test.container.StrimziKafkaContainer.KAFKA_PORT; +import static java.util.Map.entry; import java.util.HashMap; import java.util.Map; -import org.testcontainers.utility.MountableFile; - import io.quarkus.test.common.QuarkusTestResourceLifecycleManager; import io.quarkus.test.keycloak.client.KeycloakTestClient; import io.quarkus.test.keycloak.server.KeycloakContainer; @@ -31,14 +30,24 @@ public Map start() { //Start kafka container this.kafka = new StrimziKafkaContainer("quay.io/strimzi/kafka:latest-kafka-3.7.0") .withBrokerId(1) - .withKafkaConfigurationMap(Map.of("listener.security.protocol.map", - "JWT:SASL_PLAINTEXT,BROKER1:PLAINTEXT", - "listener.name.jwt.oauthbearer.sasl.jaas.config", - getOauthSaslJaasConfig(keycloak.getInternalUrl(), keycloak.getServerUrl()), - "listener.name.jwt.plain.sasl.jaas.config", - getPlainSaslJaasConfig(keycloak.getInternalUrl(), keycloak.getServerUrl()))) + .withKafkaConfigurationMap(Map.ofEntries( + entry("listener.security.protocol.map", "JWT:SASL_PLAINTEXT,BROKER1:PLAINTEXT"), + entry("listener.name.jwt.oauthbearer.sasl.jaas.config", + getOauthSaslJaasConfig(keycloak.getInternalUrl(), keycloak.getServerUrl())), + entry("listener.name.jwt.plain.sasl.jaas.config", + getPlainSaslJaasConfig(keycloak.getInternalUrl(), keycloak.getServerUrl())), + entry("sasl.enabled.mechanisms", "OAUTHBEARER"), + entry("sasl.mechanism.inter.broker.protocol", "OAUTHBEARER"), + entry("oauth.username.claim", "preferred_username"), + entry("principal.builder.class", "io.strimzi.kafka.oauth.server.OAuthKafkaPrincipalBuilder"), + entry("listener.name.jwt.sasl.enabled.mechanisms", "OAUTHBEARER,PLAIN"), + entry("listener.name.jwt.oauthbearer.sasl.server.callback.handler.class", + "io.strimzi.kafka.oauth.server.JaasServerOauthValidatorCallbackHandler"), + entry("listener.name.jwt.oauthbearer.sasl.login.callback.handler.class", + "io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler"), + entry("listener.name.jwt.plain.sasl.server.callback.handler.class", + "io.strimzi.kafka.oauth.server.plain.JaasServerOauthOverPlainValidatorCallbackHandler"))) .withNetworkAliases("kafka") - .withServerProperties(MountableFile.forClasspathResource("kafkaServer.properties")) .withBootstrapServers( c -> String.format("JWT://%s:%s", c.getHost(), c.getMappedPort(KAFKA_PORT))); this.kafka.start(); diff --git a/integration-tests/kafka-oauth-keycloak/src/test/resources/kafkaServer.properties b/integration-tests/kafka-oauth-keycloak/src/test/resources/kafkaServer.properties deleted file mode 100644 index d148fcc18242f..0000000000000 --- a/integration-tests/kafka-oauth-keycloak/src/test/resources/kafkaServer.properties +++ /dev/null @@ -1,164 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# see kafka.server.KafkaConfig for additional details and defaults - -############################# Server Basics ############################# - -# The id of the broker. This must be set to a unique integer for each broker. -broker.id=1 - -############################# Socket Server Settings ############################# - -# The address the socket server listens on. It will get the value returned from -# java.net.InetAddress.getCanonicalHostName() if not configured. -# FORMAT: -# listeners = listener_name://host_name:port -# EXAMPLE: -# listeners = PLAINTEXT://your.host.name:9092 -#listeners=PLAINTEXT://:9092 -listeners=JWT://:9092 -#advertised.listeners=SASL_PLAINTEXT://localhost:9092 - - - -# Hostname and port the broker will advertise to producers and consumers. If not set, -# it uses the value for "listeners" if configured. Otherwise, it will use the value -# returned from java.net.InetAddress.getCanonicalHostName(). -#advertised.listeners=PLAINTEXT://your.host.name:9092 -#advertised.listeners=SASL_PLAINTEXT://localhost:9092 - -# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details -#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL -listener.security.protocol.map=JWT:SASL_PLAINTEXT - - -# The number of threads that the server uses for receiving requests from the network and sending responses to the network -num.network.threads=3 - -# The number of threads that the server uses for processing requests, which may include disk I/O -num.io.threads=8 - -# The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes=102400 - -# The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes=102400 - -# The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes=104857600 - - -inter.broker.listener.name=JWT - - -#### SASL #### - -sasl.enabled.mechanisms=OAUTHBEARER - -sasl.mechanism.inter.broker.protocol=OAUTHBEARER - -oauth.username.claim=preferred_username -principal.builder.class=io.strimzi.kafka.oauth.server.OAuthKafkaPrincipalBuilder - -listener.name.jwt.sasl.enabled.mechanisms=OAUTHBEARER,PLAIN -listener.name.jwt.oauthbearer.sasl.jaas.config=set_by_test - -listener.name.jwt.oauthbearer.sasl.server.callback.handler.class=io.strimzi.kafka.oauth.server.JaasServerOauthValidatorCallbackHandler -listener.name.jwt.oauthbearer.sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler -#listener.name.jwt.plain.sasl.jaas.config=set_by_test - -listener.name.jwt.plain.sasl.server.callback.handler.class=io.strimzi.kafka.oauth.server.plain.JaasServerOauthOverPlainValidatorCallbackHandler - -############################# Log Basics ############################# - -# A comma separated list of directories under which to store log files -log.dirs=/tmp/kafka-logs - -# The default number of log partitions per topic. More partitions allow greater -# parallelism for consumption, but this will also result in more files across -# the brokers. -num.partitions=1 - -# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. -# This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir=1 - -############################# Internal Topic Settings ############################# -# The replication factor for the group metadata internal topics "__consumer_offsets" and "__transaction_state" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -offsets.topic.replication.factor=1 -transaction.state.log.replication.factor=1 -transaction.state.log.min.isr=1 - -############################# Log Flush Policy ############################# - -# Messages are immediately written to the filesystem but by default we only fsync() to sync -# the OS cache lazily. The following configurations control the flush of data to disk. -# There are a few important trade-offs here: -# 1. Durability: Unflushed data may be lost if you are not using replication. -# 2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. -# 3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. -# The settings below allow one to configure the flush policy to flush data after a period of time or -# every N messages (or both). This can be done globally and overridden on a per-topic basis. - -# The number of messages to accept before forcing a flush of data to disk -#log.flush.interval.messages=10000 - -# The maximum amount of time a message can sit in a log before we force a flush -#log.flush.interval.ms=1000 - -############################# Log Retention Policy ############################# - -# The following configurations control the disposal of log segments. The policy can -# be set to delete segments after a period of time, or after a given size has accumulated. -# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens -# from the end of the log. - -# The minimum age of a log file to be eligible for deletion due to age -log.retention.hours=168 - -# A size-based retention policy for logs. Segments are pruned from the log unless the remaining -# segments drop below log.retention.bytes. Functions independently of log.retention.hours. -#log.retention.bytes=1073741824 - -# The maximum size of a log segment file. When this size is reached a new log segment will be created. -log.segment.bytes=1073741824 - -# The interval at which log segments are checked to see if they can be deleted according -# to the retention policies -log.retention.check.interval.ms=300000 - -############################# Zookeeper ############################# - -# Zookeeper connection string (see zookeeper docs for details). -# This is a comma separated host:port pairs, each corresponding to a zk -# server. e.g. "127.0.0.1:3000,127.0.0.1:3001,127.0.0.1:3002". -# You can also append an optional chroot string to the urls to specify the -# root directory for all kafka znodes. -zookeeper.connect=localhost:2181 - -# Timeout in ms for connecting to zookeeper -zookeeper.connection.timeout.ms=45000 - - -############################# Group Coordinator Settings ############################# - -# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance. -# The rebalance will be further delayed by the value of group.initial.rebalance.delay.ms as new members join the group, up to a maximum of max.poll.interval.ms. -# The default value for this is 3 seconds. -# We override this to 0 here as it makes for a better out-of-the-box experience for development and testing. -# However, in production environments the default value of 3 seconds is more suitable as this will help to avoid unnecessary, and potentially expensive, rebalances during application startup. -group.initial.rebalance.delay.ms=0 \ No newline at end of file diff --git a/integration-tests/kafka-sasl-elytron/src/test/java/io/quarkus/it/kafka/KafkaSaslTestResource.java b/integration-tests/kafka-sasl-elytron/src/test/java/io/quarkus/it/kafka/KafkaSaslTestResource.java index f496581ecaaa5..4535e9eae893a 100644 --- a/integration-tests/kafka-sasl-elytron/src/test/java/io/quarkus/it/kafka/KafkaSaslTestResource.java +++ b/integration-tests/kafka-sasl-elytron/src/test/java/io/quarkus/it/kafka/KafkaSaslTestResource.java @@ -1,6 +1,7 @@ package io.quarkus.it.kafka; import static io.strimzi.test.container.StrimziKafkaContainer.KAFKA_PORT; +import static java.util.Map.entry; import java.util.HashMap; import java.util.Map; @@ -34,10 +35,23 @@ public Map start() { //Start kafka container kafka = new StrimziKafkaContainer() + .withBrokerId(0) .withBootstrapServers( c -> String.format("SASL_PLAINTEXT://%s:%s", c.getHost(), c.getMappedPort(KAFKA_PORT))) + .withKafkaConfigurationMap(Map.ofEntries( + entry("listener.security.protocol.map", "SASL_PLAINTEXT:SASL_PLAINTEXT,BROKER1:PLAINTEXT"), + entry("inter.broker.listener.name", "SASL_PLAINTEXT"), + entry("sasl.enabled.mechanisms", "GSSAPI"), + entry("sasl.mechanism.inter.broker.protocol", "GSSAPI"), + entry("listener.name.sasl_plaintext.gssapi.sasl.jaas.config", + "com.sun.security.auth.module.Krb5LoginModule required " + + "useKeyTab=true storeKey=true debug=true serviceName=\"kafka\" " + + "keyTab=\"/opt/kafka/config/kafkabroker.keytab\" " + + "principal=\"kafka/localhost@EXAMPLE.COM\";"), + entry("sasl.kerberos.service.name", "kafka"), + entry("ssl.endpoint.identification.algorithm", "https"), + entry("ssl.client.auth", "none"))) .withPort(KAFKA_PORT) - .withServerProperties(MountableFile.forClasspathResource("kafkaServer.properties")) .withCopyFileToContainer(MountableFile.forClasspathResource("krb5KafkaBroker.conf"), "/etc/krb5.conf") .withCopyFileToContainer(MountableFile.forHostPath("target/kafkabroker.keytab"), diff --git a/integration-tests/kafka-sasl-elytron/src/test/resources/kafkaServer.properties b/integration-tests/kafka-sasl-elytron/src/test/resources/kafkaServer.properties deleted file mode 100644 index c10c56cc57ba2..0000000000000 --- a/integration-tests/kafka-sasl-elytron/src/test/resources/kafkaServer.properties +++ /dev/null @@ -1,172 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# see kafka.server.KafkaConfig for additional details and defaults - -############################# Server Basics ############################# - -# The id of the broker. This must be set to a unique integer for each broker. -broker.id=0 - -############################# Socket Server Settings ############################# - -# The address the socket server listens on. It will get the value returned from -# java.net.InetAddress.getCanonicalHostName() if not configured. -# FORMAT: -# listeners = listener_name://host_name:port -# EXAMPLE: -# listeners = PLAINTEXT://your.host.name:9092 -#listeners=PLAINTEXT://:9092 -listeners=SASL_PLAINTEXT://:9092 -#advertised.listeners=SASL_PLAINTEXT://localhost:9092 - - - -# Hostname and port the broker will advertise to producers and consumers. If not set, -# it uses the value for "listeners" if configured. Otherwise, it will use the value -# returned from java.net.InetAddress.getCanonicalHostName(). -#advertised.listeners=PLAINTEXT://your.host.name:9092 -#advertised.listeners=SASL_PLAINTEXT://localhost:9092 - -# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details -#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL -listener.security.protocol.map=SASL_PLAINTEXT:SASL_PLAINTEXT - - -# The number of threads that the server uses for receiving requests from the network and sending responses to the network -num.network.threads=3 - -# The number of threads that the server uses for processing requests, which may include disk I/O -num.io.threads=8 - -# The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes=102400 - -# The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes=102400 - -# The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes=104857600 - - -inter.broker.listener.name=SASL_PLAINTEXT - - -#### SASL #### - -sasl.enabled.mechanisms=GSSAPI - -sasl.mechanism.inter.broker.protocol=GSSAPI - -#listener.name.sasl_plaintext.plain.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required \ -# username="broker" \ -# password="broker-secret" \ -# user_broker="broker-secret" \ -# user_client="client-secret"; - -listener.name.sasl_plaintext.gssapi.sasl.jaas.config=com.sun.security.auth.module.Krb5LoginModule required \ - useKeyTab=true \ - storeKey=true \ - debug=true \ - serviceName="kafka" \ - keyTab="/opt/kafka/config/kafkabroker.keytab" \ - principal="kafka/localhost@EXAMPLE.COM"; - -sasl.kerberos.service.name=kafka - -#ssl.endpoint.identification.algortigm=https://localhost -ssl.endpoint.identification.algorithm=https -ssl.client.auth=none - -############################# Log Basics ############################# - -# A comma separated list of directories under which to store log files -log.dirs=/tmp/kafka-logs - -# The default number of log partitions per topic. More partitions allow greater -# parallelism for consumption, but this will also result in more files across -# the brokers. -num.partitions=1 - -# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. -# This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir=1 - -############################# Internal Topic Settings ############################# -# The replication factor for the group metadata internal topics "__consumer_offsets" and "__transaction_state" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -offsets.topic.replication.factor=1 -transaction.state.log.replication.factor=1 -transaction.state.log.min.isr=1 - -############################# Log Flush Policy ############################# - -# Messages are immediately written to the filesystem but by default we only fsync() to sync -# the OS cache lazily. The following configurations control the flush of data to disk. -# There are a few important trade-offs here: -# 1. Durability: Unflushed data may be lost if you are not using replication. -# 2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. -# 3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. -# The settings below allow one to configure the flush policy to flush data after a period of time or -# every N messages (or both). This can be done globally and overridden on a per-topic basis. - -# The number of messages to accept before forcing a flush of data to disk -#log.flush.interval.messages=10000 - -# The maximum amount of time a message can sit in a log before we force a flush -#log.flush.interval.ms=1000 - -############################# Log Retention Policy ############################# - -# The following configurations control the disposal of log segments. The policy can -# be set to delete segments after a period of time, or after a given size has accumulated. -# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens -# from the end of the log. - -# The minimum age of a log file to be eligible for deletion due to age -log.retention.hours=168 - -# A size-based retention policy for logs. Segments are pruned from the log unless the remaining -# segments drop below log.retention.bytes. Functions independently of log.retention.hours. -#log.retention.bytes=1073741824 - -# The maximum size of a log segment file. When this size is reached a new log segment will be created. -log.segment.bytes=1073741824 - -# The interval at which log segments are checked to see if they can be deleted according -# to the retention policies -log.retention.check.interval.ms=300000 - -############################# Zookeeper ############################# - -# Zookeeper connection string (see zookeeper docs for details). -# This is a comma separated host:port pairs, each corresponding to a zk -# server. e.g. "127.0.0.1:3000,127.0.0.1:3001,127.0.0.1:3002". -# You can also append an optional chroot string to the urls to specify the -# root directory for all kafka znodes. -zookeeper.connect=localhost:2181 - -# Timeout in ms for connecting to zookeeper -zookeeper.connection.timeout.ms=45000 - - -############################# Group Coordinator Settings ############################# - -# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance. -# The rebalance will be further delayed by the value of group.initial.rebalance.delay.ms as new members join the group, up to a maximum of max.poll.interval.ms. -# The default value for this is 3 seconds. -# We override this to 0 here as it makes for a better out-of-the-box experience for development and testing. -# However, in production environments the default value of 3 seconds is more suitable as this will help to avoid unnecessary, and potentially expensive, rebalances during application startup. -group.initial.rebalance.delay.ms=0 \ No newline at end of file diff --git a/integration-tests/kafka-sasl/src/test/java/io/quarkus/it/kafka/KafkaSASLTestResource.java b/integration-tests/kafka-sasl/src/test/java/io/quarkus/it/kafka/KafkaSASLTestResource.java index 8b3ffa5464990..cfbd669b7c0a4 100644 --- a/integration-tests/kafka-sasl/src/test/java/io/quarkus/it/kafka/KafkaSASLTestResource.java +++ b/integration-tests/kafka-sasl/src/test/java/io/quarkus/it/kafka/KafkaSASLTestResource.java @@ -1,26 +1,40 @@ package io.quarkus.it.kafka; import static io.strimzi.test.container.StrimziKafkaContainer.KAFKA_PORT; +import static java.util.Map.entry; import java.util.HashMap; import java.util.Map; -import org.testcontainers.utility.MountableFile; - import io.quarkus.test.common.QuarkusTestResourceLifecycleManager; import io.strimzi.test.container.StrimziKafkaContainer; public class KafkaSASLTestResource implements QuarkusTestResourceLifecycleManager { private final StrimziKafkaContainer kafka = new StrimziKafkaContainer() - .withServerProperties(MountableFile.forClasspathResource("server.properties")) - .withBootstrapServers( - container -> String.format("SASL_PLAINTEXT://%s:%s", container.getHost(), - container.getMappedPort(KAFKA_PORT))); + .withBrokerId(0) + .withBootstrapServers(c -> String.format("SASL_PLAINTEXT://%s:%s", c.getHost(), + c.getMappedPort(KAFKA_PORT))) + .withKafkaConfigurationMap(Map.ofEntries( + entry("listener.security.protocol.map", + "SASL_PLAINTEXT:SASL_PLAINTEXT,BROKER1:PLAINTEXT,PLAINTEXT:PLAINTEXT"), + entry("sasl.enabled.mechanisms", "PLAIN"), + entry("sasl.mechanism.inter.broker.protocol", "PLAIN"), + entry("listener.name.sasl_plaintext.plain.sasl.jaas.config", + "org.apache.kafka.common.security.plain.PlainLoginModule required " + + "username=\"broker\" " + + "password=\"broker-secret\" " + + "user_broker=\"broker-secret\" " + + "user_client=\"client-secret\";"))); @Override public Map start() { kafka.start(); + try { + Thread.sleep(3000); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } // Used by the test System.setProperty("bootstrap.servers", kafka.getBootstrapServers()); // Used by the application diff --git a/integration-tests/kafka-sasl/src/test/resources/server.properties b/integration-tests/kafka-sasl/src/test/resources/server.properties deleted file mode 100644 index c7ee8c2298c00..0000000000000 --- a/integration-tests/kafka-sasl/src/test/resources/server.properties +++ /dev/null @@ -1,157 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# see kafka.server.KafkaConfig for additional details and defaults - -############################# Server Basics ############################# - -# The id of the broker. This must be set to a unique integer for each broker. -broker.id=0 - -############################# Socket Server Settings ############################# - -# The address the socket server listens on. It will get the value returned from -# java.net.InetAddress.getCanonicalHostName() if not configured. -# FORMAT: -# listeners = listener_name://host_name:port -# EXAMPLE: -# listeners = PLAINTEXT://your.host.name:9092 -#listeners=PLAINTEXT://:9092 -listeners=SASL_PLAINTEXT://:9092 - - - -# Hostname and port the broker will advertise to producers and consumers. If not set, -# it uses the value for "listeners" if configured. Otherwise, it will use the value -# returned from java.net.InetAddress.getCanonicalHostName(). -#advertised.listeners=PLAINTEXT://your.host.name:9092 -#advertised.listeners=SASL_PLAINTEXT://localhost:9092 - -# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details -#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL -listener.security.protocol.map=SASL_PLAINTEXT:SASL_PLAINTEXT - -# The number of threads that the server uses for receiving requests from the network and sending responses to the network -num.network.threads=3 - -# The number of threads that the server uses for processing requests, which may include disk I/O -num.io.threads=8 - -# The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes=102400 - -# The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes=102400 - -# The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes=104857600 - - -inter.broker.listener.name=SASL_PLAINTEXT - - -#### SASL #### - -sasl.enabled.mechanisms=PLAIN - -sasl.mechanism.inter.broker.protocol=PLAIN - -listener.name.sasl_plaintext.plain.sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required \ - username="broker" \ - password="broker-secret" \ - user_broker="broker-secret" \ - user_client="client-secret"; - - -############################# Log Basics ############################# - -# A comma separated list of directories under which to store log files -log.dirs=/tmp/kafka-logs - -# The default number of log partitions per topic. More partitions allow greater -# parallelism for consumption, but this will also result in more files across -# the brokers. -num.partitions=1 - -# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. -# This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir=1 - -############################# Internal Topic Settings ############################# -# The replication factor for the group metadata internal topics "__consumer_offsets" and "__transaction_state" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -offsets.topic.replication.factor=1 -transaction.state.log.replication.factor=1 -transaction.state.log.min.isr=1 - -############################# Log Flush Policy ############################# - -# Messages are immediately written to the filesystem but by default we only fsync() to sync -# the OS cache lazily. The following configurations control the flush of data to disk. -# There are a few important trade-offs here: -# 1. Durability: Unflushed data may be lost if you are not using replication. -# 2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. -# 3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. -# The settings below allow one to configure the flush policy to flush data after a period of time or -# every N messages (or both). This can be done globally and overridden on a per-topic basis. - -# The number of messages to accept before forcing a flush of data to disk -#log.flush.interval.messages=10000 - -# The maximum amount of time a message can sit in a log before we force a flush -#log.flush.interval.ms=1000 - -############################# Log Retention Policy ############################# - -# The following configurations control the disposal of log segments. The policy can -# be set to delete segments after a period of time, or after a given size has accumulated. -# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens -# from the end of the log. - -# The minimum age of a log file to be eligible for deletion due to age -log.retention.hours=168 - -# A size-based retention policy for logs. Segments are pruned from the log unless the remaining -# segments drop below log.retention.bytes. Functions independently of log.retention.hours. -#log.retention.bytes=1073741824 - -# The maximum size of a log segment file. When this size is reached a new log segment will be created. -log.segment.bytes=1073741824 - -# The interval at which log segments are checked to see if they can be deleted according -# to the retention policies -log.retention.check.interval.ms=300000 - -############################# Zookeeper ############################# - -# Zookeeper connection string (see zookeeper docs for details). -# This is a comma separated host:port pairs, each corresponding to a zk -# server. e.g. "127.0.0.1:3000,127.0.0.1:3001,127.0.0.1:3002". -# You can also append an optional chroot string to the urls to specify the -# root directory for all kafka znodes. -zookeeper.connect=localhost:2181 - -# Timeout in ms for connecting to zookeeper -zookeeper.connection.timeout.ms=45000 - - -############################# Group Coordinator Settings ############################# - -# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance. -# The rebalance will be further delayed by the value of group.initial.rebalance.delay.ms as new members join the group, up to a maximum of max.poll.interval.ms. -# The default value for this is 3 seconds. -# We override this to 0 here as it makes for a better out-of-the-box experience for development and testing. -# However, in production environments the default value of 3 seconds is more suitable as this will help to avoid unnecessary, and potentially expensive, rebalances during application startup. -group.initial.rebalance.delay.ms=0 \ No newline at end of file diff --git a/integration-tests/kafka-ssl/src/test/java/io/quarkus/it/kafka/KafkaSSLTestResource.java b/integration-tests/kafka-ssl/src/test/java/io/quarkus/it/kafka/KafkaSSLTestResource.java index 24b6a1ca7dff6..3aa2aff9d5f19 100644 --- a/integration-tests/kafka-ssl/src/test/java/io/quarkus/it/kafka/KafkaSSLTestResource.java +++ b/integration-tests/kafka-ssl/src/test/java/io/quarkus/it/kafka/KafkaSSLTestResource.java @@ -1,6 +1,7 @@ package io.quarkus.it.kafka; import static io.strimzi.test.container.StrimziKafkaContainer.KAFKA_PORT; +import static java.util.Map.entry; import java.io.File; import java.util.HashMap; @@ -13,9 +14,21 @@ public class KafkaSSLTestResource implements QuarkusTestResourceLifecycleManager { + Map conf = new HashMap<>(); + private final StrimziKafkaContainer kafka = new StrimziKafkaContainer() .withBootstrapServers(c -> String.format("SSL://%s:%s", c.getHost(), c.getMappedPort(KAFKA_PORT))) - .withServerProperties(MountableFile.forClasspathResource("server.properties")) + .withBrokerId(0) + .withKafkaConfigurationMap(Map.ofEntries( + entry("ssl.keystore.location", "/opt/kafka/config/kafka-keystore.p12"), + entry("ssl.keystore.password", "Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L"), + entry("ssl.keystore.type", "PKCS12"), + entry("ssl.key.password", "Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L"), + entry("ssl.truststore.location", "/opt/kafka/config/kafka-truststore.p12"), + entry("ssl.truststore.password", "Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L"), + entry("ssl.truststore.type", "PKCS12"), + entry("ssl.endpoint.identification.algorithm", ""), + entry("listener.security.protocol.map", "BROKER1:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL"))) .withCopyFileToContainer(MountableFile.forHostPath("target/certs/kafka-keystore.p12"), "/opt/kafka/config/kafka-keystore.p12") .withCopyFileToContainer(MountableFile.forHostPath("target/certs/kafka-truststore.p12"), diff --git a/integration-tests/kafka-ssl/src/test/resources/server.properties b/integration-tests/kafka-ssl/src/test/resources/server.properties deleted file mode 100644 index 349085d8dbf99..0000000000000 --- a/integration-tests/kafka-ssl/src/test/resources/server.properties +++ /dev/null @@ -1,154 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# see kafka.server.KafkaConfig for additional details and defaults - -############################# Server Basics ############################# - -# The id of the broker. This must be set to a unique integer for each broker. -broker.id=0 - -############################# Socket Server Settings ############################# - -# The address the socket server listens on. It will get the value returned from -# java.net.InetAddress.getCanonicalHostName() if not configured. -# FORMAT: -# listeners = listener_name://host_name:port -# EXAMPLE: -# listeners = PLAINTEXT://your.host.name:9092 -#listeners=PLAINTEXT://:9092 -listeners=SSL://:9092 - - - -# Hostname and port the broker will advertise to producers and consumers. If not set, -# it uses the value for "listeners" if configured. Otherwise, it will use the value -# returned from java.net.InetAddress.getCanonicalHostName(). -#advertised.listeners=PLAINTEXT://your.host.name:9092 -advertised.listeners=SSL://:9092 - -# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details -#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL -listener.security.protocol.map=SSL:SSL - -# The number of threads that the server uses for receiving requests from the network and sending responses to the network -num.network.threads=3 - -# The number of threads that the server uses for processing requests, which may include disk I/O -num.io.threads=8 - -# The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes=102400 - -# The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes=102400 - -# The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes=104857600 - -inter.broker.listener.name=SSL - -#### SSL #### - -ssl.keystore.location=/opt/kafka/config/kafka-keystore.p12 -ssl.keystore.password=Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L -ssl.keystore.type=PKCS12 -ssl.key.password=Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L -ssl.truststore.location=/opt/kafka/config/kafka-truststore.p12 -ssl.truststore.password=Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L -ssl.truststore.type=PKCS12 -ssl.endpoint.identification.algorithm= - - -############################# Log Basics ############################# - -# A comma separated list of directories under which to store log files -log.dirs=/tmp/kafka-logs - -# The default number of log partitions per topic. More partitions allow greater -# parallelism for consumption, but this will also result in more files across -# the brokers. -num.partitions=1 - -# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. -# This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir=1 - -############################# Internal Topic Settings ############################# -# The replication factor for the group metadata internal topics "__consumer_offsets" and "__transaction_state" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -offsets.topic.replication.factor=1 -transaction.state.log.replication.factor=1 -transaction.state.log.min.isr=1 - -############################# Log Flush Policy ############################# - -# Messages are immediately written to the filesystem but by default we only fsync() to sync -# the OS cache lazily. The following configurations control the flush of data to disk. -# There are a few important trade-offs here: -# 1. Durability: Unflushed data may be lost if you are not using replication. -# 2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. -# 3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. -# The settings below allow one to configure the flush policy to flush data after a period of time or -# every N messages (or both). This can be done globally and overridden on a per-topic basis. - -# The number of messages to accept before forcing a flush of data to disk -#log.flush.interval.messages=10000 - -# The maximum amount of time a message can sit in a log before we force a flush -#log.flush.interval.ms=1000 - -############################# Log Retention Policy ############################# - -# The following configurations control the disposal of log segments. The policy can -# be set to delete segments after a period of time, or after a given size has accumulated. -# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens -# from the end of the log. - -# The minimum age of a log file to be eligible for deletion due to age -log.retention.hours=168 - -# A size-based retention policy for logs. Segments are pruned from the log unless the remaining -# segments drop below log.retention.bytes. Functions independently of log.retention.hours. -#log.retention.bytes=1073741824 - -# The maximum size of a log segment file. When this size is reached a new log segment will be created. -log.segment.bytes=1073741824 - -# The interval at which log segments are checked to see if they can be deleted according -# to the retention policies -log.retention.check.interval.ms=300000 - -############################# Zookeeper ############################# - -# Zookeeper connection string (see zookeeper docs for details). -# This is a comma separated host:port pairs, each corresponding to a zk -# server. e.g. "127.0.0.1:3000,127.0.0.1:3001,127.0.0.1:3002". -# You can also append an optional chroot string to the urls to specify the -# root directory for all kafka znodes. -zookeeper.connect=localhost:2181 - -# Timeout in ms for connecting to zookeeper -zookeeper.connection.timeout.ms=45000 - - -############################# Group Coordinator Settings ############################# - -# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance. -# The rebalance will be further delayed by the value of group.initial.rebalance.delay.ms as new members join the group, up to a maximum of max.poll.interval.ms. -# The default value for this is 3 seconds. -# We override this to 0 here as it makes for a better out-of-the-box experience for development and testing. -# However, in production environments the default value of 3 seconds is more suitable as this will help to avoid unnecessary, and potentially expensive, rebalances during application startup. -group.initial.rebalance.delay.ms=0 \ No newline at end of file diff --git a/integration-tests/kafka-streams/src/test/java/io/quarkus/it/kafka/streams/KafkaSSLTestResource.java b/integration-tests/kafka-streams/src/test/java/io/quarkus/it/kafka/streams/KafkaSSLTestResource.java index 7f62dcce37865..b51e74d3a8a31 100644 --- a/integration-tests/kafka-streams/src/test/java/io/quarkus/it/kafka/streams/KafkaSSLTestResource.java +++ b/integration-tests/kafka-streams/src/test/java/io/quarkus/it/kafka/streams/KafkaSSLTestResource.java @@ -1,6 +1,7 @@ package io.quarkus.it.kafka.streams; import static io.strimzi.test.container.StrimziKafkaContainer.KAFKA_PORT; +import static java.util.Map.entry; import java.io.File; import java.util.HashMap; @@ -14,7 +15,16 @@ public class KafkaSSLTestResource implements QuarkusTestResourceLifecycleManager { private static final StrimziKafkaContainer kafka = new StrimziKafkaContainer() - .withServerProperties(MountableFile.forClasspathResource("server.properties")) + .withKafkaConfigurationMap(Map.ofEntries( + entry("ssl.keystore.location", "/opt/kafka/config/kafka-keystore.p12"), + entry("ssl.keystore.password", "Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L"), + entry("ssl.keystore.type", "PKCS12"), + entry("ssl.key.password", "Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L"), + entry("ssl.truststore.location", "/opt/kafka/config/kafka-truststore.p12"), + entry("ssl.truststore.password", "Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L"), + entry("ssl.truststore.type", "PKCS12"), + entry("ssl.endpoint.identification.algorithm=", ""), + entry("listener.security.protocol.map", "BROKER1:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL"))) .withBootstrapServers(c -> String.format("SSL://%s:%s", c.getHost(), c.getMappedPort(KAFKA_PORT))) .withCopyFileToContainer(MountableFile.forClasspathResource("ks-keystore.p12"), "/opt/kafka/config/kafka-keystore.p12") diff --git a/integration-tests/kafka-streams/src/test/resources/server.properties b/integration-tests/kafka-streams/src/test/resources/server.properties deleted file mode 100644 index 349085d8dbf99..0000000000000 --- a/integration-tests/kafka-streams/src/test/resources/server.properties +++ /dev/null @@ -1,154 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# see kafka.server.KafkaConfig for additional details and defaults - -############################# Server Basics ############################# - -# The id of the broker. This must be set to a unique integer for each broker. -broker.id=0 - -############################# Socket Server Settings ############################# - -# The address the socket server listens on. It will get the value returned from -# java.net.InetAddress.getCanonicalHostName() if not configured. -# FORMAT: -# listeners = listener_name://host_name:port -# EXAMPLE: -# listeners = PLAINTEXT://your.host.name:9092 -#listeners=PLAINTEXT://:9092 -listeners=SSL://:9092 - - - -# Hostname and port the broker will advertise to producers and consumers. If not set, -# it uses the value for "listeners" if configured. Otherwise, it will use the value -# returned from java.net.InetAddress.getCanonicalHostName(). -#advertised.listeners=PLAINTEXT://your.host.name:9092 -advertised.listeners=SSL://:9092 - -# Maps listener names to security protocols, the default is for them to be the same. See the config documentation for more details -#listener.security.protocol.map=PLAINTEXT:PLAINTEXT,SSL:SSL,SASL_PLAINTEXT:SASL_PLAINTEXT,SASL_SSL:SASL_SSL -listener.security.protocol.map=SSL:SSL - -# The number of threads that the server uses for receiving requests from the network and sending responses to the network -num.network.threads=3 - -# The number of threads that the server uses for processing requests, which may include disk I/O -num.io.threads=8 - -# The send buffer (SO_SNDBUF) used by the socket server -socket.send.buffer.bytes=102400 - -# The receive buffer (SO_RCVBUF) used by the socket server -socket.receive.buffer.bytes=102400 - -# The maximum size of a request that the socket server will accept (protection against OOM) -socket.request.max.bytes=104857600 - -inter.broker.listener.name=SSL - -#### SSL #### - -ssl.keystore.location=/opt/kafka/config/kafka-keystore.p12 -ssl.keystore.password=Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L -ssl.keystore.type=PKCS12 -ssl.key.password=Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L -ssl.truststore.location=/opt/kafka/config/kafka-truststore.p12 -ssl.truststore.password=Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L -ssl.truststore.type=PKCS12 -ssl.endpoint.identification.algorithm= - - -############################# Log Basics ############################# - -# A comma separated list of directories under which to store log files -log.dirs=/tmp/kafka-logs - -# The default number of log partitions per topic. More partitions allow greater -# parallelism for consumption, but this will also result in more files across -# the brokers. -num.partitions=1 - -# The number of threads per data directory to be used for log recovery at startup and flushing at shutdown. -# This value is recommended to be increased for installations with data dirs located in RAID array. -num.recovery.threads.per.data.dir=1 - -############################# Internal Topic Settings ############################# -# The replication factor for the group metadata internal topics "__consumer_offsets" and "__transaction_state" -# For anything other than development testing, a value greater than 1 is recommended to ensure availability such as 3. -offsets.topic.replication.factor=1 -transaction.state.log.replication.factor=1 -transaction.state.log.min.isr=1 - -############################# Log Flush Policy ############################# - -# Messages are immediately written to the filesystem but by default we only fsync() to sync -# the OS cache lazily. The following configurations control the flush of data to disk. -# There are a few important trade-offs here: -# 1. Durability: Unflushed data may be lost if you are not using replication. -# 2. Latency: Very large flush intervals may lead to latency spikes when the flush does occur as there will be a lot of data to flush. -# 3. Throughput: The flush is generally the most expensive operation, and a small flush interval may lead to excessive seeks. -# The settings below allow one to configure the flush policy to flush data after a period of time or -# every N messages (or both). This can be done globally and overridden on a per-topic basis. - -# The number of messages to accept before forcing a flush of data to disk -#log.flush.interval.messages=10000 - -# The maximum amount of time a message can sit in a log before we force a flush -#log.flush.interval.ms=1000 - -############################# Log Retention Policy ############################# - -# The following configurations control the disposal of log segments. The policy can -# be set to delete segments after a period of time, or after a given size has accumulated. -# A segment will be deleted whenever *either* of these criteria are met. Deletion always happens -# from the end of the log. - -# The minimum age of a log file to be eligible for deletion due to age -log.retention.hours=168 - -# A size-based retention policy for logs. Segments are pruned from the log unless the remaining -# segments drop below log.retention.bytes. Functions independently of log.retention.hours. -#log.retention.bytes=1073741824 - -# The maximum size of a log segment file. When this size is reached a new log segment will be created. -log.segment.bytes=1073741824 - -# The interval at which log segments are checked to see if they can be deleted according -# to the retention policies -log.retention.check.interval.ms=300000 - -############################# Zookeeper ############################# - -# Zookeeper connection string (see zookeeper docs for details). -# This is a comma separated host:port pairs, each corresponding to a zk -# server. e.g. "127.0.0.1:3000,127.0.0.1:3001,127.0.0.1:3002". -# You can also append an optional chroot string to the urls to specify the -# root directory for all kafka znodes. -zookeeper.connect=localhost:2181 - -# Timeout in ms for connecting to zookeeper -zookeeper.connection.timeout.ms=45000 - - -############################# Group Coordinator Settings ############################# - -# The following configuration specifies the time, in milliseconds, that the GroupCoordinator will delay the initial consumer rebalance. -# The rebalance will be further delayed by the value of group.initial.rebalance.delay.ms as new members join the group, up to a maximum of max.poll.interval.ms. -# The default value for this is 3 seconds. -# We override this to 0 here as it makes for a better out-of-the-box experience for development and testing. -# However, in production environments the default value of 3 seconds is more suitable as this will help to avoid unnecessary, and potentially expensive, rebalances during application startup. -group.initial.rebalance.delay.ms=0 \ No newline at end of file From cf6c2cd81ee259729cd53b482b8d95488bb70f97 Mon Sep 17 00:00:00 2001 From: Georgios Andrianakis Date: Wed, 15 Jan 2025 14:37:03 +0200 Subject: [PATCH 04/32] Use VertxLogDelegateFactory for internal Vert.x logging This is done because I assume that was the original intent, but also because it avoids the non-zero cost incurred by Vert.x to look up the various supported options --- .../vertx/core/deployment/VertxCoreProcessor.java | 6 ++++++ .../io/quarkus/vertx/core/runtime/VertxCoreRecorder.java | 9 +++++++++ 2 files changed, 15 insertions(+) diff --git a/extensions/vertx/deployment/src/main/java/io/quarkus/vertx/core/deployment/VertxCoreProcessor.java b/extensions/vertx/deployment/src/main/java/io/quarkus/vertx/core/deployment/VertxCoreProcessor.java index 41acd5fe59f51..0feaf4c912316 100644 --- a/extensions/vertx/deployment/src/main/java/io/quarkus/vertx/core/deployment/VertxCoreProcessor.java +++ b/extensions/vertx/deployment/src/main/java/io/quarkus/vertx/core/deployment/VertxCoreProcessor.java @@ -220,6 +220,12 @@ IOThreadDetectorBuildItem ioThreadDetector(VertxCoreRecorder recorder) { return new IOThreadDetectorBuildItem(recorder.detector()); } + @BuildStep + @Record(ExecutionTime.RUNTIME_INIT) + void configureLogging(VertxCoreRecorder recorder) { + recorder.configureQuarkusLoggerFactory(); + } + @BuildStep @Produce(ServiceStartBuildItem.class) @Record(value = ExecutionTime.RUNTIME_INIT) diff --git a/extensions/vertx/runtime/src/main/java/io/quarkus/vertx/core/runtime/VertxCoreRecorder.java b/extensions/vertx/runtime/src/main/java/io/quarkus/vertx/core/runtime/VertxCoreRecorder.java index 15ab4cd34fc98..80fcfc52ce597 100644 --- a/extensions/vertx/runtime/src/main/java/io/quarkus/vertx/core/runtime/VertxCoreRecorder.java +++ b/extensions/vertx/runtime/src/main/java/io/quarkus/vertx/core/runtime/VertxCoreRecorder.java @@ -69,6 +69,8 @@ @Recorder public class VertxCoreRecorder { + private static final String LOGGER_FACTORY_NAME_SYS_PROP = "vertx.logger-delegate-factory-class-name"; + static { System.setProperty("vertx.disableTCCL", "true"); } @@ -667,6 +669,13 @@ public static Supplier recoverFailedStart(VertxConfiguration config, Thre } + public void configureQuarkusLoggerFactory() { + String loggerClassName = System.getProperty(LOGGER_FACTORY_NAME_SYS_PROP); + if (loggerClassName == null) { + System.setProperty(LOGGER_FACTORY_NAME_SYS_PROP, VertxLogDelegateFactory.class.getName()); + } + } + static class VertxSupplier implements Supplier { final LaunchMode launchMode; final VertxConfiguration config; From 4217cdb768d0cc44f183536bdfb4cfceeea9713a Mon Sep 17 00:00:00 2001 From: Foivos Zakkak Date: Thu, 15 Feb 2024 15:07:23 +0200 Subject: [PATCH 05/32] Switch to runtime initialization instead of rerun They both have the same result starting with 23.1. Runtime initialization is public API while rerun is not, and will soon be deprecated and at some point removed in future releases. See https://github.com/oracle/graal/issues/5013#issuecomment-1944747803 and https://github.com/oracle/graal/pull/8323/ --- .../steps/NativeImageFeatureStep.java | 42 ++++++++++++++----- 1 file changed, 32 insertions(+), 10 deletions(-) diff --git a/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageFeatureStep.java b/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageFeatureStep.java index f91930e898ed8..7f9217f093ba2 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageFeatureStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/steps/NativeImageFeatureStep.java @@ -20,6 +20,7 @@ import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem; import io.quarkus.deployment.builditem.nativeimage.UnsafeAccessedFieldBuildItem; import io.quarkus.deployment.pkg.NativeConfig; +import io.quarkus.gizmo.AssignableResultHandle; import io.quarkus.gizmo.BranchResult; import io.quarkus.gizmo.BytecodeCreator; import io.quarkus.gizmo.CatchBlockCreator; @@ -176,29 +177,50 @@ public void write(String s, byte[] bytes) { // hack in reinitialization of process info classes if (!runtimeReinitializedClassBuildItems.isEmpty()) { MethodCreator runtimeReinitializedClasses = file - .getMethodCreator("runtimeReinitializedClasses", void.class) + .getMethodCreator("runtimeReinitializedClasses", Class[].class) .setModifiers(Modifier.PRIVATE | Modifier.STATIC); ResultHandle thisClass = runtimeReinitializedClasses.loadClassFromTCCL(GRAAL_FEATURE); ResultHandle cl = runtimeReinitializedClasses.invokeVirtualMethod( ofMethod(Class.class, "getClassLoader", ClassLoader.class), thisClass); - ResultHandle quarkus = runtimeReinitializedClasses.load("Quarkus"); - ResultHandle imageSingleton = runtimeReinitializedClasses.invokeStaticMethod(IMAGE_SINGLETONS_LOOKUP, - runtimeReinitializedClasses.loadClassFromTCCL(RUNTIME_CLASS_INITIALIZATION_SUPPORT)); - for (RuntimeReinitializedClassBuildItem runtimeReinitializedClass : runtimeReinitializedClassBuildItems) { + ResultHandle classesArray = runtimeReinitializedClasses.newArray(Class.class, + runtimeReinitializedClasses.load(runtimeReinitializedClassBuildItems.size())); + for (int i = 0; i < runtimeReinitializedClassBuildItems.size(); i++) { TryBlock tc = runtimeReinitializedClasses.tryBlock(); ResultHandle clazz = tc.invokeStaticMethod( ofMethod(Class.class, "forName", Class.class, String.class, boolean.class, ClassLoader.class), - tc.load(runtimeReinitializedClass.getClassName()), tc.load(false), cl); - tc.invokeInterfaceMethod(RERUN_INITIALIZATION, imageSingleton, clazz, quarkus); - + tc.load(runtimeReinitializedClassBuildItems.get(i).getClassName()), tc.load(false), cl); + tc.writeArrayValue(classesArray, i, clazz); CatchBlockCreator cc = tc.addCatch(Throwable.class); cc.invokeVirtualMethod(ofMethod(Throwable.class, "printStackTrace", void.class), cc.getCaughtException()); } - runtimeReinitializedClasses.returnVoid(); + runtimeReinitializedClasses.returnValue(classesArray); - overallCatch.invokeStaticMethod(runtimeReinitializedClasses.getMethodDescriptor()); + ResultHandle classes = overallCatch.invokeStaticMethod(runtimeReinitializedClasses.getMethodDescriptor()); + + ResultHandle graalVMVersion = overallCatch.invokeStaticMethod(GRAALVM_VERSION_GET_CURRENT); + BranchResult graalVm23_1Test = overallCatch + .ifGreaterEqualZero(overallCatch.invokeVirtualMethod(GRAALVM_VERSION_COMPARE_TO, graalVMVersion, + overallCatch.marshalAsArray(int.class, overallCatch.load(23), overallCatch.load(1)))); + /* GraalVM >= 23.1 */ + try (BytecodeCreator greaterEqual23_1 = graalVm23_1Test.trueBranch()) { + greaterEqual23_1.invokeStaticMethod(INITIALIZE_CLASSES_AT_RUN_TIME, classes); + } + /* GraalVM < 23.1 */ + try (BytecodeCreator less23_1 = graalVm23_1Test.falseBranch()) { + ResultHandle quarkus = less23_1.load("Quarkus"); + ResultHandle imageSingleton = less23_1.invokeStaticMethod(IMAGE_SINGLETONS_LOOKUP, + less23_1.loadClassFromTCCL(RUNTIME_CLASS_INITIALIZATION_SUPPORT)); + ResultHandle arraySize = less23_1.arrayLength(classes); + AssignableResultHandle index = less23_1.createVariable(int.class); + less23_1.assign(index, less23_1.load(0)); + try (BytecodeCreator loop = less23_1.whileLoop(c -> c.ifIntegerLessThan(index, arraySize)).block()) { + loop.invokeInterfaceMethod(RERUN_INITIALIZATION, imageSingleton, loop.readArrayValue(classes, index), + quarkus); + loop.assign(index, loop.increment(index)); + } + } } // Ensure registration of fields being accessed through unsafe is done last to ensure that the class From ec2b9badb15bec45d54716e22b596098898a734e Mon Sep 17 00:00:00 2001 From: Foivos Zakkak Date: Wed, 15 Jan 2025 15:27:50 +0200 Subject: [PATCH 06/32] Runtime reinitialize NioSocketImpl for backward compatibility This change does not have any impact in Mandrel/GraalVM for JDK 21 and later versions, but allows Quarkus to build with Mandrel/GraalVM for JDK 17. --- .../java/io/quarkus/deployment/NioSocketImplProcessor.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/deployment/src/main/java/io/quarkus/deployment/NioSocketImplProcessor.java b/core/deployment/src/main/java/io/quarkus/deployment/NioSocketImplProcessor.java index 9f8d7098bcf7b..a5aaa4beda55e 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/NioSocketImplProcessor.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/NioSocketImplProcessor.java @@ -1,14 +1,14 @@ package io.quarkus.deployment; import io.quarkus.deployment.annotations.BuildStep; -import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; +import io.quarkus.deployment.builditem.nativeimage.RuntimeReinitializedClassBuildItem; public class NioSocketImplProcessor { // Workaround till https://github.com/oracle/graal/pull/10431 gets merged and backported to all supported versions @BuildStep - RuntimeInitializedClassBuildItem reinitializeClass() { - return new RuntimeInitializedClassBuildItem("sun.nio.ch.NioSocketImpl"); + RuntimeReinitializedClassBuildItem reinitializeClass() { + return new RuntimeReinitializedClassBuildItem("sun.nio.ch.NioSocketImpl"); } } From 68c5071f02952ea793c0ea1220d8adc461a66652 Mon Sep 17 00:00:00 2001 From: Foivos Zakkak Date: Wed, 15 Jan 2025 15:30:55 +0200 Subject: [PATCH 07/32] Deprecate `RuntimeReinitializedClassBuildItem` Starting with Mandrel/GraalVM for JDK 21 `RuntimeReinitializedClassBuildItem` is functionally the same with `RuntimeInitializedClassBuildItem`. --- .../nativeimage/RuntimeReinitializedClassBuildItem.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeReinitializedClassBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeReinitializedClassBuildItem.java index 8ca45a2c62be2..d9ec805ef2463 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeReinitializedClassBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/RuntimeReinitializedClassBuildItem.java @@ -5,7 +5,11 @@ /** * A class that will be reinitialized at runtime in native mode. This will result in the static * initializer running twice. + * + * @deprecated Starting with Mandrel/GraalVM 23.1 for JDK 21 this is functionally the same with + * {@link RuntimeInitializedClassBuildItem}. */ +@Deprecated(since = "3.18") public final class RuntimeReinitializedClassBuildItem extends MultiBuildItem { private final String className; From a18d095aa0412ec25a23456acecb3068ebb9fd0a Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Tue, 14 Jan 2025 23:59:59 +0100 Subject: [PATCH 08/32] avoid star imports --- .../main/java/io/quarkus/builder/BuildContext.java | 4 ++-- .../quarkus/deployment/CollectionClassProcessor.java | 11 +++++++++-- .../deployment/pkg/builditem/JarBuildItem.java | 3 ++- .../quarkus/deployment/pkg/steps/AppCDSBuildStep.java | 2 +- 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java index 4d2c23ee429f2..c24310df419ae 100644 --- a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java +++ b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java @@ -1,7 +1,5 @@ package io.quarkus.builder; -import static io.quarkus.builder.Execution.*; - import java.time.LocalTime; import java.util.ArrayList; import java.util.Collections; @@ -20,6 +18,8 @@ import io.quarkus.builder.item.SimpleBuildItem; import io.quarkus.builder.location.Location; +import static io.quarkus.builder.Execution.log; + /** * The context passed to a deployer's operation. * diff --git a/core/deployment/src/main/java/io/quarkus/deployment/CollectionClassProcessor.java b/core/deployment/src/main/java/io/quarkus/deployment/CollectionClassProcessor.java index 530a695ea7407..53de887aaaf48 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/CollectionClassProcessor.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/CollectionClassProcessor.java @@ -1,10 +1,17 @@ package io.quarkus.deployment; -import java.util.*; - import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.TreeMap; +import java.util.TreeSet; + public class CollectionClassProcessor { @BuildStep ReflectiveClassBuildItem setupCollectionClasses() { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java index ba5a0e5601bb3..6909c9be7287e 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java @@ -1,6 +1,5 @@ package io.quarkus.deployment.pkg.builditem; -import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.*; import java.nio.file.Path; import java.util.Collection; @@ -11,6 +10,8 @@ import io.quarkus.deployment.pkg.PackageConfig; import io.quarkus.sbom.ApplicationManifestConfig; +import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.MUTABLE_JAR; + public final class JarBuildItem extends SimpleBuildItem { private final Path path; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java index 12bbefbc61222..04dc1b39e29b9 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java @@ -1,6 +1,6 @@ package io.quarkus.deployment.pkg.steps; -import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.*; +import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.FAST_JAR; import static io.quarkus.deployment.pkg.steps.LinuxIDUtil.getLinuxID; import static io.quarkus.deployment.util.ContainerRuntimeUtil.detectContainerRuntime; From 788d3810d821930ed6d12a4b0b4449cee549a26e Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Wed, 15 Jan 2025 18:21:37 +0100 Subject: [PATCH 09/32] add more --- .../deployment/pkg/builditem/JarBuildItem.java | 3 +-- .../deployment/pkg/steps/JarResultBuildStep.java | 1 - .../main/java/org/acme/MyMessagingApplication.java | 6 +++--- .../src/main/java/GreetingVertx.java | 2 -- .../azure/functions/resteasy/runtime/Function.java | 10 +++++++++- .../cyclonedx/generator/CycloneDxSbomGenerator.java | 9 ++++++++- .../funqy/deployment/FunctionScannerBuildStep.java | 4 +++- .../client/runtime/devui/util/ConsumerFactory.java | 6 +++++- .../kubernetes/deployment/EnvVarValidator.java | 8 +++++++- .../runtime/exporter/otlp/OTelExporterRecorder.java | 7 ++++++- .../runtime/logs/OpenTelemetryLogHandler.java | 7 +++++-- .../redis/runtime/RedisCacheBuildRecorder.java | 5 ++++- .../cli/letsencrypt/LetsEncryptRenewCommand.java | 3 ++- .../graphql/deployment/VertxGraphqlProcessor.java | 5 ++++- .../io/quarkus/arc/impl/AbstractSharedContext.java | 5 ++++- .../io/quarkus/maven/ExtensionDescriptorMojo.java | 10 +++++++++- .../catalog/processor/CatalogProcessor.java | 7 ++++++- .../java/org/acme/ApplicationConfigResource.java | 5 ++--- .../src/test/java/org/acme/ExampleResourceTest.java | 10 +++++----- .../src/main/java/org/acme/HelloResource.java | 5 ++--- .../keycloak/KeycloakRealmResourceManager.java | 13 +++++++++++-- .../tck/jwt/DeploymentPropertyConfigProvider.java | 1 - .../test/junit/QuarkusMainTestExtension.java | 7 ++++++- .../test/mongodb/MongoReplicaSetTestResource.java | 10 ++++++++-- 24 files changed, 110 insertions(+), 39 deletions(-) diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java index 6909c9be7287e..842b0c8467b3a 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java @@ -1,5 +1,6 @@ package io.quarkus.deployment.pkg.builditem; +import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.MUTABLE_JAR; import java.nio.file.Path; import java.util.Collection; @@ -10,8 +11,6 @@ import io.quarkus.deployment.pkg.PackageConfig; import io.quarkus.sbom.ApplicationManifestConfig; -import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.MUTABLE_JAR; - public final class JarBuildItem extends SimpleBuildItem { private final Path path; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java index bd36f8ba78ae9..aff0f11a2bc4b 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java @@ -1,7 +1,6 @@ package io.quarkus.deployment.pkg.steps; import static io.quarkus.commons.classloading.ClassLoaderHelper.fromClassNameToResourceName; -import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.LEGACY_JAR; import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.MUTABLE_JAR; import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.UBER_JAR; diff --git a/devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/extension-codestarts/messaging-codestart/java/src/main/java/org/acme/MyMessagingApplication.java b/devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/extension-codestarts/messaging-codestart/java/src/main/java/org/acme/MyMessagingApplication.java index c2254aff79618..a2f1fd3df713a 100644 --- a/devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/extension-codestarts/messaging-codestart/java/src/main/java/org/acme/MyMessagingApplication.java +++ b/devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/extension-codestarts/messaging-codestart/java/src/main/java/org/acme/MyMessagingApplication.java @@ -1,12 +1,12 @@ package org.acme; -import io.quarkus.runtime.StartupEvent; -import org.eclipse.microprofile.reactive.messaging.*; +import java.util.stream.Stream; import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.event.Observes; import jakarta.inject.Inject; -import java.util.stream.Stream; + +import io.quarkus.runtime.StartupEvent; @ApplicationScoped public class MyMessagingApplication { diff --git a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java index f9aad01092494..12213a373f74f 100644 --- a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java +++ b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java @@ -1,7 +1,5 @@ package ${package}; -import static io.quarkus.vertx.web.Route.HttpMethod.*; - import io.quarkus.vertx.web.Route; import io.vertx.ext.web.RoutingContext; diff --git a/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java b/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java index e34cd8f0099d1..f3f3821443e8e 100644 --- a/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java +++ b/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java @@ -1,6 +1,14 @@ package io.quarkus.azure.functions.resteasy.runtime; -import static com.microsoft.azure.functions.HttpMethod.*; +import static com.microsoft.azure.functions.HttpMethod.CONNECT; +import static com.microsoft.azure.functions.HttpMethod.DELETE; +import static com.microsoft.azure.functions.HttpMethod.GET; +import static com.microsoft.azure.functions.HttpMethod.HEAD; +import static com.microsoft.azure.functions.HttpMethod.OPTIONS; +import static com.microsoft.azure.functions.HttpMethod.PATCH; +import static com.microsoft.azure.functions.HttpMethod.POST; +import static com.microsoft.azure.functions.HttpMethod.PUT; +import static com.microsoft.azure.functions.HttpMethod.TRACE; import java.util.Optional; diff --git a/extensions/cyclonedx/generator/src/main/java/io/quarkus/cyclonedx/generator/CycloneDxSbomGenerator.java b/extensions/cyclonedx/generator/src/main/java/io/quarkus/cyclonedx/generator/CycloneDxSbomGenerator.java index eb3be5e9bd786..533b87d86b54b 100644 --- a/extensions/cyclonedx/generator/src/main/java/io/quarkus/cyclonedx/generator/CycloneDxSbomGenerator.java +++ b/extensions/cyclonedx/generator/src/main/java/io/quarkus/cyclonedx/generator/CycloneDxSbomGenerator.java @@ -8,7 +8,14 @@ import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Objects; +import java.util.Properties; +import java.util.TreeMap; import org.apache.commons.lang3.StringUtils; import org.apache.maven.model.MailingList; diff --git a/extensions/funqy/funqy-server-common/deployment/src/main/java/io/quarkus/funqy/deployment/FunctionScannerBuildStep.java b/extensions/funqy/funqy-server-common/deployment/src/main/java/io/quarkus/funqy/deployment/FunctionScannerBuildStep.java index 0064bf54ed611..15429b2e95a0d 100644 --- a/extensions/funqy/funqy-server-common/deployment/src/main/java/io/quarkus/funqy/deployment/FunctionScannerBuildStep.java +++ b/extensions/funqy/funqy-server-common/deployment/src/main/java/io/quarkus/funqy/deployment/FunctionScannerBuildStep.java @@ -1,7 +1,9 @@ package io.quarkus.funqy.deployment; import static io.quarkus.deployment.annotations.ExecutionTime.STATIC_INIT; -import static io.quarkus.funqy.deployment.ReflectionRegistrationUtil.*; +import static io.quarkus.funqy.deployment.ReflectionRegistrationUtil.IGNORE_FIELD_FOR_REFLECTION_PREDICATE; +import static io.quarkus.funqy.deployment.ReflectionRegistrationUtil.IGNORE_METHOD_FOR_REFLECTION_PREDICATE; +import static io.quarkus.funqy.deployment.ReflectionRegistrationUtil.IGNORE_TYPE_FOR_REFLECTION_PREDICATE; import java.lang.reflect.Modifier; import java.util.Collection; diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/util/ConsumerFactory.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/util/ConsumerFactory.java index 34a577ead19a9..21132e6a16880 100644 --- a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/util/ConsumerFactory.java +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/util/ConsumerFactory.java @@ -1,6 +1,10 @@ package io.quarkus.kafka.client.runtime.devui.util; -import java.util.*; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/EnvVarValidator.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/EnvVarValidator.java index 85c62e5aa0f7c..f5e6e674edce0 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/EnvVarValidator.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/EnvVarValidator.java @@ -1,6 +1,12 @@ package io.quarkus.kubernetes.deployment; -import java.util.*; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import org.jboss.logging.Logger; diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/OTelExporterRecorder.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/OTelExporterRecorder.java index 57cd1f9d2253e..a4ae1b08bd535 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/OTelExporterRecorder.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/OTelExporterRecorder.java @@ -43,7 +43,12 @@ import io.quarkus.opentelemetry.runtime.config.build.OTelBuildConfig; import io.quarkus.opentelemetry.runtime.config.runtime.BatchSpanProcessorConfig; import io.quarkus.opentelemetry.runtime.config.runtime.OTelRuntimeConfig; -import io.quarkus.opentelemetry.runtime.config.runtime.exporter.*; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.CompressionType; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.OtlpExporterConfig; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.OtlpExporterLogsConfig; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.OtlpExporterMetricsConfig; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.OtlpExporterRuntimeConfig; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.OtlpExporterTracesConfig; import io.quarkus.opentelemetry.runtime.exporter.otlp.logs.NoopLogRecordExporter; import io.quarkus.opentelemetry.runtime.exporter.otlp.logs.VertxGrpcLogRecordExporter; import io.quarkus.opentelemetry.runtime.exporter.otlp.logs.VertxHttpLogRecordExporter; diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java index 65e2de1270e70..24b7b9f93a78f 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java @@ -1,12 +1,15 @@ package io.quarkus.opentelemetry.runtime.logs; -import static io.opentelemetry.semconv.ExceptionAttributes.*; +import static io.opentelemetry.semconv.ExceptionAttributes.EXCEPTION_MESSAGE; +import static io.opentelemetry.semconv.ExceptionAttributes.EXCEPTION_STACKTRACE; +import static io.opentelemetry.semconv.ExceptionAttributes.EXCEPTION_TYPE; import static io.opentelemetry.semconv.incubating.CodeIncubatingAttributes.CODE_FUNCTION; import static io.opentelemetry.semconv.incubating.CodeIncubatingAttributes.CODE_LINENO; import static io.opentelemetry.semconv.incubating.CodeIncubatingAttributes.CODE_NAMESPACE; import static io.opentelemetry.semconv.incubating.LogIncubatingAttributes.LOG_FILE_PATH; -import static io.opentelemetry.semconv.incubating.ThreadIncubatingAttributes.*; +import static io.opentelemetry.semconv.incubating.ThreadIncubatingAttributes.THREAD_ID; import static io.quarkus.opentelemetry.runtime.config.build.OTelBuildConfig.INSTRUMENTATION_NAME; +import static io.quarkus.vertx.http.runtime.attribute.ThreadNameAttribute.THREAD_NAME; import java.io.PrintWriter; import java.io.StringWriter; diff --git a/extensions/redis-cache/runtime/src/main/java/io/quarkus/cache/redis/runtime/RedisCacheBuildRecorder.java b/extensions/redis-cache/runtime/src/main/java/io/quarkus/cache/redis/runtime/RedisCacheBuildRecorder.java index 1ec0696eb0e9f..b0baa97b3aa5e 100644 --- a/extensions/redis-cache/runtime/src/main/java/io/quarkus/cache/redis/runtime/RedisCacheBuildRecorder.java +++ b/extensions/redis-cache/runtime/src/main/java/io/quarkus/cache/redis/runtime/RedisCacheBuildRecorder.java @@ -1,7 +1,10 @@ package io.quarkus.cache.redis.runtime; import java.lang.reflect.Type; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; import java.util.function.Supplier; import org.jboss.logging.Logger; diff --git a/extensions/tls-registry/cli/src/main/java/io/quarkus/tls/cli/letsencrypt/LetsEncryptRenewCommand.java b/extensions/tls-registry/cli/src/main/java/io/quarkus/tls/cli/letsencrypt/LetsEncryptRenewCommand.java index e34717fc6c025..87c3f4f015f45 100644 --- a/extensions/tls-registry/cli/src/main/java/io/quarkus/tls/cli/letsencrypt/LetsEncryptRenewCommand.java +++ b/extensions/tls-registry/cli/src/main/java/io/quarkus/tls/cli/letsencrypt/LetsEncryptRenewCommand.java @@ -4,7 +4,8 @@ import static io.quarkus.tls.cli.letsencrypt.LetsEncryptConstants.DOT_ENV_FILE; import static io.quarkus.tls.cli.letsencrypt.LetsEncryptConstants.KEY_FILE; import static io.quarkus.tls.cli.letsencrypt.LetsEncryptConstants.LETS_ENCRYPT_DIR; -import static io.quarkus.tls.cli.letsencrypt.LetsEncryptHelpers.*; +import static io.quarkus.tls.cli.letsencrypt.LetsEncryptHelpers.adjustPermissions; +import static io.quarkus.tls.cli.letsencrypt.LetsEncryptHelpers.renewCertificate; import static java.lang.System.Logger.Level.INFO; import java.util.concurrent.Callable; diff --git a/extensions/vertx-graphql/deployment/src/main/java/io/quarkus/vertx/graphql/deployment/VertxGraphqlProcessor.java b/extensions/vertx-graphql/deployment/src/main/java/io/quarkus/vertx/graphql/deployment/VertxGraphqlProcessor.java index 8638bca0e38d7..04cadebdd85bf 100644 --- a/extensions/vertx-graphql/deployment/src/main/java/io/quarkus/vertx/graphql/deployment/VertxGraphqlProcessor.java +++ b/extensions/vertx-graphql/deployment/src/main/java/io/quarkus/vertx/graphql/deployment/VertxGraphqlProcessor.java @@ -12,7 +12,10 @@ import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.FeatureBuildItem; import io.quarkus.deployment.builditem.LaunchModeBuildItem; -import io.quarkus.deployment.builditem.nativeimage.*; +import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBundleBuildItem; +import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceDirectoryBuildItem; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; import io.quarkus.runtime.configuration.ConfigurationException; import io.quarkus.vertx.core.deployment.CoreVertxBuildItem; import io.quarkus.vertx.graphql.runtime.VertxGraphqlRecorder; diff --git a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/AbstractSharedContext.java b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/AbstractSharedContext.java index 3648dfce08890..752a4c385ab9e 100644 --- a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/AbstractSharedContext.java +++ b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/AbstractSharedContext.java @@ -1,6 +1,9 @@ package io.quarkus.arc.impl; -import java.util.*; +import java.util.Iterator; +import java.util.Map; +import java.util.Objects; +import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; diff --git a/independent-projects/extension-maven-plugin/src/main/java/io/quarkus/maven/ExtensionDescriptorMojo.java b/independent-projects/extension-maven-plugin/src/main/java/io/quarkus/maven/ExtensionDescriptorMojo.java index 27acf1122cb66..ecdc6e4cbd800 100644 --- a/independent-projects/extension-maven-plugin/src/main/java/io/quarkus/maven/ExtensionDescriptorMojo.java +++ b/independent-projects/extension-maven-plugin/src/main/java/io/quarkus/maven/ExtensionDescriptorMojo.java @@ -8,7 +8,15 @@ import java.nio.file.FileSystem; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import org.apache.maven.artifact.Artifact; diff --git a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/platform/catalog/processor/CatalogProcessor.java b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/platform/catalog/processor/CatalogProcessor.java index ae0037e90f61b..a947cd87bd2d1 100644 --- a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/platform/catalog/processor/CatalogProcessor.java +++ b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/platform/catalog/processor/CatalogProcessor.java @@ -2,7 +2,12 @@ import static io.quarkus.platform.catalog.processor.ExtensionProcessor.isUnlisted; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; import java.util.stream.Collectors; import io.quarkus.registry.catalog.Category; diff --git a/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java b/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java index 82145f3a5c3d7..95910560675f7 100644 --- a/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java +++ b/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java @@ -1,13 +1,12 @@ package org.acme; - -import org.eclipse.microprofile.config.inject.ConfigProperty; - import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import jakarta.ws.rs.Produces; import jakarta.ws.rs.core.MediaType; +import org.eclipse.microprofile.config.inject.ConfigProperty; + @Path("/app-config") public class ApplicationConfigResource { diff --git a/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java b/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java index 6b2e243fe8f1d..d8b87fa230f9b 100644 --- a/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java +++ b/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java @@ -1,12 +1,12 @@ package org.acme; +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.is; -import io.quarkus.test.common.QuarkusTestResource; -import io.quarkus.test.junit.QuarkusTest; import org.junit.jupiter.api.Test; -import static io.restassured.RestAssured.given; -import static org.hamcrest.CoreMatchers.is; +import io.quarkus.test.common.QuarkusTestResource; +import io.quarkus.test.junit.QuarkusTest; @QuarkusTest @QuarkusTestResource(TestResource.class) @@ -20,4 +20,4 @@ public void testHelloEndpoint() { .statusCode(200) .body(is("hello")); } -} \ No newline at end of file +} diff --git a/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java b/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java index e4bb827f2625a..8aa6dfb55aedd 100644 --- a/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java +++ b/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java @@ -1,14 +1,13 @@ package org.acme; -import org.eclipse.microprofile.config.inject.ConfigProperty; - - import jakarta.inject.Inject; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import jakarta.ws.rs.Produces; import jakarta.ws.rs.core.MediaType; +import org.eclipse.microprofile.config.inject.ConfigProperty; + @Path("/hello") public class HelloResource { diff --git a/integration-tests/smallrye-graphql-client-keycloak/src/test/java/io/quarkus/it/smallrye/graphql/keycloak/KeycloakRealmResourceManager.java b/integration-tests/smallrye-graphql-client-keycloak/src/test/java/io/quarkus/it/smallrye/graphql/keycloak/KeycloakRealmResourceManager.java index a527c265f76af..b14148a84e5bc 100644 --- a/integration-tests/smallrye-graphql-client-keycloak/src/test/java/io/quarkus/it/smallrye/graphql/keycloak/KeycloakRealmResourceManager.java +++ b/integration-tests/smallrye-graphql-client-keycloak/src/test/java/io/quarkus/it/smallrye/graphql/keycloak/KeycloakRealmResourceManager.java @@ -1,10 +1,19 @@ package io.quarkus.it.smallrye.graphql.keycloak; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.keycloak.representations.AccessTokenResponse; -import org.keycloak.representations.idm.*; +import org.keycloak.representations.idm.ClientRepresentation; +import org.keycloak.representations.idm.CredentialRepresentation; +import org.keycloak.representations.idm.RealmRepresentation; +import org.keycloak.representations.idm.RoleRepresentation; +import org.keycloak.representations.idm.RolesRepresentation; +import org.keycloak.representations.idm.UserRepresentation; import org.keycloak.util.JsonSerialization; import org.testcontainers.containers.GenericContainer; diff --git a/tcks/microprofile-jwt/src/main/java/io/quarkus/tck/jwt/DeploymentPropertyConfigProvider.java b/tcks/microprofile-jwt/src/main/java/io/quarkus/tck/jwt/DeploymentPropertyConfigProvider.java index 790b39d2ea735..2d7447a37f30a 100644 --- a/tcks/microprofile-jwt/src/main/java/io/quarkus/tck/jwt/DeploymentPropertyConfigProvider.java +++ b/tcks/microprofile-jwt/src/main/java/io/quarkus/tck/jwt/DeploymentPropertyConfigProvider.java @@ -3,7 +3,6 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.net.URL; -import java.util.*; import org.eclipse.microprofile.config.spi.ConfigSource; import org.eclipse.microprofile.config.spi.ConfigSourceProvider; diff --git a/test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusMainTestExtension.java b/test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusMainTestExtension.java index 7975bd7b1b18d..dafc53877c83e 100644 --- a/test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusMainTestExtension.java +++ b/test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusMainTestExtension.java @@ -5,7 +5,12 @@ import java.io.Closeable; import java.lang.reflect.Method; -import java.util.*; +import java.util.Arrays; +import java.util.Enumeration; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; import java.util.concurrent.LinkedBlockingDeque; import java.util.logging.Handler; diff --git a/test-framework/mongodb/src/main/java/io/quarkus/test/mongodb/MongoReplicaSetTestResource.java b/test-framework/mongodb/src/main/java/io/quarkus/test/mongodb/MongoReplicaSetTestResource.java index 169e84cae8b9b..04102dc3cf4dd 100644 --- a/test-framework/mongodb/src/main/java/io/quarkus/test/mongodb/MongoReplicaSetTestResource.java +++ b/test-framework/mongodb/src/main/java/io/quarkus/test/mongodb/MongoReplicaSetTestResource.java @@ -1,9 +1,15 @@ package io.quarkus.test.mongodb; -import static org.awaitility.Durations.*; +import static org.awaitility.Durations.ONE_MINUTE; +import static org.awaitility.Durations.ONE_SECOND; import java.net.UnknownHostException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; import org.awaitility.Awaitility; import org.bson.Document; From 1e54448b47fb64429e0a6e29a7da92605d22d4a9 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Wed, 15 Jan 2025 18:23:10 +0100 Subject: [PATCH 10/32] add more --- .../src/main/java/io/quarkus/builder/BuildContext.java | 4 ++-- .../archetype-resources/src/main/java/GreetingVertx.java | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java index c24310df419ae..0a17cc526548f 100644 --- a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java +++ b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java @@ -1,5 +1,7 @@ package io.quarkus.builder; +import static io.quarkus.builder.Execution.log; + import java.time.LocalTime; import java.util.ArrayList; import java.util.Collections; @@ -18,8 +20,6 @@ import io.quarkus.builder.item.SimpleBuildItem; import io.quarkus.builder.location.Location; -import static io.quarkus.builder.Execution.log; - /** * The context passed to a deployer's operation. * diff --git a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java index 12213a373f74f..aad0fac75be16 100644 --- a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java +++ b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java @@ -1,5 +1,7 @@ package ${package}; +import static io.quarkus.vertx.web.Route.HttpMethod.GET; + import io.quarkus.vertx.web.Route; import io.vertx.ext.web.RoutingContext; From 985c5a34790396ed432be1eda462f11db7d8dbb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michal=20Vav=C5=99=C3=ADk?= Date: Wed, 15 Jan 2025 19:05:49 +0100 Subject: [PATCH 11/32] feat(oidc): Reuse Vert.x in DEV and Test mode when possible --- .../devservices/AbstractDevUIProcessor.java | 5 +- .../devservices/OidcDevUIProcessor.java | 88 ++-------- .../keycloak/KeycloakDevUIProcessor.java | 2 +- .../runtime/devui/OidcDevJsonRpcService.java | 17 +- .../oidc/runtime/devui/OidcDevUiRecorder.java | 46 ++++- .../java/io/quarkus/it/oidc/OidcMtlsTest.java | 160 +++++++++--------- .../test/oidc/client/OidcTestClient.java | 31 +++- 7 files changed, 172 insertions(+), 177 deletions(-) diff --git a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/AbstractDevUIProcessor.java b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/AbstractDevUIProcessor.java index e7342b6f7727c..383dd938ddfc3 100644 --- a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/AbstractDevUIProcessor.java +++ b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/AbstractDevUIProcessor.java @@ -46,7 +46,7 @@ protected static CardPageBuildItem createProviderWebComponent(OidcDevUiRecorder Map keycloakUsers, List keycloakRealms, boolean alwaysLogoutUserInDevUiOnReload, - HttpConfiguration httpConfiguration) { + HttpConfiguration httpConfiguration, boolean discoverMetadata, String authServerUrl) { final CardPageBuildItem cardPage = new CardPageBuildItem(); // prepare provider component @@ -82,7 +82,8 @@ protected static CardPageBuildItem createProviderWebComponent(OidcDevUiRecorder authorizationUrl, tokenUrl, logoutUrl, webClientTimeout, grantOptions, keycloakUsers, oidcProviderName, oidcApplicationType, oidcGrantType, introspectionIsAvailable, keycloakAdminUrl, keycloakRealms, swaggerIsAvailable, - graphqlIsAvailable, swaggerUiPath, graphqlUiPath, alwaysLogoutUserInDevUiOnReload); + graphqlIsAvailable, swaggerUiPath, graphqlUiPath, alwaysLogoutUserInDevUiOnReload, discoverMetadata, + authServerUrl); recorder.createJsonRPCService(beanContainer.getValue(), runtimeProperties, httpConfiguration); diff --git a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/OidcDevUIProcessor.java b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/OidcDevUIProcessor.java index 7223bdc5e018f..687ef5e9b9859 100644 --- a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/OidcDevUIProcessor.java +++ b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/OidcDevUIProcessor.java @@ -4,7 +4,6 @@ import java.util.Set; import org.eclipse.microprofile.config.ConfigProvider; -import org.jboss.logging.Logger; import io.quarkus.arc.deployment.BeanContainerBuildItem; import io.quarkus.deployment.Capabilities; @@ -15,32 +14,22 @@ import io.quarkus.deployment.annotations.ExecutionTime; import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.ConfigurationBuildItem; -import io.quarkus.deployment.builditem.CuratedApplicationShutdownBuildItem; import io.quarkus.deployment.builditem.RuntimeConfigSetupCompleteBuildItem; import io.quarkus.devservices.oidc.OidcDevServicesConfigBuildItem; import io.quarkus.devui.spi.JsonRPCProvidersBuildItem; import io.quarkus.devui.spi.page.CardPageBuildItem; import io.quarkus.oidc.OidcTenantConfig; import io.quarkus.oidc.OidcTenantConfig.Provider; -import io.quarkus.oidc.common.runtime.OidcConstants; import io.quarkus.oidc.deployment.OidcBuildTimeConfig; import io.quarkus.oidc.runtime.devui.OidcDevJsonRpcService; -import io.quarkus.oidc.runtime.devui.OidcDevServicesUtils; import io.quarkus.oidc.runtime.devui.OidcDevUiRecorder; import io.quarkus.oidc.runtime.providers.KnownOidcProviders; import io.quarkus.runtime.configuration.ConfigUtils; +import io.quarkus.vertx.core.deployment.CoreVertxBuildItem; import io.quarkus.vertx.http.deployment.NonApplicationRootPathBuildItem; import io.quarkus.vertx.http.runtime.HttpConfiguration; -import io.vertx.core.Vertx; -import io.vertx.core.http.HttpHeaders; -import io.vertx.core.json.JsonObject; -import io.vertx.mutiny.core.buffer.Buffer; -import io.vertx.mutiny.ext.web.client.HttpResponse; -import io.vertx.mutiny.ext.web.client.WebClient; public class OidcDevUIProcessor extends AbstractDevUIProcessor { - static volatile Vertx vertxInstance; - private static final Logger LOG = Logger.getLogger(OidcDevUIProcessor.class); private static final String TENANT_ENABLED_CONFIG_KEY = CONFIG_PREFIX + "tenant-enabled"; private static final String DISCOVERY_ENABLED_CONFIG_KEY = CONFIG_PREFIX + "discovery-enabled"; @@ -57,9 +46,9 @@ public class OidcDevUIProcessor extends AbstractDevUIProcessor { @Record(ExecutionTime.RUNTIME_INIT) @BuildStep(onlyIf = IsDevelopment.class) + @Consume(CoreVertxBuildItem.class) // metadata discovery requires Vertx instance @Consume(RuntimeConfigSetupCompleteBuildItem.class) - void prepareOidcDevConsole(CuratedApplicationShutdownBuildItem closeBuildItem, - Capabilities capabilities, + void prepareOidcDevConsole(Capabilities capabilities, HttpConfiguration httpConfiguration, BeanContainerBuildItem beanContainer, NonApplicationRootPathBuildItem nonApplicationRootPathBuildItem, @@ -76,33 +65,8 @@ void prepareOidcDevConsole(CuratedApplicationShutdownBuildItem closeBuildItem, ? oidcDevServicesConfigBuildItem.get().getConfig().get(AUTH_SERVER_URL_CONFIG_KEY) : getAuthServerUrl(providerConfig); if (authServerUrl != null) { - if (vertxInstance == null) { - vertxInstance = Vertx.vertx(); - - Runnable closeTask = new Runnable() { - @Override - public void run() { - if (vertxInstance != null) { - try { - vertxInstance.close(); - } catch (Throwable t) { - LOG.error("Failed to close Vertx instance", t); - } - } - vertxInstance = null; - } - }; - closeBuildItem.addCloseTask(closeTask, true); - } - JsonObject metadata = null; - if (isDiscoveryEnabled(providerConfig)) { - metadata = discoverMetadata(authServerUrl); - if (metadata == null) { - return; - } - } + boolean discoverMetadata = isDiscoveryEnabled(providerConfig); String providerName = tryToGetProviderName(authServerUrl); - boolean metadataNotNull = metadata != null; final String keycloakAdminUrl; if (KEYCLOAK.equals(providerName)) { @@ -116,12 +80,10 @@ public void run() { getApplicationType(providerConfig), oidcConfig.devui().grant().type().isPresent() ? oidcConfig.devui().grant().type().get().getGrantType() : "code", - metadataNotNull ? metadata.getString("authorization_endpoint") : null, - metadataNotNull ? metadata.getString("token_endpoint") : null, - metadataNotNull ? metadata.getString("end_session_endpoint") : null, - metadataNotNull - ? (metadata.containsKey("introspection_endpoint") || metadata.containsKey("userinfo_endpoint")) - : checkProviderUserInfoRequired(providerConfig), + null, + null, + null, + checkProviderUserInfoRequired(providerConfig), beanContainer, oidcConfig.devui().webClientTimeout(), oidcConfig.devui().grantOptions(), @@ -131,7 +93,7 @@ public void run() { null, null, true, - httpConfiguration); + httpConfiguration, discoverMetadata, authServerUrl); cardPageProducer.produce(cardPage); } } @@ -141,14 +103,14 @@ JsonRPCProvidersBuildItem produceOidcDevJsonRpcService() { return new JsonRPCProvidersBuildItem(OidcDevJsonRpcService.class); } - private boolean checkProviderUserInfoRequired(OidcTenantConfig providerConfig) { + private static boolean checkProviderUserInfoRequired(OidcTenantConfig providerConfig) { if (providerConfig != null) { - return providerConfig.authentication.userInfoRequired.orElse(false); + return providerConfig.authentication().userInfoRequired().orElse(false); } return false; } - private String tryToGetProviderName(String authServerUrl) { + private static String tryToGetProviderName(String authServerUrl) { if (authServerUrl.contains("/realms/")) { return KEYCLOAK; } @@ -163,28 +125,6 @@ private String tryToGetProviderName(String authServerUrl) { return null; } - private JsonObject discoverMetadata(String authServerUrl) { - WebClient client = OidcDevServicesUtils.createWebClient(vertxInstance); - try { - String metadataUrl = authServerUrl + OidcConstants.WELL_KNOWN_CONFIGURATION; - LOG.infof("OIDC Dev Console: discovering the provider metadata at %s", metadataUrl); - - HttpResponse resp = client.getAbs(metadataUrl) - .putHeader(HttpHeaders.ACCEPT.toString(), "application/json").send().await().indefinitely(); - if (resp.statusCode() == 200) { - return resp.bodyAsJsonObject(); - } else { - LOG.errorf("OIDC metadata discovery failed: %s", resp.bodyAsString()); - return null; - } - } catch (Throwable t) { - LOG.infof("OIDC metadata can not be discovered: %s", t.toString()); - return null; - } finally { - client.close(); - } - } - private static String getConfigProperty(String name) { return ConfigProvider.getConfig().getValue(name, String.class); } @@ -195,7 +135,7 @@ private static boolean isOidcTenantEnabled() { private static boolean isDiscoveryEnabled(OidcTenantConfig providerConfig) { return ConfigProvider.getConfig().getOptionalValue(DISCOVERY_ENABLED_CONFIG_KEY, Boolean.class) - .orElse((providerConfig != null ? providerConfig.discoveryEnabled.orElse(true) : true)); + .orElse((providerConfig != null ? providerConfig.discoveryEnabled().orElse(true) : true)); } private static boolean getBooleanProperty(String name) { @@ -210,7 +150,7 @@ private static String getAuthServerUrl(OidcTenantConfig providerConfig) { try { return getConfigProperty(AUTH_SERVER_URL_CONFIG_KEY); } catch (Exception ex) { - return providerConfig != null ? providerConfig.authServerUrl.get() : null; + return providerConfig != null ? providerConfig.authServerUrl().get() : null; } } diff --git a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevUIProcessor.java b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevUIProcessor.java index c8468c7f002e0..71072fadc006d 100644 --- a/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevUIProcessor.java +++ b/extensions/oidc/deployment/src/main/java/io/quarkus/oidc/deployment/devservices/keycloak/KeycloakDevUIProcessor.java @@ -72,7 +72,7 @@ void produceProviderComponent(Optional confi users, keycloakRealms, configProps.get().isContainerRestarted(), - httpConfiguration); + httpConfiguration, false, null); // use same card page so that both pages appear on the same card var keycloakAdminPageItem = new KeycloakAdminPageBuildItem(cardPageBuildItem); keycloakAdminPageProducer.produce(keycloakAdminPageItem); diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevJsonRpcService.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevJsonRpcService.java index 2944e03ffa7ae..742677989272b 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevJsonRpcService.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevJsonRpcService.java @@ -2,8 +2,6 @@ import static io.quarkus.oidc.runtime.devui.OidcDevServicesUtils.getTokens; -import jakarta.annotation.PostConstruct; -import jakarta.annotation.PreDestroy; import jakarta.inject.Inject; import org.eclipse.microprofile.config.ConfigProvider; @@ -21,17 +19,8 @@ public class OidcDevJsonRpcService { @Inject OidcDevLoginObserver oidcDevTokensObserver; - private Vertx vertx; - - @PostConstruct - public void startup() { - vertx = Vertx.vertx(); - } - - @PreDestroy - public void shutdown() { - vertx.close(); - } + @Inject + Vertx vertx; @NonBlocking public OidcDevUiRuntimePropertiesDTO getProperties() { @@ -72,7 +61,7 @@ public Multi streamOidcLoginEvent() { return oidcDevTokensObserver.streamOidcLoginEvent(); } - public void hydrate(OidcDevUiRpcSvcPropertiesBean properties, HttpConfiguration httpConfiguration) { + void hydrate(OidcDevUiRpcSvcPropertiesBean properties, HttpConfiguration httpConfiguration) { this.props = properties; this.httpConfiguration = httpConfiguration; } diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRecorder.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRecorder.java index f1b220e4d1610..7cefaff7013f0 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRecorder.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/devui/OidcDevUiRecorder.java @@ -4,17 +4,28 @@ import java.util.List; import java.util.Map; +import org.jboss.logging.Logger; + import io.quarkus.arc.runtime.BeanContainer; +import io.quarkus.oidc.common.runtime.OidcConstants; import io.quarkus.oidc.runtime.OidcConfig; import io.quarkus.runtime.RuntimeValue; import io.quarkus.runtime.annotations.Recorder; +import io.quarkus.vertx.core.runtime.VertxCoreRecorder; import io.quarkus.vertx.http.runtime.HttpConfiguration; import io.vertx.core.Handler; +import io.vertx.core.http.HttpHeaders; +import io.vertx.core.json.JsonObject; import io.vertx.ext.web.RoutingContext; +import io.vertx.mutiny.core.buffer.Buffer; +import io.vertx.mutiny.ext.web.client.HttpResponse; +import io.vertx.mutiny.ext.web.client.WebClient; @Recorder public class OidcDevUiRecorder { + private static final Logger LOG = Logger.getLogger(OidcDevUiRecorder.class); + private final RuntimeValue oidcConfigRuntimeValue; public OidcDevUiRecorder(RuntimeValue oidcConfigRuntimeValue) { @@ -31,8 +42,18 @@ public RuntimeValue getRpcServiceProperties(Strin String logoutUrl, Duration webClientTimeout, Map> grantOptions, Map oidcUsers, String oidcProviderName, String oidcApplicationType, String oidcGrantType, boolean introspectionIsAvailable, String keycloakAdminUrl, List keycloakRealms, boolean swaggerIsAvailable, - boolean graphqlIsAvailable, String swaggerUiPath, String graphqlUiPath, boolean alwaysLogoutUserInDevUiOnReload) { - + boolean graphqlIsAvailable, String swaggerUiPath, String graphqlUiPath, boolean alwaysLogoutUserInDevUiOnReload, + boolean discoverMetadata, String authServerUrl) { + if (discoverMetadata) { + JsonObject metadata = discoverMetadata(authServerUrl); + if (metadata != null) { + authorizationUrl = metadata.getString("authorization_endpoint"); + tokenUrl = metadata.getString("token_endpoint"); + logoutUrl = metadata.getString("end_session_endpoint"); + introspectionIsAvailable = metadata.containsKey("introspection_endpoint") + || metadata.containsKey("userinfo_endpoint"); + } + } return new RuntimeValue( new OidcDevUiRpcSvcPropertiesBean(authorizationUrl, tokenUrl, logoutUrl, webClientTimeout, grantOptions, oidcUsers, oidcProviderName, oidcApplicationType, oidcGrantType, @@ -48,4 +69,25 @@ public Handler logoutHandler() { return new OidcDevSessionLogoutHandler(); } + private static JsonObject discoverMetadata(String authServerUrl) { + WebClient client = OidcDevServicesUtils.createWebClient(VertxCoreRecorder.getVertx().get()); + try { + String metadataUrl = authServerUrl + OidcConstants.WELL_KNOWN_CONFIGURATION; + LOG.infof("OIDC Dev Console: discovering the provider metadata at %s", metadataUrl); + + HttpResponse resp = client.getAbs(metadataUrl) + .putHeader(HttpHeaders.ACCEPT.toString(), "application/json").send().await().indefinitely(); + if (resp.statusCode() == 200) { + return resp.bodyAsJsonObject(); + } else { + LOG.errorf("OIDC metadata discovery failed: %s", resp.bodyAsString()); + return null; + } + } catch (Throwable t) { + LOG.infof("OIDC metadata can not be discovered: %s", t.toString()); + return null; + } finally { + client.close(); + } + } } diff --git a/integration-tests/oidc-mtls/src/test/java/io/quarkus/it/oidc/OidcMtlsTest.java b/integration-tests/oidc-mtls/src/test/java/io/quarkus/it/oidc/OidcMtlsTest.java index 0529704d89240..0f3371019ff86 100644 --- a/integration-tests/oidc-mtls/src/test/java/io/quarkus/it/oidc/OidcMtlsTest.java +++ b/integration-tests/oidc-mtls/src/test/java/io/quarkus/it/oidc/OidcMtlsTest.java @@ -10,6 +10,8 @@ import java.nio.file.Path; import java.nio.file.Paths; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import io.quarkus.oidc.common.runtime.OidcConstants; @@ -37,96 +39,94 @@ public class OidcMtlsTest { @TestHTTPResource(tls = true) URL url; - @Test - public void testMtlsJwt() throws Exception { - Vertx vertx = Vertx.vertx(); - try { - WebClientOptions options = createWebClientOptions(); - WebClient webClient = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx), options); - - // HTTP 200 - HttpResponse resp = webClient.get("/service/mtls-jwt") - .putHeader("Authorization", - OidcConstants.BEARER_SCHEME + " " + getAccessToken("backend-service", null, "alice")) - .send().await() - .indefinitely(); - assertEquals(200, resp.statusCode()); - String name = resp.bodyAsString(); - assertEquals("Identities: CN=backend-service, alice;" - + " Client: backend-service;" - + " JWT cert thumbprint: true, introspection cert thumbprint: false", name); - - // HTTP 401, invalid token - resp = webClient.get("/service/mtls-jwt") - .putHeader("Authorization", OidcConstants.BEARER_SCHEME + " " + "123") - .send().await() - .indefinitely(); - assertEquals(401, resp.statusCode()); - } finally { + private static Vertx vertx; + + @BeforeAll + public static void createVertx() { + vertx = Vertx.vertx(); + } + + @AfterAll + public static void closeVertx() { + if (vertx != null) { vertx.close(); } } + @Test + public void testMtlsJwt() throws Exception { + WebClientOptions options = createWebClientOptions(); + WebClient webClient = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx), options); + + // HTTP 200 + HttpResponse resp = webClient.get("/service/mtls-jwt") + .putHeader("Authorization", + OidcConstants.BEARER_SCHEME + " " + getAccessToken("backend-service", null, "alice")) + .send().await() + .indefinitely(); + assertEquals(200, resp.statusCode()); + String name = resp.bodyAsString(); + assertEquals("Identities: CN=backend-service, alice;" + + " Client: backend-service;" + + " JWT cert thumbprint: true, introspection cert thumbprint: false", name); + + // HTTP 401, invalid token + resp = webClient.get("/service/mtls-jwt") + .putHeader("Authorization", OidcConstants.BEARER_SCHEME + " " + "123") + .send().await() + .indefinitely(); + assertEquals(401, resp.statusCode()); + } + @Test public void testMtlsIntrospection() throws Exception { - Vertx vertx = Vertx.vertx(); - try { - WebClientOptions options = createWebClientOptions(); - WebClient webClient = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx), options); - - // HTTP 200 - HttpResponse resp = webClient.get("/service/mtls-introspection") - .putHeader("Authorization", - OidcConstants.BEARER_SCHEME + " " + getAccessToken("backend-service", null, "alice")) - .send().await() - .indefinitely(); - assertEquals(200, resp.statusCode()); - String name = resp.bodyAsString(); - assertEquals("Identities: CN=backend-service, alice;" - + " Client: backend-service;" - + " JWT cert thumbprint: false, introspection cert thumbprint: true", name); - - // HTTP 401, invalid token - resp = webClient.get("/service/mtls-introspection") - .putHeader("Authorization", OidcConstants.BEARER_SCHEME + " " + "123") - .send().await() - .indefinitely(); - assertEquals(401, resp.statusCode()); - } finally { - vertx.close(); - } + WebClientOptions options = createWebClientOptions(); + WebClient webClient = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx), options); + + // HTTP 200 + HttpResponse resp = webClient.get("/service/mtls-introspection") + .putHeader("Authorization", + OidcConstants.BEARER_SCHEME + " " + getAccessToken("backend-service", null, "alice")) + .send().await() + .indefinitely(); + assertEquals(200, resp.statusCode()); + String name = resp.bodyAsString(); + assertEquals("Identities: CN=backend-service, alice;" + + " Client: backend-service;" + + " JWT cert thumbprint: false, introspection cert thumbprint: true", name); + + // HTTP 401, invalid token + resp = webClient.get("/service/mtls-introspection") + .putHeader("Authorization", OidcConstants.BEARER_SCHEME + " " + "123") + .send().await() + .indefinitely(); + assertEquals(401, resp.statusCode()); } @Test public void testMtlsClientWithSecret() throws Exception { - Vertx vertx = Vertx.vertx(); - try { - WebClientOptions options = createWebClientOptions(); - WebClient webClient = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx), options); - - String accessToken = getAccessToken("backend-client-with-secret", "secret", "alice"); - // HTTP 200 - HttpResponse resp = webClient.get("/service/mtls-client-with-secret") - .putHeader("Authorization", - OidcConstants.BEARER_SCHEME + " " + accessToken) - .send().await() - .indefinitely(); - assertEquals(200, resp.statusCode()); - String name = resp.bodyAsString(); - assertEquals("Identities: CN=backend-service, alice;" - + " Client: backend-client-with-secret;" - + " JWT cert thumbprint: false, introspection cert thumbprint: false", name); - - // HTTP 401, token is valid but it is not certificate bound - resp = webClient.get("/service/mtls-jwt") - .putHeader("Authorization", OidcConstants.BEARER_SCHEME + " " + accessToken) - .send().await() - .indefinitely(); - assertEquals(401, resp.statusCode()); - - } finally { - vertx.close(); - } + WebClientOptions options = createWebClientOptions(); + WebClient webClient = WebClient.create(new io.vertx.mutiny.core.Vertx(vertx), options); + + String accessToken = getAccessToken("backend-client-with-secret", "secret", "alice"); + // HTTP 200 + HttpResponse resp = webClient.get("/service/mtls-client-with-secret") + .putHeader("Authorization", + OidcConstants.BEARER_SCHEME + " " + accessToken) + .send().await() + .indefinitely(); + assertEquals(200, resp.statusCode()); + String name = resp.bodyAsString(); + assertEquals("Identities: CN=backend-service, alice;" + + " Client: backend-client-with-secret;" + + " JWT cert thumbprint: false, introspection cert thumbprint: false", name); + + // HTTP 401, token is valid but it is not certificate bound + resp = webClient.get("/service/mtls-jwt") + .putHeader("Authorization", OidcConstants.BEARER_SCHEME + " " + accessToken) + .send().await() + .indefinitely(); + assertEquals(401, resp.statusCode()); } private String getAccessToken(String clientName, String clientSecret, String userName) { diff --git a/test-framework/oidc-server/src/main/java/io/quarkus/test/oidc/client/OidcTestClient.java b/test-framework/oidc-server/src/main/java/io/quarkus/test/oidc/client/OidcTestClient.java index d0964004442a3..a857600ab2b50 100644 --- a/test-framework/oidc-server/src/main/java/io/quarkus/test/oidc/client/OidcTestClient.java +++ b/test-framework/oidc-server/src/main/java/io/quarkus/test/oidc/client/OidcTestClient.java @@ -9,6 +9,8 @@ import org.eclipse.microprofile.config.ConfigProvider; +import io.quarkus.arc.Arc; +import io.quarkus.arc.ArcContainer; import io.vertx.core.MultiMap; import io.vertx.core.Vertx; import io.vertx.core.buffer.Buffer; @@ -22,8 +24,8 @@ public class OidcTestClient { private final static String CLIENT_ID_PROP = "quarkus.oidc.client-id"; private final static String CLIENT_SECRET_PROP = "quarkus.oidc.credentials.secret"; - Vertx vertx = Vertx.vertx(); - WebClient client = WebClient.create(vertx); + private volatile Vertx vertx = null; + private volatile WebClient client = null; private String authServerUrl; private String tokenUrl; @@ -121,7 +123,7 @@ private String getAccessTokenInternal(MultiMap requestMap, Map e requestMap = requestMap.addAll(extraProps); } - var result = client.postAbs(getTokenUrl()) + var result = getClient().postAbs(getTokenUrl()) .putHeader("Content-Type", "application/x-www-form-urlencoded") .sendBuffer(encodeForm(requestMap)); await().atMost(REQUEST_TIMEOUT).until(result::isComplete); @@ -153,7 +155,7 @@ public String getAuthServerUrl() { public String getTokenUrl() { if (tokenUrl == null) { getAuthServerUrl(); - var result = client.getAbs(authServerUrl + "/.well-known/openid-configuration") + var result = getClient().getAbs(authServerUrl + "/.well-known/openid-configuration") .send(); await().atMost(REQUEST_TIMEOUT).until(result::isComplete); tokenUrl = result.result().bodyAsJsonObject().getString("token_endpoint"); @@ -191,6 +193,27 @@ private static String urlEncode(String value) { } } + private WebClient getClient() { + if (client == null) { + client = WebClient.create(getVertx()); + } + return client; + } + + private Vertx getVertx() { + final ArcContainer container = Arc.container(); + if (container != null && container.isRunning()) { + var managedVertx = container.instance(Vertx.class).orElse(null); + if (managedVertx != null) { + return managedVertx; + } + } + if (vertx == null) { + vertx = Vertx.vertx(); + } + return vertx; + } + public void close() { if (client != null) { client.close(); From 1d9029d45e1549a9c879ecb98a144d97b7929073 Mon Sep 17 00:00:00 2001 From: Hannah Arndt Date: Wed, 8 Jan 2025 17:18:54 +0100 Subject: [PATCH 12/32] Added helper methods for manual spans in mutiny pipelines. In a traditional blocking and synchronous framework the opentelemetry context is attached to ThreadLocal. In reactive programming, where multiple processings share the same event-loop thread, one has to use Vert.x duplicated contexts instead (see https://quarkus.io/guides/duplicated-context). wrapWithSpan ensures that the pipeline is executed on a duplicated context (If the current context already is duplicated, it will stay the same. Therefore, nested calls to wrapWithSpan will all run on the same vert.x context). inspired by Jan Peremsky (https://github.com/jan-peremsky/quarkus-reactive-otel/blob/c74043d388ec4df155f466f1d6938931c3389b70/src/main/java/com/fng/ewallet/pex/Tracer.java) and edeandrea (https://github.com/quarkusio/quarkus-super-heroes/blob/main/event-statistics/src/main/java/io/quarkus/sample/superheroes/statistics/listener/SuperStats.java) suggestions from review I will squash with the previous commit once everything is approved suggestion from reviewer todo: squash when everything is done -> adapt commit message. In this solution span and scope are local variables. no need for a stack. unnecessary line removed fix test to run on different contexts again cleanup --- .../main/asciidoc/opentelemetry-tracing.adoc | 54 +++ extensions/opentelemetry/deployment/pom.xml | 5 + .../traces/MutinyTracingHelperTest.java | 313 ++++++++++++++++++ .../tracing/mutiny/MutinyTracingHelper.java | 121 +++++++ 4 files changed, 493 insertions(+) create mode 100644 extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/traces/MutinyTracingHelperTest.java create mode 100644 extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/mutiny/MutinyTracingHelper.java diff --git a/docs/src/main/asciidoc/opentelemetry-tracing.adoc b/docs/src/main/asciidoc/opentelemetry-tracing.adoc index 4c5944576ee1d..a7e8505f8e405 100644 --- a/docs/src/main/asciidoc/opentelemetry-tracing.adoc +++ b/docs/src/main/asciidoc/opentelemetry-tracing.adoc @@ -526,6 +526,60 @@ public void tracedWork() { } ---- +=== Mutiny +Methods returning reactive types can also be annotated with `@WithSpan` and `@AddingSpanAttributes` to create a new span or add attributes to the current span. + +If you need to create spans manually within a mutiny pipeline, use `wrapWithSpan` method from `io.quarkus.opentelemetry.runtime.tracing.mutiny.MutinyTracingHelper`. + +Example. Assuming you have the following pipeline: +[source,java] +---- +Uni uni = Uni.createFrom().item("hello") + //start trace here + .onItem().transform(item -> item + " world") + .onItem().transform(item -> item + "!") + //end trace here + .subscribe().with( + item -> System.out.println("Received: " + item), + failure -> System.out.println("Failed with " + failure) + ); +---- +wrap it like this: +[source,java] +---- +import static io.quarkus.opentelemetry.runtime.tracing.mutiny.MutinyTracingHelper.wrapWithSpan; +... +@Inject +Tracer tracer; +... +Context context = Context.current(); +Uni uni = Uni.createFrom().item("hello") + .transformToUni(m -> wrapWithSpan(tracer, Optional.of(context), "my-span-name", + Uni.createFrom().item(m) + .onItem().transform(item -> item + " world") + .onItem().transform(item -> item + "!") + )) + .subscribe().with( + item -> System.out.println("Received: " + item), + failure -> System.out.println("Failed with " + failure) + ); + +---- +for multi-pipelines it works similarly: +[source,java] +---- +Multi.createFrom().items("Alice", "Bob", "Charlie") + .transformToMultiAndConcatenate(m -> TracingHelper.withTrace("my-span-name", + Multi.createFrom().item(m) + .onItem().transform(name -> "Hello " + name) + )) + .subscribe().with( + item -> System.out.println("Received: " + item), + failure -> System.out.println("Failed with " + failure) + ); +---- +Instead of `transformToMultiAndConcatenate` you can use `transformToMultiAndMerge` if you don't care about the order of the items. + === Quarkus Messaging - Kafka When using the Quarkus Messaging extension for Kafka, diff --git a/extensions/opentelemetry/deployment/pom.xml b/extensions/opentelemetry/deployment/pom.xml index cac0090542ced..f0b9e0ef6d2a4 100644 --- a/extensions/opentelemetry/deployment/pom.xml +++ b/extensions/opentelemetry/deployment/pom.xml @@ -90,6 +90,11 @@ vertx-web-client test + + io.smallrye.reactive + smallrye-mutiny-vertx-junit5 + test + io.quarkus quarkus-reactive-routes-deployment diff --git a/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/traces/MutinyTracingHelperTest.java b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/traces/MutinyTracingHelperTest.java new file mode 100644 index 0000000000000..58313b1442200 --- /dev/null +++ b/extensions/opentelemetry/deployment/src/test/java/io/quarkus/opentelemetry/deployment/traces/MutinyTracingHelperTest.java @@ -0,0 +1,313 @@ +package io.quarkus.opentelemetry.deployment.traces; + +import static io.quarkus.opentelemetry.runtime.tracing.mutiny.MutinyTracingHelper.wrapWithSpan; +import static org.assertj.core.api.Assertions.assertThat; + +import java.util.List; +import java.util.Optional; +import java.util.stream.Stream; + +import jakarta.inject.Inject; + +import org.jboss.shrinkwrap.api.ShrinkWrap; +import org.jboss.shrinkwrap.api.asset.StringAsset; +import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import io.opentelemetry.sdk.trace.data.SpanData; +import io.quarkus.opentelemetry.deployment.common.exporter.TestSpanExporter; +import io.quarkus.opentelemetry.deployment.common.exporter.TestSpanExporterProvider; +import io.quarkus.opentelemetry.runtime.QuarkusContextStorage; +import io.quarkus.test.QuarkusUnitTest; +import io.smallrye.common.vertx.VertxContext; +import io.smallrye.mutiny.Multi; +import io.smallrye.mutiny.Uni; +import io.smallrye.mutiny.helpers.test.AssertSubscriber; +import io.smallrye.mutiny.helpers.test.UniAssertSubscriber; +import io.vertx.core.Context; +import io.vertx.core.Vertx; + +class MutinyTracingHelperTest { + + @RegisterExtension + static final QuarkusUnitTest TEST = new QuarkusUnitTest() + .setArchiveProducer( + () -> ShrinkWrap.create(JavaArchive.class) + .addClasses(TestSpanExporter.class, TestSpanExporterProvider.class) + .addAsResource(new StringAsset(TestSpanExporterProvider.class.getCanonicalName()), + "META-INF/services/io.opentelemetry.sdk.autoconfigure.spi.traces.ConfigurableSpanExporterProvider")); + + @Inject + private TestSpanExporter spanExporter; + + @Inject + private Tracer tracer; + + @Inject + private Vertx vertx; + + @AfterEach + void tearDown() { + spanExporter.reset(); + } + + @ParameterizedTest(name = "{index}: Simple uni pipeline {1}") + @MethodSource("generateContextRunners") + void testSimpleUniPipeline(final String contextType, final String contextName) { + + final UniAssertSubscriber subscriber = Uni.createFrom() + .item("Hello") + .emitOn(r -> runOnContext(r, vertx, contextType)) + .onItem() + .transformToUni(m -> wrapWithSpan(tracer, "testSpan", + Uni.createFrom().item(m).onItem().transform(s -> { + final Span span = tracer.spanBuilder("subspan").startSpan(); + try (final Scope scope = span.makeCurrent()) { + return s + " world"; + } finally { + span.end(); + } + }))) + .subscribe() + .withSubscriber(new UniAssertSubscriber<>()); + + subscriber.awaitItem().assertItem("Hello world"); + + //ensure there are two spans with subspan as child of testSpan + final List spans = spanExporter.getFinishedSpanItems(2); + assertThat(spans.stream().map(SpanData::getName)).containsExactlyInAnyOrder("testSpan", "subspan"); + assertChildSpan(spans, "testSpan", "subspan"); + } + + @ParameterizedTest(name = "{index}: Explicit parent {1}") + @MethodSource("generateContextRunners") + void testSpanWithExplicitParent(final String contextType, final String contextName) { + + final String parentSpanName = "parentSpan"; + final String pipelineSpanName = "pipelineSpan"; + final String subspanName = "subspan"; + + final Span parentSpan = tracer.spanBuilder(parentSpanName).startSpan(); + final io.opentelemetry.context.Context parentContext = io.opentelemetry.context.Context.current().with(parentSpan); + + final UniAssertSubscriber subscriber = Uni.createFrom() + .item("Hello") + .emitOn(r -> runOnContext(r, vertx, contextType)) + .onItem() + .transformToUni(m -> wrapWithSpan(tracer, Optional.of(parentContext), + pipelineSpanName, + Uni.createFrom().item(m).onItem().transform(s -> { + final Span span = tracer.spanBuilder(subspanName).startSpan(); + try (final Scope scope = span.makeCurrent()) { + return s + " world"; + } finally { + span.end(); + } + }))) + .subscribe() + .withSubscriber(new UniAssertSubscriber<>()); + + subscriber.awaitItem().assertItem("Hello world"); + parentSpan.end(); + + //ensure there are 3 spans with proper parent-child relationships + final List spans = spanExporter.getFinishedSpanItems(3); + assertThat(spans.stream().map(SpanData::getName)).containsExactlyInAnyOrder(parentSpanName, pipelineSpanName, + subspanName); + assertChildSpan(spans, parentSpanName, pipelineSpanName); + assertChildSpan(spans, pipelineSpanName, subspanName); + } + + @ParameterizedTest(name = "{index}: Nested uni pipeline with implicit parent {1}") + @MethodSource("generateContextRunners") + void testNestedPipeline_implicitParent(final String contextType, + final String contextName) { + + final String parentSpanName = "parentSpan"; + final String childSpanName = "childSpan"; + + final UniAssertSubscriber subscriber = Uni.createFrom() + .item("test") + .emitOn(r -> runOnContext(r, vertx, contextType)) + .onItem() + .transformToUni(m -> wrapWithSpan(tracer, parentSpanName, + Uni.createFrom().item(m) + .onItem().transform(s -> s + " in outer span") + .onItem().transformToUni(m1 -> wrapWithSpan(tracer, childSpanName, + Uni.createFrom().item(m1) + .onItem().transform(s -> "now in inner span"))) + + )) + .subscribe() + .withSubscriber(new UniAssertSubscriber<>()); + + subscriber.awaitItem(); + + //ensure there are 2 spans with doSomething and doSomethingAsync as children of testSpan + final List spans = spanExporter.getFinishedSpanItems(2); + assertThat(spans.stream().map(SpanData::getName)).containsExactlyInAnyOrder(parentSpanName, childSpanName); + assertChildSpan(spans, parentSpanName, childSpanName); + } + + @ParameterizedTest(name = "{index}: Nested uni pipeline with explicit no parent {1}") + @MethodSource("generateContextRunners") + void testNestedPipeline_explicitNoParent(final String contextType, final String contextName) { + + final String parentSpanName = "parentSpan"; + final String childSpanName = "childSpan"; + + final UniAssertSubscriber subscriber = Uni.createFrom() + .item("test") + .emitOn(r -> runOnContext(r, vertx, contextType)) + .onItem() + .transformToUni(m -> wrapWithSpan(tracer, parentSpanName, + Uni.createFrom().item(m) + .onItem().transform(s -> s + " in outer span") + .onItem().transformToUni(m1 -> wrapWithSpan(tracer, Optional.empty(), childSpanName, + Uni.createFrom().item(m1) + .onItem().transform(s -> "now in inner span"))) + + )) + .subscribe() + .withSubscriber(new UniAssertSubscriber<>()); + + subscriber.awaitItem(); + + //ensure there are 2 spans but without parent-child relationship + final List spans = spanExporter.getFinishedSpanItems(2); + assertThat(spans.stream().map(SpanData::getName)).containsExactlyInAnyOrder(parentSpanName, childSpanName); + assertThat(spans.stream() + .filter(span -> span.getName().equals(childSpanName)) + .findAny() + .orElseThrow() + .getParentSpanId()).isEqualTo("0000000000000000");//signifies no parent + } + + @ParameterizedTest(name = "{index}: Concatenating multi pipeline {1}") + @MethodSource("generateContextRunners") + void testSimpleMultiPipeline_Concatenate(final String contextType, final String contextName) { + + final AssertSubscriber subscriber = Multi.createFrom() + .items("test1", "test2", "test3") + .emitOn(r -> runOnContext(r, vertx, contextType)) + .onItem() + .transformToUniAndConcatenate(m -> wrapWithSpan(tracer, Optional.empty(), "testSpan " + m, + //the traced pipeline + Uni.createFrom().item(m).onItem().transform(s -> { + final Span span = tracer.spanBuilder("subspan " + s).startSpan(); + try (final Scope scope = span.makeCurrent()) { + return s + " transformed"; + } finally { + span.end(); + } + }))) + .subscribe() + .withSubscriber(AssertSubscriber.create(3)); + + subscriber.awaitCompletion().assertItems("test1 transformed", "test2 transformed", "test3 transformed"); + + //ensure there are six spans with three pairs of subspan as child of testSpan + final List spans = spanExporter.getFinishedSpanItems(6); + for (int i = 1; i <= 3; i++) { + final int currentI = i; + assertThat(spans.stream().anyMatch(span -> span.getName().equals("testSpan test" + currentI))).isTrue(); + assertThat(spans.stream().anyMatch(span -> span.getName().equals("subspan test" + currentI))).isTrue(); + assertChildSpan(spans, "testSpan test" + currentI, "subspan test" + currentI); + } + } + + @ParameterizedTest(name = "{index}: Merging multi pipeline {1}") + @MethodSource("generateContextRunners") + void testSimpleMultiPipeline_Merge(final String contextType, final String contextName) { + + final AssertSubscriber subscriber = Multi.createFrom() + .items("test1", "test2", "test3") + .emitOn(r -> runOnContext(r, vertx, contextType)) + .onItem() + .transformToUniAndMerge(m -> wrapWithSpan(tracer, Optional.empty(), "testSpan " + m, + Uni.createFrom().item(m).onItem().transform(s -> { + final Span span = tracer.spanBuilder("subspan " + s).startSpan(); + try (final Scope scope = span.makeCurrent()) { + return s + " transformed"; + } finally { + span.end(); + } + }))) + .subscribe() + .withSubscriber(AssertSubscriber.create(3)); + + subscriber.awaitCompletion(); + + //ensure there are six spans with three pairs of subspan as child of testSpan + final List spans = spanExporter.getFinishedSpanItems(6); + for (int i = 1; i <= 3; i++) { + final int currentI = i; + assertThat(spans.stream().anyMatch(span -> span.getName().equals("testSpan test" + currentI))).isTrue(); + assertThat(spans.stream().anyMatch(span -> span.getName().equals("subspan test" + currentI))).isTrue(); + assertChildSpan(spans, "testSpan test" + currentI, "subspan test" + currentI); + } + } + + private static void assertChildSpan(final List spans, final String parentSpanName, + final String childSpanName1) { + assertThat(spans.stream() + .filter(span -> span.getName().equals(childSpanName1)) + .findAny() + .orElseThrow() + .getParentSpanId()).isEqualTo( + spans.stream().filter(span -> span.getName().equals(parentSpanName)).findAny().get().getSpanId()); + } + + private static Stream generateContextRunners() { + return Stream.of( + Arguments.of("WITHOUT_CONTEXT", "Without Context"), + Arguments.of("ROOT_CONTEXT", "On Root Context"), + Arguments.of("DUPLICATED_CONTEXT", "On Duplicated Context")); + } + + private void runOnContext(final Runnable runnable, final Vertx vertx, final String contextType) { + switch (contextType) { + case "WITHOUT_CONTEXT": + runWithoutContext(runnable); + break; + case "ROOT_CONTEXT": + runOnRootContext(runnable, vertx); + break; + case "DUPLICATED_CONTEXT": + runOnDuplicatedContext(runnable, vertx); + break; + default: + throw new IllegalArgumentException("Unknown context type: " + contextType); + } + } + + private static void runWithoutContext(final Runnable runnable) { + assertThat(QuarkusContextStorage.getVertxContext()).isNull(); + runnable.run(); + } + + private static void runOnRootContext(final Runnable runnable, final Vertx vertx) { + final Context rootContext = VertxContext.getRootContext(vertx.getOrCreateContext()); + assertThat(rootContext).isNotNull(); + assertThat(VertxContext.isDuplicatedContext(rootContext)).isFalse(); + assertThat(rootContext).isNotEqualTo(QuarkusContextStorage.getVertxContext()); + + rootContext.runOnContext(v -> runnable.run()); + } + + private static void runOnDuplicatedContext(final Runnable runnable, final Vertx vertx) { + final Context duplicatedContext = VertxContext.createNewDuplicatedContext(vertx.getOrCreateContext()); + assertThat(duplicatedContext).isNotNull(); + assertThat(VertxContext.isDuplicatedContext(duplicatedContext)).isTrue(); + + duplicatedContext.runOnContext(v -> runnable.run()); + } + +} diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/mutiny/MutinyTracingHelper.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/mutiny/MutinyTracingHelper.java new file mode 100644 index 0000000000000..a1197a0174312 --- /dev/null +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/tracing/mutiny/MutinyTracingHelper.java @@ -0,0 +1,121 @@ +package io.quarkus.opentelemetry.runtime.tracing.mutiny; + +import java.util.Optional; +import java.util.concurrent.CancellationException; + +import io.opentelemetry.api.trace.Span; +import io.opentelemetry.api.trace.SpanBuilder; +import io.opentelemetry.api.trace.Tracer; +import io.opentelemetry.context.Scope; +import io.quarkus.opentelemetry.runtime.QuarkusContextStorage; +import io.smallrye.mutiny.Uni; +import io.vertx.core.Context; + +public class MutinyTracingHelper { + + /** + * Wraps the given pipeline with a span with the given name. Ensures that subspans find the current span as context, + * by running on a duplicated context. The span will be closed when the pipeline completes. + * If there is already a span in the current context, it will be used as parent for the new span. + *

+ * Use as follows: + * Given this existing pipeline: + * ```java + * Uni.createFrom().item("Hello") + * .onItem().transform(s -> s + " World") + * .subscribe().with(System.out::println); + * ``` + * wrap like this: + * ```java + * Uni.createFrom().item("Hello") + * .onItem().transformToUni(s -> wrapWithSpan(tracer, "mySpan", Uni.createFrom().item(s + " World"))) + * .subscribe().with(System.out::println); + * ``` + *

+ * it also works with multi: + * ```java + * Multi.createFrom().items("Alice", "Bob", "Charlie") + * .onItem().transform(name -> "Hello " + name) + * .subscribe().with(System.out::println); + * ``` + * wrap like this: + * ```java + * Multi.createFrom().items("Alice", "Bob", "Charlie") + * .onItem().transformToUni(s -> wrapWithSpan(tracer, "mySpan", Uni.createFrom().item("Hello " + s) + * .onItem().transform(name -> "Hello " + name) + * )) + * .subscribe().with(System.out::println); + * ``` + * + * @param the type of the result of the pipeline + * @param spanName + * the name of the span that should be created + * @param pipeline + * the pipeline to run within the span + * + * @return the result of the pipeline + */ + public static Uni wrapWithSpan(final Tracer tracer, final String spanName, final Uni pipeline) { + + return wrapWithSpan(tracer, Optional.of(io.opentelemetry.context.Context.current()), spanName, pipeline); + } + + /** + * see {@link #wrapWithSpan(Tracer, String, Uni)} + * use this method if you manually want to specify the parent context of the new span + * or if you want to ensure the new span is a root span. + * + * @param + * @param parentContext + * the parent context to use for the new span. If empty, a new root span will be created. + * @param spanName + * the name of the span that should be created + * @param pipeline + * the pipeline to run within the span + * + * @return the result of the pipeline + */ + public static Uni wrapWithSpan(final Tracer tracer, + final Optional parentContext, + final String spanName, final Uni pipeline) { + + return runOnDuplicatedContext(Uni.createFrom().deferred(() -> { + final SpanBuilder spanBuilder = tracer.spanBuilder(spanName); + if (parentContext.isPresent()) { + spanBuilder.setParent(parentContext.get()); + } else { + spanBuilder.setNoParent(); + } + final Span span = spanBuilder.startSpan(); + final Scope scope = span.makeCurrent(); + return pipeline.onTermination() + .invoke((o, throwable, isCancelled) -> { + try { + if (Boolean.TRUE.equals(isCancelled)) { + span.recordException(new CancellationException()); + } else if (throwable != null) { + span.recordException(throwable); + } + span.end(); + } finally { + scope.close(); + } + }); + })); + } + + private static Uni runOnDuplicatedContext(final Uni deferred) { + //creates duplicate context, if the current context is not a duplicated one and not null + //Otherwise returns the current context or null + final Context context = QuarkusContextStorage.getVertxContext(); + + return deferred.runSubscriptionOn(runnable -> { + if (context != null) { + context.runOnContext(v -> runnable.run()); + } else { + runnable.run(); + } + }); + } + +} From d88dc63936b99334afb5dd3dffe4457357a76e57 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Thu, 16 Jan 2025 10:07:39 +0100 Subject: [PATCH 13/32] Fix import format gaps --- .../main/java/org/acme/ApplicationConfigResource.java | 5 ++--- .../src/test/java/org/acme/ExampleResourceTest.java | 10 +++++----- .../runner/src/main/java/org/acme/HelloResource.java | 5 ++--- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java b/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java index 82145f3a5c3d7..95910560675f7 100644 --- a/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java +++ b/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java @@ -1,13 +1,12 @@ package org.acme; - -import org.eclipse.microprofile.config.inject.ConfigProperty; - import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import jakarta.ws.rs.Produces; import jakarta.ws.rs.core.MediaType; +import org.eclipse.microprofile.config.inject.ConfigProperty; + @Path("/app-config") public class ApplicationConfigResource { diff --git a/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java b/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java index 6b2e243fe8f1d..d8b87fa230f9b 100644 --- a/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java +++ b/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java @@ -1,12 +1,12 @@ package org.acme; +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.is; -import io.quarkus.test.common.QuarkusTestResource; -import io.quarkus.test.junit.QuarkusTest; import org.junit.jupiter.api.Test; -import static io.restassured.RestAssured.given; -import static org.hamcrest.CoreMatchers.is; +import io.quarkus.test.common.QuarkusTestResource; +import io.quarkus.test.junit.QuarkusTest; @QuarkusTest @QuarkusTestResource(TestResource.class) @@ -20,4 +20,4 @@ public void testHelloEndpoint() { .statusCode(200) .body(is("hello")); } -} \ No newline at end of file +} diff --git a/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java b/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java index e4bb827f2625a..8aa6dfb55aedd 100644 --- a/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java +++ b/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java @@ -1,14 +1,13 @@ package org.acme; -import org.eclipse.microprofile.config.inject.ConfigProperty; - - import jakarta.inject.Inject; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import jakarta.ws.rs.Produces; import jakarta.ws.rs.core.MediaType; +import org.eclipse.microprofile.config.inject.ConfigProperty; + @Path("/hello") public class HelloResource { From 7d664bdf8a17d588fa2363d7e1f056aa31f17570 Mon Sep 17 00:00:00 2001 From: Guillaume Smet Date: Thu, 4 Jul 2024 13:41:38 +0200 Subject: [PATCH 14/32] Enable io.netty.allocator.disableCacheFinalizersForFastThreadLocalThreads This is going to be useful once https://github.com/netty/netty/pull/14155 lands in Quarkus. --- .../java/io/quarkus/netty/deployment/NettyProcessor.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyProcessor.java b/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyProcessor.java index c5469651ce15f..98a0d370ffcaf 100644 --- a/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyProcessor.java +++ b/extensions/netty/deployment/src/main/java/io/quarkus/netty/deployment/NettyProcessor.java @@ -79,6 +79,11 @@ public SystemPropertyBuildItem setNettyMachineId() { return new SystemPropertyBuildItem("io.netty.machineId", nettyMachineId); } + @BuildStep + public SystemPropertyBuildItem disableFinalizers() { + return new SystemPropertyBuildItem("io.netty.allocator.disableCacheFinalizersForFastThreadLocalThreads", "true"); + } + @BuildStep NativeImageConfigBuildItem build( NettyBuildTimeConfig config, From e227b9c793c79843eff0bbdb19d172c28f55238c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michal=20Vav=C5=99=C3=ADk?= Date: Thu, 16 Jan 2025 13:52:11 +0100 Subject: [PATCH 15/32] docs,tests(oidc): Use OidcTestClient with Dev Svc for OIDC --- ...rity-oidc-bearer-token-authentication.adoc | 42 +++++++++++++++++++ .../security-openid-connect-dev-services.adoc | 1 + .../oidc/OidcDevServicesConfig.java | 4 +- .../oidc/OidcDevServicesProcessor.java | 19 --------- integration-tests/oidc-dev-services/pom.xml | 5 +++ .../src/main/resources/application.properties | 3 ++ ...arerAuthenticationOidcDevServicesTest.java | 25 ++++++----- 7 files changed, 64 insertions(+), 35 deletions(-) diff --git a/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc b/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc index 1e5bc86d05267..0e3c6a1bfe588 100644 --- a/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc +++ b/docs/src/main/asciidoc/security-oidc-bearer-token-authentication.adoc @@ -857,6 +857,48 @@ For a test like this to work, the test `Auth0` application must have the `passwo This example code also shows how to pass additional parameters. For `Auth0`, these are the `audience` and `scope` parameters. +===== Test OIDC DevService + +You can also use `OidcTestClient` to test Quarkus endpoints supported by xref:security-openid-connect-dev-services.adoc#dev-services-for-oidc[Dev Services for OIDC]. +No configuration in the `application.properties` file is needed, Quarkus will configure `OidcTestClient` for you: + +[source, java] +---- +package org.acme; + +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.is; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; + +import io.quarkus.test.junit.QuarkusTest; +import io.quarkus.test.oidc.client.OidcTestClient; + +@QuarkusTest +public class GreetingResourceTest { + + static final OidcTestClient oidcTestClient = new OidcTestClient(); + + @AfterAll + public static void close() { + oidcTestClient.close(); + } + + @Test + public void testHelloEndpoint() { + String accessToken = oidcTestClient.getAccessToken("alice", "alice"); + given() + .auth().oauth2(accessToken) + .when().get("/hello") + .then() + .statusCode(200) + .body(is("Hello, Alice")); + } + +} +---- + ifndef::no-deprecated-test-resource[] [[bearer-token-integration-testing-keycloak]] ==== `KeycloakTestResourceLifecycleManager` diff --git a/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc b/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc index 538f7c1b7e040..5bfb9256f947b 100644 --- a/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc +++ b/docs/src/main/asciidoc/security-openid-connect-dev-services.adoc @@ -380,6 +380,7 @@ This document refers to the `http://localhost:8080/q/dev-ui` Dev UI URL in sever If you customize `quarkus.http.root-path` or `quarkus.http.non-application-root-path` properties, then replace `q` accordingly. For more information, see the https://quarkus.io/blog/path-resolution-in-quarkus/[Path resolution in Quarkus] blog post. +[[dev-services-for-oidc]] == Dev Services for OIDC When you work with Keycloak in production, <> provides the best dev mode experience. diff --git a/extensions/devservices/oidc/src/main/java/io/quarkus/devservices/oidc/OidcDevServicesConfig.java b/extensions/devservices/oidc/src/main/java/io/quarkus/devservices/oidc/OidcDevServicesConfig.java index e97eef86dad8d..e1adb8178adb7 100644 --- a/extensions/devservices/oidc/src/main/java/io/quarkus/devservices/oidc/OidcDevServicesConfig.java +++ b/extensions/devservices/oidc/src/main/java/io/quarkus/devservices/oidc/OidcDevServicesConfig.java @@ -25,9 +25,7 @@ public interface OidcDevServicesConfig { /** * A map of roles for OIDC identity provider users. *

- * If empty, default roles are assigned: `alice` receives `admin` and `user` roles, while other users receive - * `user` role. - * This map is used for role creation when no realm file is found at the `realm-path`. + * If empty, default roles are assigned: user `alice` receives `admin` and `user` roles and user `bob` receives role `user`. */ @ConfigDocMapKey("role-name") Map> roles(); diff --git a/extensions/devservices/oidc/src/main/java/io/quarkus/devservices/oidc/OidcDevServicesProcessor.java b/extensions/devservices/oidc/src/main/java/io/quarkus/devservices/oidc/OidcDevServicesProcessor.java index 1729e3ad64e70..1e03ae0158540 100644 --- a/extensions/devservices/oidc/src/main/java/io/quarkus/devservices/oidc/OidcDevServicesProcessor.java +++ b/extensions/devservices/oidc/src/main/java/io/quarkus/devservices/oidc/OidcDevServicesProcessor.java @@ -192,9 +192,6 @@ private static void registerRoutes(Router router) { router.get("/logout").handler(OidcDevServicesProcessor::logout); router.get("/userinfo").handler(OidcDevServicesProcessor::userInfo); - // can be used for testing of bearer token authentication - router.get("/testing/generate/access-token").handler(OidcDevServicesProcessor::generateAccessToken); - KeyPairGenerator kpg; try { kpg = KeyPairGenerator.getInstance("RSA"); @@ -206,22 +203,6 @@ private static void registerRoutes(Router router) { kid = createKeyId(); } - private static void generateAccessToken(RoutingContext rc) { - String user = rc.request().getParam("user"); - if (user == null || user.isEmpty()) { - rc.response().setStatusCode(400).endAndForget("Missing required parameter: user"); - return; - } - String rolesParam = rc.request().getParam("roles"); - Set roles = new HashSet<>(); - if (rolesParam == null || rolesParam.isEmpty()) { - roles.addAll(getUserRoles(user)); - } else { - roles.addAll(Arrays.asList(rolesParam.split(","))); - } - rc.response().endAndForget(createAccessToken(user, roles, Set.of("openid", "email"))); - } - private static List getUsers() { if (userToDefaultRoles.isEmpty()) { return Arrays.asList("alice", "bob"); diff --git a/integration-tests/oidc-dev-services/pom.xml b/integration-tests/oidc-dev-services/pom.xml index eace1af7f7741..50458d925f246 100644 --- a/integration-tests/oidc-dev-services/pom.xml +++ b/integration-tests/oidc-dev-services/pom.xml @@ -44,6 +44,11 @@ + + io.quarkus + quarkus-test-oidc-server + test + io.quarkus diff --git a/integration-tests/oidc-dev-services/src/main/resources/application.properties b/integration-tests/oidc-dev-services/src/main/resources/application.properties index 636d87caec1ef..02f7a3cbb7aa3 100644 --- a/integration-tests/oidc-dev-services/src/main/resources/application.properties +++ b/integration-tests/oidc-dev-services/src/main/resources/application.properties @@ -1,3 +1,6 @@ quarkus.oidc.devservices.enabled=true +quarkus.oidc.devservices.roles.Ronald=admin +%code-flow.quarkus.oidc.devservices.roles.alice=admin,user +%code-flow.quarkus.oidc.devservices.roles.bob=user %code-flow.quarkus.oidc.application-type=web-app diff --git a/integration-tests/oidc-dev-services/src/test/java/io/quarkus/it/oidc/dev/services/BearerAuthenticationOidcDevServicesTest.java b/integration-tests/oidc-dev-services/src/test/java/io/quarkus/it/oidc/dev/services/BearerAuthenticationOidcDevServicesTest.java index eac0592af5e07..623c51403e732 100644 --- a/integration-tests/oidc-dev-services/src/test/java/io/quarkus/it/oidc/dev/services/BearerAuthenticationOidcDevServicesTest.java +++ b/integration-tests/oidc-dev-services/src/test/java/io/quarkus/it/oidc/dev/services/BearerAuthenticationOidcDevServicesTest.java @@ -1,25 +1,34 @@ package io.quarkus.it.oidc.dev.services; import org.hamcrest.Matchers; +import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.Test; import io.quarkus.test.junit.QuarkusTest; +import io.quarkus.test.oidc.client.OidcTestClient; import io.restassured.RestAssured; @QuarkusTest public class BearerAuthenticationOidcDevServicesTest { + static final OidcTestClient oidcTestClient = new OidcTestClient(); + + @AfterAll + public static void close() { + oidcTestClient.close(); + } + @Test public void testLoginAsCustomUser() { RestAssured.given() - .auth().oauth2(getAccessToken("Ronald", "admin")) + .auth().oauth2(getAccessToken("Ronald")) .get("/secured/admin-only") .then() .statusCode(200) .body(Matchers.containsString("Ronald")) .body(Matchers.containsString("admin")); RestAssured.given() - .auth().oauth2(getAccessToken("Ronald", "admin")) + .auth().oauth2(getAccessToken("Ronald")) .get("/secured/user-only") .then() .statusCode(403); @@ -62,16 +71,6 @@ public void testLoginAsBob() { } private String getAccessToken(String user) { - return RestAssured.given().get(getAuthServerUrl() + "/testing/generate/access-token?user=" + user).asString(); - } - - private String getAccessToken(String user, String... roles) { - return RestAssured.given() - .get(getAuthServerUrl() + "/testing/generate/access-token?user=" + user + "&roles=" + String.join(",", roles)) - .asString(); - } - - private static String getAuthServerUrl() { - return RestAssured.get("/secured/auth-server-url").then().statusCode(200).extract().body().asString(); + return oidcTestClient.getAccessToken(user, user); } } From 930d0f2d57fc8525b9c7b2aade50082a89217267 Mon Sep 17 00:00:00 2001 From: Sergey Beryozkin Date: Thu, 16 Jan 2025 13:47:19 +0000 Subject: [PATCH 16/32] Enable public access to OidcProviderClientImpl#getWebClient --- .../java/io/quarkus/oidc/runtime/OidcProviderClientImpl.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProviderClientImpl.java b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProviderClientImpl.java index 56d53f00498ea..123853e154ef6 100644 --- a/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProviderClientImpl.java +++ b/extensions/oidc/runtime/src/main/java/io/quarkus/oidc/runtime/OidcProviderClientImpl.java @@ -466,7 +466,7 @@ Vertx getVertx() { return vertx; } - WebClient getWebClient() { + public WebClient getWebClient() { return client; } From c40e9745d4004a7d87b4f6305e6102d5254b43b5 Mon Sep 17 00:00:00 2001 From: punkratz312 Date: Thu, 16 Jan 2025 15:19:32 +0100 Subject: [PATCH 17/32] Update .editorconfig --- .editorconfig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index 97a5881cfa7fd..256af214a7b08 100644 --- a/.editorconfig +++ b/.editorconfig @@ -3,7 +3,7 @@ charset = utf-8 end_of_line = lf indent_style = space insert_final_newline = false -max_line_length = 140 +# TBD max_line_length = 140 [*.java] ij_java_names_count_to_use_import_on_demand = 999 From 3b7c7fbba73b0b51c027b5a4734ef1f17451abb9 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Thu, 16 Jan 2025 15:23:19 +0100 Subject: [PATCH 18/32] max_line_length = 128 --- .editorconfig | 2 +- .../src/main/resources/eclipse-format.xml | 692 ++++++++++-------- 2 files changed, 375 insertions(+), 319 deletions(-) diff --git a/.editorconfig b/.editorconfig index 256af214a7b08..b62fb6cd71416 100644 --- a/.editorconfig +++ b/.editorconfig @@ -3,7 +3,7 @@ charset = utf-8 end_of_line = lf indent_style = space insert_final_newline = false -# TBD max_line_length = 140 +max_line_length = 128 [*.java] ij_java_names_count_to_use_import_on_demand = 999 diff --git a/independent-projects/ide-config/src/main/resources/eclipse-format.xml b/independent-projects/ide-config/src/main/resources/eclipse-format.xml index f6e4206e5f3f4..746da3f54f6af 100644 --- a/independent-projects/ide-config/src/main/resources/eclipse-format.xml +++ b/independent-projects/ide-config/src/main/resources/eclipse-format.xml @@ -1,322 +1,378 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 378e0d59fc0a0b92e601920935d03d4e5923389a Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Thu, 16 Jan 2025 15:26:20 +0100 Subject: [PATCH 19/32] editorconfig --- core/pom.xml | 8 ++++++++ pom.xml | 29 +++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/core/pom.xml b/core/pom.xml index bd2477b254f75..18b98121f49f1 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,4 +23,12 @@ class-change-agent junit4-mock + + + + org.ec4j.maven + editorconfig-maven-plugin + + + diff --git a/pom.xml b/pom.xml index 05a9946f3aeea..5464e3009a7af 100644 --- a/pom.xml +++ b/pom.xml @@ -177,6 +177,35 @@ false + + + org.ec4j.maven + editorconfig-maven-plugin + 0.1.3 + + + + check + + + + + + logs/** + node/** + node_modules/** + .docs/** + .mvn/** + .m2/** + .jdk/** + mvnw + mvnw.cmd + src/main/resources/static/lib/** + src/main/resources/static/fonts/** + .examples/docker-compose/with-keycloak/keycloak/import/** + + + From 57f9b7f19b9c6fe994396e855422af290eef0dc7 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Thu, 16 Jan 2025 16:00:05 +0100 Subject: [PATCH 20/32] wip --- pom.xml | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/pom.xml b/pom.xml index 5464e3009a7af..1b0cf8cfcc76a 100644 --- a/pom.xml +++ b/pom.xml @@ -189,25 +189,31 @@ - - - logs/** - node/** - node_modules/** - .docs/** - .mvn/** - .m2/** - .jdk/** - mvnw - mvnw.cmd - src/main/resources/static/lib/** - src/main/resources/static/fonts/** - .examples/docker-compose/with-keycloak/keycloak/import/** - - + + + + + + + + + + + + + + + + + + + org.ec4j.maven + editorconfig-maven-plugin + + io.quarkus.bot From bd0a225124fe8e17c26a04b731f89cea779b964b Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Sun, 5 Jan 2025 12:54:18 +0100 Subject: [PATCH 21/32] use autoconfig instead of manual config: 999 Next, disable wildcard imports: navigate to Editor -> Code Style -> Java -> Imports and set Class count to use import with '*' to 999. Do the same with Names count to use static import with '*'. --- .editorconfig | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 .editorconfig diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000000..c23676479e5b1 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,10 @@ +[*] +charset = utf-8 +end_of_line = lf +indent_style = space +insert_final_newline = false +max_line_length = 160 + +[*.java] +ij_java_names_count_to_use_import_on_demand = 999 +ij_java_class_count_to_use_import_on_demand = 999 From 888140715a4da0fddaab818f79976a3256754547 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Sun, 5 Jan 2025 12:54:51 +0100 Subject: [PATCH 22/32] max_line_length = 140 --- .editorconfig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index c23676479e5b1..97a5881cfa7fd 100644 --- a/.editorconfig +++ b/.editorconfig @@ -3,7 +3,7 @@ charset = utf-8 end_of_line = lf indent_style = space insert_final_newline = false -max_line_length = 160 +max_line_length = 140 [*.java] ij_java_names_count_to_use_import_on_demand = 999 From f69438aa60a1ebc2286145abc42c20f23a19c573 Mon Sep 17 00:00:00 2001 From: punkratz312 Date: Thu, 16 Jan 2025 15:19:32 +0100 Subject: [PATCH 23/32] Update .editorconfig --- .editorconfig | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index 97a5881cfa7fd..256af214a7b08 100644 --- a/.editorconfig +++ b/.editorconfig @@ -3,7 +3,7 @@ charset = utf-8 end_of_line = lf indent_style = space insert_final_newline = false -max_line_length = 140 +# TBD max_line_length = 140 [*.java] ij_java_names_count_to_use_import_on_demand = 999 From 56b3b5c145d4487baa1d7773e9e756c01e5ac175 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Thu, 16 Jan 2025 15:23:19 +0100 Subject: [PATCH 24/32] max_line_length = 128 --- .editorconfig | 2 +- .../src/main/resources/eclipse-format.xml | 692 ++++++++++-------- 2 files changed, 375 insertions(+), 319 deletions(-) diff --git a/.editorconfig b/.editorconfig index 256af214a7b08..b62fb6cd71416 100644 --- a/.editorconfig +++ b/.editorconfig @@ -3,7 +3,7 @@ charset = utf-8 end_of_line = lf indent_style = space insert_final_newline = false -# TBD max_line_length = 140 +max_line_length = 128 [*.java] ij_java_names_count_to_use_import_on_demand = 999 diff --git a/independent-projects/ide-config/src/main/resources/eclipse-format.xml b/independent-projects/ide-config/src/main/resources/eclipse-format.xml index f6e4206e5f3f4..746da3f54f6af 100644 --- a/independent-projects/ide-config/src/main/resources/eclipse-format.xml +++ b/independent-projects/ide-config/src/main/resources/eclipse-format.xml @@ -1,322 +1,378 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 7cf83e06136ffcaf57728e32efedd362b38b7a6d Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Thu, 16 Jan 2025 15:26:20 +0100 Subject: [PATCH 25/32] editorconfig --- core/pom.xml | 8 ++++++++ pom.xml | 29 +++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+) diff --git a/core/pom.xml b/core/pom.xml index bd2477b254f75..18b98121f49f1 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -23,4 +23,12 @@ class-change-agent junit4-mock + + + + org.ec4j.maven + editorconfig-maven-plugin + + + diff --git a/pom.xml b/pom.xml index 05a9946f3aeea..5464e3009a7af 100644 --- a/pom.xml +++ b/pom.xml @@ -177,6 +177,35 @@ false + + + org.ec4j.maven + editorconfig-maven-plugin + 0.1.3 + + + + check + + + + + + logs/** + node/** + node_modules/** + .docs/** + .mvn/** + .m2/** + .jdk/** + mvnw + mvnw.cmd + src/main/resources/static/lib/** + src/main/resources/static/fonts/** + .examples/docker-compose/with-keycloak/keycloak/import/** + + + From 8faa0428f3ff44db30321b521bad1c9ff3101750 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Thu, 16 Jan 2025 10:07:39 +0100 Subject: [PATCH 26/32] Fix import format gaps --- .../main/java/org/acme/ApplicationConfigResource.java | 5 ++--- .../src/test/java/org/acme/ExampleResourceTest.java | 10 +++++----- .../runner/src/main/java/org/acme/HelloResource.java | 5 ++--- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java b/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java index 82145f3a5c3d7..95910560675f7 100644 --- a/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java +++ b/integration-tests/gradle/src/main/resources/basic-java-library-module/application/src/main/java/org/acme/ApplicationConfigResource.java @@ -1,13 +1,12 @@ package org.acme; - -import org.eclipse.microprofile.config.inject.ConfigProperty; - import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import jakarta.ws.rs.Produces; import jakarta.ws.rs.core.MediaType; +import org.eclipse.microprofile.config.inject.ConfigProperty; + @Path("/app-config") public class ApplicationConfigResource { diff --git a/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java b/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java index 6b2e243fe8f1d..d8b87fa230f9b 100644 --- a/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java +++ b/integration-tests/gradle/src/main/resources/custom-filesystem-provider/application/src/test/java/org/acme/ExampleResourceTest.java @@ -1,12 +1,12 @@ package org.acme; +import static io.restassured.RestAssured.given; +import static org.hamcrest.CoreMatchers.is; -import io.quarkus.test.common.QuarkusTestResource; -import io.quarkus.test.junit.QuarkusTest; import org.junit.jupiter.api.Test; -import static io.restassured.RestAssured.given; -import static org.hamcrest.CoreMatchers.is; +import io.quarkus.test.common.QuarkusTestResource; +import io.quarkus.test.junit.QuarkusTest; @QuarkusTest @QuarkusTestResource(TestResource.class) @@ -20,4 +20,4 @@ public void testHelloEndpoint() { .statusCode(200) .body(is("hello")); } -} \ No newline at end of file +} diff --git a/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java b/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java index e4bb827f2625a..8aa6dfb55aedd 100644 --- a/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java +++ b/integration-tests/maven/src/test/resources-filtered/projects/project-with-extension/runner/src/main/java/org/acme/HelloResource.java @@ -1,14 +1,13 @@ package org.acme; -import org.eclipse.microprofile.config.inject.ConfigProperty; - - import jakarta.inject.Inject; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import jakarta.ws.rs.Produces; import jakarta.ws.rs.core.MediaType; +import org.eclipse.microprofile.config.inject.ConfigProperty; + @Path("/hello") public class HelloResource { From 5d6e52d18347aaacf58db6bf26df5c9378cb8dc0 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Tue, 14 Jan 2025 23:59:59 +0100 Subject: [PATCH 27/32] avoid star imports --- .../main/java/io/quarkus/builder/BuildContext.java | 4 ++-- .../quarkus/deployment/CollectionClassProcessor.java | 11 +++++++++-- .../deployment/pkg/builditem/JarBuildItem.java | 3 ++- .../quarkus/deployment/pkg/steps/AppCDSBuildStep.java | 2 +- 4 files changed, 14 insertions(+), 6 deletions(-) diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java index 4d2c23ee429f2..c24310df419ae 100644 --- a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java +++ b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java @@ -1,7 +1,5 @@ package io.quarkus.builder; -import static io.quarkus.builder.Execution.*; - import java.time.LocalTime; import java.util.ArrayList; import java.util.Collections; @@ -20,6 +18,8 @@ import io.quarkus.builder.item.SimpleBuildItem; import io.quarkus.builder.location.Location; +import static io.quarkus.builder.Execution.log; + /** * The context passed to a deployer's operation. * diff --git a/core/deployment/src/main/java/io/quarkus/deployment/CollectionClassProcessor.java b/core/deployment/src/main/java/io/quarkus/deployment/CollectionClassProcessor.java index 530a695ea7407..53de887aaaf48 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/CollectionClassProcessor.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/CollectionClassProcessor.java @@ -1,10 +1,17 @@ package io.quarkus.deployment; -import java.util.*; - import io.quarkus.deployment.annotations.BuildStep; import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.LinkedHashSet; +import java.util.LinkedList; +import java.util.TreeMap; +import java.util.TreeSet; + public class CollectionClassProcessor { @BuildStep ReflectiveClassBuildItem setupCollectionClasses() { diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java index ba5a0e5601bb3..6909c9be7287e 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java @@ -1,6 +1,5 @@ package io.quarkus.deployment.pkg.builditem; -import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.*; import java.nio.file.Path; import java.util.Collection; @@ -11,6 +10,8 @@ import io.quarkus.deployment.pkg.PackageConfig; import io.quarkus.sbom.ApplicationManifestConfig; +import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.MUTABLE_JAR; + public final class JarBuildItem extends SimpleBuildItem { private final Path path; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java index 12bbefbc61222..04dc1b39e29b9 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/AppCDSBuildStep.java @@ -1,6 +1,6 @@ package io.quarkus.deployment.pkg.steps; -import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.*; +import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.FAST_JAR; import static io.quarkus.deployment.pkg.steps.LinuxIDUtil.getLinuxID; import static io.quarkus.deployment.util.ContainerRuntimeUtil.detectContainerRuntime; From 6ff88b6d06269eea20cc3d681ec3ed6397126666 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Wed, 15 Jan 2025 18:21:37 +0100 Subject: [PATCH 28/32] add more --- .../deployment/pkg/builditem/JarBuildItem.java | 3 +-- .../deployment/pkg/steps/JarResultBuildStep.java | 1 - .../main/java/org/acme/MyMessagingApplication.java | 6 +++--- .../src/main/java/GreetingVertx.java | 2 -- .../azure/functions/resteasy/runtime/Function.java | 10 +++++++++- .../cyclonedx/generator/CycloneDxSbomGenerator.java | 9 ++++++++- .../funqy/deployment/FunctionScannerBuildStep.java | 4 +++- .../client/runtime/devui/util/ConsumerFactory.java | 6 +++++- .../kubernetes/deployment/EnvVarValidator.java | 8 +++++++- .../runtime/exporter/otlp/OTelExporterRecorder.java | 7 ++++++- .../runtime/logs/OpenTelemetryLogHandler.java | 7 +++++-- .../redis/runtime/RedisCacheBuildRecorder.java | 5 ++++- .../cli/letsencrypt/LetsEncryptRenewCommand.java | 3 ++- .../graphql/deployment/VertxGraphqlProcessor.java | 5 ++++- .../io/quarkus/arc/impl/AbstractSharedContext.java | 5 ++++- .../io/quarkus/maven/ExtensionDescriptorMojo.java | 10 +++++++++- .../catalog/processor/CatalogProcessor.java | 7 ++++++- .../keycloak/KeycloakRealmResourceManager.java | 13 +++++++++++-- .../tck/jwt/DeploymentPropertyConfigProvider.java | 1 - .../test/junit/QuarkusMainTestExtension.java | 7 ++++++- .../test/mongodb/MongoReplicaSetTestResource.java | 10 ++++++++-- 21 files changed, 101 insertions(+), 28 deletions(-) diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java index 6909c9be7287e..842b0c8467b3a 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/builditem/JarBuildItem.java @@ -1,5 +1,6 @@ package io.quarkus.deployment.pkg.builditem; +import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.MUTABLE_JAR; import java.nio.file.Path; import java.util.Collection; @@ -10,8 +11,6 @@ import io.quarkus.deployment.pkg.PackageConfig; import io.quarkus.sbom.ApplicationManifestConfig; -import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.MUTABLE_JAR; - public final class JarBuildItem extends SimpleBuildItem { private final Path path; diff --git a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java index bd36f8ba78ae9..aff0f11a2bc4b 100644 --- a/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java +++ b/core/deployment/src/main/java/io/quarkus/deployment/pkg/steps/JarResultBuildStep.java @@ -1,7 +1,6 @@ package io.quarkus.deployment.pkg.steps; import static io.quarkus.commons.classloading.ClassLoaderHelper.fromClassNameToResourceName; -import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.LEGACY_JAR; import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.MUTABLE_JAR; import static io.quarkus.deployment.pkg.PackageConfig.JarConfig.JarType.UBER_JAR; diff --git a/devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/extension-codestarts/messaging-codestart/java/src/main/java/org/acme/MyMessagingApplication.java b/devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/extension-codestarts/messaging-codestart/java/src/main/java/org/acme/MyMessagingApplication.java index c2254aff79618..a2f1fd3df713a 100644 --- a/devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/extension-codestarts/messaging-codestart/java/src/main/java/org/acme/MyMessagingApplication.java +++ b/devtools/project-core-extension-codestarts/src/main/resources/codestarts/quarkus/extension-codestarts/messaging-codestart/java/src/main/java/org/acme/MyMessagingApplication.java @@ -1,12 +1,12 @@ package org.acme; -import io.quarkus.runtime.StartupEvent; -import org.eclipse.microprofile.reactive.messaging.*; +import java.util.stream.Stream; import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.event.Observes; import jakarta.inject.Inject; -import java.util.stream.Stream; + +import io.quarkus.runtime.StartupEvent; @ApplicationScoped public class MyMessagingApplication { diff --git a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java index f9aad01092494..12213a373f74f 100644 --- a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java +++ b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java @@ -1,7 +1,5 @@ package ${package}; -import static io.quarkus.vertx.web.Route.HttpMethod.*; - import io.quarkus.vertx.web.Route; import io.vertx.ext.web.RoutingContext; diff --git a/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java b/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java index e34cd8f0099d1..f3f3821443e8e 100644 --- a/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java +++ b/extensions/azure-functions-http/runtime/src/main/java/io/quarkus/azure/functions/resteasy/runtime/Function.java @@ -1,6 +1,14 @@ package io.quarkus.azure.functions.resteasy.runtime; -import static com.microsoft.azure.functions.HttpMethod.*; +import static com.microsoft.azure.functions.HttpMethod.CONNECT; +import static com.microsoft.azure.functions.HttpMethod.DELETE; +import static com.microsoft.azure.functions.HttpMethod.GET; +import static com.microsoft.azure.functions.HttpMethod.HEAD; +import static com.microsoft.azure.functions.HttpMethod.OPTIONS; +import static com.microsoft.azure.functions.HttpMethod.PATCH; +import static com.microsoft.azure.functions.HttpMethod.POST; +import static com.microsoft.azure.functions.HttpMethod.PUT; +import static com.microsoft.azure.functions.HttpMethod.TRACE; import java.util.Optional; diff --git a/extensions/cyclonedx/generator/src/main/java/io/quarkus/cyclonedx/generator/CycloneDxSbomGenerator.java b/extensions/cyclonedx/generator/src/main/java/io/quarkus/cyclonedx/generator/CycloneDxSbomGenerator.java index eb3be5e9bd786..533b87d86b54b 100644 --- a/extensions/cyclonedx/generator/src/main/java/io/quarkus/cyclonedx/generator/CycloneDxSbomGenerator.java +++ b/extensions/cyclonedx/generator/src/main/java/io/quarkus/cyclonedx/generator/CycloneDxSbomGenerator.java @@ -8,7 +8,14 @@ import java.net.URISyntaxException; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Objects; +import java.util.Properties; +import java.util.TreeMap; import org.apache.commons.lang3.StringUtils; import org.apache.maven.model.MailingList; diff --git a/extensions/funqy/funqy-server-common/deployment/src/main/java/io/quarkus/funqy/deployment/FunctionScannerBuildStep.java b/extensions/funqy/funqy-server-common/deployment/src/main/java/io/quarkus/funqy/deployment/FunctionScannerBuildStep.java index 0064bf54ed611..15429b2e95a0d 100644 --- a/extensions/funqy/funqy-server-common/deployment/src/main/java/io/quarkus/funqy/deployment/FunctionScannerBuildStep.java +++ b/extensions/funqy/funqy-server-common/deployment/src/main/java/io/quarkus/funqy/deployment/FunctionScannerBuildStep.java @@ -1,7 +1,9 @@ package io.quarkus.funqy.deployment; import static io.quarkus.deployment.annotations.ExecutionTime.STATIC_INIT; -import static io.quarkus.funqy.deployment.ReflectionRegistrationUtil.*; +import static io.quarkus.funqy.deployment.ReflectionRegistrationUtil.IGNORE_FIELD_FOR_REFLECTION_PREDICATE; +import static io.quarkus.funqy.deployment.ReflectionRegistrationUtil.IGNORE_METHOD_FOR_REFLECTION_PREDICATE; +import static io.quarkus.funqy.deployment.ReflectionRegistrationUtil.IGNORE_TYPE_FOR_REFLECTION_PREDICATE; import java.lang.reflect.Modifier; import java.util.Collection; diff --git a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/util/ConsumerFactory.java b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/util/ConsumerFactory.java index 34a577ead19a9..21132e6a16880 100644 --- a/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/util/ConsumerFactory.java +++ b/extensions/kafka-client/runtime/src/main/java/io/quarkus/kafka/client/runtime/devui/util/ConsumerFactory.java @@ -1,6 +1,10 @@ package io.quarkus.kafka.client.runtime.devui.util; -import java.util.*; +import java.util.Collection; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.UUID; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; diff --git a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/EnvVarValidator.java b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/EnvVarValidator.java index 85c62e5aa0f7c..f5e6e674edce0 100644 --- a/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/EnvVarValidator.java +++ b/extensions/kubernetes/vanilla/deployment/src/main/java/io/quarkus/kubernetes/deployment/EnvVarValidator.java @@ -1,6 +1,12 @@ package io.quarkus.kubernetes.deployment; -import java.util.*; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import org.jboss.logging.Logger; diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/OTelExporterRecorder.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/OTelExporterRecorder.java index 57cd1f9d2253e..a4ae1b08bd535 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/OTelExporterRecorder.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/exporter/otlp/OTelExporterRecorder.java @@ -43,7 +43,12 @@ import io.quarkus.opentelemetry.runtime.config.build.OTelBuildConfig; import io.quarkus.opentelemetry.runtime.config.runtime.BatchSpanProcessorConfig; import io.quarkus.opentelemetry.runtime.config.runtime.OTelRuntimeConfig; -import io.quarkus.opentelemetry.runtime.config.runtime.exporter.*; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.CompressionType; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.OtlpExporterConfig; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.OtlpExporterLogsConfig; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.OtlpExporterMetricsConfig; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.OtlpExporterRuntimeConfig; +import io.quarkus.opentelemetry.runtime.config.runtime.exporter.OtlpExporterTracesConfig; import io.quarkus.opentelemetry.runtime.exporter.otlp.logs.NoopLogRecordExporter; import io.quarkus.opentelemetry.runtime.exporter.otlp.logs.VertxGrpcLogRecordExporter; import io.quarkus.opentelemetry.runtime.exporter.otlp.logs.VertxHttpLogRecordExporter; diff --git a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java index 65e2de1270e70..24b7b9f93a78f 100644 --- a/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java +++ b/extensions/opentelemetry/runtime/src/main/java/io/quarkus/opentelemetry/runtime/logs/OpenTelemetryLogHandler.java @@ -1,12 +1,15 @@ package io.quarkus.opentelemetry.runtime.logs; -import static io.opentelemetry.semconv.ExceptionAttributes.*; +import static io.opentelemetry.semconv.ExceptionAttributes.EXCEPTION_MESSAGE; +import static io.opentelemetry.semconv.ExceptionAttributes.EXCEPTION_STACKTRACE; +import static io.opentelemetry.semconv.ExceptionAttributes.EXCEPTION_TYPE; import static io.opentelemetry.semconv.incubating.CodeIncubatingAttributes.CODE_FUNCTION; import static io.opentelemetry.semconv.incubating.CodeIncubatingAttributes.CODE_LINENO; import static io.opentelemetry.semconv.incubating.CodeIncubatingAttributes.CODE_NAMESPACE; import static io.opentelemetry.semconv.incubating.LogIncubatingAttributes.LOG_FILE_PATH; -import static io.opentelemetry.semconv.incubating.ThreadIncubatingAttributes.*; +import static io.opentelemetry.semconv.incubating.ThreadIncubatingAttributes.THREAD_ID; import static io.quarkus.opentelemetry.runtime.config.build.OTelBuildConfig.INSTRUMENTATION_NAME; +import static io.quarkus.vertx.http.runtime.attribute.ThreadNameAttribute.THREAD_NAME; import java.io.PrintWriter; import java.io.StringWriter; diff --git a/extensions/redis-cache/runtime/src/main/java/io/quarkus/cache/redis/runtime/RedisCacheBuildRecorder.java b/extensions/redis-cache/runtime/src/main/java/io/quarkus/cache/redis/runtime/RedisCacheBuildRecorder.java index 1ec0696eb0e9f..b0baa97b3aa5e 100644 --- a/extensions/redis-cache/runtime/src/main/java/io/quarkus/cache/redis/runtime/RedisCacheBuildRecorder.java +++ b/extensions/redis-cache/runtime/src/main/java/io/quarkus/cache/redis/runtime/RedisCacheBuildRecorder.java @@ -1,7 +1,10 @@ package io.quarkus.cache.redis.runtime; import java.lang.reflect.Type; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; import java.util.function.Supplier; import org.jboss.logging.Logger; diff --git a/extensions/tls-registry/cli/src/main/java/io/quarkus/tls/cli/letsencrypt/LetsEncryptRenewCommand.java b/extensions/tls-registry/cli/src/main/java/io/quarkus/tls/cli/letsencrypt/LetsEncryptRenewCommand.java index e34717fc6c025..87c3f4f015f45 100644 --- a/extensions/tls-registry/cli/src/main/java/io/quarkus/tls/cli/letsencrypt/LetsEncryptRenewCommand.java +++ b/extensions/tls-registry/cli/src/main/java/io/quarkus/tls/cli/letsencrypt/LetsEncryptRenewCommand.java @@ -4,7 +4,8 @@ import static io.quarkus.tls.cli.letsencrypt.LetsEncryptConstants.DOT_ENV_FILE; import static io.quarkus.tls.cli.letsencrypt.LetsEncryptConstants.KEY_FILE; import static io.quarkus.tls.cli.letsencrypt.LetsEncryptConstants.LETS_ENCRYPT_DIR; -import static io.quarkus.tls.cli.letsencrypt.LetsEncryptHelpers.*; +import static io.quarkus.tls.cli.letsencrypt.LetsEncryptHelpers.adjustPermissions; +import static io.quarkus.tls.cli.letsencrypt.LetsEncryptHelpers.renewCertificate; import static java.lang.System.Logger.Level.INFO; import java.util.concurrent.Callable; diff --git a/extensions/vertx-graphql/deployment/src/main/java/io/quarkus/vertx/graphql/deployment/VertxGraphqlProcessor.java b/extensions/vertx-graphql/deployment/src/main/java/io/quarkus/vertx/graphql/deployment/VertxGraphqlProcessor.java index 8638bca0e38d7..04cadebdd85bf 100644 --- a/extensions/vertx-graphql/deployment/src/main/java/io/quarkus/vertx/graphql/deployment/VertxGraphqlProcessor.java +++ b/extensions/vertx-graphql/deployment/src/main/java/io/quarkus/vertx/graphql/deployment/VertxGraphqlProcessor.java @@ -12,7 +12,10 @@ import io.quarkus.deployment.annotations.Record; import io.quarkus.deployment.builditem.FeatureBuildItem; import io.quarkus.deployment.builditem.LaunchModeBuildItem; -import io.quarkus.deployment.builditem.nativeimage.*; +import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceBundleBuildItem; +import io.quarkus.deployment.builditem.nativeimage.NativeImageResourceDirectoryBuildItem; +import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem; +import io.quarkus.deployment.builditem.nativeimage.RuntimeInitializedClassBuildItem; import io.quarkus.runtime.configuration.ConfigurationException; import io.quarkus.vertx.core.deployment.CoreVertxBuildItem; import io.quarkus.vertx.graphql.runtime.VertxGraphqlRecorder; diff --git a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/AbstractSharedContext.java b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/AbstractSharedContext.java index 3648dfce08890..752a4c385ab9e 100644 --- a/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/AbstractSharedContext.java +++ b/independent-projects/arc/runtime/src/main/java/io/quarkus/arc/impl/AbstractSharedContext.java @@ -1,6 +1,9 @@ package io.quarkus.arc.impl; -import java.util.*; +import java.util.Iterator; +import java.util.Map; +import java.util.Objects; +import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; diff --git a/independent-projects/extension-maven-plugin/src/main/java/io/quarkus/maven/ExtensionDescriptorMojo.java b/independent-projects/extension-maven-plugin/src/main/java/io/quarkus/maven/ExtensionDescriptorMojo.java index 27acf1122cb66..ecdc6e4cbd800 100644 --- a/independent-projects/extension-maven-plugin/src/main/java/io/quarkus/maven/ExtensionDescriptorMojo.java +++ b/independent-projects/extension-maven-plugin/src/main/java/io/quarkus/maven/ExtensionDescriptorMojo.java @@ -8,7 +8,15 @@ import java.nio.file.FileSystem; import java.nio.file.Files; import java.nio.file.Path; -import java.util.*; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import org.apache.maven.artifact.Artifact; diff --git a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/platform/catalog/processor/CatalogProcessor.java b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/platform/catalog/processor/CatalogProcessor.java index ae0037e90f61b..a947cd87bd2d1 100644 --- a/independent-projects/tools/devtools-common/src/main/java/io/quarkus/platform/catalog/processor/CatalogProcessor.java +++ b/independent-projects/tools/devtools-common/src/main/java/io/quarkus/platform/catalog/processor/CatalogProcessor.java @@ -2,7 +2,12 @@ import static io.quarkus.platform.catalog.processor.ExtensionProcessor.isUnlisted; -import java.util.*; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; import java.util.stream.Collectors; import io.quarkus.registry.catalog.Category; diff --git a/integration-tests/smallrye-graphql-client-keycloak/src/test/java/io/quarkus/it/smallrye/graphql/keycloak/KeycloakRealmResourceManager.java b/integration-tests/smallrye-graphql-client-keycloak/src/test/java/io/quarkus/it/smallrye/graphql/keycloak/KeycloakRealmResourceManager.java index a527c265f76af..b14148a84e5bc 100644 --- a/integration-tests/smallrye-graphql-client-keycloak/src/test/java/io/quarkus/it/smallrye/graphql/keycloak/KeycloakRealmResourceManager.java +++ b/integration-tests/smallrye-graphql-client-keycloak/src/test/java/io/quarkus/it/smallrye/graphql/keycloak/KeycloakRealmResourceManager.java @@ -1,10 +1,19 @@ package io.quarkus.it.smallrye.graphql.keycloak; import java.io.IOException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import org.keycloak.representations.AccessTokenResponse; -import org.keycloak.representations.idm.*; +import org.keycloak.representations.idm.ClientRepresentation; +import org.keycloak.representations.idm.CredentialRepresentation; +import org.keycloak.representations.idm.RealmRepresentation; +import org.keycloak.representations.idm.RoleRepresentation; +import org.keycloak.representations.idm.RolesRepresentation; +import org.keycloak.representations.idm.UserRepresentation; import org.keycloak.util.JsonSerialization; import org.testcontainers.containers.GenericContainer; diff --git a/tcks/microprofile-jwt/src/main/java/io/quarkus/tck/jwt/DeploymentPropertyConfigProvider.java b/tcks/microprofile-jwt/src/main/java/io/quarkus/tck/jwt/DeploymentPropertyConfigProvider.java index 790b39d2ea735..2d7447a37f30a 100644 --- a/tcks/microprofile-jwt/src/main/java/io/quarkus/tck/jwt/DeploymentPropertyConfigProvider.java +++ b/tcks/microprofile-jwt/src/main/java/io/quarkus/tck/jwt/DeploymentPropertyConfigProvider.java @@ -3,7 +3,6 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.net.URL; -import java.util.*; import org.eclipse.microprofile.config.spi.ConfigSource; import org.eclipse.microprofile.config.spi.ConfigSourceProvider; diff --git a/test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusMainTestExtension.java b/test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusMainTestExtension.java index 7975bd7b1b18d..dafc53877c83e 100644 --- a/test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusMainTestExtension.java +++ b/test-framework/junit5/src/main/java/io/quarkus/test/junit/QuarkusMainTestExtension.java @@ -5,7 +5,12 @@ import java.io.Closeable; import java.lang.reflect.Method; -import java.util.*; +import java.util.Arrays; +import java.util.Enumeration; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; import java.util.concurrent.LinkedBlockingDeque; import java.util.logging.Handler; diff --git a/test-framework/mongodb/src/main/java/io/quarkus/test/mongodb/MongoReplicaSetTestResource.java b/test-framework/mongodb/src/main/java/io/quarkus/test/mongodb/MongoReplicaSetTestResource.java index 169e84cae8b9b..04102dc3cf4dd 100644 --- a/test-framework/mongodb/src/main/java/io/quarkus/test/mongodb/MongoReplicaSetTestResource.java +++ b/test-framework/mongodb/src/main/java/io/quarkus/test/mongodb/MongoReplicaSetTestResource.java @@ -1,9 +1,15 @@ package io.quarkus.test.mongodb; -import static org.awaitility.Durations.*; +import static org.awaitility.Durations.ONE_MINUTE; +import static org.awaitility.Durations.ONE_SECOND; import java.net.UnknownHostException; -import java.util.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Optional; import org.awaitility.Awaitility; import org.bson.Document; From 86bf7a823ba3cc75926d8326812b8961f87a66d7 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Wed, 15 Jan 2025 18:23:10 +0100 Subject: [PATCH 29/32] add more --- .../src/main/java/io/quarkus/builder/BuildContext.java | 4 ++-- .../archetype-resources/src/main/java/GreetingVertx.java | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java index c24310df419ae..0a17cc526548f 100644 --- a/core/builder/src/main/java/io/quarkus/builder/BuildContext.java +++ b/core/builder/src/main/java/io/quarkus/builder/BuildContext.java @@ -1,5 +1,7 @@ package io.quarkus.builder; +import static io.quarkus.builder.Execution.log; + import java.time.LocalTime; import java.util.ArrayList; import java.util.Collections; @@ -18,8 +20,6 @@ import io.quarkus.builder.item.SimpleBuildItem; import io.quarkus.builder.location.Location; -import static io.quarkus.builder.Execution.log; - /** * The context passed to a deployer's operation. * diff --git a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java index 12213a373f74f..aad0fac75be16 100644 --- a/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java +++ b/extensions/amazon-lambda-rest/maven-archetype/src/main/resources/archetype-resources/src/main/java/GreetingVertx.java @@ -1,5 +1,7 @@ package ${package}; +import static io.quarkus.vertx.web.Route.HttpMethod.GET; + import io.quarkus.vertx.web.Route; import io.vertx.ext.web.RoutingContext; From d40575967504529ef7a6986b4b94aed2f58c327d Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Thu, 16 Jan 2025 16:00:05 +0100 Subject: [PATCH 30/32] wip --- pom.xml | 38 ++++++++++++++++++++++---------------- 1 file changed, 22 insertions(+), 16 deletions(-) diff --git a/pom.xml b/pom.xml index 5464e3009a7af..1b0cf8cfcc76a 100644 --- a/pom.xml +++ b/pom.xml @@ -189,25 +189,31 @@ - - - logs/** - node/** - node_modules/** - .docs/** - .mvn/** - .m2/** - .jdk/** - mvnw - mvnw.cmd - src/main/resources/static/lib/** - src/main/resources/static/fonts/** - .examples/docker-compose/with-keycloak/keycloak/import/** - - + + + + + + + + + + + + + + + + + + + org.ec4j.maven + editorconfig-maven-plugin + + io.quarkus.bot From 6ca95bfd79beca38747822890f7f455f6f3378a8 Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Thu, 16 Jan 2025 16:10:05 +0100 Subject: [PATCH 31/32] add --- pom.xml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/pom.xml b/pom.xml index 1b0cf8cfcc76a..886cfeb30e77e 100644 --- a/pom.xml +++ b/pom.xml @@ -212,6 +212,30 @@ org.ec4j.maven editorconfig-maven-plugin + 0.1.3 + + + + check + + + + + + + + + + + + + + + + + + + From 70ac391e4cfbe4b06fa19c0d7904f7f57b36e18e Mon Sep 17 00:00:00 2001 From: Vincent Potucek Date: Thu, 16 Jan 2025 16:14:13 +0100 Subject: [PATCH 32/32] wip --- .editorconfig | 12 ++++++------ pom.xml | 40 ---------------------------------------- 2 files changed, 6 insertions(+), 46 deletions(-) diff --git a/.editorconfig b/.editorconfig index b62fb6cd71416..4d947c52c0ec1 100644 --- a/.editorconfig +++ b/.editorconfig @@ -1,9 +1,9 @@ -[*] -charset = utf-8 -end_of_line = lf -indent_style = space -insert_final_newline = false -max_line_length = 128 +# [*] +# charset = utf-8 +# end_of_line = lf +# indent_style = space +# insert_final_newline = false +# max_line_length = 128 [*.java] ij_java_names_count_to_use_import_on_demand = 999 diff --git a/pom.xml b/pom.xml index 886cfeb30e77e..d2e3142be02e7 100644 --- a/pom.xml +++ b/pom.xml @@ -189,22 +189,6 @@ - - - - - - - - - - - - - - - - @@ -212,30 +196,6 @@ org.ec4j.maven editorconfig-maven-plugin - 0.1.3 - - - - check - - - - - - - - - - - - - - - - - - -