diff --git a/gradle/docs.gradle b/gradle/docs.gradle index cd8b83a708..cf389a53c6 100644 --- a/gradle/docs.gradle +++ b/gradle/docs.gradle @@ -8,21 +8,16 @@ antora { stacktrace: true ] dependencies = [ - '@antora/atlas-extension' : '1.0.0-alpha.1', - '@antora/collector-extension' : '1.0.0-alpha.3', - '@asciidoctor/tabs' : '1.0.0-beta.3', - '@springio/antora-extensions' : '1.4.2', - '@springio/asciidoctor-extensions': '1.0.0-alpha.8', + '@antora/atlas-extension' : '1.0.0-alpha.2', + '@antora/collector-extension' : '1.0.1', + '@asciidoctor/tabs' : '1.0.0-beta.6', + '@springio/antora-extensions' : '1.14.4', + '@springio/asciidoctor-extensions': '1.0.0-alpha.16', ] } tasks.named('generateAntoraYml') { - asciidocAttributes = project.provider({ - return ['project-version': project.version, - 'revnumber' : project.version, - 'spring-version' : project.version, - ] - }) + asciidocAttributes = project.provider( { generateAttributes() } ) baseAntoraYmlFile = file('src/main/antora/antora.yml') } @@ -35,3 +30,32 @@ tasks.register('generateAntoraResources') { dependsOn 'createAntoraPartials' dependsOn 'generateAntoraYml' } + +def generateAttributes() { + def springDoc = "https://docs.spring.io" + def springFrameworkReferenceUrl = "$springDoc/spring-framework/reference/${generateVersionWithoutPatch(springVersion)}" + def springBootUrl = "$springDoc/spring-boot/${generateVersionWithoutPatch(springBootVersion)}" + def springIntegrationUrl = "$springDoc/spring-integration/reference" + def kafkaPage = "https://kafka.apache.org" + def kafkaUrl = "$kafkaPage/${kafkaVersion.split('\\.')[0,1].join('')}" + def micrometerTracingReferenceUrl = "https://docs.micrometer.io/tracing/reference/${generateVersionWithoutPatch(micrometerTracingVersion)}" + + return [ + 'project-version': project.version, + 'revnumber' : project.version, + 'spring-version' : project.version, + 'spring-framework-reference-url' : springFrameworkReferenceUrl.toString(), + 'spring-boot-url' : springBootUrl.toString(), + 'spring-integration-url' : springIntegrationUrl.toString(), + 'kafka-page' : kafkaPage, + 'kafka-url' : kafkaUrl.toString(), + 'micrometer-tracing-reference-url': micrometerTracingReferenceUrl.toString(), + 'javadoc-location-org-springframework-kafka': "$springDoc/spring-kafka/docs/$project.version/api".toString(), + 'javadoc-location-org-apache-kafka': "$kafkaUrl/javadoc".toString() + ] +} + +static String generateVersionWithoutPatch(String version) { + + return version.split('\\.')[0,1].join('.') + (version.endsWith('-SNAPSHOT') ? '-SNAPSHOT' : '') +} diff --git a/spring-kafka-docs/src/main/antora/antora-playbook.yml b/spring-kafka-docs/src/main/antora/antora-playbook.yml index 00d8e13614..fc2a2872c9 100644 --- a/spring-kafka-docs/src/main/antora/antora-playbook.yml +++ b/spring-kafka-docs/src/main/antora/antora-playbook.yml @@ -33,6 +33,7 @@ asciidoc: extensions: - '@asciidoctor/tabs' - '@springio/asciidoctor-extensions' + - '@springio/asciidoctor-extensions/javadoc-extension' sourcemap: true urls: latest_version_segment: '' diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/appendix/change-history.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/appendix/change-history.adoc index 7842d96056..27e25c64c3 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/appendix/change-history.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/appendix/change-history.adoc @@ -891,7 +891,7 @@ See xref:streams.adoc#streams-header-enricher[Header Enricher] for more informat The `MessagingTransformer` has been provided. This allows a Kafka streams topology to interact with a spring-messaging component, such as a Spring Integration flow. -See xref:streams.adoc#streams-messaging[`MessagingProcessor`] and See https://docs.spring.io/spring-integration/docs/current/reference/html/kafka.html#streams-integration[[Calling a Spring Integration Flow from a `KStream`]] for more information. +See xref:streams.adoc#streams-messaging[`MessagingProcessor`] and See {spring-integration-url}/kafka.html#streams-integration[Calling a Spring Integration Flow from a `KStream`] for more information. [[cb-2-2-and-2-3-json-component-changes]] === JSON Component Changes diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/appendix/native-images.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/appendix/native-images.adoc index ae9057767e..7d2f5b065c 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/appendix/native-images.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/appendix/native-images.adoc @@ -1,7 +1,7 @@ [[native-images]] = Native Images -https://docs.spring.io/spring-framework/docs/current/reference/html/core.html#aot[Spring AOT] native hints are provided to assist in developing native images for Spring applications that use Spring for Apache Kafka, including hints for AVRO generated classes used in `@KafkaListener`+++s+++. +{spring-framework-reference-url}/core/aot.html[Spring AOT] native hints are provided to assist in developing native images for Spring applications that use Spring for Apache Kafka, including hints for AVRO generated classes used in `@KafkaListener`+++s+++. IMPORTANT: `spring-kafka-test` (and, specifically, its `EmbeddedKafkaBroker`) is not supported in native images. diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/micrometer.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/micrometer.adoc index 07f0d200bd..4d6144684f 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/micrometer.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/micrometer.adoc @@ -102,7 +102,7 @@ Set `observationEnabled` to `true` on the `KafkaTemplate` and `ContainerProperti IMPORTANT: Micrometer Observation does not support batch listener; this will enable Micrometer Timers -Refer to https://micrometer.io/docs/tracing[Micrometer Tracing] for more information. +Refer to {micrometer-tracing-reference-url}[Micrometer Tracing] for more information. To add tags to timers/traces, configure a custom `KafkaTemplateObservationConvention` or `KafkaListenerObservationConvention` to the template or listener container, respectively. diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/filtering.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/filtering.adoc index 86d6ba069a..17ed26da9b 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/filtering.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/filtering.adoc @@ -4,7 +4,7 @@ In certain scenarios, such as rebalancing, a message that has already been processed may be redelivered. The framework cannot know whether such a message has been processed or not. That is an application-level function. -This is known as the https://www.enterpriseintegrationpatterns.com/patterns/messaging/IdempotentReceiver.html[Idempotent Receiver] pattern and Spring Integration provides an https://docs.spring.io/spring-integration/reference/handler-advice/idempotent-receiver.html[implementation] of it. +This is known as the https://www.enterpriseintegrationpatterns.com/patterns/messaging/IdempotentReceiver.html[Idempotent Receiver] pattern and Spring Integration provides an {spring-integration-url}/handler-advice/idempotent-receiver.html[implementation] of it. The Spring for Apache Kafka project also provides some assistance by means of the `FilteringMessageListenerAdapter` class, which can wrap your `MessageListener`. This class takes an implementation of `RecordFilterStrategy` in which you implement the `filter` method to signal that a message is a duplicate and should be discarded. diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/listener-annotation.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/listener-annotation.adoc index 2de479ae11..ed4e22838f 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/listener-annotation.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/listener-annotation.adoc @@ -5,7 +5,7 @@ The `@KafkaListener` annotation is used to designate a bean method as a listener The bean is wrapped in a `MessagingMessageListenerAdapter` configured with various features, such as converters to convert the data, if necessary, to match the method parameters. You can configure most attributes on the annotation with SpEL by using `#{...}` or property placeholders (`${...}`). -See the https://docs.spring.io/spring-kafka/api/org/springframework/kafka/annotation/KafkaListener.html[Javadoc] for more information. +See the javadoc:org.springframework.kafka.annotation.KafkaListener[Javadoc] for more information. [[record-listener]] == Record Listeners diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/message-listener-container.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/message-listener-container.adoc index febe9b23a1..e4fee60d6b 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/message-listener-container.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/receiving-messages/message-listener-container.adoc @@ -27,7 +27,7 @@ You can set the listener container's `interceptBeforeTx` property to `false` to Starting with version 2.9, this will apply to any transaction manager, not just `KafkaAwareTransactionManager`+++s+++. This allows, for example, the interceptor to participate in a JDBC transaction started by the container. -Starting with versions 2.3.8, 2.4.6, the `ConcurrentMessageListenerContainer` now supports https://kafka.apache.org/documentation/#static_membership[Static Membership] when the concurrency is greater than one. +Starting with versions 2.3.8, 2.4.6, the `ConcurrentMessageListenerContainer` now supports {kafka-url}/documentation/#static_membership[Static Membership] when the concurrency is greater than one. The `group.instance.id` is suffixed with `-n` with `n` starting at `1`. This, together with an increased `session.timeout.ms`, can be used to reduce rebalance events, for example, when application instances are restarted. @@ -94,7 +94,7 @@ KafkaMessageListenerContainer container = return container; ---- -Refer to the https://docs.spring.io/spring-kafka/api/org/springframework/kafka/listener/ContainerProperties.html[Javadoc] for `ContainerProperties` for more information about the various properties that you can set. +Refer to the javadoc:org.springframework.kafka.listener.ContainerProperties[Javadoc] for `ContainerProperties` for more information about the various properties that you can set. Since version 2.1.1, a new property called `logContainerConfig` is available. When `true` and `INFO` logging is enabled each listener container writes a log message summarizing its configuration properties. diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/sending-messages.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/sending-messages.adoc index 8d59b41fe9..9755397d85 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/sending-messages.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/sending-messages.adoc @@ -60,7 +60,7 @@ interface OperationsCallback { } ---- -See the https://docs.spring.io/spring-kafka/api/org/springframework/kafka/core/KafkaTemplate.html[Javadoc] for more detail. +See the javadoc:org.springframework.kafka.core.KafkaTemplate[Javadoc] for more detail. The `sendDefault` API requires that a default topic has been provided to the template. @@ -69,13 +69,13 @@ How the user-provided timestamp is stored depends on the timestamp type configur If the topic is configured to use `CREATE_TIME`, the user-specified timestamp is recorded (or generated if not specified). If the topic is configured to use `LOG_APPEND_TIME`, the user-specified timestamp ignored and the broker adds in the local broker time. -The `metrics` and `partitionsFor` methods delegate to the same methods on the underlying https://kafka.apache.org/20/javadoc/org/apache/kafka/clients/producer/Producer.html[`Producer`]. -The `execute` method provides direct access to the underlying https://kafka.apache.org/20/javadoc/org/apache/kafka/clients/producer/Producer.html[`Producer`]. +The `metrics` and `partitionsFor` methods delegate to the same methods on the underlying javadoc:org.apache.kafka.clients.producer.Producer[]. +The `execute` method provides direct access to the underlying javadoc:org.apache.kafka.clients.producer.Producer[]. To use the template, you can configure a producer factory and provide it in the template's constructor. The following example shows how to do so: -[source, java] +[source, java, subs="attributes"] ---- @Bean public ProducerFactory producerFactory() { @@ -88,7 +88,7 @@ public Map producerConfigs() { props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); - // See https://kafka.apache.org/documentation/#producerconfigs for more properties + // See {kafka-url}/documentation/#producerconfigs for more properties return props; } diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/serdes.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/serdes.adoc index 3128120512..17c5848719 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/serdes.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/serdes.adoc @@ -93,7 +93,7 @@ Starting with version 2.3, all the JSON-aware components are configured by defau Also such an instance is supplied with well-known modules for custom data types, such a Java time and Kotlin support. See `JacksonUtils.enhancedObjectMapper()` JavaDocs for more information. This method also registers a `org.springframework.kafka.support.JacksonMimeTypeModule` for `org.springframework.util.MimeType` objects serialization into the plain string for inter-platform compatibility over the network. -A `JacksonMimeTypeModule` can be registered as a bean in the application context and it will be auto-configured into the https://docs.spring.io/spring-boot/docs/current/reference/html/howto.html#howto.spring-mvc.customize-jackson-objectmapper[Spring Boot `ObjectMapper` instance]. +A `JacksonMimeTypeModule` can be registered as a bean in the application context and it will be auto-configured into the {spring-boot-url}/how-to/spring-mvc.html#howto.spring-mvc.customize-jackson-objectmapper[Spring Boot `ObjectMapper` instance]. Also starting with version 2.3, the `JsonDeserializer` provides `TypeReference`-based constructors for better handling of target generic container types. @@ -146,7 +146,7 @@ consumerProps.put(JsonDeserializer.TYPE_MAPPINGS, "cat:com.yourcat.Cat, hat:com. IMPORTANT: The corresponding objects must be compatible. -If you use https://docs.spring.io/spring-boot/docs/current/reference/html/messaging.html#messaging.kafka[Spring Boot], you can provide these properties in the `application.properties` (or yaml) file. +If you use {spring-boot-url}/reference/messaging/kafka.html[Spring Boot], you can provide these properties in the `application.properties` (or yaml) file. The following example shows how to do so: [source] @@ -408,7 +408,7 @@ Refer to the https://github.com/spring-projects/spring-retry[spring-retry] proje [[messaging-message-conversion]] == Spring Messaging Message Conversion -Although the `Serializer` and `Deserializer` API is quite simple and flexible from the low-level Kafka `Consumer` and `Producer` perspective, you might need more flexibility at the Spring Messaging level, when using either `@KafkaListener` or https://docs.spring.io/spring-integration/docs/current/reference/html/kafka.html#kafka[Spring Integration's Apache Kafka Support]. +Although the `Serializer` and `Deserializer` API is quite simple and flexible from the low-level Kafka `Consumer` and `Producer` perspective, you might need more flexibility at the Spring Messaging level, when using either `@KafkaListener` or {spring-integration-url}/kafka.html[Spring Integration's Apache Kafka Support]. To let you easily convert to and from `org.springframework.messaging.Message`, Spring for Apache Kafka provides a `MessageConverter` abstraction with the `MessagingMessageConverter` implementation and its `JsonMessageConverter` (and subclasses) customization. You can inject the `MessageConverter` into a `KafkaTemplate` instance directly and by using `AbstractKafkaListenerContainerFactory` bean definition for the `@KafkaListener.containerFactory()` property. The following example shows how to do so: @@ -532,7 +532,7 @@ Alternatively, you can configure the `ErrorHandlingDeserializer` to create a cus This function is invoked to create an instance of `T`, which is passed to the listener in the usual fashion. An object of type `FailedDeserializationInfo`, which contains all the contextual information is provided to the function. You can find the `DeserializationException` (as a serialized Java object) in headers. -See the https://docs.spring.io/spring-kafka/api/org/springframework/kafka/support/serializer/ErrorHandlingDeserializer.html[Javadoc] for the `ErrorHandlingDeserializer` for more information. +See the javadoc:org.springframework.kafka.support.serializer.ErrorHandlingDeserializer[Javadoc] for the `ErrorHandlingDeserializer` for more information. You can use the `DefaultKafkaConsumerFactory` constructor that takes key and value `Deserializer` objects and wire in appropriate `ErrorHandlingDeserializer` instances that you have configured with the proper delegates. Alternatively, you can use consumer configuration properties (which are used by the `ErrorHandlingDeserializer`) to instantiate the delegates. diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/tombstones.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/tombstones.adoc index e358ef8666..930c17eeea 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/tombstones.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/kafka/tombstones.adoc @@ -1,7 +1,7 @@ [[tombstones]] = Null Payloads and Log Compaction of 'Tombstone' Records -When you use https://kafka.apache.org/documentation/#compaction[Log Compaction], you can send and receive messages with `null` payloads to identify the deletion of a key. +When you use {kafka-url}/documentation/#compaction[Log Compaction], you can send and receive messages with `null` payloads to identify the deletion of a key. You can also receive `null` values for other reasons, such as a `Deserializer` that might return `null` when it cannot deserialize a value. diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/other-resources.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/other-resources.adoc index b0a3ee225e..2bc80a799f 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/other-resources.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/other-resources.adoc @@ -3,7 +3,7 @@ In addition to this reference documentation, we recommend a number of other resources that may help you learn about Spring and Apache Kafka. -- https://kafka.apache.org/[Apache Kafka Project Home Page] +- {kafka-page}[Apache Kafka Project Home Page] - https://projects.spring.io/spring-kafka/[Spring for Apache Kafka Home Page] - https://github.com/spring-projects/spring-kafka[Spring for Apache Kafka GitHub Repository] - https://github.com/spring-projects/spring-integration[Spring Integration GitHub Repository (Apache Kafka Module)] diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/quick-tour.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/quick-tour.adoc index a322840a02..5c5c4ddb23 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/quick-tour.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/quick-tour.adoc @@ -66,7 +66,7 @@ This quick tour works with the following versions: == Getting Started The simplest way to get started is to use https://start.spring.io[start.spring.io] (or the wizards in Spring Tool Suits and Intellij IDEA) and create a project, selecting 'Spring for Apache Kafka' as a dependency. -Refer to the https://docs.spring.io/spring-boot/docs/current/reference/html/messaging.html#messaging.kafka[Spring Boot documentation] for more information about its opinionated auto configuration of the infrastructure beans. +Refer to the {spring-boot-url}/reference/messaging/kafka.html[Spring Boot documentation] for more information about its opinionated auto configuration of the infrastructure beans. Here is a minimal consumer application. diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/streams.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/streams.adoc index 7675c3ab24..ed475b2be9 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/streams.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/streams.adoc @@ -1,7 +1,7 @@ [[streams-kafka-streams]] = Apache Kafka Streams Support -Starting with version 1.1.4, Spring for Apache Kafka provides first-class support for https://kafka.apache.org/documentation/streams[Kafka Streams]. +Starting with version 1.1.4, Spring for Apache Kafka provides first-class support for {kafka-url}/documentation/streams[Kafka Streams]. To use it from a Spring application, the `kafka-streams` jar must be present on classpath. It is an optional dependency of the Spring for Apache Kafka project and is not downloaded transitively. @@ -76,7 +76,7 @@ public KStream kStream(StreamsBuilder kStreamBuilder) { } ---- -If you would like to control the lifecycle manually (for example, stopping and starting by some condition), you can reference the `StreamsBuilderFactoryBean` bean directly by using the factory bean (`&`) https://docs.spring.io/spring-framework/reference/core/beans/factory-extension.html#beans-factory-extension-factorybean[prefix]. +If you would like to control the lifecycle manually (for example, stopping and starting by some condition), you can reference the `StreamsBuilderFactoryBean` bean directly by using the factory bean (`&`) {spring-framework-reference-url}/core/beans/factory-extension.html#beans-factory-extension-factorybean[prefix]. Since `StreamsBuilderFactoryBean` uses its internal `KafkaStreams` instance, it is safe to stop and restart it again. A new `KafkaStreams` is created on each `start()`. You might also consider using different `StreamsBuilderFactoryBean` instances, if you would like to control the lifecycles for `KStream` instances separately. @@ -209,7 +209,7 @@ new KafkaStreamBrancher() == Configuration To configure the Kafka Streams environment, the `StreamsBuilderFactoryBean` requires a `KafkaStreamsConfiguration` instance. -See the Apache Kafka https://kafka.apache.org/0102/documentation/#streamsconfigs[documentation] for all possible options. +See the Apache Kafka {kafka-url}/documentation/#streamsconfigs[documentation] for all possible options. IMPORTANT: Starting with version 2.2, the stream configuration is now provided as a `KafkaStreamsConfiguration` object, rather than as a `StreamsConfig`. @@ -295,7 +295,7 @@ public interface MessagingFunction { Spring Integration automatically provides an implementation using its `GatewayProxyFactoryBean`. It also requires a `MessagingMessageConverter` to convert the key, value and metadata (including headers) to/from a Spring Messaging `Message`. -See https://docs.spring.io/spring-integration/docs/current/reference/html/kafka.html#streams-integration[[Calling a Spring Integration Flow from a `KStream`]] for more information. +See {spring-integration-url}/kafka.html#streams-integration[Calling a Spring Integration Flow from a `KStream`] for more information. [[streams-deser-recovery]] == Recovery from Deserialization Exceptions @@ -336,7 +336,7 @@ Of course, the `recoverer()` bean can be your own implementation of `ConsumerRec Starting with version 3.2, Spring for Apache Kafka provides basic facilities required for interactive queries in Kafka Streams. Interactive queries are useful in stateful Kafka Streams applications since they provide a way to constantly query the stateful stores in the application. Thus, if an application wants to materialize the current view of the system under consideration, interactive queries provide a way to do that. -To learn more about interactive queries, see this https://kafka.apache.org/36/documentation/streams/developer-guide/interactive-queries.html[article]. +To learn more about interactive queries, see this {kafka-url}/documentation/streams/developer-guide/interactive-queries.html[article]. The support in Spring for Apache Kafka is centered around an API called `KafkaStreamsInteractiveQueryService` which is a facade around interactive queries APIs in Kafka Streams library. An application can create an instance of this service as a bean and then later on use it to retrieve the state store by its name. @@ -434,7 +434,7 @@ public HostInfo getKafkaStreamsApplicationHostInfo(String store, K key, Seri ---- When using multiple instances of the Kafka Streams processors of the same `application.id` in a distributed way like this, the application is supposed to provide an RPC layer where the state stores can be queried over an RPC endpoint such as a REST one. -See this https://kafka.apache.org/36/documentation/streams/developer-guide/interactive-queries.html#querying-remote-state-stores-for-the-entire-app[article] for more details on this. +See this {kafka-url}/documentation/streams/developer-guide/interactive-queries.html#querying-remote-state-stores-for-the-entire-app[article] for more details on this. When using Spring for Apache Kafka, it is very easy to add a Spring based REST endpoint by using the spring-web technologies. Once there is a REST endpoint, then that can be used to query the state stores from any Kafka Streams instance, given the `HostInfo` where the key is hosted is known to the instance. diff --git a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/testing.adoc b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/testing.adoc index 052e5efa8a..735466beda 100644 --- a/spring-kafka-docs/src/main/antora/modules/ROOT/pages/testing.adoc +++ b/spring-kafka-docs/src/main/antora/modules/ROOT/pages/testing.adoc @@ -17,7 +17,7 @@ There are several techniques to configure the broker as discussed in the followi == KafkaTestUtils `org.springframework.kafka.test.utils.KafkaTestUtils` provides a number of static helper methods to consume records, retrieve various record offsets, and others. -Refer to its https://docs.spring.io/spring-kafka/docs/current/api/org/springframework/kafka/test/utils/KafkaTestUtils.html[Javadocs] for complete details. +Refer to its javadoc:org.springframework.kafka.test.utils.KafkaTestUtils[Javadocs] for complete details. [[junit]] == JUnit @@ -137,7 +137,7 @@ So, before running tests with an embedded Kafka on random ports, we can set `spr This is now the default value for this property (starting with version 3.0.10). With the `EmbeddedKafkaBroker.brokerProperties(Map)`, you can provide additional properties for the Kafka servers. -See https://kafka.apache.org/documentation/#brokerconfigs[Kafka Config] for more information about possible broker properties. +See {kafka-url}/documentation/#brokerconfigs[Kafka Config] for more information about possible broker properties. [[configuring-topics]] == Configuring Topics