Skip to content

Commit 6e8716f

Browse files
authored
docs: update env vars acording to CLI output (#448)" (#468)
This reverts commit a090a51.
1 parent aa474df commit 6e8716f

File tree

7 files changed

+50
-50
lines changed

7 files changed

+50
-50
lines changed

code-examples/quarkus-kafka-quickstart/src/main/resources/application.properties

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,14 +21,14 @@ quarkus.container-image.push=false
2121
## ./mvnw quarkus:dev
2222
## ./mvnw package -Dquarkus.profile=dev
2323

24-
%dev.kafka.bootstrap.servers=${KAFKA_HOST}
24+
%dev.kafka.bootstrap.servers=${BOOTSTRAP_SERVER}
2525
%dev.kafka.security.protocol=SASL_SSL
2626

2727
%dev.kafka.sasl.mechanism=OAUTHBEARER
2828
%dev.kafka.sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \
29-
oauth.client.id="${RHOAS_CLIENT_ID}" \
30-
oauth.client.secret="${RHOAS_CLIENT_SECRET}" \
31-
oauth.token.endpoint.uri="${RHOAS_OAUTH_TOKEN_URL}" ;
29+
oauth.client.id="${CLIENT_ID}" \
30+
oauth.client.secret="${CLIENT_SECRET}" \
31+
oauth.token.endpoint.uri="${OAUTH_TOKEN_ENDPOINT_URI}" ;
3232
%dev.kafka.sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler
3333

3434
## sbo-dev profile that can be used for local development when using

code-examples/quarkus-service-registry-quickstart/consumer/src/main/resources/application.properties

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -4,23 +4,23 @@ mp.messaging.incoming.quotes.value.deserializer=io.apicurio.registry.serde.avro.
44
mp.messaging.incoming.quotes.apicurio.registry.use-specific-avro-reader=true
55
mp.messaging.incoming.quotes.apicurio.registry.avro-datum-provider=io.apicurio.registry.serde.avro.ReflectAvroDatumProvider
66

7-
%dev.mp.messaging.incoming.quotes.apicurio.auth.service.url=${RHOAS_OAUTH_TOKEN_URL:https://identity.api.openshift.com/auth}
8-
%dev.mp.messaging.incoming.quotes.apicurio.auth.realm=${RHOAS_OAUTH_REALM:rhoas}
9-
%dev.mp.messaging.incoming.quotes.apicurio.auth.client.id=${RHOAS_CLIENT_ID}
10-
%dev.mp.messaging.incoming.quotes.apicurio.auth.client.secret=${RHOAS_CLIENT_SECRET}
7+
%dev.mp.messaging.incoming.quotes.apicurio.auth.service.url=${OAUTH_SERVER_URL:https://identity.api.openshift.com/auth}
8+
%dev.mp.messaging.incoming.quotes.apicurio.auth.realm=${OAUTH_REALM:rhoas}
9+
%dev.mp.messaging.incoming.quotes.apicurio.auth.client.id=${CLIENT_ID}
10+
%dev.mp.messaging.incoming.quotes.apicurio.auth.client.secret=${CLIENT_SECRET}
1111

12-
mp.messaging.connector.smallrye-kafka.apicurio.registry.url=${SERVICE_REGISTRY_URL}
12+
mp.messaging.connector.smallrye-kafka.apicurio.registry.url=${REGISTRY_URL}
1313

1414
%test.quarkus.apicurio-registry.devservices.port=8888
1515

1616
##Kafka servers and auth configuration
1717

18-
%dev.kafka.bootstrap.servers=${KAFKA_HOST}
18+
%dev.kafka.bootstrap.servers=${BOOTSTRAP_SERVER}
1919
%dev.kafka.security.protocol=SASL_SSL
2020

2121
%dev.kafka.sasl.mechanism=OAUTHBEARER
2222
%dev.kafka.sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \
23-
oauth.client.id="${RHOAS_CLIENT_ID}" \
24-
oauth.client.secret="${RHOAS_CLIENT_SECRET}" \
25-
oauth.token.endpoint.uri="${RHOAS_OAUTH_TOKEN_URL}" ;
23+
oauth.client.id="${CLIENT_ID}" \
24+
oauth.client.secret="${CLIENT_SECRET}" \
25+
oauth.token.endpoint.uri="${OAUTH_TOKEN_ENDPOINT_URI}" ;
2626
%dev.kafka.sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler

code-examples/quarkus-service-registry-quickstart/producer/src/main/resources/application.properties

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -10,23 +10,23 @@ mp.messaging.outgoing.quotes.value.serializer=io.apicurio.registry.serde.avro.Av
1010
mp.messaging.outgoing.quotes.key.serializer=org.apache.kafka.common.serialization.StringSerializer
1111
mp.messaging.outgoing.quotes.merge=true
1212

13-
%dev.mp.messaging.outgoing.quotes.apicurio.auth.realm=${RHOAS_OAUTH_REALM:rhoas}
14-
%dev.mp.messaging.outgoing.quotes.apicurio.auth.service.url=${RHOAS_OAUTH_TOKEN_URL:https://identity.api.openshift.com/auth}
15-
%dev.mp.messaging.outgoing.quotes.apicurio.auth.client.id=${RHOAS_CLIENT_ID}
16-
%dev.mp.messaging.outgoing.quotes.apicurio.auth.client.secret=${RHOAS_CLIENT_SECRET}
13+
%dev.mp.messaging.outgoing.quotes.apicurio.auth.realm=${OAUTH_REALM:rhoas}
14+
%dev.mp.messaging.outgoing.quotes.apicurio.auth.service.url=${OAUTH_SERVER_URL:https://identity.api.openshift.com/auth}
15+
%dev.mp.messaging.outgoing.quotes.apicurio.auth.client.id=${CLIENT_ID}
16+
%dev.mp.messaging.outgoing.quotes.apicurio.auth.client.secret=${CLIENT_SECRET}
1717

18-
mp.messaging.connector.smallrye-kafka.apicurio.registry.url=${SERVICE_REGISTRY_URL}
18+
mp.messaging.connector.smallrye-kafka.apicurio.registry.url=${REGISTRY_URL}
1919
%test.quarkus.apicurio-registry.devservices.port=8888
2020

2121

2222
##Kafka servers and auth configuration
2323

24-
%dev.kafka.bootstrap.servers=${KAFKA_HOST}
24+
%dev.kafka.bootstrap.servers=${BOOTSTRAP_SERVER}
2525
%dev.kafka.security.protocol=SASL_SSL
2626

2727
%dev.kafka.sasl.mechanism=OAUTHBEARER
2828
%dev.kafka.sasl.jaas.config=org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required \
29-
oauth.client.id="${RHOAS_CLIENT_ID}" \
30-
oauth.client.secret="${RHOAS_CLIENT_SECRET}" \
31-
oauth.token.endpoint.uri="${RHOAS_OAUTH_TOKEN_URL}" ;
29+
oauth.client.id="${CLIENT_ID}" \
30+
oauth.client.secret="${CLIENT_SECRET}" \
31+
oauth.token.endpoint.uri="${OAUTH_TOKEN_ENDPOINT_URI}" ;
3232
%dev.kafka.sasl.login.callback.handler.class=io.strimzi.kafka.oauth.client.JaasClientOauthLoginCallbackHandler

docs/kafka/kcat-kafka/README.adoc

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -130,9 +130,9 @@ endif::[]
130130
.Setting environment variables for server and credentials
131131
[source,subs="+quotes"]
132132
----
133-
$ export KAFKA_HOST=__<bootstrap_server>__
134-
$ export RHOAS_CLIENT_ID=__<client_id>__
135-
$ export RHOAS_CLIENT_SECRET=__<client_secret>__
133+
$ export BOOTSTRAP_SERVER=__<bootstrap_server>__
134+
$ export USER=__<client_id>__
135+
$ export PASSWORD=__<client_secret>__
136136
----
137137
--
138138

@@ -156,10 +156,10 @@ This example uses the SASL/PLAIN authentication mechanism with the server and cr
156156
.Starting Kafkacat in producer mode
157157
[source]
158158
----
159-
$ kafkacat -t my-first-kafka-topic -b "$KAFKA_HOST" \
159+
$ kafkacat -t my-first-kafka-topic -b "$BOOTSTRAP_SERVER" \
160160
-X security.protocol=SASL_SSL -X sasl.mechanisms=PLAIN \
161-
-X sasl.username="$RHOAS_CLIENT_ID" \
162-
-X sasl.password="$RHOAS_CLIENT_SECRET" -P
161+
-X sasl.username="$USER" \
162+
-X sasl.password="$PASSWORD" -P
163163
----
164164

165165
NOTE: {product-kafka} also supports the SASL/OAUTHBEARER mechanism for authentication, which is the recommended authentication mechanism to use. However, Kafkacat does not yet fully support OAUTHBEARER, so this example uses SASL/PLAIN.
@@ -205,10 +205,10 @@ This example uses the SASL/PLAIN authentication mechanism with the server and cr
205205
.Starting Kafkacat in consumer mode
206206
[source]
207207
----
208-
$ kafkacat -t my-first-kafka-topic -b "$KAFKA_HOST" \
208+
$ kafkacat -t my-first-kafka-topic -b "$BOOTSTRAP_SERVER" \
209209
-X security.protocol=SASL_SSL -X sasl.mechanisms=PLAIN \
210-
-X sasl.username="$RHOAS_CLIENT_ID" \
211-
-X sasl.password="$RHOAS_CLIENT_SECRET" -C
210+
-X sasl.username="$USER" \
211+
-X sasl.password="$PASSWORD" -C
212212
213213
First message
214214
Second message

docs/kafka/nodejs-kafka/README.adoc

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ To enable your Node.js application to access a Kafka instance, you must configur
119119
* The generated credentials for your {product-kafka} service account
120120
* The Simple Authentication and Security Layer (SASL) mechanism that the client will use to authenticate with the Kafka instance
121121

122-
In this task, you'll create a new configuration file called `rhoas.env`. In this file, you'll set the required bootstrap server and client credentials as environment variables.
122+
In this task, you'll create a new configuration file called `.env`. In this file, you'll set the required bootstrap server and client credentials as environment variables.
123123

124124
.Prerequisites
125125
ifndef::qs[]
@@ -130,16 +130,16 @@ endif::[]
130130

131131
.Procedure
132132

133-
. In your IDE, create a new file. Save the file with the name `rhoas.env`, at the root level of the `reactive-example` directory for the cloned repository.
133+
. In your IDE, create a new file. Save the file with the name `.env`, at the root level of the `reactive-example` directory for the cloned repository.
134134

135-
. In the `rhoas.env` file, add the lines shown in the example. These lines set the bootstrap server and client credentials as environment variables to be used by the Node.js application.
135+
. In the `.env` file, add the lines shown in the example. These lines set the bootstrap server and client credentials as environment variables to be used by the Node.js application.
136136
+
137-
.Setting environment variables in the rhoas.env file
137+
.Setting environment variables in the .env file
138138
[source,subs="+quotes"]
139139
----
140-
KAFKA_HOST=__<bootstrap_server>__
141-
RHOAS_CLIENT_ID=__<client_id>__
142-
RHOAS_CLIENT_SECRET=__<client_secret>__
140+
KAFKA_BOOTSTRAP_SERVER=__<bootstrap_server>__
141+
KAFKA_CLIENT_ID=__<client_id>__
142+
KAFKA_CLIENT_SECRET=__<client_secret>__
143143
KAFKA_SASL_MECHANISM=plain
144144
----
145145
+
@@ -157,7 +157,7 @@ endif::[]
157157
+
158158
In this case, observe that the Node.js application uses the SASL/PLAIN authentication method (that is, the value of `KAFKA_SASL_MECHANISM` is set to `plain`). This means that the application uses only the client ID and client secret to authenticate with the Kafka instance. The application doesn't require an authentication token.
159159

160-
. Save the `rhoas.env` file.
160+
. Save the `.env` file.
161161

162162
ifdef::qs[]
163163
.Verification
@@ -310,11 +310,11 @@ The output from both components confirms that they successfully connected to the
310310

311311
. In your IDE, in the `producer-backend` directory of the repository that you cloned, open the `producer.js` file.
312312
+
313-
Observe that the producer component is configured to process environment variables from the `rhoas.env` file that you created. The component used the bootstrap server endpoint and client credentials stored in this file to connect to the Kafka instance.
313+
Observe that the producer component is configured to process environment variables from the `.env` file that you created. The component used the bootstrap server endpoint and client credentials stored in this file to connect to the Kafka instance.
314314

315315
. In the `consumer-backend` directory, open the `consumer.js` file.
316316
+
317-
Observe that the consumer component is also configured to process environment variables from the `rhoas.env` file that you created.
317+
Observe that the consumer component is also configured to process environment variables from the `.env` file that you created.
318318

319319
ifdef::qs[]
320320
.Verification

docs/kafka/quarkus-kafka/README.adoc

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -130,10 +130,10 @@ endif::[]
130130
.Setting environment variables for server and credentials
131131
[source,subs="+quotes"]
132132
----
133-
$ export KAFKA_HOST=__<bootstrap_server>__
134-
$ export RHOAS_CLIENT_ID=__<client_id>__
135-
$ export RHOAS_CLIENT_SECRET=__<client_secret>__
136-
$ export RHOAS_OAUTH_TOKEN_URL=__<oauth_token_endpoint_uri>__
133+
$ export BOOTSTRAP_SERVER=__<bootstrap_server>__
134+
$ export CLIENT_ID=__<client_id>__
135+
$ export CLIENT_SECRET=__<client_secret>__
136+
$ export OAUTH_TOKEN_ENDPOINT_URI=__<oauth_token_endpoint_uri>__
137137
----
138138
--
139139
. In the Quarkus example application, review the `src/main/resources/application.properties` file to understand how the environment variables you set in the previous step are used in your application. This example uses the `dev` configuration profile in the `application.properties` file.

docs/registry/quarkus-registry/README.adoc

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -140,11 +140,11 @@ endif::[]
140140
.Setting environment variables for server and credentials
141141
[source,subs="+quotes"]
142142
----
143-
$ export KAFKA_HOST=__<bootstrap_server>__
144-
$ export SERVICE_REGISTRY_URL=__<core_registry_url>__
145-
$ export RHOAS_OAUTH_TOKEN_URL=__<oauth_token_endpoint_uri>__
146-
$ export RHOAS_CLIENT_ID=__<client_id>__
147-
$ export RHOAS_CLIENT_SECRET=__<client_secret>__
143+
$ export BOOTSTRAP_SERVER=__<bootstrap_server>__
144+
$ export REGISTRY_URL=__<core_registry_url>__
145+
$ export OAUTH_TOKEN_ENDPOINT_URI=__<oauth_token_endpoint_uri>__
146+
$ export CLIENT_ID=__<client_id>__
147+
$ export CLIENT_SECRET=__<client_secret>__
148148
----
149149

150150
. In the Quarkus example application, review the `/src/main/resources/application.properties` files in the `consumer` and `producer` sub-folders to understand how the environment variables you set in the previous step are used. This example uses the `dev` configuration profile in the `application.properties` files.

0 commit comments

Comments
 (0)