Skip to content

Commit 3584187

Browse files
committed
Add legacy setup with confluentinc images to run kafka stack with docker compose.
1 parent fc11fa9 commit 3584187

File tree

7 files changed

+216
-3
lines changed

7 files changed

+216
-3
lines changed

README.md

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,11 @@ of Java 17 or later.
2626

2727
Generally, this component is installed
2828
with [RADAR-Kubernetes](https://github.com/RADAR-base/RADAR-Kubernetes). It uses Docker
29-
image [radarbase/kafka-connect-rest-fitbit-source](https://hub.docker.com/r/radarbase/kafka-connect-rest-fitbit-source).
29+
image [radarbase/kafka-connect-rest-fitbit-source](https://hub.docker.com/r/radarbase/kafka-connect-rest-fitbit-source),
30+
which is built from the `kafka-connect-fitbit-source/Dockerfile`. The image is based on the Strimzi Kafka Connect image.
31+
32+
The Fitbit source connector can be also run with docker compose and the Confluent Kafka Connect image, using the `kafka-connect-fitbit-source/Dockerfile-legacy`.
33+
3034

3135
First, [register a Fitbit App](https://dev.fitbit.com/apps) with Fitbit. It should be either a
3236
server app, for multiple users, or a personal app for a single user. With the server app, you need
@@ -39,7 +43,7 @@ For every Fitbit user you want access to, copy `docker/fitbit-user.yml.template`
3943
For automatic configuration for multiple users, please take a look at
4044
`scripts/REDCAP-FITBIT-AUTH-AUTO/README.md`.
4145

42-
Copy `docker/source-fitbit.properties.template` to `docker/source-fitbit.properties` and enter
46+
Copy `docker/legacy/source-fitbit.properties.template` to `docker/legacy/source-fitbit.properties` and enter
4347
your Fitbit App client ID and client secret. The following tables shows the possible properties.
4448

4549
<table class="data-table"><tbody>

docker-compose.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ services:
145145
radar-fitbit-connector:
146146
build:
147147
context: .
148-
dockerfile: ./kafka-connect-fitbit-source/Dockerfile
148+
dockerfile: ./kafka-connect-fitbit-source/Dockerfile-legacy
149149
image: radarbase/radar-connect-fitbit-source
150150
restart: on-failure
151151
volumes:

docker/legacy/README.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# Confluentinc image based docker setup (legacy)
2+
3+
Files in this directory are used by Dockerfile-legacy to build a legacy docker images (Confluentinc-based) of the connectors,
4+
as opposed to the new Strimzi based images.
5+
6+
The legacy setup can be to run the Kafka stack (Kafka, Zookeeper, Schema Registry and Kafka Connectors)
7+
with docker compose (see [docker-compose.yml](../../docker-compose.yml)).

docker/legacy/ensure

Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
#!/bin/bash
2+
3+
if [ "$WAIT_FOR_KAFKA" != "1" ]; then
4+
echo "Starting without checking for Kafka availability"
5+
exit 0
6+
fi
7+
8+
max_timeout=32
9+
10+
IS_TEMP=0
11+
12+
echo "===> Wait for infrastructure ..."
13+
14+
if [ -z "$COMMAND_CONFIG_FILE_PATH" ]; then
15+
COMMAND_CONFIG_FILE_PATH="$(mktemp)"
16+
IS_TEMP=1
17+
fi
18+
19+
if [ ! -f "$COMMAND_CONFIG_FILE_PATH" ] || [ $IS_TEMP = 1 ]; then
20+
while IFS='=' read -r -d '' n v; do
21+
if [[ "$n" == "CONNECT_"* ]]; then
22+
name="${n/CONNECT_/""}" # remove first "CONNECT_"
23+
name="${name,,}" # lower case
24+
name="${name//_/"."}" # replace all '_' with '.'
25+
echo "$name=$v" >> ${COMMAND_CONFIG_FILE_PATH}
26+
fi
27+
done < <(env -0)
28+
fi
29+
30+
# Check if variables exist
31+
if [ -z "$CONNECT_BOOTSTRAP_SERVERS" ]; then
32+
echo "CONNECT_BOOTSTRAP_SERVERS is not defined"
33+
else
34+
KAFKA_BROKERS=${KAFKA_BROKERS:-3}
35+
36+
tries=10
37+
timeout=1
38+
while true; do
39+
KAFKA_CHECK=$(kafka-broker-api-versions --bootstrap-server "$CONNECT_BOOTSTRAP_SERVERS" --command-config "${COMMAND_CONFIG_FILE_PATH}" | grep "(id: " | wc -l)
40+
41+
if [ "$KAFKA_CHECK" -ge "$KAFKA_BROKERS" ]; then
42+
echo "Kafka brokers available."
43+
break
44+
fi
45+
46+
tries=$((tries - 1))
47+
if [ ${tries} -eq 0 ]; then
48+
echo "FAILED: KAFKA BROKERs NOT READY."
49+
exit 5
50+
fi
51+
echo "Expected $KAFKA_BROKERS brokers but found only $KAFKA_CHECK. Waiting $timeout second before retrying ..."
52+
sleep ${timeout}
53+
if [ ${timeout} -lt ${max_timeout} ]; then
54+
timeout=$((timeout * 2))
55+
fi
56+
done
57+
58+
echo "Kafka is available."
59+
fi
60+
61+
if [ -z "$CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL" ]; then
62+
echo "CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL is not defined"
63+
else
64+
tries=10
65+
timeout=1
66+
while true; do
67+
if wget --spider -q "${CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL}/subjects" 2>/dev/null; then
68+
echo "Schema registry available."
69+
break
70+
fi
71+
tries=$((tries - 1))
72+
if [ $tries -eq 0 ]; then
73+
echo "FAILED TO REACH SCHEMA REGISTRY."
74+
exit 6
75+
fi
76+
echo "Failed to reach schema registry. Retrying in ${timeout} seconds."
77+
sleep ${timeout}
78+
if [ ${timeout} -lt ${max_timeout} ]; then
79+
timeout=$((timeout * 2))
80+
fi
81+
done
82+
83+
echo "Schema registry is available."
84+
fi
85+
86+
if [ $IS_TEMP = 1 ]; then
87+
/bin/rm -f "$COMMAND_CONFIG_FILE_PATH"
88+
fi

docker/legacy/launch

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
#!/usr/bin/env bash
2+
#
3+
# Copyright 2016 Confluent Inc.
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
17+
18+
# Override this section from the script to include the com.sun.management.jmxremote.rmi.port property.
19+
if [ -z "$KAFKA_JMX_OPTS" ]; then
20+
export KAFKA_JMX_OPTS="-Dcom.sun.management.jmxremote=true -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false "
21+
fi
22+
23+
# The JMX client needs to be able to connect to java.rmi.server.hostname.
24+
# The default for bridged n/w is the bridged IP so you will only be able to connect from another docker container.
25+
# For host n/w, this is the IP that the hostname on the host resolves to.
26+
27+
# If you have more that one n/w configured, hostname -i gives you all the IPs,
28+
# the default is to pick the first IP (or network).
29+
export KAFKA_JMX_HOSTNAME=${KAFKA_JMX_HOSTNAME:-$(hostname -i | cut -d" " -f1)}
30+
31+
if [ "$KAFKA_JMX_PORT" ]; then
32+
# This ensures that the "if" section for JMX_PORT in kafka launch script does not trigger.
33+
export JMX_PORT=$KAFKA_JMX_PORT
34+
export KAFKA_JMX_OPTS="$KAFKA_JMX_OPTS -Djava.rmi.server.hostname=$KAFKA_JMX_HOSTNAME -Dcom.sun.management.jmxremote.local.only=false -Dcom.sun.management.jmxremote.rmi.port=$JMX_PORT -Dcom.sun.management.jmxremote.port=$JMX_PORT"
35+
fi
36+
37+
echo "===> Launching ${COMPONENT} ..."
38+
# Add our jar to the classpath so that the custom classes can be loaded first.
39+
# And this also makes sure that the CLASSPATH does not start with ":/etc/..."
40+
# other jars are loaded via the plugin path
41+
if [ -z "$CLASSPATH" ]; then
42+
export CLASSPATH="/etc/kafka-connect/jars/*"
43+
fi
44+
45+
if [ -z "$CONNECTOR_PROPERTY_FILE_PREFIX" ]; then
46+
# execute connector in distributed mode
47+
exec connect-distributed /etc/"${COMPONENT}"/"${COMPONENT}".properties
48+
else
49+
# execute connector in standalone mode
50+
exec connect-standalone /etc/"${COMPONENT}"/"${COMPONENT}".properties /etc/"${COMPONENT}"/"${CONNECTOR_PROPERTY_FILE_PREFIX}"*.properties
51+
fi
File renamed without changes.
Lines changed: 63 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,63 @@
1+
# Copyright 2018 The Hyve
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
FROM --platform=$BUILDPLATFORM gradle:8.9-jdk17 AS builder
16+
17+
RUN mkdir /code
18+
WORKDIR /code
19+
20+
ENV GRADLE_USER_HOME=/code/.gradlecache \
21+
GRADLE_OPTS="-Dorg.gradle.vfs.watch=false -Djdk.lang.Process.launchMechanism=vfork"
22+
23+
COPY buildSrc /code/buildSrc
24+
COPY ./build.gradle.kts ./settings.gradle.kts ./gradle.properties /code/
25+
COPY kafka-connect-rest-source/build.gradle.kts /code/kafka-connect-rest-source/
26+
COPY kafka-connect-fitbit-source/build.gradle.kts /code/kafka-connect-fitbit-source/
27+
28+
RUN gradle downloadDependencies copyDependencies
29+
30+
COPY ./kafka-connect-rest-source/src/ /code/kafka-connect-rest-source/src
31+
COPY ./kafka-connect-fitbit-source/src/ /code/kafka-connect-fitbit-source/src
32+
33+
RUN gradle jar
34+
35+
FROM confluentinc/cp-kafka-connect-base:7.8.1
36+
37+
USER appuser
38+
39+
LABEL org.opencontainers.image.authors="[email protected]"
40+
41+
LABEL description="Kafka REST API Source connector"
42+
43+
ENV CONNECT_PLUGIN_PATH="/usr/share/java/kafka-connect/plugins" \
44+
WAIT_FOR_KAFKA="1"
45+
46+
# To isolate the classpath from the plugin path as recommended
47+
COPY --from=builder /code/kafka-connect-rest-source/build/third-party/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-rest-source/
48+
COPY --from=builder /code/kafka-connect-fitbit-source/build/third-party/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-fitbit-source/
49+
50+
COPY --from=builder /code/kafka-connect-rest-source/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-rest-source/
51+
COPY --from=builder /code/kafka-connect-rest-source/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-fitbit-source/
52+
COPY --from=builder /code/kafka-connect-fitbit-source/build/libs/*.jar ${CONNECT_PLUGIN_PATH}/kafka-connect-fitbit-source/
53+
54+
# Load topics validator
55+
COPY --chown=appuser:appuser ./docker/legacy/ensure /etc/confluent/docker/ensure
56+
57+
# Load modified launcher
58+
COPY --chown=appuser:appuser ./docker/legacy/launch /etc/confluent/docker/launch
59+
60+
# Overwrite the log4j configuration to include Sentry monitoring.
61+
COPY ./docker/log4j.properties.template /etc/confluent/docker/log4j.properties.template
62+
# Copy Sentry monitoring jars.
63+
COPY --from=builder /code/kafka-connect-fitbit-source/build/third-party/sentry-* /etc/kafka-connect/jars

0 commit comments

Comments
 (0)