Skip to content

Commit 26ce99f

Browse files
authored
Merge pull request #108 from jembi/PLAT-346-kafak-topic-partitioning
Plat 346 kafka topic partitioning
2 parents 27ec7e0 + 3123910 commit 26ce99f

File tree

10 files changed

+137
-14
lines changed

10 files changed

+137
-14
lines changed

.env.dev

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,10 @@ KAFKA_CPU_LIMIT=0.8
123123
KAFKA_CPU_RESERVE=0.05
124124
KAFKA_MEMORY_LIMIT=3G
125125
KAFKA_MEMORY_RESERVE=500M
126+
KAFKA_PARTITIONS=3
127+
KAFKA_INSTANCES=1
128+
# Topics should comma seperated
129+
KAFKA_TOPICS=2xx,reprocess,3xx
126130

127131
# Kafdrop
128132

.env.local

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -74,3 +74,14 @@ JS_REPORT_LICENSE_KEY=
7474
# MAKE SURE YOU HAVE RUN 'set-permissions.sh' SCRIPT BEFORE AND AFTER RUNNING JS REPORT
7575
JS_REPORT_DEV_MOUNT=false
7676
JS_REPORT_PACKAGE_PATH=
77+
78+
# Message Bus - Kafka
79+
80+
KAFKA_PARTITIONS=1
81+
KAFKA_INSTANCES=1
82+
# Topics should comma seperated
83+
KAFKA_TOPICS=2xx,reprocess
84+
85+
# Kafdrop
86+
87+

.env.remote

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,3 +41,10 @@ POSTGRES_REPLICA_SET=postgres-1:5432,postgres-2:5432,postgres-3:5432
4141
MONGO_SET_COUNT=3
4242
OPENHIM_MONGO_URL=mongodb://mongo-1:27017,mongo-2:27017,mongo-3:27017/openhim?replicaSet=mongo-set
4343
OPENHIM_MONGO_ATNAURL=mongodb://mongo-1:27017,mongo-2:27017,mongo-3:27017/openhim?replicaSet=mongo-set
44+
45+
# Message Bus - Kafka
46+
47+
KAFKA_PARTITIONS=3
48+
KAFKA_INSTANCES=3
49+
# Topics should comma seperated
50+
KAFKA_TOPICS=2xx,reprocess
Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,10 @@
1+
version: '3.9'
2+
3+
services:
4+
await-helper:
5+
image: jembi/await-helper:1.0.1
6+
deploy:
7+
replicas: 1
8+
restart_policy:
9+
condition: none
10+
command: '-k http://kafdrop:9013'
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
version: '3.9'
2+
3+
services:
4+
message-bus-kafka-config-importer:
5+
image: jembi/instantohie-config-importer
6+
deploy:
7+
restart_policy:
8+
condition: none
9+
environment:
10+
KAFDROP_HOST: kafdrop
11+
KAFDROP_PORT: 9013
12+
KAFKA_PARTITIONS: ${KAFKA_PARTITIONS:-3}
13+
KAFKA_TOPICS: ${KAFKA_TOPICS}
14+
command: sh -c "wait-on -t 60000 http-get://kafdrop:9013/topic && node /kafka.js && echo 'success';"
15+
configs:
16+
- source: kafka.js
17+
target: /kafka.js
18+
19+
configs:
20+
kafka.js:
21+
file: ./kafka.js
22+
name: kafka-config.js-${kafka_config_DIGEST:?err}
23+
labels:
24+
name: ethiopia
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
'use strict'
2+
3+
const http = require('http')
4+
5+
const KAFDROP_HOST = process.env.KAFDROP_HOST || 'kafdrop'
6+
const KAFDROP_PORT = process.env.KAFDROP_PORT || '9013'
7+
const KAFKA_PARTITIONS = process.env.KAFKA_PARTITIONS || 3
8+
const KAFKA_TOPICS= process.env.KAFKA_TOPICS
9+
10+
const createKafkaTopic = topic => {
11+
const options = {
12+
protocol: 'http:',
13+
hostname: KAFDROP_HOST,
14+
path: '/topic',
15+
port: KAFDROP_PORT,
16+
method: 'POST',
17+
headers: {
18+
'Content-Type': 'application/x-www-form-urlencoded'
19+
}
20+
}
21+
22+
const req = http.request(options, res => {
23+
if (res.statusCode != 200) {
24+
throw Error(`Failed to create topic - ${topic}`)
25+
}
26+
console.log(`Created topic - ${topic}`)
27+
})
28+
req.write(`name=${topic}&partitionsNumber=${KAFKA_PARTITIONS}&replicationFactor=1`)
29+
req.end()
30+
}
31+
32+
console.log('Creating kafka topics......................');
33+
34+
(() => {
35+
if (KAFKA_TOPICS) {
36+
KAFKA_TOPICS.split(',').forEach(topic => createKafkaTopic(topic))
37+
} else {
38+
console.log('Topics not created: KAFKA_TOPICS variable invalid')
39+
process.exit(1)
40+
}
41+
})();

message-bus-kafka/package-metadata.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@
88
"KAFKA_CPU_RESERVE": "0.05",
99
"KAFKA_MEMORY_LIMIT": "3G",
1010
"KAFKA_MEMORY_RESERVE": "500M",
11+
"KAFKA_PARTITIONS": "3",
12+
"KAFKA_TOPICS": "",
1113
"KAFDROP_CPU_LIMIT": "0.8",
1214
"KAFDROP_CPU_RESERVE": "0.05",
1315
"KAFDROP_MEMORY_LIMIT": "3G",

message-bus-kafka/swarm.sh

100644100755
Lines changed: 30 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
#!/bin/bash
22

3-
statefulNodes=${STATEFUL_NODES:-"cluster"}
3+
STATEFUL_NODES=${STATEFUL_NODES:-"cluster"}
44

55
COMPOSE_FILE_PATH=$(
66
cd "$(dirname "${BASH_SOURCE[0]}")" || exit
@@ -13,7 +13,10 @@ ROOT_PATH="${COMPOSE_FILE_PATH}/.."
1313
. "${ROOT_PATH}/utils/docker-utils.sh"
1414
. "${ROOT_PATH}/utils/log.sh"
1515

16-
if [[ $statefulNodes == "cluster" ]]; then
16+
readonly KAFKA_INSTANCES=${KAFKA_INSTANCES:-1}
17+
export KAFKA_INSTANCES
18+
19+
if [[ $STATEFUL_NODES == "cluster" ]]; then
1720
log info "Running Message Bus Kafka package in Cluster node mode"
1821
kafkaClusterComposeParam="-c ${COMPOSE_FILE_PATH}/docker-compose.cluster.yml"
1922
else
@@ -30,32 +33,46 @@ else
3033
fi
3134

3235
if [[ $1 == "init" ]] || [[ $1 == "up" ]]; then
36+
config::set_config_digests "${COMPOSE_FILE_PATH}"/importer/docker-compose.config.yml
37+
3338
try "docker stack deploy -c ${COMPOSE_FILE_PATH}/docker-compose.yml $kafkaClusterComposeParam $kafkaDevComposeParam instant" "Failed to deploy Message Bus Kafka"
39+
40+
config::await_service_running "kafka" "${COMPOSE_FILE_PATH}"/docker-compose.await-helper.yml "${KAFKA_INSTANCES}"
41+
42+
try "docker stack deploy -c ${COMPOSE_FILE_PATH}/importer/docker-compose.config.yml instant" "Failed to deploy Message Bus Kafka"
43+
44+
config::remove_stale_service_configs "${COMPOSE_FILE_PATH}"/importer/docker-compose.config.yml "ethiopia"
45+
config::remove_config_importer message-bus-kafka-config-importer
3446
elif [[ $1 == "down" ]]; then
3547
try "docker service scale instant_zookeeper-1=0 instant_kafdrop=0" "Failed to scale down zookeeper and kafdrop"
3648
# You cannot scale a global service so we have to remove it
3749
try "docker service rm instant_kafka" "Failed to remove kafka"
38-
if [[ $statefulNodes == "cluster" ]]; then
50+
if [[ $STATEFUL_NODES == "cluster" ]]; then
3951
try "docker service scale instant_zookeeper-2=0" "Failed to scale down zookeeper cluster"
4052
try "docker service scale instant_zookeeper-3=0" "Failed to scale down zookeeper cluster"
4153
fi
4254
elif [[ $1 == "destroy" ]]; then
43-
try "docker service rm instant_zookeeper-1 instant_kafka instant_kafdrop" "Failed to destroy kafka"
44-
4555
log info "Allow services to shut down before deleting volumes"
4656

47-
config::await_service_removed instant_zookeeper-1
48-
config::await_service_removed instant_kafka
49-
config::await_service_removed instant_kafdrop
57+
docker::service_destroy zookeeper-1
58+
docker::service_destroy kafka
59+
docker::service_destroy kafdrop
60+
61+
docker::try_remove_volume zookeeper-1-volume
62+
docker::try_remove_volume kafka-volume
5063

51-
try "docker volume rm instant_kafka-volume" "Failed to remove kafka volume"
52-
try "docker volume rm instant_zookeeper-1-volume" "Failed to remove zookeeper volume"
64+
if [[ $STATEFUL_NODES == "cluster" ]]; then
65+
docker::service_destroy zookeeper-2
66+
docker::service_destroy zookeeper-3
5367

54-
if [[ $statefulNodes == "cluster" ]]; then
55-
try "docker service rm instant_zookeeper-2" "Failed to remove zookeeper cluster volumes"
56-
try "docker service rm instant_zookeeper-3" "Failed to remove zookeeper cluster volumes"
68+
docker::try_remove_volume zookeeper-2-volume
69+
docker::try_remove_volume zookeeper-3-volume
5770
log notice "Volumes are only deleted on the host on which the command is run. Kafka volumes on other nodes are not deleted"
5871
fi
72+
73+
if ! docker service rm instant_message-bus-kafka-config-importer; then
74+
log warn "message-bus-kafka-config-importer not removed... it's possible the service has already been removed"
75+
fi
5976
else
6077
log error "Valid options are: init, up, down, or destroy"
6178
fi

utils/config-utils.sh

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -164,6 +164,12 @@ config::remove_config_importer() {
164164
local -r start_time=$(date +%s)
165165

166166
local config_importer_state
167+
168+
if [[ -z $(docker service ps instant_"$config_importer_service_name") ]]; then
169+
log info "instant_$config_importer_service_name service cannot be removed as it does not exist!"
170+
exit 0
171+
fi
172+
167173
config_importer_state=$(docker service ps instant_"$config_importer_service_name" --format "{{.CurrentState}}")
168174
until [[ $config_importer_state == *"Complete"* ]]; do
169175
config::timeout_check "$start_time" "$config_importer_service_name to run" "$exit_time" "$warning_time"
@@ -192,6 +198,7 @@ config::await_service_removed() {
192198
config::timeout_check "$start_time" "${SERVICE_NAME} to be removed"
193199
sleep 1
194200
done
201+
log info "Service $SERVICE_NAME successfully removed"
195202
}
196203

197204
# Waits for the provided service to join the network

utils/docker-utils.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ docker::try_remove_volume() {
107107
local start_time
108108
start_time=$(date +%s)
109109
until [[ -n "$(docker volume rm instant_"${VOLUME_NAME}" 2>/dev/null)" ]]; do
110-
config::timeout_check "${start_time}" "${VOLUME_NAME} to be removed" "10" "20"
110+
config::timeout_check "${start_time}" "${VOLUME_NAME} to be removed" "20" "10"
111111
sleep 1
112112
done
113113
overwrite "Waiting for volume ${VOLUME_NAME} to be removed... Done"

0 commit comments

Comments
 (0)