Skip to content

Commit 76487c4

Browse files
Merge pull request #2 from superstreamlabs/RND-1914-client-python-examples
client examples python
1 parent 6d81872 commit 76487c4

File tree

10 files changed

+1016
-1
lines changed

10 files changed

+1016
-1
lines changed
Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
import os
2+
import sys
3+
import json
4+
import ssl
5+
import time
6+
import asyncio
7+
import logging
8+
from aiokafka import AIOKafkaProducer
9+
10+
logging.basicConfig(
11+
level=logging.INFO,
12+
format="%(asctime)s [%(levelname)s] %(name)s - %(message)s",
13+
handlers=[logging.StreamHandler()]
14+
)
15+
logger = logging.getLogger("aiven.aiokafka.producer")
16+
17+
examples_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
18+
sys.path.append(examples_path)
19+
from json_generator import generate_random_json
20+
21+
# === Aiven Kafka Bootstrap ===
22+
BOOTSTRAP_SERVERS = ['your-service-name.aivencloud.com:PORT']
23+
24+
# === SSL Certificate Files from Aiven ZIP ===
25+
SSL_CAFILE = "/path/to/ca.pem" # Root CA file
26+
SSL_CERTFILE = "/path/to/client.cert.pem" # Client public certificate
27+
SSL_KEYFILE = "/path/to/client.pk8.pem" # Client private key
28+
29+
30+
# === Producer Config ===
31+
PRODUCER_NAME_1 = 'aiokafka-producer-1'
32+
PRODUCER_NAME_2 = 'aiokafka-producer-2'
33+
TOPICS_1 = ['example-topic', 'test-1']
34+
TOPICS_2 = ['example-topic', 'test-2']
35+
36+
37+
async def create_producer(client_id: str) -> AIOKafkaProducer:
38+
"""Create and configure AIOKafkaProducer with SSL context"""
39+
logger.info(f"Creating AIOKafkaProducer for client_id={client_id}")
40+
41+
ssl_ctx = ssl.create_default_context(cafile=SSL_CAFILE)
42+
ssl_ctx.load_cert_chain(certfile=SSL_CERTFILE, keyfile=SSL_KEYFILE)
43+
44+
producer = AIOKafkaProducer(
45+
bootstrap_servers=BOOTSTRAP_SERVERS,
46+
client_id=client_id,
47+
security_protocol="SSL",
48+
ssl_context=ssl_ctx,
49+
key_serializer=lambda k: k.encode("utf-8"),
50+
value_serializer=lambda v: json.dumps(v).encode("utf-8")
51+
)
52+
await producer.start()
53+
return producer
54+
55+
56+
async def send_messages_to_topics(producer, topics, producer_name, num_messages=50):
57+
"""Send JSON messages to the given Kafka topics"""
58+
logger.info(f"Sending {num_messages} messages using {producer_name} to topics: {topics}")
59+
successful = 0
60+
failed = 0
61+
62+
for i in range(num_messages):
63+
try:
64+
message = generate_random_json(min_size_kb=1)
65+
message["message_number"] = i + 1
66+
message["producer"] = producer_name
67+
key = f"msg-{i + 1}"
68+
69+
for topic in topics:
70+
await producer.send_and_wait(topic, key=key, value=message)
71+
72+
successful += 1
73+
except Exception as e:
74+
failed += 1
75+
logger.error(f"Failed to send message {i+1}: {e}")
76+
77+
await asyncio.sleep(0.01)
78+
79+
logger.info(f"{producer_name} Summary: {successful} successful, {failed} failed")
80+
81+
82+
async def main():
83+
producer1 = producer2 = None
84+
try:
85+
producer1 = await create_producer(PRODUCER_NAME_1)
86+
producer2 = await create_producer(PRODUCER_NAME_2)
87+
88+
await send_messages_to_topics(producer1, TOPICS_1, PRODUCER_NAME_1)
89+
await send_messages_to_topics(producer2, TOPICS_2, PRODUCER_NAME_2)
90+
91+
except Exception as e:
92+
logger.exception(f"Error during Kafka production: {e}")
93+
finally:
94+
logger.info("Shutting down producers...")
95+
if producer1:
96+
await producer1.stop()
97+
logger.info("Producer 1 closed")
98+
if producer2:
99+
await producer2.stop()
100+
logger.info("Producer 2 closed")
101+
102+
logger.info("Sleeping for 10 minutes...")
103+
try:
104+
await asyncio.sleep(600)
105+
except asyncio.CancelledError:
106+
logger.info("Cancelled sleep")
107+
108+
109+
if __name__ == "__main__":
110+
asyncio.run(main())
Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,106 @@
1+
import asyncio
2+
import json
3+
import os
4+
import ssl
5+
import sys
6+
import logging
7+
from aiokafka import AIOKafkaProducer
8+
9+
logging.basicConfig(
10+
level=logging.INFO,
11+
format="%(asctime)s [%(levelname)s] %(name)s - %(message)s"
12+
)
13+
logger = logging.getLogger("confluent.aiokafka")
14+
15+
examples_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
16+
sys.path.append(examples_path)
17+
from json_generator import generate_random_json
18+
19+
# === Confluent Cloud Config ===
20+
BOOTSTRAP_SERVERS = 'your-cluster-name.region.aws.confluent.cloud:9092'
21+
SASL_USERNAME = 'your-confluent-api-key'
22+
SASL_PASSWORD = 'your-confluent-api-secret'
23+
24+
# === Producer Config ===
25+
PRODUCER_NAME_1 = 'aiokafka-confluent-producer-1'
26+
PRODUCER_NAME_2 = 'aiokafka-confluent-producer-2'
27+
TOPICS_1 = ['example-topic', 'test1']
28+
TOPICS_2 = ['example-topic', 'test2']
29+
BATCH_SIZE = 150
30+
LINGER_MS = 10
31+
32+
33+
async def create_producer(client_id):
34+
"""Create a Confluent Cloud-compatible AIOKafkaProducer using SASL_SSL"""
35+
logger.info(f"Creating async producer for {client_id}")
36+
37+
ssl_ctx = ssl.create_default_context()
38+
39+
producer = AIOKafkaProducer(
40+
bootstrap_servers=BOOTSTRAP_SERVERS,
41+
client_id=client_id,
42+
security_protocol="SASL_SSL",
43+
ssl_context=ssl_ctx,
44+
sasl_mechanism="PLAIN",
45+
sasl_plain_username=SASL_USERNAME,
46+
sasl_plain_password=SASL_PASSWORD,
47+
key_serializer=lambda k: k.encode("utf-8"),
48+
value_serializer=lambda v: json.dumps(v).encode("utf-8"),
49+
linger_ms=LINGER_MS,
50+
max_batch_size=BATCH_SIZE,
51+
)
52+
await producer.start()
53+
return producer
54+
55+
56+
async def send_messages_to_topics(producer, topics, producer_name, num_messages=50):
57+
successful = 0
58+
failed = 0
59+
60+
logger.info(f"Sending {num_messages} messages from {producer_name} to {topics}")
61+
for i in range(num_messages):
62+
try:
63+
message = generate_random_json(min_size_kb=1)
64+
message['message_number'] = i + 1
65+
message['producer'] = producer_name
66+
key = f"msg-{i + 1}"
67+
68+
for topic in topics:
69+
await producer.send_and_wait(topic, key=key, value=message)
70+
71+
successful += 1
72+
except Exception as e:
73+
failed += 1
74+
logger.error(f"Failed to send message {i + 1}: {e}")
75+
76+
await asyncio.sleep(0.01)
77+
78+
logger.info(f"{producer_name} Summary: {successful} successful, {failed} failed")
79+
80+
81+
async def main():
82+
producer1 = producer2 = None
83+
try:
84+
producer1 = await create_producer(PRODUCER_NAME_1)
85+
producer2 = await create_producer(PRODUCER_NAME_2)
86+
87+
await send_messages_to_topics(producer1, TOPICS_1, PRODUCER_NAME_1)
88+
await send_messages_to_topics(producer2, TOPICS_2, PRODUCER_NAME_2)
89+
90+
except Exception as e:
91+
logger.exception(f"Producer error: {e}")
92+
finally:
93+
if producer1:
94+
await producer1.stop()
95+
logger.info("Producer 1 closed")
96+
if producer2:
97+
await producer2.stop()
98+
logger.info("Producer 2 closed")
99+
100+
logger.info("Sleeping for 10 minutes...")
101+
await asyncio.sleep(600)
102+
103+
104+
if __name__ == "__main__":
105+
asyncio.run(main())
106+
Lines changed: 133 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,133 @@
1+
import asyncio
2+
import ssl
3+
import logging
4+
import os
5+
import json
6+
import sys
7+
from aiokafka.abc import AbstractTokenProvider
8+
from aiokafka import AIOKafkaProducer
9+
from aws_msk_iam_sasl_signer import MSKAuthTokenProvider
10+
from botocore.session import get_session
11+
12+
# --- Configuration Section ---
13+
logging.basicConfig(
14+
level=logging.INFO,
15+
format="%(asctime)s [%(levelname)s] %(name)s - %(message)s"
16+
)
17+
log = logging.getLogger("msk_iam_producer")
18+
19+
# === MSK Cluster Config ===
20+
BOOTSTRAP_SERVERS = "b-1-public.your-msk-cluster.amazonaws.com:9198,b-2-public.your-msk-cluster.amazonaws.com:9198"
21+
TOPICS = ["example-topic"]
22+
23+
# === AWS IAM Config ===
24+
AWS_ACCESS_KEY_ID = None
25+
AWS_SECRET_ACCESS_KEY = None
26+
AWS_SESSION_TOKEN = None
27+
28+
# === Producer Config ===
29+
CLIENT_ID = "iam-producer-instance-1"
30+
PRODUCER_NAME = "iam-producer-instance-1"
31+
MAX_BATCH_SIZE = 16384
32+
LINGER_MS = 100
33+
34+
examples_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
35+
sys.path.append(examples_path)
36+
from json_generator import generate_random_json
37+
38+
39+
class MSKIAMTokenProvider(AbstractTokenProvider):
40+
"""Custom TokenProvider that uses botocore to handle credential discovery."""
41+
def __init__(self, access_key=None, secret_key=None, session_token=None):
42+
self._botocore_session = get_session()
43+
if access_key and secret_key:
44+
log.info("Overwriting default credentials with keys provided directly in the script.")
45+
self._botocore_session.set_credentials(access_key, secret_key, session_token)
46+
else:
47+
log.info("Using botocore's default credential discovery.")
48+
49+
self.region = self._botocore_session.get_config_variable('region')
50+
if not self.region:
51+
raise ValueError("AWS Region not found.")
52+
log.info(f"Token provider initialized for region: {self.region}")
53+
54+
async def token(self) -> str:
55+
token, _ = MSKAuthTokenProvider.generate_auth_token(self.region, self._botocore_session)
56+
return token
57+
58+
59+
async def create_producer(client_id, **aws_creds):
60+
"""Creates and starts an MSK IAM-compatible AIOKafkaProducer."""
61+
log.info(f"Creating async producer with client.id: {client_id}")
62+
context = ssl.create_default_context()
63+
context.check_hostname = False
64+
context.verify_mode = ssl.CERT_NONE
65+
token_provider = MSKIAMTokenProvider(**aws_creds)
66+
67+
producer = AIOKafkaProducer(
68+
bootstrap_servers=BOOTSTRAP_SERVERS,
69+
client_id=client_id,
70+
security_protocol="SASL_SSL",
71+
sasl_mechanism="OAUTHBEARER",
72+
sasl_oauth_token_provider=token_provider,
73+
ssl_context=context,
74+
key_serializer=lambda k: k.encode("utf-8"),
75+
value_serializer=lambda v: json.dumps(v).encode("utf-8"),
76+
max_batch_size=MAX_BATCH_SIZE,
77+
linger_ms=LINGER_MS,
78+
)
79+
await producer.start()
80+
return producer
81+
82+
83+
async def send_messages_to_topics(producer, topics, producer_name, num_messages=50):
84+
"""Generates and sends messages using the provided producer instance."""
85+
successful = 0
86+
failed = 0
87+
log.info(f"Sending {num_messages} messages from logical producer '{producer_name}' to topics: {topics}")
88+
89+
for i in range(num_messages):
90+
try:
91+
message = generate_random_json(min_size_kb=1)
92+
message['message_number'] = i + 1
93+
message['producer'] = producer_name
94+
key = f"msg-{i + 1}"
95+
for topic in topics:
96+
await producer.send(topic, key=key, value=message)
97+
log.info(f"Queued message #{i + 1} with key '{key}' for all topics.")
98+
successful += 1
99+
except Exception as e:
100+
failed += 1
101+
log.error(f"Failed to queue message #{i + 1}: {e}")
102+
103+
log.info("Flushing final messages...")
104+
await producer.flush()
105+
log.info(f"--- '{producer_name}' Summary: {successful} queued, {failed} failed ---")
106+
107+
108+
async def main():
109+
"""The main entry point for the script."""
110+
producer = None
111+
try:
112+
producer = await create_producer(
113+
CLIENT_ID,
114+
access_key=AWS_ACCESS_KEY_ID,
115+
secret_key=AWS_SECRET_ACCESS_KEY,
116+
session_token=AWS_SESSION_TOKEN
117+
)
118+
await send_messages_to_topics(producer, TOPICS, PRODUCER_NAME)
119+
except Exception as e:
120+
log.exception(f"A critical error occurred in main: {e}")
121+
finally:
122+
if producer:
123+
log.info("Attempting to stop the producer...")
124+
try:
125+
await producer.flush()
126+
await producer.stop()
127+
log.info("Producer stopped successfully.")
128+
except Exception as e:
129+
log.exception(f"An error occurred while stopping the producer: {e}")
130+
131+
132+
if __name__ == "__main__":
133+
asyncio.run(main())

0 commit comments

Comments
 (0)