Skip to content

Commit c0e735e

Browse files
committed
Merge branch 'main' into develop
2 parents 3982928 + 0aa9cdf commit c0e735e

File tree

7 files changed

+158
-48
lines changed

7 files changed

+158
-48
lines changed

README.md

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,24 @@ async def test() -> None:
1313
print("The best task ever!")
1414
```
1515

16+
## Non-obvious things
17+
18+
You can configure kafka producer and consumer with special methods `configure_producer` and `configure_consumer`.
19+
Example:
20+
```python
21+
from taskiq_aio_kafka import AioKafkaBroker
22+
23+
broker = AioKafkaBroker(bootstrap_servers="localhost")
24+
25+
# configure producer, you can set any parameter from
26+
# base AIOKafkaProducer, except `loop` and `bootstrap_servers`
27+
broker.configure_producer(request_timeout_ms=100000)
28+
29+
# configure consumer, you can set any parameter from
30+
# base AIOKafkaConsumer, except `loop` and `bootstrap_servers`
31+
broker.configure_consumer(group_id="the best group ever.")
32+
```
33+
1634
## Configuration
1735

1836
AioKafkaBroker parameters:

poetry.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ packages = [{ include = "taskiq_aio_kafka" }]
2727
python = "^3.7"
2828
taskiq = "^0"
2929
aiokafka = "^0.8.0"
30+
pydantic = "^1.10.7"
3031

3132
[tool.poetry.group.dev.dependencies]
3233
pytest = "^7.1.2"

taskiq_aio_kafka/broker.py

Lines changed: 51 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,15 @@
11
import asyncio
22
import pickle # noqa: S403
33
from logging import getLogger
4-
from typing import AsyncGenerator, Callable, List, Optional, Set, TypeVar, Union
4+
from typing import Any, AsyncGenerator, Callable, List, Optional, Set, TypeVar, Union
55

66
from aiokafka import AIOKafkaConsumer, AIOKafkaProducer
77
from kafka.admin import KafkaAdminClient, NewTopic
88
from taskiq import AsyncResultBackend, BrokerMessage
99
from taskiq.abc.broker import AsyncBroker
1010

1111
from taskiq_aio_kafka.exceptions import WrongAioKafkaBrokerParametersError
12+
from taskiq_aio_kafka.models import KafkaConsumerParameters, KafkaProducerParameters
1213

1314
_T = TypeVar("_T") # noqa: WPS111
1415

@@ -86,21 +87,12 @@ def __init__( # noqa: WPS211
8687
replication_factor=1,
8788
)
8889

89-
self._aiokafka_producer: AIOKafkaProducer = (
90-
aiokafka_producer
91-
or AIOKafkaProducer(
92-
bootstrap_servers=self._bootstrap_servers,
93-
loop=self._loop,
94-
)
90+
self._aiokafka_producer_params: KafkaProducerParameters = (
91+
KafkaProducerParameters()
9592
)
9693

97-
self._aiokafka_consumer: AIOKafkaConsumer = (
98-
aiokafka_consumer
99-
or AIOKafkaConsumer(
100-
self._kafka_topic.name,
101-
bootstrap_servers=self._bootstrap_servers,
102-
loop=self._loop,
103-
)
94+
self._aiokafka_consumer_params: KafkaConsumerParameters = (
95+
KafkaConsumerParameters()
10496
)
10597

10698
self._kafka_admin_client: KafkaAdminClient = (
@@ -118,6 +110,30 @@ def __init__( # noqa: WPS211
118110
self._is_producer_started = False
119111
self._is_consumer_started = False
120112

113+
def configure_producer(self, **producer_parameters: Any) -> None:
114+
"""Configure kafka producer.
115+
116+
You can pass here any configuration parameters
117+
accepted by the kafka producer.
118+
119+
:param producer_parameters: producer parameters kwargs.
120+
"""
121+
self._aiokafka_producer_params = KafkaProducerParameters(
122+
**producer_parameters,
123+
)
124+
125+
def configure_consumer(self, **consumer_parameters: Any) -> None:
126+
"""Configure kafka consumer.
127+
128+
You can pass here any configuration parameters
129+
accepted by the kafka consumer.
130+
131+
:param consumer_parameters: consumer parameters kwargs.
132+
"""
133+
self._aiokafka_consumer_params = KafkaConsumerParameters(
134+
**consumer_parameters,
135+
)
136+
121137
async def startup(self) -> None:
122138
"""Setup AIOKafkaProducer, AIOKafkaConsumer and kafka topics.
123139
@@ -127,18 +143,29 @@ async def startup(self) -> None:
127143
if there are no producer and consumer passed.
128144
"""
129145
await super().startup()
130-
131-
is_topic_available: bool = bool(
132-
self._kafka_admin_client.describe_topics([self._kafka_topic.name]),
146+
available_condition: bool = (
147+
self._kafka_topic.name not in self._kafka_admin_client.list_topics()
133148
)
134-
if not is_topic_available:
149+
if available_condition:
135150
self._kafka_admin_client.create_topics(
136151
new_topics=[self._kafka_topic],
137152
validate_only=False,
138153
)
139-
154+
self._aiokafka_producer = AIOKafkaProducer(
155+
bootstrap_servers=self._bootstrap_servers,
156+
loop=self._loop,
157+
**self._aiokafka_producer_params.dict(),
158+
)
140159
await self._aiokafka_producer.start()
160+
141161
if self.is_worker_process:
162+
self._aiokafka_consumer = AIOKafkaConsumer(
163+
self._kafka_topic.name,
164+
bootstrap_servers=self._bootstrap_servers,
165+
loop=self._loop,
166+
**self._aiokafka_consumer_params.dict(),
167+
)
168+
142169
await self._aiokafka_consumer.start()
143170
self._is_consumer_started = True
144171

@@ -148,10 +175,10 @@ async def shutdown(self) -> None:
148175
"""Close all connections on shutdown."""
149176
await super().shutdown()
150177

151-
if self._aiokafka_producer:
178+
if self._is_producer_started:
152179
await self._aiokafka_producer.stop()
153180

154-
if self._aiokafka_consumer:
181+
if self._is_consumer_started:
155182
await self._aiokafka_consumer.stop()
156183

157184
topic_delete_condition: bool = all(
@@ -183,12 +210,11 @@ async def kick(self, message: BrokerMessage) -> None:
183210
if not self._is_producer_started:
184211
raise ValueError("Please run startup before kicking.")
185212

186-
kafka_message: bytes = pickle.dumps(message)
187213
topic_name: str = self._kafka_topic.name
188214

189-
await self._aiokafka_producer.send(
215+
await self._aiokafka_producer.send( # type: ignore
190216
topic=topic_name,
191-
value=kafka_message,
217+
value=message.message,
192218
)
193219

194220
async def listen(
@@ -205,5 +231,5 @@ async def listen(
205231
if not self._is_consumer_started:
206232
raise ValueError("Please run startup before listening.")
207233

208-
async for raw_kafka_message in self._aiokafka_consumer:
234+
async for raw_kafka_message in self._aiokafka_consumer: # type: ignore
209235
yield raw_kafka_message.value

taskiq_aio_kafka/models.py

Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
from typing import Any, Callable, Optional, Union
2+
3+
from aiokafka import __version__
4+
from kafka.coordinator.assignors.roundrobin import RoundRobinPartitionAssignor
5+
from kafka.partitioner.default import DefaultPartitioner
6+
from pydantic import BaseModel
7+
8+
9+
class KafkaProducerParameters(BaseModel):
10+
"""Parameters to kafka producer."""
11+
12+
client_id: Optional[str] = None
13+
metadata_max_age_ms: int = 300000
14+
request_timeout_ms: int = 40000
15+
api_version: str = "auto"
16+
acks: Optional[Union[str, int]] = 1
17+
key_serializer: Optional[Callable[..., bytes]] = None
18+
value_serializer: Optional[Callable[..., bytes]] = None
19+
compression_type: Optional[str] = None
20+
max_batch_size: int = 16384
21+
partitioner: Callable[..., Any] = DefaultPartitioner()
22+
max_request_size: int = 1048576
23+
linger_ms: int = 0
24+
send_backoff_ms: int = 100
25+
retry_backoff_ms: int = 100
26+
security_protocol: str = "PLAINTEXT"
27+
ssl_context: Optional[Any] = None
28+
connections_max_idle_ms: int = 540000
29+
enable_idempotence: bool = False
30+
transactional_id: Optional[Any] = None
31+
transaction_timeout_ms: int = 60000
32+
sasl_mechanism: str = "PLAIN"
33+
sasl_plain_password: Optional[str] = None
34+
sasl_plain_username: Optional[str] = None
35+
sasl_kerberos_service_name: Any = "kafka"
36+
sasl_kerberos_domain_name: Any = None
37+
sasl_oauth_token_provider: Any = None
38+
39+
40+
class KafkaConsumerParameters(BaseModel):
41+
"""Parameters to kafka consumer."""
42+
43+
client_id: str = "aiokafka-" + __version__ # noqa: WPS336
44+
group_id: Optional[str] = None
45+
key_deserializer: Optional[Callable[..., Any]] = None
46+
value_deserializer: Optional[Callable[..., Any]] = None
47+
fetch_max_wait_ms: int = 500
48+
fetch_max_bytes: int = 52428800
49+
fetch_min_bytes: int = 1
50+
max_partition_fetch_bytes: int = 1 * 1024 * 1024 # noqa: WPS345
51+
request_timeout_ms: int = 40 * 1000 # noqa: WPS432
52+
retry_backoff_ms: int = 100
53+
auto_offset_reset: str = "latest"
54+
enable_auto_commit: bool = True
55+
auto_commit_interval_ms: int = 5000
56+
check_crcs: bool = True
57+
metadata_max_age_ms: int = 5 * 60 * 1000
58+
partition_assignment_strategy: Any = (RoundRobinPartitionAssignor,)
59+
max_poll_interval_ms: int = 300000
60+
rebalance_timeout_ms: Optional[int] = None
61+
session_timeout_ms: int = 10000
62+
heartbeat_interval_ms: int = 3000
63+
consumer_timeout_ms: int = 200
64+
max_poll_records: Optional[int] = None
65+
ssl_context: Optional[Any] = None
66+
security_protocol: str = "PLAINTEXT"
67+
api_version: str = "auto"
68+
exclude_internal_topics: bool = True
69+
connections_max_idle_ms: int = 540000
70+
isolation_level: str = "read_uncommitted"
71+
sasl_mechanism: str = "PLAIN"
72+
sasl_plain_password: Optional[str] = None
73+
sasl_plain_username: Optional[str] = None
74+
sasl_kerberos_service_name: Optional[str] = "kafka"
75+
sasl_kerberos_domain_name: Optional[str] = None
76+
sasl_oauth_token_provider: Optional[str] = None

tests/conftest.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import os
22
from typing import AsyncGenerator
3+
from uuid import uuid4
34

45
import pytest
56
from aiokafka import AIOKafkaConsumer, AIOKafkaProducer
@@ -35,7 +36,7 @@ def base_topic_name() -> str:
3536
3637
:returns: topic name.
3738
"""
38-
return "taskiq_topic"
39+
return uuid4().hex
3940

4041

4142
@pytest.fixture()
@@ -115,6 +116,7 @@ async def broker(
115116
kafka_url: str,
116117
test_kafka_producer: AIOKafkaProducer,
117118
test_kafka_consumer: AIOKafkaConsumer,
119+
base_topic: NewTopic,
118120
) -> AsyncGenerator[AioKafkaBroker, None]:
119121
"""Yield new broker instance.
120122
@@ -125,13 +127,15 @@ async def broker(
125127
:param kafka_url: url to kafka.
126128
:param test_kafka_producer: custom AIOKafkaProducer.
127129
:param test_kafka_consumer: custom AIOKafkaConsumer.
130+
:param base_topic: base topic.
128131
129132
:yields: broker.
130133
"""
131134
broker = AioKafkaBroker(
132135
bootstrap_servers=kafka_url,
133136
aiokafka_producer=test_kafka_producer,
134137
aiokafka_consumer=test_kafka_consumer,
138+
kafka_topic=base_topic,
135139
delete_topic_on_shutdown=True,
136140
)
137141
broker.is_worker_process = True

tests/test_broker.py

Lines changed: 6 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -48,12 +48,7 @@ async def test_kick_success(broker: AioKafkaBroker) -> None:
4848
get_first_task(broker),
4949
timeout=1,
5050
)
51-
assert pickle.dumps(message_to_send) == received_message_bytes
52-
53-
received_message: BrokerMessage = pickle.loads(
54-
received_message_bytes,
55-
)
56-
assert message_to_send == received_message
51+
assert received_message_bytes == message_to_send.message
5752

5853

5954
@pytest.mark.anyio
@@ -78,14 +73,13 @@ async def test_startup(
7873
str
7974
] = broker_without_arguments._kafka_admin_client.list_topics() # noqa: WPS437
8075

81-
assert base_topic_name in all_kafka_topics
76+
assert broker_without_arguments._kafka_topic.name in all_kafka_topics
8277

8378

8479
@pytest.mark.anyio
8580
async def test_listen(
8681
broker: AioKafkaBroker,
8782
test_kafka_producer: AIOKafkaProducer,
88-
base_topic_name: str,
8983
) -> None:
9084
"""Test that message are read correctly.
9185
@@ -94,8 +88,8 @@ async def test_listen(
9488
9589
:param broker: current broker.
9690
:param test_kafka_producer: AIOKafkaProducer.
97-
:param base_topic_name: topic name.
9891
"""
92+
await test_kafka_producer.start()
9993
task_id: str = uuid4().hex
10094
task_name: str = uuid4().hex
10195
message: bytes = pickle.dumps(uuid4().hex)
@@ -109,22 +103,13 @@ async def test_listen(
109103
)
110104

111105
await test_kafka_producer.send(
112-
topic=base_topic_name,
113-
value=pickle.dumps(message_to_send),
106+
topic=broker._kafka_topic.name,
107+
value=message_to_send.message,
114108
)
115109

116110
received_message_bytes: bytes = await asyncio.wait_for(
117111
get_first_task(broker),
118112
timeout=1,
119113
)
120114

121-
assert pickle.dumps(message_to_send) == received_message_bytes
122-
123-
received_message: BrokerMessage = pickle.loads(
124-
received_message_bytes,
125-
)
126-
127-
assert received_message.message == message
128-
assert received_message.labels == labels
129-
assert received_message.task_id == task_id
130-
assert received_message.task_name == task_name
115+
assert received_message_bytes == message_to_send.message

0 commit comments

Comments
 (0)