Skip to content

Commit 684ca89

Browse files
TimPansinohmstepanek
authored andcommitted
Kafka Expanded Version Testing (#629)
* Expand kafka testing range to older versions * Remove confluent-kafka 1.5 * Remove confluent-kafka 1.5 * Fix flakey confluent-kafka tests * Fixup: fix flakey tests * Fixup: fix kafka-python flakey tests * Fixup: fix kafka-python flakey tests * Remove confluent-kafka 1.8 tests The following is an unresolved issue occuring in the setup of confluent-kafka 1.8.2: asweigart/PyGetWindow#9 Co-authored-by: Hannah Stepanek <[email protected]>
1 parent 8beb0cc commit 684ca89

File tree

7 files changed

+87
-46
lines changed

7 files changed

+87
-46
lines changed

tests/messagebroker_confluentkafka/conftest.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def skip_if_not_serializing(client_type):
6363

6464

6565
@pytest.fixture(scope="function")
66-
def producer(client_type, json_serializer):
66+
def producer(topic, client_type, json_serializer):
6767
from confluent_kafka import Producer, SerializingProducer
6868

6969
if client_type == "cimpl":
@@ -86,7 +86,9 @@ def producer(client_type, json_serializer):
8686
)
8787

8888
yield producer
89-
producer.purge()
89+
90+
if hasattr(producer, "purge"):
91+
producer.purge()
9092

9193

9294
@pytest.fixture(scope="function")

tests/messagebroker_confluentkafka/test_consumer.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -165,7 +165,15 @@ def _consume():
165165
@cache_kafka_consumer_headers()
166166
def _test():
167167
# Start the transaction but don't exit it.
168-
consumer.poll(0.5)
168+
# Keep polling until we get the record or the timeout is exceeded.
169+
timeout = 10
170+
attempts = 0
171+
record = None
172+
while not record and attempts < timeout:
173+
record = consumer.poll(0.5)
174+
if not record:
175+
attempts += 1
176+
continue
169177

170178
_test()
171179

tests/messagebroker_confluentkafka/test_serialization.py

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -107,8 +107,12 @@ def test_deserialization_errors(skip_if_not_serializing, monkeypatch, topic, pro
107107
@background_task()
108108
def test():
109109
with pytest.raises(error_cls):
110-
record = consumer.poll(0.5)
111-
assert record is not None, "No record consumed."
110+
timeout = 10
111+
attempts = 0
112+
while attempts < timeout:
113+
if not consumer.poll(0.5):
114+
attempts += 1
115+
continue
112116

113117
test()
114118

@@ -128,14 +132,20 @@ def _test():
128132
send_producer_message()
129133

130134
record_count = 0
131-
while True:
135+
136+
timeout = 10
137+
attempts = 0
138+
record = None
139+
while not record and attempts < timeout:
132140
record = consumer.poll(0.5)
133141
if not record:
134-
break
142+
attempts += 1
143+
continue
135144
assert not record.error()
136145

137146
assert record.value() == {"foo": 1}
138147
record_count += 1
148+
consumer.poll(0.5) # Exit the transaction.
139149

140150
assert record_count == 1, "Incorrect count of records consumed: %d. Expected 1." % record_count
141151

tests/messagebroker_kafkapython/conftest.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -138,11 +138,6 @@ def consumer(topic, producer, client_type, json_deserializer, json_callable_dese
138138
group_id="test",
139139
)
140140

141-
# The first time the kafka consumer is created and polled, it returns a StopIterator
142-
# exception. To by-pass this, loop over the consumer before using it.
143-
# NOTE: This seems to only happen in Python2.7.
144-
for record in consumer:
145-
pass
146141
yield consumer
147142
consumer.close()
148143

@@ -230,9 +225,15 @@ def _test():
230225
send_producer_message()
231226

232227
record_count = 0
233-
for record in consumer:
234-
assert deserialize(record.value) == {"foo": 1}
235-
record_count += 1
228+
229+
timeout = 10
230+
attempts = 0
231+
record = None
232+
while not record and attempts < timeout:
233+
for record in consumer:
234+
assert deserialize(record.value) == {"foo": 1}
235+
record_count += 1
236+
attempts += 1
236237

237238
assert record_count == 1, "Incorrect count of records consumed: %d. Expected 1." % record_count
238239

tests/messagebroker_kafkapython/test_consumer.py

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -14,25 +14,23 @@
1414

1515
import pytest
1616
from conftest import cache_kafka_consumer_headers
17-
18-
from newrelic.common.object_names import callable_name
19-
2017
from testing_support.fixtures import (
18+
reset_core_stats_engine,
2119
validate_attributes,
2220
validate_error_event_attributes_outside_transaction,
2321
validate_transaction_errors,
2422
validate_transaction_metrics,
25-
reset_core_stats_engine,
26-
)
27-
from testing_support.validators.validate_transaction_count import (
28-
validate_transaction_count,
2923
)
3024
from testing_support.validators.validate_distributed_trace_accepted import (
3125
validate_distributed_trace_accepted,
3226
)
27+
from testing_support.validators.validate_transaction_count import (
28+
validate_transaction_count,
29+
)
3330

3431
from newrelic.api.background_task import background_task
3532
from newrelic.api.transaction import end_of_transaction
33+
from newrelic.common.object_names import callable_name
3634
from newrelic.packages import six
3735

3836

@@ -117,8 +115,7 @@ def test_consumer_errors(get_consumer_record, consumer_next_raises):
117115

118116
@reset_core_stats_engine()
119117
@validate_error_event_attributes_outside_transaction(
120-
num_errors=1,
121-
exact_attrs={"intrinsic": {"error.class": callable_name(exc_class)}, "agent": {}, "user": {}}
118+
num_errors=1, exact_attrs={"intrinsic": {"error.class": callable_name(exc_class)}, "agent": {}, "user": {}}
122119
)
123120
def _test():
124121
with pytest.raises(exc_class):
@@ -160,7 +157,14 @@ def _consume():
160157
@cache_kafka_consumer_headers
161158
def _test():
162159
# Start the transaction but don't exit it.
163-
next(consumer_iter)
160+
timeout = 10
161+
attempts = 0
162+
record = None
163+
while not record and attempts < timeout:
164+
try:
165+
record = next(consumer_iter)
166+
except StopIteration:
167+
attempts += 1
164168

165169
_test()
166170

tests/messagebroker_kafkapython/test_serialization.py

Lines changed: 28 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -12,20 +12,20 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15+
import json
16+
1517
import pytest
1618
from testing_support.fixtures import (
19+
reset_core_stats_engine,
20+
validate_error_event_attributes_outside_transaction,
1721
validate_transaction_errors,
1822
validate_transaction_metrics,
19-
validate_error_event_attributes_outside_transaction,
20-
reset_core_stats_engine,
2123
)
2224

2325
from newrelic.api.background_task import background_task
24-
from newrelic.packages import six
25-
2626
from newrelic.common.object_names import callable_name
27+
from newrelic.packages import six
2728

28-
import json
2929

3030
def test_serialization_metrics(skip_if_not_serializing, topic, send_producer_message):
3131
txn_name = "test_serialization:test_serialization_metrics.<locals>.test" if six.PY3 else "test_serialization:test"
@@ -48,10 +48,13 @@ def test():
4848
test()
4949

5050

51-
@pytest.mark.parametrize("key,value", (
52-
(object(), "A"),
53-
("A", object()),
54-
))
51+
@pytest.mark.parametrize(
52+
"key,value",
53+
(
54+
(object(), "A"),
55+
("A", object()),
56+
),
57+
)
5558
def test_serialization_errors(skip_if_not_serializing, topic, producer, key, value):
5659
error_cls = TypeError
5760

@@ -64,13 +67,16 @@ def test():
6467
test()
6568

6669

67-
@pytest.mark.parametrize("key,value", (
68-
(b"%", b"{}"),
69-
(b"{}", b"%"),
70-
))
70+
@pytest.mark.parametrize(
71+
"key,value",
72+
(
73+
(b"%", b"{}"),
74+
(b"{}", b"%"),
75+
),
76+
)
7177
def test_deserialization_errors(skip_if_not_serializing, monkeypatch, topic, producer, consumer, key, value):
7278
error_cls = json.decoder.JSONDecodeError if six.PY3 else ValueError
73-
79+
7480
# Remove serializers to cause intentional issues
7581
monkeypatch.setitem(producer.config, "value_serializer", None)
7682
monkeypatch.setitem(producer.config, "key_serializer", None)
@@ -80,13 +86,16 @@ def test_deserialization_errors(skip_if_not_serializing, monkeypatch, topic, pro
8086

8187
@reset_core_stats_engine()
8288
@validate_error_event_attributes_outside_transaction(
83-
num_errors=1,
84-
exact_attrs={"intrinsic": {"error.class": callable_name(error_cls)}, "agent": {}, "user": {}}
89+
num_errors=1, exact_attrs={"intrinsic": {"error.class": callable_name(error_cls)}, "agent": {}, "user": {}}
8590
)
8691
def test():
8792
with pytest.raises(error_cls):
88-
for record in consumer:
89-
pass
90-
assert record is not None, "No record consumed."
93+
timeout = 10
94+
attempts = 0
95+
record = None
96+
while not record and attempts < timeout:
97+
for record in consumer:
98+
pass
99+
attempts += 1
91100

92101
test()

tox.ini

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,9 @@ envlist =
150150
rabbitmq-messagebroker_pika-{py27,py37,py38,py39,pypy,pypy37}-pika0.13,
151151
rabbitmq-messagebroker_pika-{py37,py38,py39,py310,pypy37}-pikalatest,
152152
kafka-messagebroker_confluentkafka-{py27,py37,py38,py39,py310}-confluentkafkalatest,
153+
kafka-messagebroker_confluentkafka-{py27,py39}-confluentkafka{0107,0106},
153154
kafka-messagebroker_kafkapython-{pypy,py27,py37,py38,pypy37}-kafkapythonlatest,
155+
kafka-messagebroker_kafkapython-{py27,py38}-kafkapython{020001,020000,0104},
154156
python-template_mako-{py27,py37,py38,py39,py310}
155157

156158
[pytest]
@@ -355,8 +357,13 @@ deps =
355357
messagebroker_pika-pikalatest: pika
356358
messagebroker_pika: tornado<5
357359
messagebroker_pika-{py27,pypy}: enum34
358-
messagebroker_confluentkafka: confluent-kafka
359-
messagebroker_kafkapython: kafka-python
360+
messagebroker_confluentkafka-confluentkafkalatest: confluent-kafka
361+
messagebroker_confluentkafka-confluentkafka0107: confluent-kafka<1.8
362+
messagebroker_confluentkafka-confluentkafka0106: confluent-kafka<1.7
363+
messagebroker_kafkapython-kafkapythonlatest: kafka-python
364+
messagebroker_kafkapython-kafkapython020001: kafka-python<2.0.2
365+
messagebroker_kafkapython-kafkapython020000: kafka-python<2.0.1
366+
messagebroker_kafkapython-kafkapython0104: kafka-python<1.5
360367
template_mako: mako<1.2
361368

362369
setenv =

0 commit comments

Comments
 (0)