|
13 | 13 |
|
14 | 14 |
|
15 | 15 | def test_e2e_read_messages(dd_environment, kafka_instance, check, dd_run_check, aggregator): |
16 | | - """Test reading messages from Kafka.""" |
| 16 | + """Test end-to-end integration: check can connect to real Kafka and emit events correctly.""" |
17 | 17 | if not e2e_testing(): |
18 | 18 | pytest.skip("E2E tests require dd_environment fixture") |
19 | 19 |
|
20 | 20 | # Verify cluster is available |
21 | 21 | cluster_id = common.get_cluster_id() |
22 | 22 | assert cluster_id is not None, "Kafka cluster is not available" |
23 | 23 |
|
24 | | - # Ensure the instance has the cluster ID set |
25 | 24 | kafka_instance['read_messages']['cluster'] = cluster_id |
26 | 25 |
|
27 | 26 | # Run the check |
28 | 27 | check_instance = check(kafka_instance) |
29 | 28 | dd_run_check(check_instance) |
30 | 29 |
|
31 | | - # Verify events were emitted |
32 | | - events = aggregator.events |
33 | | - assert len(events) > 0, f"Expected at least one event. Got {len(events)} events." |
34 | | - |
35 | | - # Find the action success event |
36 | | - action_events = [e for e in events if 'kafka_action_' in e.get('event_type', '')] |
| 30 | + # Verify standard Datadog event was emitted |
| 31 | + action_events = [e for e in aggregator.events if 'kafka_action_' in e.get('event_type', '')] |
37 | 32 | assert len(action_events) == 1, f"Expected 1 action event, got {len(action_events)}" |
| 33 | + assert action_events[0]['event_type'] == 'kafka_action_success' |
38 | 34 |
|
39 | | - action_event = action_events[0] |
40 | | - assert action_event['event_type'] == 'kafka_action_success' |
41 | | - assert 'kafka_cluster_id' in [tag.split(':')[0] for tag in action_event['tags']] |
42 | | - assert 'remote_config_id:test-rc-id' in action_event['tags'] |
43 | | - |
44 | | - # Parse the msg_text as JSON |
45 | | - event_data = json.loads(action_event['msg_text']) |
46 | | - assert event_data['action'] == 'read_messages' |
47 | | - assert event_data['status'] == 'success' |
48 | | - assert 'stats' in event_data |
49 | | - stats = event_data['stats'] |
50 | | - assert stats['messages_scanned'] > 0 |
51 | | - assert stats['messages_sent'] > 0 |
| 35 | + # Verify events sent to data-streams-message track |
| 36 | + data_streams_events = aggregator.get_event_platform_events("data-streams-message") |
| 37 | + action_ds_events = [e for e in data_streams_events if 'action' in e] |
| 38 | + message_events = [e for e in data_streams_events if 'topic' in e] |
52 | 39 |
|
53 | | - # Check that message events were emitted |
54 | | - message_events = [e for e in events if e.get('event_type') == 'kafka_message'] |
| 40 | + # Verify both action and message events were sent |
| 41 | + assert len(action_ds_events) == 1, f"Expected 1 action event in data streams, got {len(action_ds_events)}" |
55 | 42 | assert len(message_events) > 0, "Expected at least one Kafka message event" |
56 | | - assert len(message_events) == stats['messages_sent'], "Message event count should match stats" |
57 | | - |
58 | | - # Verify message event structure |
59 | | - msg_event = message_events[0] |
60 | | - assert msg_event['source_type_name'] == 'kafka' |
61 | | - assert 'kafka_cluster_id' in [tag.split(':')[0] for tag in msg_event['tags']] |
62 | | - assert 'remote_config_id:test-rc-id' in msg_event['tags'] |
63 | | - |
64 | | - # Parse message event data |
65 | | - msg_data = json.loads(msg_event['msg_text']) |
66 | | - assert 'topic' in msg_data |
67 | | - assert 'partition' in msg_data |
68 | | - assert 'offset' in msg_data |
69 | | - assert 'key' in msg_data |
70 | | - assert 'value' in msg_data |
71 | | - assert msg_data['topic'] == 'test-topic' |
72 | 43 |
|
73 | | - # Find the action success event |
74 | | - action_events = [e for e in events if 'kafka_action_' in e.get('event_type', '')] |
75 | | - assert len(action_events) == 1, f"Expected 1 action event, got {len(action_events)}" |
76 | | - |
77 | | - action_event = action_events[0] |
78 | | - assert action_event['event_type'] == 'kafka_action_success' |
79 | | - assert 'kafka_cluster_id' in [tag.split(':')[0] for tag in action_event['tags']] |
80 | | - assert 'remote_config_id:test-rc-id' in action_event['tags'] |
| 44 | + # Verify action event payload is consistent between both tracks |
| 45 | + event_data = json.loads(action_events[0]['msg_text']) |
| 46 | + assert action_ds_events[0] == event_data, "Action event should have same payload in both tracks" |
81 | 47 |
|
82 | | - # Parse the msg_text as JSON |
83 | | - event_data = json.loads(action_event['msg_text']) |
84 | | - assert event_data['action'] == 'read_messages' |
85 | | - assert event_data['status'] == 'success' |
86 | | - assert 'stats' in event_data |
| 48 | + # Verify message count matches stats |
87 | 49 | stats = event_data['stats'] |
88 | | - assert stats['messages_scanned'] > 0 |
89 | | - assert stats['messages_sent'] > 0 |
90 | | - |
91 | | - # Check that message events were emitted |
92 | | - message_events = [e for e in events if e.get('event_type') == 'kafka_message'] |
93 | | - assert len(message_events) > 0, "Expected at least one Kafka message event" |
94 | 50 | assert len(message_events) == stats['messages_sent'], "Message event count should match stats" |
95 | | - |
96 | | - # Verify message event structure |
97 | | - msg_event = message_events[0] |
98 | | - assert msg_event['source_type_name'] == 'kafka' |
99 | | - assert 'kafka_cluster_id' in [tag.split(':')[0] for tag in msg_event['tags']] |
100 | | - assert 'remote_config_id:test-rc-id' in msg_event['tags'] |
101 | | - |
102 | | - # Parse message event data |
103 | | - msg_data = json.loads(msg_event['msg_text']) |
104 | | - assert 'topic' in msg_data |
105 | | - assert 'partition' in msg_data |
106 | | - assert 'offset' in msg_data |
107 | | - assert 'key' in msg_data |
108 | | - assert 'value' in msg_data |
109 | | - assert msg_data['topic'] == 'test-topic' |
0 commit comments