Skip to content

Commit c5bb38c

Browse files
committed
Tests for aggregation_id - #170
1 parent 69e40af commit c5bb38c

File tree

3 files changed

+125
-7
lines changed

3 files changed

+125
-7
lines changed

validation/graylog.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,8 +61,8 @@ def access_rest_api(self):
6161
def update_logging_alert_plugin_configuration(self):
6262
return self._api.update_logging_alert_plugin_configuration()
6363

64-
def update_alert_wizard_plugin_configuration(self, default_time=1, backlog_size=500):
65-
return self._api.update_alert_wizard_plugin_configuration(default_time=default_time, backlog_size=backlog_size)
64+
def update_alert_wizard_plugin_configuration(self, default_time=1, backlog_size=500, aggregation_time=0):
65+
return self._api.update_alert_wizard_plugin_configuration(default_time=default_time, backlog_size=backlog_size, aggregation_time=aggregation_time)
6666

6767
def get_alert_wizard_plugin_configuration(self):
6868
return self._api.get_alert_wizard_plugin_configuration()
@@ -179,4 +179,4 @@ def _has_aggregation_event(self):
179179

180180
def wait_until_new_event(self, initial_event_count, wait_duration):
181181
has_new_event = lambda: self.get_events_count('aggregation-v1') == initial_event_count + 1
182-
self._wait(has_new_event, 60*wait_duration)
182+
self._wait(has_new_event, 60 * wait_duration)

validation/graylog_rest_api.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -244,10 +244,9 @@ def get_alert_rules(self):
244244

245245
def update_logging_alert_plugin_configuration(self):
246246
configuration = {
247-
'aggregation_time': '1441',
248247
'alert_tag': 'LoggingAlert',
249248
'field_alert_id': 'id',
250-
'log_body': 'type: alert\nid: ${logging_alert.id}\nseverity: ${logging_alert.severity}\napp: graylog\nsubject: ${event_definition_title}\nbody: ${event_definition_description}\n${if backlog && backlog[0]} src: ${backlog[0].fields.src_ip}\nsrc_category: ${backlog[0].fields.src_category}\ndest: ${backlog[0].fields.dest_ip}\ndest_category: ${backlog[0].fields.dest_category}\n${end}',
249+
'log_body': 'type: alert\nid: ${event.fields.aggregation_id}\nseverity: ${logging_alert.severity}\napp: graylog\nsubject: ${event_definition_title}\nbody: ${event_definition_description}\n${if backlog && backlog[0]} src: ${backlog[0].fields.src_ip}\nsrc_category: ${backlog[0].fields.src_category}\ndest: ${backlog[0].fields.dest_ip}\ndest_category: ${backlog[0].fields.dest_category}\n${end}',
251250
'overflow_tag': 'LoggingOverflow',
252251
'separator': ' | '
253252
}
@@ -258,11 +257,12 @@ def get_alert_wizard_plugin_configuration(self):
258257
response = self._get('plugins/com.airbus_cyber_security.graylog.wizard/config')
259258
return response.json()
260259

261-
def update_alert_wizard_plugin_configuration(self, default_time=1, backlog_size=500):
260+
def update_alert_wizard_plugin_configuration(self, default_time=1, backlog_size=500, aggregation_time=0):
262261
configuration = {
263262
'default_values': {
264263
'matching_type': '',
265264
'threshold_type': '',
265+
'aggregation_time': aggregation_time,
266266
'time': default_time,
267267
'backlog': backlog_size
268268
},

validation/test.py

Lines changed: 119 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ def test_create_and_alert_rule_with_pipeline_condition_should_not_trigger_event_
8383
self._api.create_alert_rule_count('A', _PERIOD, stream=stream)
8484

8585
# Send a log with user=toto and source=sourceABC. It will be placed in the Stream because the pipeline function found the user in the list. So the rule will trigger but it is wrong because "source" is not equal to "source123"
86-
# Send a log with user=xxx and source=source123. It will be placed in the Stream beauce the only Stream rule is field "source" match exactly "source123". So the rule will trigger but it is wrong because "user" is not present in the list
86+
# Send a log with user=xxx and source=source123. It will be placed in the Stream because the only Stream rule is field "source" match exactly "source123". So the rule will trigger but it is wrong because "user" is not present in the list
8787
with self._graylog.access_gelf_input(self._gelf_input_identifier) as inputs:
8888
inputs.send({'host': 'source123'})
8989
aggregation_events_count = self._graylog.get_events_count('aggregation-v1')
@@ -170,3 +170,121 @@ def test_create_alert_rule_with_list_should_not_generate_event_on_substrings_of_
170170
for i in range(events['total_events']):
171171
print(events['events'][i])
172172
self.assertEqual(starting_events_count, self._graylog.get_events_count('aggregation-v1'))
173+
174+
def test_notification_should_generate_identifier_for_aggregation_id__issue170(self):
175+
stream = {
176+
'field_rule': [{
177+
'field': 'x',
178+
'type': 1,
179+
'value': 'test_value'
180+
}],
181+
'matching_type': 'AND'
182+
}
183+
self._api.create_alert_rule_count('A', _PERIOD, stream=stream)
184+
185+
# Send a log with _x=test_value
186+
with self._graylog.access_gelf_input(self._gelf_input_identifier) as inputs:
187+
inputs.send({'_x': 'test_value'})
188+
189+
for i in range(60*_PERIOD + 20):
190+
events_count = self._graylog.get_events_count('aggregation-v1')
191+
print(f'events count: {events_count}')
192+
if events_count > 0:
193+
break
194+
time.sleep(1)
195+
196+
events = self._graylog.get_events()
197+
198+
aggregation_id = events['events'][0]['event']['fields']['aggregation_id']
199+
self.assertIsNotNone(aggregation_id)
200+
201+
def test_notification_should_generate_new_identifier_for_aggregation_id__issue170(self):
202+
# Prepare config
203+
self._graylog.update_alert_wizard_plugin_configuration(aggregation_time=0)
204+
stream = {
205+
'field_rule': [{
206+
'field': 'x',
207+
'type': 1,
208+
'value': 'test_value'
209+
}],
210+
'matching_type': 'AND'
211+
}
212+
self._api.create_alert_rule_count('A', _PERIOD, stream=stream)
213+
214+
with self._graylog.access_gelf_input(self._gelf_input_identifier) as inputs:
215+
# Send a message with _x=test_value
216+
inputs.send({'_x': 'test_value'})
217+
218+
for i in range(60*_PERIOD + 20):
219+
events_count = self._graylog.get_events_count('aggregation-v1')
220+
print(f'events count: {events_count}')
221+
if events_count > 0:
222+
break
223+
time.sleep(1)
224+
225+
# Send a second message with _x=test_value
226+
inputs.send({'_x': 'test_value'})
227+
228+
for i in range(60*_PERIOD + 20):
229+
events_count = self._graylog.get_events_count('aggregation-v1')
230+
print(f'events count: {events_count}')
231+
if events_count > 1:
232+
break
233+
time.sleep(1)
234+
235+
events = self._graylog.get_events()
236+
237+
# Check if all aggregation_id are the same
238+
aggregation_id_1 = events['events'][0]['event']['fields']['aggregation_id']
239+
aggregation_id_2 = events['events'][1]['event']['fields']['aggregation_id']
240+
241+
self.assertNotEquals(aggregation_id_1, aggregation_id_2)
242+
243+
def test_notification_should_reuse_identifier_for_aggregation_id__issue170(self):
244+
# Prepare config
245+
self._graylog.update_alert_wizard_plugin_configuration(aggregation_time=10)
246+
stream = {
247+
'field_rule': [{
248+
'field': 'x',
249+
'type': 1,
250+
'value': 'test_value'
251+
}],
252+
'matching_type': 'AND'
253+
}
254+
self._api.create_alert_rule_count('A', _PERIOD, stream=stream)
255+
256+
with self._graylog.access_gelf_input(self._gelf_input_identifier) as inputs:
257+
# Send a message with _x=test_value
258+
inputs.send({'_x': 'test_value'})
259+
260+
for i in range(60*_PERIOD + 20):
261+
events_count = self._graylog.get_events_count('aggregation-v1')
262+
print(f'events count: {events_count}')
263+
if events_count > 0:
264+
break
265+
time.sleep(1)
266+
267+
events = self._graylog.get_events()
268+
269+
# Store aggregation_id
270+
aggregation_id = events['events'][0]['event']['fields']['aggregation_id']
271+
272+
# Send a second message with _x=test_value
273+
inputs.send({'_x': 'test_value'})
274+
275+
for i in range(60*_PERIOD + 20):
276+
events_count = self._graylog.get_events_count('aggregation-v1')
277+
print(f'events count: {events_count}')
278+
if events_count > 1:
279+
break
280+
time.sleep(1)
281+
282+
events = self._graylog.get_events()
283+
284+
# Check if all aggregation_id are the same
285+
aggregation_id_1 = events['events'][0]['event']['fields']['aggregation_id']
286+
aggregation_id_2 = events['events'][1]['event']['fields']['aggregation_id']
287+
288+
self.assertEqual(aggregation_id, aggregation_id_1)
289+
self.assertEqual(aggregation_id, aggregation_id_2)
290+

0 commit comments

Comments
 (0)