Skip to content

Commit e8fa869

Browse files
addressing some of Eric's comments
1 parent ec3a50f commit e8fa869

File tree

1 file changed

+29
-50
lines changed

1 file changed

+29
-50
lines changed

contentctl/objects/content_versioning_service.py

Lines changed: 29 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,20 @@
1-
import time
2-
import uuid
31
import json
4-
import re
52
import logging
6-
from typing import Any, Callable
3+
import re
4+
import time
5+
import uuid
76
from functools import cached_property
7+
from typing import Any, Callable
88

9-
from pydantic import BaseModel, PrivateAttr, computed_field, Field
109
import splunklib.client as splunklib # type: ignore
10+
from pydantic import BaseModel, Field, PrivateAttr, computed_field
1111
from splunklib.binding import HTTPError, ResponseReader # type: ignore
1212
from splunklib.data import Record # type: ignore
1313

14-
from contentctl.objects.config import test_common, Infrastructure
15-
from contentctl.objects.detection import Detection
16-
from contentctl.objects.correlation_search import ResultIterator
1714
from contentctl.helper.utils import Utils
18-
15+
from contentctl.objects.config import Infrastructure, test_common
16+
from contentctl.objects.correlation_search import ResultIterator
17+
from contentctl.objects.detection import Detection
1918

2019
# TODO (cmcginley): suppress logging
2120
# Suppress logging by default; enable for local testing
@@ -151,8 +150,8 @@ def is_versioning_activated(self) -> bool:
151150
return bool(int(entry["content"]["versioning_activated"]))
152151
except KeyError as e:
153152
raise KeyError(
154-
"Cannot retrieve versioning status, unable to versioning status using the expected "
155-
f"keys: {e}"
153+
"Cannot retrieve versioning status, unable to determine versioning status using "
154+
f"the expected keys: {e}"
156155
) from e
157156
raise ValueError(
158157
"Cannot retrieve versioning status, unable to find an entry matching 'general' in the "
@@ -211,7 +210,7 @@ def is_cms_parser_enabled(self) -> bool:
211210

212211
def force_cms_parser(self) -> None:
213212
"""
214-
Force the cms_parser to being it's run being disabling and re-enabling it.
213+
Force the cms_parser to run by disabling and re-enabling it.
215214
"""
216215
# Get the data input entity
217216
cms_parser = self.service.input("data/inputs/cms_parser/main") # type: ignore
@@ -355,7 +354,9 @@ def validate_content_against_cms(self) -> None:
355354
# Init some counters and a mapping of detections to their names
356355
count = 100
357356
offset = 0
358-
remaining_detections = {x.name: x for x in self.detections}
357+
remaining_detections = {
358+
x.get_action_dot_correlationsearch_dot_label(self.global_config.app): x for x in self.detections
359+
}
359360
matched_detections: dict[str, Detection] = {}
360361

361362
# Create a filter for a specific memory error we're ok ignoring
@@ -377,40 +378,18 @@ def validate_content_against_cms(self) -> None:
377378
# Increment the offset for each result
378379
offset += 1
379380

380-
# Get the name of the search in the CMS event and attempt to use pattern matching
381-
# to strip the prefix and suffix used for the savedsearches.conf name so we can
382-
# compare to the detection
381+
# Get the name of the search in the CMS event
383382
cms_entry_name = cms_event["action.correlationsearch.label"]
384383
self.logger.info(
385384
f"[{self.infrastructure.instance_name}] {offset}: Matching cms_main entry "
386385
f"'{cms_entry_name}' against detections"
387386
)
388-
ptrn = re.compile(
389-
r"^"
390-
+ self.global_config.app.label
391-
+ r" - (?P<stripped_cms_entry_name>.+) - Rule$"
392-
)
393-
match = ptrn.match(cms_event["action.correlationsearch.label"])
394-
395-
# Report any errors extracting the detection name from the longer rule name
396-
if match is None:
397-
msg = (
398-
f"[{self.infrastructure.instance_name}] [{cms_entry_name}]: Entry in "
399-
"cms_main did not match the expected naming scheme; cannot compare to our "
400-
"detections."
401-
)
402-
self.logger.error(msg)
403-
exceptions.append(Exception(msg))
404-
continue
405-
406-
# Extract the detection name if matching was successful
407-
stripped_cms_entry_name = match.group("stripped_cms_entry_name")
408387

409388
# If CMS entry name matches one of the detections already matched, we've got an
410389
# unexpected repeated entry
411-
if stripped_cms_entry_name in matched_detections:
390+
if cms_entry_name in matched_detections:
412391
msg = (
413-
f"[{self.infrastructure.instance_name}] [{stripped_cms_entry_name}]: Detection "
392+
f"[{self.infrastructure.instance_name}] [{cms_entry_name}]: Detection "
414393
f"appears more than once in the cms_main index."
415394
)
416395
self.logger.error(msg)
@@ -419,18 +398,18 @@ def validate_content_against_cms(self) -> None:
419398

420399
# Iterate over the detections and compare the CMS entry name against each
421400
result_matches_detection = False
422-
for detection_name in remaining_detections:
401+
for detection_cs_label in remaining_detections:
423402
# If we find a match, break this loop, set the found flag and move the detection
424403
# from those that still need to matched to those already matched
425-
if stripped_cms_entry_name == detection_name:
404+
if cms_entry_name == detection_cs_label:
426405
self.logger.info(
427406
f"[{self.infrastructure.instance_name}] {offset}: Succesfully matched "
428-
f"cms_main entry against detection ('{detection_name}')!"
407+
f"cms_main entry against detection ('{detection_cs_label}')!"
429408
)
430409

431410
# Validate other fields of the cms_event against the detection
432411
exception = self.validate_detection_against_cms_event(
433-
cms_event, remaining_detections[detection_name]
412+
cms_event, remaining_detections[detection_cs_label]
434413
)
435414

436415
# Save the exception if validation failed
@@ -439,16 +418,16 @@ def validate_content_against_cms(self) -> None:
439418

440419
# Delete the matched detection and move it to the matched list
441420
result_matches_detection = True
442-
matched_detections[detection_name] = remaining_detections[
443-
detection_name
421+
matched_detections[detection_cs_label] = remaining_detections[
422+
detection_cs_label
444423
]
445-
del remaining_detections[detection_name]
424+
del remaining_detections[detection_cs_label]
446425
break
447426

448427
# Generate an exception if we couldn't match the CMS main entry to a detection
449428
if result_matches_detection is False:
450429
msg = (
451-
f"[{self.infrastructure.instance_name}] [{stripped_cms_entry_name}]: Could not "
430+
f"[{self.infrastructure.instance_name}] [{cms_entry_name}]: Could not "
452431
"match entry in cms_main against any of the expected detections."
453432
)
454433
self.logger.error(msg)
@@ -458,9 +437,9 @@ def validate_content_against_cms(self) -> None:
458437
# cms_main and there may have been a parsing issue with savedsearches.conf
459438
if len(remaining_detections) > 0:
460439
# Generate exceptions for the unmatched detections
461-
for detection_name in remaining_detections:
440+
for detection_cs_label in remaining_detections:
462441
msg = (
463-
f"[{self.infrastructure.instance_name}] [{detection_name}]: Detection not "
442+
f"[{self.infrastructure.instance_name}] [{detection_cs_label}]: Detection not "
464443
"found in cms_main; there may be an issue with savedsearches.conf"
465444
)
466445
self.logger.error(msg)
@@ -497,8 +476,8 @@ def validate_detection_against_cms_event(
497476
# TODO (PEX-509): validate additional fields between the cms_event and the detection
498477

499478
cms_uuid = uuid.UUID(cms_event["detection_id"])
500-
rule_name_from_detection = (
501-
f"{self.global_config.app.label} - {detection.name} - Rule"
479+
rule_name_from_detection = detection.get_action_dot_correlationsearch_dot_label(
480+
self.global_config.app
502481
)
503482

504483
# Compare the UUIDs

0 commit comments

Comments
 (0)