Skip to content

Commit 4f92434

Browse files
authored
Merge pull request #263 from splunk/obs_to_rba
DRAFT: new RBA Object - Step 3 - ESCU 5.0
2 parents d98b493 + 0f53c69 commit 4f92434

27 files changed

+1016
-762
lines changed

.github/workflows/test_against_escu.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ jobs:
3535
with:
3636
path: security_content
3737
repository: splunk/security_content
38+
ref: rba_migration
3839

3940
#Install the given version of Python we will test against
4041
- name: Install Required Python Version

contentctl/actions/build.py

Lines changed: 38 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,11 @@
1010
from contentctl.output.conf_writer import ConfWriter
1111
from contentctl.output.api_json_output import ApiJsonOutput
1212
from contentctl.output.data_source_writer import DataSourceWriter
13-
from contentctl.objects.lookup import Lookup
13+
from contentctl.objects.lookup import CSVLookup, Lookup_Type
1414
import pathlib
1515
import json
1616
import datetime
17-
from typing import Union
17+
import uuid
1818

1919
from contentctl.objects.config import build
2020

@@ -34,27 +34,41 @@ def execute(self, input_dto: BuildInputDto) -> DirectorOutputDto:
3434
updated_conf_files:set[pathlib.Path] = set()
3535
conf_output = ConfOutput(input_dto.config)
3636

37+
38+
# Construct a path to a YML that does not actually exist.
39+
# We mock this "fake" path since the YML does not exist.
40+
# This ensures the checking for the existence of the CSV is correct
41+
data_sources_fake_yml_path = input_dto.config.getPackageDirectoryPath() / "lookups" / "data_sources.yml"
42+
3743
# Construct a special lookup whose CSV is created at runtime and
38-
# written directly into the output folder. It is created with model_construct,
39-
# not model_validate, because the CSV does not exist yet.
44+
# written directly into the lookups folder. We will delete this after a build,
45+
# assuming that it is successful.
4046
data_sources_lookup_csv_path = input_dto.config.getPackageDirectoryPath() / "lookups" / "data_sources.csv"
41-
DataSourceWriter.writeDataSourceCsv(input_dto.director_output_dto.data_sources, data_sources_lookup_csv_path)
42-
input_dto.director_output_dto.addContentToDictMappings(Lookup.model_construct(description= "A lookup file that will contain the data source objects for detections.",
43-
filename=data_sources_lookup_csv_path,
44-
name="data_sources"))
4547

48+
49+
50+
DataSourceWriter.writeDataSourceCsv(input_dto.director_output_dto.data_sources, data_sources_lookup_csv_path)
51+
input_dto.director_output_dto.addContentToDictMappings(CSVLookup.model_construct(name="data_sources",
52+
id=uuid.UUID("b45c1403-6e09-47b0-824f-cf6e44f15ac8"),
53+
version=1,
54+
author=input_dto.config.app.author_name,
55+
date = datetime.date.today(),
56+
description= "A lookup file that will contain the data source objects for detections.",
57+
lookup_type=Lookup_Type.csv,
58+
file_path=data_sources_fake_yml_path))
4659
updated_conf_files.update(conf_output.writeHeaders())
47-
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.detections, SecurityContentType.detections))
48-
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.stories, SecurityContentType.stories))
49-
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.baselines, SecurityContentType.baselines))
50-
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.investigations, SecurityContentType.investigations))
51-
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.lookups, SecurityContentType.lookups))
52-
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.macros, SecurityContentType.macros))
53-
updated_conf_files.update(conf_output.writeObjects(input_dto.director_output_dto.dashboards, SecurityContentType.dashboards))
60+
updated_conf_files.update(conf_output.writeLookups(input_dto.director_output_dto.lookups))
61+
updated_conf_files.update(conf_output.writeDetections(input_dto.director_output_dto.detections))
62+
updated_conf_files.update(conf_output.writeStories(input_dto.director_output_dto.stories))
63+
updated_conf_files.update(conf_output.writeBaselines(input_dto.director_output_dto.baselines))
64+
updated_conf_files.update(conf_output.writeInvestigations(input_dto.director_output_dto.investigations))
65+
updated_conf_files.update(conf_output.writeMacros(input_dto.director_output_dto.macros))
66+
updated_conf_files.update(conf_output.writeDashboards(input_dto.director_output_dto.dashboards))
5467
updated_conf_files.update(conf_output.writeMiscellaneousAppFiles())
5568

5669

5770

71+
5872
#Ensure that the conf file we just generated/update is syntactically valid
5973
for conf_file in updated_conf_files:
6074
ConfWriter.validateConfFile(conf_file)
@@ -67,17 +81,15 @@ def execute(self, input_dto: BuildInputDto) -> DirectorOutputDto:
6781
if input_dto.config.build_api:
6882
shutil.rmtree(input_dto.config.getAPIPath(), ignore_errors=True)
6983
input_dto.config.getAPIPath().mkdir(parents=True)
70-
api_json_output = ApiJsonOutput()
71-
for output_objects, output_type in [(input_dto.director_output_dto.detections, SecurityContentType.detections),
72-
(input_dto.director_output_dto.stories, SecurityContentType.stories),
73-
(input_dto.director_output_dto.baselines, SecurityContentType.baselines),
74-
(input_dto.director_output_dto.investigations, SecurityContentType.investigations),
75-
(input_dto.director_output_dto.lookups, SecurityContentType.lookups),
76-
(input_dto.director_output_dto.macros, SecurityContentType.macros),
77-
(input_dto.director_output_dto.deployments, SecurityContentType.deployments)]:
78-
api_json_output.writeObjects(output_objects, input_dto.config.getAPIPath(), input_dto.config.app.label, output_type )
79-
80-
84+
api_json_output = ApiJsonOutput(input_dto.config.getAPIPath(), input_dto.config.app.label)
85+
api_json_output.writeDetections(input_dto.director_output_dto.detections)
86+
api_json_output.writeStories(input_dto.director_output_dto.stories)
87+
api_json_output.writeBaselines(input_dto.director_output_dto.baselines)
88+
api_json_output.writeInvestigations(input_dto.director_output_dto.investigations)
89+
api_json_output.writeLookups(input_dto.director_output_dto.lookups)
90+
api_json_output.writeMacros(input_dto.director_output_dto.macros)
91+
api_json_output.writeDeployments(input_dto.director_output_dto.deployments)
92+
8193

8294
#create version file for sse api
8395
version_file = input_dto.config.getAPIPath()/"version.json"

contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructure.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1094,6 +1094,7 @@ def retry_search_until_timeout(
10941094
job = self.get_conn().search(query=search, **kwargs)
10951095
results = JSONResultsReader(job.results(output_mode="json"))
10961096

1097+
# TODO (cmcginley): @ljstella you're removing this ultimately, right?
10971098
# Consolidate a set of the distinct observable field names
10981099
observable_fields_set = set([o.name for o in detection.tags.observable]) # keeping this around for later
10991100
risk_object_fields_set = set([o.name for o in detection.tags.observable if "Victim" in o.role ]) # just the "Risk Objects"
@@ -1121,7 +1122,10 @@ def retry_search_until_timeout(
11211122
missing_risk_objects = risk_object_fields_set - results_fields_set
11221123
if len(missing_risk_objects) > 0:
11231124
# Report a failure in such cases
1124-
e = Exception(f"The observable field(s) {missing_risk_objects} are missing in the detection results")
1125+
e = Exception(
1126+
f"The risk object field(s) {missing_risk_objects} are missing in the "
1127+
"detection results"
1128+
)
11251129
test.result.set_job_content(
11261130
job.content,
11271131
self.infrastructure,
@@ -1137,6 +1141,8 @@ def retry_search_until_timeout(
11371141
# on a field. In this case, the field will appear but will not contain any values
11381142
current_empty_fields: set[str] = set()
11391143

1144+
# TODO (cmcginley): @ljstella is this something we're keeping for testing as
1145+
# well?
11401146
for field in observable_fields_set:
11411147
if result.get(field, 'null') == 'null':
11421148
if field in risk_object_fields_set:

contentctl/actions/validate.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from contentctl.enrichments.attack_enrichment import AttackEnrichment
77
from contentctl.enrichments.cve_enrichment import CveEnrichment
88
from contentctl.objects.atomic import AtomicEnrichment
9+
from contentctl.objects.lookup import FileBackedLookup
910
from contentctl.helper.utils import Utils
1011
from contentctl.objects.data_source import DataSource
1112
from contentctl.helper.splunk_app import SplunkApp
@@ -64,7 +65,7 @@ def ensure_no_orphaned_files_in_lookups(self, repo_path:pathlib.Path, director_o
6465
lookupsDirectory = repo_path/"lookups"
6566

6667
# Get all of the files referneced by Lookups
67-
usedLookupFiles:list[pathlib.Path] = [lookup.filename for lookup in director_output_dto.lookups if lookup.filename is not None] + [lookup.file_path for lookup in director_output_dto.lookups if lookup.file_path is not None]
68+
usedLookupFiles:list[pathlib.Path] = [lookup.filename for lookup in director_output_dto.lookups if isinstance(lookup, FileBackedLookup)] + [lookup.file_path for lookup in director_output_dto.lookups if lookup.file_path is not None]
6869

6970
# Get all of the mlmodel and csv files in the lookups directory
7071
csvAndMlmodelFiles = Utils.get_security_content_files_from_directory(lookupsDirectory, allowedFileExtensions=[".yml",".csv",".mlmodel"], fileExtensionsToReturn=[".csv",".mlmodel"])

contentctl/input/director.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from contentctl.objects.playbook import Playbook
1515
from contentctl.objects.deployment import Deployment
1616
from contentctl.objects.macro import Macro
17-
from contentctl.objects.lookup import Lookup
17+
from contentctl.objects.lookup import LookupAdapter, Lookup
1818
from contentctl.objects.atomic import AtomicEnrichment
1919
from contentctl.objects.security_content_object import SecurityContentObject
2020
from contentctl.objects.data_source import DataSource
@@ -58,13 +58,12 @@ def addContentToDictMappings(self, content: SecurityContentObject):
5858
f" - {content.file_path}\n"
5959
f" - {self.name_to_content_map[content_name].file_path}"
6060
)
61-
61+
6262
if content.id in self.uuid_to_content_map:
6363
raise ValueError(
6464
f"Duplicate id '{content.id}' with paths:\n"
6565
f" - {content.file_path}\n"
66-
f" - {self.uuid_to_content_map[content.id].file_path}"
67-
)
66+
f" - {self.uuid_to_content_map[content.id].file_path}")
6867

6968
if isinstance(content, Lookup):
7069
self.lookups.append(content)
@@ -157,7 +156,8 @@ def createSecurityContent(self, contentType: SecurityContentType) -> None:
157156
modelDict = YmlReader.load_file(file)
158157

159158
if contentType == SecurityContentType.lookups:
160-
lookup = Lookup.model_validate(modelDict, context={"output_dto":self.output_dto, "config":self.input_dto})
159+
lookup = LookupAdapter.validate_python(modelDict, context={"output_dto":self.output_dto, "config":self.input_dto})
160+
#lookup = Lookup.model_validate(modelDict, context={"output_dto":self.output_dto, "config":self.input_dto})
161161
self.output_dto.addContentToDictMappings(lookup)
162162

163163
elif contentType == SecurityContentType.macros:

contentctl/input/new_content_questions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def get_questions_detection(cls) -> list[dict[str,Any]]:
4848
{
4949
'type': 'checkbox',
5050
'message': 'Your data source',
51-
'name': 'data_source',
51+
'name': 'data_sources',
5252
#In the future, we should dynamically populate this from the DataSource Objects we have parsed from the data_sources directory
5353
'choices': sorted(DataSource._value2member_map_ )
5454

0 commit comments

Comments
 (0)