Skip to content

Commit 2f7314a

Browse files
authored
Merge pull request #30 from splunk/abstract_content_types
Abstract content types
2 parents 81b0d0c + 891afe4 commit 2f7314a

File tree

11 files changed

+259
-261
lines changed

11 files changed

+259
-261
lines changed

contentctl/actions/detection_testing/infrastructures/DetectionTestingInfrastructure.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ class DetectionTestingManagerOutputDto:
5858
start_time: Union[datetime.datetime, None] = None
5959
replay_index: str = "CONTENTCTL_TESTING_INDEX"
6060
replay_host: str = "CONTENTCTL_HOST"
61-
timeout_seconds: int = 15
61+
timeout_seconds: int = 60
6262
terminate: bool = False
6363

6464

contentctl/actions/inspect.py

Lines changed: 0 additions & 42 deletions
This file was deleted.

contentctl/contentctl.py

Lines changed: 2 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
from contentctl.actions.new_content import NewContentInputDto, NewContent
1818
from contentctl.actions.doc_gen import DocGenInputDto, DocGen
1919
from contentctl.actions.initialize import Initialize, InitializeInputDto
20-
from contentctl.actions.inspect import InspectInputDto, Inspect
2120
from contentctl.actions.api_deploy import API_Deploy, API_DeployInputDto
2221

2322
from contentctl.input.director import DirectorInputDto
@@ -116,14 +115,6 @@ def build(args, config:Union[Config,None]=None) -> DirectorOutputDto:
116115
return generate.execute(generate_input_dto)
117116

118117

119-
def inspect(args) -> None:
120-
config=start(args)
121-
app_path = pathlib.Path(config.build.path_root)/f"{config.build.name}.tar.gz"
122-
input_dto = InspectInputDto(path=app_path)
123-
i = Inspect()
124-
i.execute(input_dto=input_dto)
125-
126-
127118
def api_deploy(args) -> None:
128119
config = start(args)
129120
deploy_input_dto = API_DeployInputDto(path=pathlib.Path(args.path), config=config)
@@ -170,6 +161,7 @@ def test(args: argparse.Namespace):
170161
local_path=str(pathlib.Path(config.build.path_root)/f"{config.build.name}.tar.gz"),
171162
description=config.build.description,
172163
splunkbase_path=None,
164+
force_local=True
173165
)
174166

175167
# We need to do this instead of appending to retrigger validation.
@@ -338,15 +330,7 @@ def main():
338330

339331
reporting_parser.set_defaults(func=reporting)
340332

341-
inspect_parser.add_argument(
342-
"-ap",
343-
"--app_path",
344-
required=False,
345-
type=str,
346-
default=None,
347-
help="path to the Splunk app to be inspected",
348-
)
349-
inspect_parser.set_defaults(func=inspect)
333+
350334

351335
api_deploy_parser.set_defaults(func=api_deploy)
352336

Lines changed: 157 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,157 @@
1+
import uuid
2+
import string
3+
import requests
4+
import time
5+
import sys
6+
7+
from pydantic import BaseModel, validator, root_validator, Extra
8+
from dataclasses import dataclass
9+
from typing import Union
10+
from datetime import datetime, timedelta
11+
12+
13+
from contentctl.objects.security_content_object import SecurityContentObject
14+
from contentctl.objects.enums import AnalyticsType
15+
from contentctl.objects.enums import DataModel
16+
from contentctl.objects.enums import DetectionStatus
17+
from contentctl.objects.detection_tags import DetectionTags
18+
from contentctl.objects.config import ConfigDetectionConfiguration
19+
from contentctl.objects.unit_test import UnitTest
20+
from contentctl.objects.macro import Macro
21+
from contentctl.objects.lookup import Lookup
22+
from contentctl.objects.baseline import Baseline
23+
from contentctl.objects.playbook import Playbook
24+
from contentctl.helper.link_validator import LinkValidator
25+
from contentctl.objects.enums import SecurityContentType
26+
27+
28+
class Detection_Abstract(SecurityContentObject):
29+
contentType: SecurityContentType = SecurityContentType.detections
30+
type: str
31+
status: DetectionStatus
32+
data_source: list[str]
33+
search: Union[str, dict]
34+
how_to_implement: str
35+
known_false_positives: str
36+
check_references: bool = False
37+
references: list
38+
tags: DetectionTags
39+
tests: list[UnitTest] = []
40+
41+
# enrichments
42+
datamodel: list = None
43+
deprecated: bool = None
44+
experimental: bool = None
45+
deployment: ConfigDetectionConfiguration = None
46+
annotations: dict = None
47+
risk: list = None
48+
playbooks: list[Playbook] = None
49+
baselines: list[Baseline] = None
50+
mappings: dict = None
51+
macros: list[Macro] = None
52+
lookups: list[Lookup] = None
53+
cve_enrichment: list = None
54+
splunk_app_enrichment: list = None
55+
file_path: str = None
56+
source: str = None
57+
nes_fields: str = None
58+
providing_technologies: list = None
59+
runtime: str = None
60+
61+
class Config:
62+
use_enum_values = True
63+
64+
@validator("type")
65+
def type_valid(cls, v, values):
66+
if v.lower() not in [el.name.lower() for el in AnalyticsType]:
67+
raise ValueError("not valid analytics type: " + values["name"])
68+
return v
69+
70+
@validator('how_to_implement')
71+
def encode_error(cls, v, values, field):
72+
return SecurityContentObject.free_text_field_valid(cls,v,values,field)
73+
74+
# @root_validator
75+
# def search_validation(cls, values):
76+
# if 'ssa_' not in values['file_path']:
77+
# if not '_filter' in values['search']:
78+
# raise ValueError('filter macro missing in: ' + values["name"])
79+
# if any(x in values['search'] for x in ['eventtype=', 'sourcetype=', ' source=', 'index=']):
80+
# if not 'index=_internal' in values['search']:
81+
# raise ValueError('Use source macro instead of eventtype, sourcetype, source or index in detection: ' + values["name"])
82+
# return values
83+
84+
# disable it because of performance reasons
85+
# @validator('references')
86+
# def references_check(cls, v, values):
87+
# return LinkValidator.check_references(v, values["name"])
88+
# return v
89+
90+
91+
@validator("search")
92+
def search_validate(cls, v, values):
93+
# write search validator
94+
return v
95+
96+
@validator("tests")
97+
def tests_validate(cls, v, values):
98+
if values.get("status","") != DetectionStatus.production and not v:
99+
raise ValueError(
100+
"tests value is needed for production detection: " + values["name"]
101+
)
102+
return v
103+
104+
@validator("experimental", always=True)
105+
def experimental_validate(cls, v, values):
106+
if DetectionStatus(values.get("status","")) == DetectionStatus.experimental:
107+
return True
108+
return False
109+
110+
@validator("deprecated", always=True)
111+
def deprecated_validate(cls, v, values):
112+
if DetectionStatus(values.get("status","")) == DetectionStatus.deprecated:
113+
return True
114+
return False
115+
116+
@validator("datamodel")
117+
def datamodel_valid(cls, v, values):
118+
for datamodel in v:
119+
if datamodel not in [el.name for el in DataModel]:
120+
raise ValueError("not valid data model: " + values["name"])
121+
return v
122+
123+
def all_tests_successful(self) -> bool:
124+
if len(self.tests) == 0:
125+
return False
126+
for test in self.tests:
127+
if test.result is None or test.result.success == False:
128+
return False
129+
return True
130+
131+
def get_summary(
132+
self,
133+
detection_fields: list[str] = ["name", "search"],
134+
test_model_fields: list[str] = ["success", "message"],
135+
test_job_fields: list[str] = ["resultCount", "runDuration"],
136+
) -> dict:
137+
summary_dict = {}
138+
for field in detection_fields:
139+
summary_dict[field] = getattr(self, field)
140+
summary_dict["success"] = self.all_tests_successful()
141+
summary_dict["tests"] = []
142+
for test in self.tests:
143+
result: dict[str, Union[str, bool]] = {"name": test.name}
144+
if test.result is not None:
145+
result.update(
146+
test.result.get_summary_dict(
147+
model_fields=test_model_fields,
148+
job_fields=test_job_fields,
149+
)
150+
)
151+
else:
152+
result["success"] = False
153+
result["message"] = "RESULT WAS NONE"
154+
155+
summary_dict["tests"].append(result)
156+
157+
return summary_dict
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import abc
2+
import string
3+
import uuid
4+
from datetime import datetime
5+
from pydantic import BaseModel, validator, ValidationError
6+
from contentctl.objects.enums import SecurityContentType
7+
8+
9+
class SecurityContentObject_Abstract(BaseModel, abc.ABC):
10+
contentType: SecurityContentType
11+
name: str
12+
author: str = "UNKNOWN_AUTHOR"
13+
date: str = "1990-01-01"
14+
version: int = 99999
15+
id: str = None
16+
description: str = "UNKNOWN_DESCRIPTION"
17+
18+
@validator('name')
19+
def name_max_length(cls, v):
20+
if len(v) > 67:
21+
print("LENGTH ERROR!")
22+
raise ValueError('name is longer then 67 chars: ' + v)
23+
return v
24+
25+
@validator('name')
26+
def name_invalid_chars(cls, v):
27+
invalidChars = set(string.punctuation.replace("-", ""))
28+
if any(char in invalidChars for char in v):
29+
raise ValueError('invalid chars used in name: ' + v)
30+
return v
31+
32+
@validator('id',always=True)
33+
def id_check(cls, v, values):
34+
try:
35+
uuid.UUID(str(v))
36+
except:
37+
#print(f"Generating missing uuid for {values['name']}")
38+
return str(uuid.uuid4())
39+
raise ValueError('uuid is not valid: ' + values["name"])
40+
return v
41+
42+
@validator('date')
43+
def date_valid(cls, v, values):
44+
try:
45+
datetime.strptime(v, "%Y-%m-%d")
46+
except:
47+
raise ValueError('date is not in format YYYY-MM-DD: ' + values["name"])
48+
return v
49+
50+
@staticmethod
51+
def free_text_field_valid(input_cls, v, values, field):
52+
try:
53+
v.encode('ascii')
54+
except UnicodeEncodeError:
55+
raise ValueError('encoding error in ' + field.name + ': ' + values["name"])
56+
return v
57+
58+
@validator('description')
59+
def description_valid(cls, v, values, field):
60+
return SecurityContentObject_Abstract.free_text_field_valid(cls,v,values,field)

contentctl/objects/app.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ class App(BaseModel, extra=Extra.forbid):
4444
# This will be set via a function call and should not be provided in the YML
4545
# Note that this is the path relative to the container mount
4646
environment_path: str = ENVIRONMENT_PATH_NOT_SET
47+
force_local:bool = False
4748

4849
def configure_app_source_for_container(
4950
self,
@@ -57,7 +58,7 @@ def configure_app_source_for_container(
5758
splunkbase_username is not None and splunkbase_password is not None
5859
)
5960

60-
if splunkbase_creds_provided and self.splunkbase_path is not None:
61+
if splunkbase_creds_provided and self.splunkbase_path is not None and not self.force_local:
6162
self.environment_path = self.splunkbase_path
6263

6364
elif self.local_path is not None:

contentctl/objects/constants.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,7 @@
103103
"File Name": 7,
104104
"File Hash": 8,
105105
"Process Name": 9,
106-
"Ressource UID": 10,
106+
"Resource UID": 10,
107107
"Endpoint": 20,
108108
"User": 21,
109109
"Email": 22,

0 commit comments

Comments
 (0)