Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .git-blame-ignore-revs
Original file line number Diff line number Diff line change
@@ -1 +1,3 @@
ff87bcaf1741e8ecf15cb8d401438592dfef3ba7 # Mass reformat with adoption of ruff
9d96a56b763ece491a07441dbe26176777a1964c # Add I rules to ruff
4eefa15b8afbb387b9fa5338dc45759e1915ef56 # Add E, W, and RUF
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ repos:
- id: detect-private-key
- id: forbid-submodules
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.2
rev: v0.12.5
hooks:
- id: ruff
args: [ --fix ]
Expand Down
8 changes: 5 additions & 3 deletions contentctl/actions/deploy_acs.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
from contentctl.objects.config import deploy_acs, StackType
from requests import post
import pprint

from requests import post

from contentctl.objects.config import StackType, deploy_acs


class Deploy:
def execute(self, config: deploy_acs, appinspect_token: str) -> None:
Expand Down Expand Up @@ -33,7 +35,7 @@ def execute(self, config: deploy_acs, appinspect_token: str) -> None:
raise Exception(f"Unsupported stack type: '{config.stack_type}'")
except Exception as e:
raise Exception(
f"Error installing to stack '{config.splunk_cloud_stack}' (stack_type='{config.stack_type}') via ACS:\n{str(e)}"
f"Error installing to stack '{config.splunk_cloud_stack}' (stack_type='{config.stack_type}') via ACS:\n{e!s}"
)

try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -162,10 +162,10 @@ def sigint_handler(signum, frame):
print()
print(f"[{error_type}]:")
for error in errors[error_type]:
print(f"\t❌ {str(error)}")
print(f"\t❌ {error!s}")
if isinstance(error, ExceptionGroup):
for suberror in error.exceptions: # type: ignore
print(f"\t\t❌ {str(suberror)}") # type: ignore
print(f"\t\t❌ {suberror!s}") # type: ignore
print()

return self.output_dto
Expand Down Expand Up @@ -208,7 +208,7 @@ def create_DetectionTestingInfrastructureObjects(self):
except Exception as e:
raise Exception(
"Failed to pull docker container image "
f"[{self.input_dto.config.container_settings.full_image_path}]: {str(e)}"
f"[{self.input_dto.config.container_settings.full_image_path}]: {e!s}"
)

already_staged_container_files = False
Expand Down
8 changes: 4 additions & 4 deletions contentctl/actions/detection_testing/GitService.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def getChanges(self, target_branch: str) -> List[Detection]:
updated_detections.add(detectionObject)
else:
raise Exception(
f"Error getting detection object for file {str(decoded_path)}"
f"Error getting detection object for file {decoded_path!s}"
)

elif (
Expand All @@ -111,7 +111,7 @@ def getChanges(self, target_branch: str) -> List[Detection]:
updated_macros.add(macroObject)
else:
raise Exception(
f"Error getting macro object for file {str(decoded_path)}"
f"Error getting macro object for file {decoded_path!s}"
)

elif (
Expand All @@ -125,7 +125,7 @@ def getChanges(self, target_branch: str) -> List[Detection]:
updated_datasources.add(datasourceObject)
else:
raise Exception(
f"Error getting data source object for file {str(decoded_path)}"
f"Error getting data source object for file {decoded_path!s}"
)

elif decoded_path.is_relative_to(self.config.path / "lookups"):
Expand Down Expand Up @@ -172,7 +172,7 @@ def getChanges(self, target_branch: str) -> List[Detection]:

else:
raise Exception(
f"Detected a changed file in the lookups/ directory '{str(decoded_path)}'.\n"
f"Detected a changed file in the lookups/ directory '{decoded_path!s}'.\n"
"Only files ending in .csv, .yml, or .mlmodel are supported in this "
"directory. This file must be removed from the lookups/ directory."
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,13 @@
try:
results = parser.parse_args()
except Exception as e:
print(f"Error parsing arguments: {str(e)}")
print(f"Error parsing arguments: {e!s}")
exit(1)

try:
summary_info = json.loads(results.input_summary_file.read())
except Exception as e:
print(f"Error loading {results.input_summary_file.name} JSON file: {str(e)}")
print(f"Error loading {results.input_summary_file.name} JSON file: {e!s}")
sys.exit(1)

if "summary" not in summary_info:
Expand All @@ -75,7 +75,7 @@
RAW_BADGE_SVG.format(results.badge_string, "{:2.1f}%".format(pass_percent))
)
except Exception as e:
print(f"Error generating badge: {str(e)}")
print(f"Error generating badge: {e!s}")
sys.exit(1)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def setup(self):
self.check_for_teardown()

except Exception as e:
msg = f"[{self.get_name()}]: {str(e)}"
msg = f"[{self.get_name()}]: {e!s}"
self.finish()
if isinstance(e, ExceptionGroup):
raise ExceptionGroup(msg, e.exceptions) from e # type: ignore
Expand Down Expand Up @@ -310,7 +310,7 @@ def configure_hec(self):
return

except Exception as e:
raise (Exception(f"Failure creating HEC Endpoint: {str(e)}"))
raise (Exception(f"Failure creating HEC Endpoint: {e!s}"))

def get_all_indexes(self) -> None:
"""
Expand All @@ -327,7 +327,7 @@ def get_all_indexes(self) -> None:
# Retrieve all available indexes on the splunk instance
self.all_indexes_on_server = indexes
except Exception as e:
raise (Exception(f"Failure getting indexes: {str(e)}"))
raise (Exception(f"Failure getting indexes: {e!s}"))

def get_conn(self) -> client.Service:
try:
Expand Down Expand Up @@ -382,7 +382,7 @@ def connect_to_api(self, sleep_seconds: int = 5):
pass
except Exception as e:
self.pbar.write(
f"Error getting API connection (not quitting) '{type(e).__name__}': {str(e)}"
f"Error getting API connection (not quitting) '{type(e).__name__}': {e!s}"
)

for _ in range(sleep_seconds):
Expand All @@ -402,7 +402,7 @@ def create_replay_index(self):
pass
else:
raise Exception(
f"Error creating index {self.sync_obj.replay_index} - {str(e)}"
f"Error creating index {self.sync_obj.replay_index} - {e!s}"
)

def configure_imported_roles(
Expand All @@ -426,7 +426,7 @@ def configure_imported_roles(
)
return
except Exception as e:
msg = f"Error configuring roles: {str(e)}"
msg = f"Error configuring roles: {e!s}"
self.pbar.write(msg)
raise Exception(msg) from e

Expand All @@ -436,7 +436,7 @@ def configure_delete_indexes(self):
self.get_conn().post(endpoint, value=";".join(self.all_indexes_on_server))
except Exception as e:
self.pbar.write(
f"Error configuring deleteIndexesAllowed with '{self.all_indexes_on_server}': [{str(e)}]"
f"Error configuring deleteIndexesAllowed with '{self.all_indexes_on_server}': [{e!s}]"
)

def wait_for_conf_file(self, app_name: str, conf_file_name: str):
Expand Down Expand Up @@ -474,12 +474,12 @@ def configure_conf_file_datamodels(self, APP_NAME: str = "Splunk_SA_CIM"):
parser.read(custom_acceleration_datamodels)
if len(parser.keys()) > 1:
self.pbar.write(
f"Read {len(parser) - 1} custom datamodels from {str(custom_acceleration_datamodels)}!"
f"Read {len(parser) - 1} custom datamodels from {custom_acceleration_datamodels!s}!"
)

if not cim_acceleration_datamodels.is_file():
self.pbar.write(
f"******************************\nDATAMODEL ACCELERATION FILE {str(cim_acceleration_datamodels)} NOT "
f"******************************\nDATAMODEL ACCELERATION FILE {cim_acceleration_datamodels!s} NOT "
"FOUND. CIM DATAMODELS NOT ACCELERATED\n******************************\n"
)
else:
Expand All @@ -499,7 +499,7 @@ def configure_conf_file_datamodels(self, APP_NAME: str = "Splunk_SA_CIM"):

except Exception as e:
self.pbar.write(
f"Error creating the conf Datamodel {datamodel_name} key/value {name}/{value}: {str(e)}"
f"Error creating the conf Datamodel {datamodel_name} key/value {name}/{value}: {e!s}"
)

def execute(self):
Expand Down Expand Up @@ -528,9 +528,7 @@ def execute(self):
self.finish()
return
except Exception as e:
self.pbar.write(
f"Error testing detection: {type(e).__name__}: {str(e)}"
)
self.pbar.write(f"Error testing detection: {type(e).__name__}: {e!s}")
raise e
finally:
self.sync_obj.outputQueue.append(detection)
Expand Down Expand Up @@ -1365,7 +1363,7 @@ def delete_attack_data(self, attack_data_files: list[TestAttackData]):
except Exception as e:
raise (
Exception(
f"Trouble deleting data using the search {splunk_search}: {str(e)}"
f"Trouble deleting data using the search {splunk_search}: {e!s}"
)
)

Expand Down Expand Up @@ -1440,7 +1438,7 @@ def replay_attack_data_file(
except Exception as e:
raise (
Exception(
f"Could not download attack data file [{attack_data_file.data}]:{str(e)}"
f"Could not download attack data file [{attack_data_file.data}]:{e!s}"
)
)

Expand Down Expand Up @@ -1514,7 +1512,7 @@ def hec_raw_replay(
except Exception as e:
raise (
Exception(
f"There was an exception sending attack_data to HEC: {str(e)}"
f"There was an exception sending attack_data to HEC: {e!s}"
)
)

Expand Down Expand Up @@ -1558,7 +1556,7 @@ def hec_raw_replay(
)
)
except Exception as e:
raise (Exception(f"There was an exception in the post: {str(e)}"))
raise (Exception(f"There was an exception in the post: {e!s}"))

def status(self):
pass
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import docker
import docker.models.containers
import docker.models.resource
import docker.types

from contentctl.actions.detection_testing.infrastructures.DetectionTestingInfrastructure import (
DetectionTestingInfrastructure,
)
from contentctl.objects.config import test
import docker.models.resource
import docker.models.containers
import docker
import docker.types


class DetectionTestingInfrastructureContainer(DetectionTestingInfrastructure):
Expand Down Expand Up @@ -34,7 +35,7 @@ def finish(self):
self.removeContainer()
pass
except Exception as e:
raise (Exception(f"Error removing container: {str(e)}"))
raise (Exception(f"Error removing container: {e!s}"))
super().finish()

def get_name(self) -> str:
Expand All @@ -46,7 +47,7 @@ def get_docker_client(self):

return c
except Exception as e:
raise (Exception(f"Failed to get docker client: {str(e)}"))
raise (Exception(f"Failed to get docker client: {e!s}"))

def check_for_teardown(self):
try:
Expand All @@ -56,7 +57,7 @@ def check_for_teardown(self):
except Exception as e:
if self.sync_obj.terminate is not True:
self.pbar.write(
f"Error: could not get container [{self.get_name()}]: {str(e)}"
f"Error: could not get container [{self.get_name()}]: {e!s}"
)
self.sync_obj.terminate = True
else:
Expand Down Expand Up @@ -175,6 +176,6 @@ def removeContainer(self, removeVolumes: bool = True, forceRemove: bool = True):
except Exception as e:
raise (
Exception(
f"Could not remove Docker Container [{self.get_name()}]: {str(e)}"
f"Could not remove Docker Container [{self.get_name()}]: {e!s}"
)
)
3 changes: 2 additions & 1 deletion contentctl/actions/detection_testing/progress_bar.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import datetime
import time
from enum import StrEnum

from tqdm import tqdm
import datetime


class TestReportingType(StrEnum):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
import time

import tqdm

from contentctl.actions.detection_testing.views.DetectionTestingView import (
DetectionTestingView,
)

import time
import tqdm


class DetectionTestingViewCLI(DetectionTestingView, arbitrary_types_allowed=True):
pbar: tqdm.tqdm = None
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import pathlib

import yaml

from contentctl.actions.detection_testing.views.DetectionTestingView import (
DetectionTestingView,
)
import pathlib
import yaml

OUTPUT_FOLDER = "test_results"
OUTPUT_FILENAME = "summary.yml"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import webbrowser
from threading import Thread
from wsgiref.simple_server import WSGIRequestHandler, make_server

from bottle import template, Bottle, ServerAdapter
from wsgiref.simple_server import make_server, WSGIRequestHandler
import jinja2
import webbrowser
from bottle import Bottle, ServerAdapter, template
from pydantic import ConfigDict

from contentctl.actions.detection_testing.views.DetectionTestingView import (
Expand Down Expand Up @@ -39,8 +39,8 @@
{% for containerName, data in currentTestingQueue.items() %}
<tr>
<td>{{ containerName }}</td>
<td>{{ data["name"] }}</td>
<td>{{ data["search"] }}</td>
<td>{{ data["name"] }}</td>
<td>{{ data["search"] }}</td>
</tr>
{% endfor %}
</tbody>
Expand Down Expand Up @@ -69,7 +69,7 @@
{% else %}
<td style="font-weight: bold;background-color: #ff9999"><b>False</b></td>
{% endif %}

</tr>
{% endfor %}
{% endfor %}
Expand Down Expand Up @@ -118,7 +118,7 @@ def setup(self):
try:
webbrowser.open(f"http://{self.server.host}:{DEFAULT_WEB_UI_PORT}")
except Exception as e:
print(f"Could not open webbrowser for status page: {str(e)}")
print(f"Could not open webbrowser for status page: {e!s}")

def stop(self):
if self.server.server is None:
Expand Down
3 changes: 1 addition & 2 deletions contentctl/actions/doc_gen.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import os

from dataclasses import dataclass

from contentctl.input.director import DirectorInputDto, Director, DirectorOutputDto
from contentctl.input.director import Director, DirectorInputDto, DirectorOutputDto
from contentctl.output.doc_md_output import DocMdOutput


Expand Down
Loading
Loading