Skip to content

Commit 97490ff

Browse files
committed
bug fix in html & black formatter
1 parent 115dabc commit 97490ff

File tree

12 files changed

+206
-185
lines changed

12 files changed

+206
-185
lines changed

src/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import os
22
import yaml
33
import json
4+
from logging import Logger
45
from slugify import slugify
56
from src.thirdparty import triageutils as triageutils
67
from src.thirdparty.logging import get_logger
@@ -73,7 +74,7 @@ def _ParseMapFile(self):
7374
with open(os.path.join("config", "mapping.json")) as mapp:
7475
return json.load(mapp)
7576

76-
def run(self):
77+
def run(self, logger: Logger):
7778
"""Main entry point of the plugin"""
7879
raise NotImplementedError("[BasePlugin] run() needs to be overriden")
7980

src/plugins/generaptor.py

Lines changed: 28 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -88,8 +88,8 @@ def __init__(self, conf: dict):
8888
self.filebeat_dir = os.path.join(self.generaptor_dir, "filebeat")
8989
triageutils.create_directory_path(path=self.filebeat_dir, logger=self.logger)
9090

91-
self.activitiescache_share = os.path.join(
92-
self.generaptor_dir, "ActivitiesCache"
91+
self.activitiescache_share = Path(
92+
os.path.join(self.generaptor_dir, "ActivitiesCache")
9393
)
9494
triageutils.create_directory_path(
9595
path=self.activitiescache_share, logger=self.logger
@@ -319,7 +319,7 @@ def generate_plaso_timeline(self, logger: Logger):
319319
logger=self.logger,
320320
)
321321
except Exception as ex:
322-
self.logger.error(f"[generate_plaso_timeline] {ex}")
322+
self.error(f"[generate_plaso_timeline] {ex}")
323323

324324
@triageutils.LOG
325325
def generate_psort_timeline(self, plasofile: str, logger: Logger) -> str:
@@ -368,7 +368,7 @@ def generate_psort_timeline(self, plasofile: str, logger: Logger) -> str:
368368
s_file = os.path.join(self.plaso_folder, f"psort-{self.hostname}.jsonl")
369369
return s_file
370370
except Exception as ex:
371-
self.logger.error(f"[generate_psort_timeline] {ex}")
371+
self.error(f"[generate_psort_timeline] {ex}")
372372
return ""
373373

374374
@triageutils.LOG
@@ -408,8 +408,8 @@ def get_evtx(self, evtx_folder: Path, logger: Logger) -> list:
408408
if not evtx_folder:
409409
raise Exception("No evtx folder")
410410
records.extend(
411-
triageutils.search_files(
412-
src=evtx_folder, pattern=".evtx", logger=self.logger
411+
triageutils.search_files_by_extension(
412+
dir=evtx_folder, extension=".evtx", logger=self.logger
413413
)
414414
)
415415
if len(records):
@@ -695,12 +695,20 @@ def generaptor_parse_evtx(self, logger: Logger):
695695
if self.is_logstash_active:
696696
_file_infos = triageutils.get_file_informations(filepath=_f)
697697
_analytics = triageutils.generate_analytics(logger=self.logger)
698-
_analytics["log"]["file"]["eventcount"] = _res.get("nb_events_read", 0)
699-
_analytics["log"]["file"]["eventsent"] = _res.get("nb_events_sent", 0)
698+
_analytics["log"]["file"]["eventcount"] = _res.get(
699+
"nb_events_read", 0
700+
)
701+
_analytics["log"]["file"]["eventsent"] = _res.get(
702+
"nb_events_sent", 0
703+
)
700704
_analytics["log"]["file"]["path"] = str(_f)
701705
_analytics["log"]["file"]["size"] = _file_infos.get("fileSize", 0)
702-
_analytics["log"]["file"]["lastaccessed"] = _file_infos.get("lastAccessTime", 0)
703-
_analytics["log"]["file"]["creation"] = _file_infos.get("creationTime", 0)
706+
_analytics["log"]["file"]["lastaccessed"] = _file_infos.get(
707+
"lastAccessTime", 0
708+
)
709+
_analytics["log"]["file"]["creation"] = _file_infos.get(
710+
"creationTime", 0
711+
)
704712
_analytics["csirt"]["client"] = self.clientname
705713
_analytics["csirt"]["hostname"] = self.hostname
706714
_analytics["csirt"]["application"] = "generaptor_parse_evtx"
@@ -742,7 +750,7 @@ def generaptor_parse_mft(self, logger: Logger):
742750
)
743751
_analyzer.analyze()
744752
else:
745-
self.logger.error(f"[generaptor_parse_mft] No $MFT found")
753+
self.error(f"[generaptor_parse_mft] No $MFT found")
746754
except Exception as ex:
747755
self.error(f"[generaptor_parse_mft] {str(ex)}")
748756
raise ex
@@ -765,7 +773,7 @@ def generaptor_parse_usnjrnl(self, logger: Logger):
765773
)
766774
_analyzer.analyze()
767775
else:
768-
self.logger.error(f"[generaptor_parse_usnjrnl] No $UsnJrnl%3A$J found")
776+
self.error(f"[generaptor_parse_usnjrnl] No $UsnJrnl%3A$J found")
769777
except Exception as ex:
770778
self.error(f"[generaptor_parse_usnjrnl] {str(ex)}")
771779
raise ex
@@ -864,17 +872,18 @@ def generaptor_get_consolehost_history(self, logger: Logger):
864872
):
865873
self.info(f"[generaptor_get_consolehost_history] Parse: {_f}")
866874
try:
867-
_username = _f.parts[_f.parts.index('Users')+1]
875+
_username = _f.parts[_f.parts.index("Users") + 1]
868876
except Exception as errorname:
869877
self.error(f"{errorname}")
870878
_username = time.time()
871879
_dst = self.psreadline_dir / Path(f"{_username}")
872-
triageutils.copy_file(src=_f, dst=_dst, overwrite=True, logger=self.logger)
880+
triageutils.copy_file(
881+
src=_f, dst=_dst, overwrite=True, logger=self.logger
882+
)
873883
except Exception as ex:
874884
self.error(f"[generaptor_get_consolehost_history] {str(ex)}")
875885
raise ex
876886

877-
878887
@triageutils.LOG
879888
def run(self, logger: Logger):
880889
"""Fonction principale qui exécute tout le triage de generaptor
@@ -898,10 +907,10 @@ def run(self, logger: Logger):
898907
if self.is_logstash_active:
899908
self.ymlcreator(logger=self.logger)
900909
self.check_docker_image(
901-
image_name=self.docker_images["filebeat"]["image"],
902-
tag=self.docker_images["filebeat"]["tag"],
903-
logger=self.logger,
904-
)
910+
image_name=self.docker_images["filebeat"]["image"],
911+
tag=self.docker_images["filebeat"]["tag"],
912+
logger=self.logger,
913+
)
905914
self.generaptor_filebeat(logger=self.logger)
906915
if self.config["run"]["generaptor"]["timeline"]:
907916
self.info("[generaptor] Run PLASO")

src/plugins/hayabusa.py

Lines changed: 16 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -22,17 +22,23 @@ def __init__(self, conf: dict):
2222
)
2323
)
2424
if _evtx_folder:
25-
self.hayabusa_dir = _evtx_folder.parent
25+
self.evtx_dir = _evtx_folder.parent
26+
self.hayabusa_dir = Path(
27+
os.path.join(self.upload_dir, self.hostname, "Hayabusa")
28+
)
29+
triageutils.create_directory_path(
30+
path=self.hayabusa_dir, logger=self.logger
31+
)
2632
else:
2733
self.error("[HAYABUSA] No evtx folder")
2834
raise Exception("[HAYABUSA] No evtx folder")
29-
self.output_json = f"{os.path.join(self.hayabusa_dir,self.clientname)}_HAYABUSA_SIGMA.jsonl"
35+
self.output_json = f"{self.hayabusa_dir}/HAYABUSA_SIGMA.jsonl"
3036
except Exception as ex:
3137
self.error(f"[init] {ex}")
3238
raise ex
3339

3440
@triageutils.LOG
35-
def exec_hayabusa(self, log_folder=None, logger=None):
41+
def exec_hayabusa(self, log_folder: Path, logger=None):
3642
"""Exécution du binaire hayabusa sur un dossier
3743
3844
Args:
@@ -41,8 +47,6 @@ def exec_hayabusa(self, log_folder=None, logger=None):
4147
4248
"""
4349
try:
44-
if not log_folder:
45-
log_folder = self.hayabusa_dir
4650
cmd = [
4751
self.hayabusa_bin_path,
4852
"json-timeline",
@@ -142,8 +146,12 @@ def send_analytics_to_elk(self, event_sent: int, logger=None):
142146
_analytics["log"]["file"]["eventsent"] = event_sent
143147
_analytics["log"]["file"]["path"] = self.output_json.name
144148
_analytics["log"]["file"]["size"] = _file_infos.get("fileSize", 0)
145-
_analytics["log"]["file"]["lastaccessed"] = _file_infos.get("lastAccessTime", 0)
146-
_analytics["log"]["file"]["creation"] = _file_infos.get("creationTime", 0)
149+
_analytics["log"]["file"]["lastaccessed"] = _file_infos.get(
150+
"lastAccessTime", 0
151+
)
152+
_analytics["log"]["file"]["creation"] = _file_infos.get(
153+
"creationTime", 0
154+
)
147155
_analytics["csirt"]["client"] = self.clientname
148156
_analytics["csirt"]["hostname"] = self.hostname
149157
_analytics["csirt"]["application"] = "hayabusa"
@@ -167,7 +175,7 @@ def run(self, logger=None):
167175
168176
"""
169177
try:
170-
self.exec_hayabusa(logger=self.logger)
178+
self.exec_hayabusa(log_folder=self.evtx_dir, logger=self.logger)
171179
if self.is_logstash_active:
172180
_event_sent = self.send_to_elk(logger=self.logger)
173181
self.send_analytics_to_elk(event_sent=_event_sent, logger=self.logger)

src/plugins/kape.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -854,12 +854,14 @@ def kape_get_consolehost_history(self, logger: Logger):
854854
):
855855
self.info(f"[kape_get_consolehost_history] Parse: {_f}")
856856
try:
857-
_username = _f.parts[_f.parts.index('Users')+1]
857+
_username = _f.parts[_f.parts.index("Users") + 1]
858858
except Exception as errorname:
859859
self.error(f"{errorname}")
860860
_username = time.time()
861861
_dst = self.psreadline_dir / Path(f"{_username}")
862-
triageutils.copy_file(src=_f, dst=_dst, overwrite=True, logger=self.logger)
862+
triageutils.copy_file(
863+
src=_f, dst=_dst, overwrite=True, logger=self.logger
864+
)
863865
except Exception as ex:
864866
self.error(f"[kape_get_consolehost_history] {str(ex)}")
865867
raise ex
@@ -943,11 +945,11 @@ def run(self, logger: Logger):
943945
except Exception as err_reg:
944946
self.error(f"[kape ERROR] {str(err_reg)}")
945947
if self.config["run"]["kape"].get("psreadline", False):
946-
self.info("[kape] Run PSReadline")
947-
try:
948-
self.kape_get_consolehost_history(logger=self.logger)
949-
except Exception as err_reg:
950-
self.error(f"[kape ERROR] {str(err_reg)}")
948+
self.info("[kape] Run PSReadline")
949+
try:
950+
self.kape_get_consolehost_history(logger=self.logger)
951+
except Exception as err_reg:
952+
self.error(f"[kape ERROR] {str(err_reg)}")
951953
if self.config["run"]["kape"].get("iis", False):
952954
try:
953955
self.info("[KAPE] Run IIS")

src/plugins/o365.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ class Plugin(BasePlugin):
1010
O365 plugin pour triage
1111
"""
1212

13-
def __init__(self, conf=None):
13+
def __init__(self, conf: dict):
1414
super().__init__(config=conf)
1515
self.o365_dir = os.path.join(self.upload_dir, self.hostname, "o365")
1616
triageutils.create_directory_path(path=self.o365_dir, logger=self.logger)
@@ -77,7 +77,7 @@ def o365_get_csv_files(self, logger=None) -> list:
7777
return records
7878

7979
@triageutils.LOG
80-
def o365_send_json_results(self, json_file=None, logger=None):
80+
def o365_send_json_results(self, json_file: Path, logger=None):
8181
"""Fonction qui envoie les résultats json o365 vers ELK"""
8282
try:
8383
try:

src/plugins/orc.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -77,9 +77,10 @@ def kill_docker_container(self, logger: Logger):
7777
_docker.close()
7878

7979
@triageutils.LOG
80-
def rename_orc_file(self, filepath: Path, logger: Logger):
80+
def rename_orc_file(self, filepath: Path, logger: Logger, LOGLEVEL: str ="NOLOG"):
8181
"""
8282
Rename file by keeping only real file name
83+
logger and LOGLEVEL are used by LOG decorator
8384
8485
return:
8586
Path: file's new path
@@ -99,7 +100,7 @@ def rename_orc_file(self, filepath: Path, logger: Logger):
99100
_new_path = Path(_path) / Path(_new_name)
100101

101102
if triageutils.file_exists(file=_new_path, LOGLEVEL="NOLOG"):
102-
self.info(f"[rename_orc_file] File exists !")
103+
#self.info(f"[rename_orc_file] File exists !")
103104
_parent = Path(_path) / Path(str(round(time.time() * 1000)))
104105
triageutils.create_directory_path(path=_parent, LOGLEVEL="NOLOG")
105106
_new_path = _parent / Path(_new_name).name
@@ -152,7 +153,7 @@ def extract_all_7z(self, logger: Logger):
152153
for _file in triageutils.search_files_by_extension_generator(
153154
src=self.orc_dir, extension=".data", logger=self.logger
154155
):
155-
records.append(self.rename_orc_file(filepath=_file, logger=self.logger))
156+
records.append(self.rename_orc_file(filepath=_file, logger=self.logger, LOGLEVEL="NOLOG"))
156157
return records
157158
except Exception as ex:
158159
self.error(f"[extract_all_7z] {str(ex)}")

0 commit comments

Comments
 (0)