Skip to content

Commit 0b86543

Browse files
committed
add new artefact powershell history file
1 parent 602f288 commit 0b86543

File tree

10 files changed

+218
-90
lines changed

10 files changed

+218
-90
lines changed

README.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,6 +64,7 @@ OU
6464
- Parsing MPLog
6565
- Parsing Windows10 Timeline (ActivitiesCache)
6666
- Parsing $Recycle.Bin
67+
- Récupération des fichiers d'historique Powershell des utilisateurs
6768

6869
#### GENERAPTOR Windows
6970

@@ -84,6 +85,7 @@ OU
8485
- Parsing MPLog
8586
- Parsing Windows10 Timeline (ActivitiesCache)
8687
- Parsing $Recycle.Bin
88+
- Récupération des fichiers d'historique Powershell des utilisateurs
8789

8890
#### DFIR-ORC
8991

src/plugins/generaptor.py

Lines changed: 53 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import os
22
import docker
33
import yaml
4+
import time
45
from src.thirdparty import triageutils as triageutils
56
from src.thirdparty.AESCipher import AESCipher
67
from src.thirdparty.ParseEVTX import ParseEVTX
@@ -97,6 +98,9 @@ def __init__(self, conf: dict):
9798
self.recyclebin_dir = Path(os.path.join(self.generaptor_dir, "RecycleBin"))
9899
triageutils.create_directory_path(path=self.recyclebin_dir, logger=self.logger)
99100

101+
self.psreadline_dir = Path(os.path.join(self.generaptor_dir, "PSReadLine"))
102+
triageutils.create_directory_path(path=self.psreadline_dir, logger=self.logger)
103+
100104
self.log_dirs = (
101105
dict()
102106
) # for filebeat volumes: ex {apache: "/home/user/.../elk/apache"}
@@ -689,11 +693,17 @@ def generaptor_parse_evtx(self, logger: Logger):
689693

690694
# send analytics info
691695
if self.is_logstash_active:
692-
_analytics = triageutils.get_file_informations(filepath=_f)
693-
_analytics["numberOfLogRecords"] = _res.get("nb_events_read", 0)
694-
_analytics["numberOfEventSent"] = _res.get("nb_events_sent", 0)
695-
_analytics["hostname"] = self.hostname
696-
_analytics["logfilename"] = _res.get("file", "")
696+
_file_infos = triageutils.get_file_informations(filepath=_f)
697+
_analytics = triageutils.generate_analytics(logger=self.logger)
698+
_analytics["log"]["file"]["eventcount"] = _res.get("nb_events_read", 0)
699+
_analytics["log"]["file"]["eventsent"] = _res.get("nb_events_sent", 0)
700+
_analytics["log"]["file"]["path"] = str(_f)
701+
_analytics["log"]["file"]["size"] = _file_infos.get("fileSize", 0)
702+
_analytics["log"]["file"]["lastaccessed"] = _file_infos.get("lastAccessTime", 0)
703+
_analytics["log"]["file"]["creation"] = _file_infos.get("creationTime", 0)
704+
_analytics["csirt"]["client"] = self.clientname
705+
_analytics["csirt"]["hostname"] = self.hostname
706+
_analytics["csirt"]["application"] = "generaptor_parse_evtx"
697707
triageutils.send_data_to_elk(
698708
data=_analytics,
699709
ip=_ip,
@@ -843,6 +853,28 @@ def generaptor_parse_recyclebin(self, logger: Logger):
843853
except Exception as ex:
844854
self.error(f"[generaptor_parse_recyclebin] {ex}")
845855

856+
@triageutils.LOG
857+
def generaptor_get_consolehost_history(self, logger: Logger):
858+
try:
859+
for _f in triageutils.search_files_generator(
860+
src=self.zip_destination,
861+
pattern="ConsoleHost_history.txt",
862+
patterninpath="PSReadLine",
863+
strict=True,
864+
):
865+
self.info(f"[generaptor_get_consolehost_history] Parse: {_f}")
866+
try:
867+
_username = _f.parts[_f.parts.index('Users')+1]
868+
except Exception as errorname:
869+
self.error(f"{errorname}")
870+
_username = time.time()
871+
_dst = self.psreadline_dir / Path(f"{_username}")
872+
triageutils.copy_file(src=_f, dst=_dst, overwrite=True, logger=self.logger)
873+
except Exception as ex:
874+
self.error(f"[generaptor_get_consolehost_history] {str(ex)}")
875+
raise ex
876+
877+
846878
@triageutils.LOG
847879
def run(self, logger: Logger):
848880
"""Fonction principale qui exécute tout le triage de generaptor
@@ -891,7 +923,7 @@ def run(self, logger: Logger):
891923
except Exception as copy_err:
892924
self.error(f"[RUN] {copy_err}")
893925
pass
894-
if self.config["run"]["generaptor"]["evtx"]:
926+
if self.config["run"]["generaptor"].get("evtx", False):
895927
self.info("[generaptor] Run EVTX")
896928
if self.config["run"]["generaptor"]["winlogbeat"]:
897929
evtx_logs = self.get_evtx(
@@ -903,57 +935,63 @@ def run(self, logger: Logger):
903935
)
904936
else:
905937
self.generaptor_parse_evtx(logger=self.logger)
906-
if self.config["run"]["generaptor"]["registry"]:
938+
if self.config["run"]["generaptor"].get("registry", False):
907939
self.info("[generaptor] Run Registry")
908940
try:
909941
self.generaptor_parse_registry(logger=self.logger)
910942
except Exception as err_reg:
911943
self.error(f"[generaptor ERROR] {str(err_reg)}")
912-
if self.config["run"]["generaptor"]["mft"]:
944+
if self.config["run"]["generaptor"].get("mft", False):
913945
self.info("[generaptor] Run MFT")
914946
try:
915947
self.generaptor_parse_mft(logger=self.logger)
916948
except Exception as err_reg:
917949
self.error(f"[generaptor ERROR] {str(err_reg)}")
918-
if self.config["run"]["generaptor"]["usnjrnl"]:
950+
if self.config["run"]["generaptor"].get("usnjrnl", False):
919951
self.info("[generaptor] Run UsnJrnl")
920952
try:
921953
self.generaptor_parse_usnjrnl(logger=self.logger)
922954
except Exception as err_reg:
923955
self.error(f"[generaptor ERROR] {str(err_reg)}")
924-
if self.config["run"]["generaptor"]["prefetch"]:
956+
if self.config["run"]["generaptor"].get("prefetch", False):
925957
self.info("[generaptor] Run Prefetch")
926958
try:
927959
self.generaptor_parse_prefetch(logger=self.logger)
928960
except Exception as err_reg:
929961
self.error(f"[generaptor ERROR] {str(err_reg)}")
930-
if self.config["run"]["generaptor"]["mplog"]:
962+
if self.config["run"]["generaptor"].get("mplog", False):
931963
self.info("[generaptor] Run MPLog")
932964
try:
933965
self.generaptor_parse_mplog(logger=self.logger)
934966
except Exception as err_reg:
935967
self.error(f"[generaptor ERROR] {str(err_reg)}")
936-
if self.config["run"]["generaptor"]["activitiescache"]:
968+
if self.config["run"]["generaptor"].get("activitiescache", False):
937969
self.info("[generaptor] Run ActivitiesCache")
938970
try:
939971
self.generaptor_parse_activitiescache(logger=self.logger)
940972
except Exception as err_reg:
941973
self.error(f"[generaptor ERROR] {str(err_reg)}")
942-
if self.config["run"]["generaptor"]["recyclebin"]:
974+
if self.config["run"]["generaptor"].get("recyclebin", False):
943975
self.info("[generaptor] Run Recycle Bin")
944976
try:
945977
self.generaptor_parse_recyclebin(logger=self.logger)
946978
except Exception as err_reg:
947979
self.error(f"[generaptor ERROR] {str(err_reg)}")
948-
if self.config["run"]["generaptor"]["iis"]:
980+
if self.config["run"]["generaptor"].get("psreadline", False):
981+
self.info("[generaptor] Run PSReadline")
982+
try:
983+
self.generaptor_get_consolehost_history(logger=self.logger)
984+
except Exception as err_reg:
985+
self.error(f"[generaptor ERROR] {str(err_reg)}")
986+
if self.config["run"]["generaptor"].get("iis", False):
949987
self.info("[generaptor] Run IIS")
950988
try:
951989
res = self.get_iis_logs(logger=self.logger)
952990
except Exception as err_reg:
953991
self.error(f"[generaptor ERROR] {str(err_reg)}")
954992
if self.is_logstash_active:
955993
self.send_iis_logs(iis_logs=res, logger=self.logger)
956-
if self.config["run"]["generaptor"]["timeline"]:
994+
if self.config["run"]["generaptor"].get("timeline", False):
957995
self.info("[generaptor] Run PLASO")
958996
self.check_docker_image(
959997
image_name=self.docker_images["plaso"]["image"],

src/plugins/hayabusa.py

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -134,13 +134,19 @@ def send_analytics_to_elk(self, event_sent: int, logger=None):
134134
with open(self.output_json, "r") as jsonl_f:
135135
_total_events = len(jsonl_f.readlines())
136136
self.output_json = Path(self.output_json)
137-
_analytics = triageutils.get_file_informations(
137+
_file_infos = triageutils.get_file_informations(
138138
filepath=self.output_json
139139
)
140-
_analytics["numberOfLogRecords"] = _total_events
141-
_analytics["numberOfEventSent"] = event_sent
142-
_analytics["hostname"] = self.hostname
143-
_analytics["logfilename"] = self.output_json.name
140+
_analytics = triageutils.generate_analytics(logger=self.logger)
141+
_analytics["log"]["file"]["eventcount"] = _total_events
142+
_analytics["log"]["file"]["eventsent"] = event_sent
143+
_analytics["log"]["file"]["path"] = self.output_json.name
144+
_analytics["log"]["file"]["size"] = _file_infos.get("fileSize", 0)
145+
_analytics["log"]["file"]["lastaccessed"] = _file_infos.get("lastAccessTime", 0)
146+
_analytics["log"]["file"]["creation"] = _file_infos.get("creationTime", 0)
147+
_analytics["csirt"]["client"] = self.clientname
148+
_analytics["csirt"]["hostname"] = self.hostname
149+
_analytics["csirt"]["application"] = "hayabusa"
144150
triageutils.send_data_to_elk(
145151
data=_analytics,
146152
ip=ip,

src/plugins/kape.py

Lines changed: 38 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import subprocess
22
import os
33
import json
4+
import time
45
from typing import Optional
56
from itertools import islice
67
from datetime import datetime, timezone
@@ -839,6 +840,27 @@ def kape_parse_recyclebin(self, logger: Logger):
839840
except Exception as ex:
840841
self.error(f"[kape_parse_recyclebin] {ex}")
841842

843+
@triageutils.LOG
844+
def kape_get_consolehost_history(self, logger: Logger):
845+
try:
846+
for _f in triageutils.search_files_generator(
847+
src=self.zip_destination,
848+
pattern="ConsoleHost_history.txt",
849+
patterninpath="PSReadLine",
850+
strict=True,
851+
):
852+
self.info(f"[kape_get_consolehost_history] Parse: {_f}")
853+
try:
854+
_username = _f.parts[_f.parts.index('Users')+1]
855+
except Exception as errorname:
856+
self.error(f"{errorname}")
857+
_username = time.time()
858+
_dst = self.psreadline_dir / Path(f"{_username}")
859+
triageutils.copy_file(src=_f, dst=_dst, overwrite=True, logger=self.logger)
860+
except Exception as ex:
861+
self.error(f"[kape_get_consolehost_history] {str(ex)}")
862+
raise ex
863+
842864
@triageutils.LOG
843865
def run(self, logger: Logger):
844866
"""Fonction principale qui exécute tout le triage de kape
@@ -861,7 +883,7 @@ def run(self, logger: Logger):
861883
)
862884
except Exception as ex:
863885
self.error(f"[Kape ERROR] {str(ex)}")
864-
if self.config["run"]["kape"]["evtx"]:
886+
if self.config["run"]["kape"].get("evtx", False):
865887
self.info("[KAPE] Run EVTX")
866888
try:
867889
if self.config["run"]["kape"]["winlogbeat"]:
@@ -875,57 +897,63 @@ def run(self, logger: Logger):
875897
self.info("[kape] EVTX process done")
876898
except Exception as ex:
877899
self.error(f"[Kape ERROR] {str(ex)}")
878-
if self.config["run"]["kape"]["registry"]:
900+
if self.config["run"]["kape"].get("registry", False):
879901
try:
880902
self.info("[KAPE] Run Registry")
881903
self.kape_parse_registry(logger=self.logger)
882904
except Exception as ex:
883905
self.error(f"[Kape ERROR] {str(ex)}")
884-
if self.config["run"]["kape"]["mft"]:
906+
if self.config["run"]["kape"].get("mft", False):
885907
try:
886908
self.info("[KAPE] Run MFT")
887909
self.kape_parse_mft(logger=self.logger)
888910
except Exception as ex:
889911
self.error(f"[Kape ERROR] {str(ex)}")
890-
if self.config["run"]["kape"]["usnjrnl"]:
912+
if self.config["run"]["kape"].get("usnjrnl", False):
891913
try:
892914
self.info("[KAPE] Run UsnJrnl")
893915
self.kape_parse_usnjrnl(logger=self.logger)
894916
except Exception as ex:
895917
self.error(f"[Kape ERROR] {str(ex)}")
896-
if self.config["run"]["kape"]["prefetch"]:
918+
if self.config["run"]["kape"].get("prefetch", False):
897919
try:
898920
self.info("[kape] Run Prefetch")
899921
self.kape_parse_prefetch(logger=self.logger)
900922
except Exception as ex:
901923
self.error(f"[Kape ERROR] {str(ex)}")
902-
if self.config["run"]["kape"]["mplog"]:
924+
if self.config["run"]["kape"].get("mplog", False):
903925
try:
904926
self.info("[kape] Run MPLog")
905927
self.kape_parse_mplog(logger=self.logger)
906928
except Exception as ex:
907929
self.error(f"[Kape ERROR] {str(ex)}")
908-
if self.config["run"]["kape"]["activitiescache"]:
930+
if self.config["run"]["kape"].get("activitiescache", False):
909931
try:
910932
self.info("[kape] Run ActivitiesCache")
911933
self.kape_parse_activitiescache(logger=self.logger)
912934
except Exception as ex:
913935
self.error(f"[Kape ERROR] {str(ex)}")
914-
if self.config["run"]["kape"]["recyclebin"]:
936+
if self.config["run"]["kape"].get("recyclebin", False):
915937
self.info("[kape] Run Recycle Bin")
916938
try:
917939
self.kape_parse_recyclebin(logger=self.logger)
918940
except Exception as err_reg:
919941
self.error(f"[kape ERROR] {str(err_reg)}")
920-
if self.config["run"]["kape"]["iis"]:
942+
if self.config["run"]["kape"].get("psreadline", False):
943+
self.info("[kape] Run PSReadline")
944+
try:
945+
self.kape_get_consolehost_history(logger=self.logger)
946+
except Exception as err_reg:
947+
self.error(f"[kape ERROR] {str(err_reg)}")
948+
if self.config["run"]["kape"].get("iis", False):
921949
try:
922950
self.info("[KAPE] Run IIS")
923951
res = self.get_iis_logs(logger=self.logger)
924952
if self.is_logstash_active:
925953
self.send_iis_logs(iis_logs=res, logger=self.logger)
926954
except Exception as ex:
927955
self.error(f"[Kape ERROR] {str(ex)}")
928-
if self.config["run"]["kape"]["timeline"]:
956+
if self.config["run"]["kape"].get("timeline", False):
929957
self.info("[KAPE] Run PLASO")
930958
self.check_docker_image(
931959
image_name=self.docker_images["plaso"]["image"],

src/plugins/standalone.py

Lines changed: 22 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -67,13 +67,17 @@ def standalone_hayabusa(self, logger=None):
6767
# Send analytics
6868
if self.is_logstash_active:
6969
self.standalone_input_file = Path(self.standalone_input_file)
70-
_analytics = triageutils.get_file_informations(
71-
filepath=self.standalone_input_file
72-
)
73-
_analytics["numberOfLogRecords"] = len(json_data)
74-
_analytics["numberOfEventSent"] = _event_sent
75-
_analytics["hostname"] = self.hostname
76-
_analytics["logfilename"] = self.standalone_input_file.name
70+
_file_infos = triageutils.get_file_informations(filepath=self.standalone_input_file)
71+
_analytics = triageutils.generate_analytics(logger=self.logger)
72+
_analytics["log"]["file"]["eventcount"] = len(json_data)
73+
_analytics["log"]["file"]["eventsent"] = _event_sent
74+
_analytics["log"]["file"]["path"] = self.standalone_input_file.name
75+
_analytics["log"]["file"]["size"] = _file_infos.get("fileSize", 0)
76+
_analytics["log"]["file"]["lastaccessed"] = _file_infos.get("lastAccessTime", 0)
77+
_analytics["log"]["file"]["creation"] = _file_infos.get("creationTime", 0)
78+
_analytics["csirt"]["client"] = self.clientname
79+
_analytics["csirt"]["hostname"] = self.hostname
80+
_analytics["csirt"]["application"] = "standalone_hayabusa"
7781
triageutils.send_data_to_elk(
7882
data=_analytics,
7983
ip=ip,
@@ -421,11 +425,17 @@ def run(self, logger=None):
421425

422426
# send analytics info
423427
if self.is_logstash_active:
424-
_analytics = triageutils.get_file_informations(filepath=_f)
425-
_analytics["numberOfLogRecords"] = _res.get("nb_events_read", 0)
426-
_analytics["numberOfEventSent"] = _res.get("nb_events_sent", 0)
427-
_analytics["hostname"] = self.hostname
428-
_analytics["logfilename"] = _res.get("file", "")
428+
_file_infos = triageutils.get_file_informations(filepath=_f)
429+
_analytics = triageutils.generate_analytics(logger=self.logger)
430+
_analytics["log"]["file"]["eventcount"] = _res.get("nb_events_read", 0)
431+
_analytics["log"]["file"]["eventsent"] = _res.get("nb_events_sent", 0)
432+
_analytics["log"]["file"]["path"] = _res.get("file", "")
433+
_analytics["log"]["file"]["size"] = _file_infos.get("fileSize", 0)
434+
_analytics["log"]["file"]["lastaccessed"] = _file_infos.get("lastAccessTime", 0)
435+
_analytics["log"]["file"]["creation"] = _file_infos.get("creationTime", 0)
436+
_analytics["csirt"]["client"] = self.clientname
437+
_analytics["csirt"]["hostname"] = self.hostname
438+
_analytics["csirt"]["application"] = "standalone_parse_evtx"
429439
triageutils.send_data_to_elk(
430440
data=_analytics,
431441
ip=_ip,

0 commit comments

Comments
 (0)