Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
ALERTS_DATA_TABLE,
COMPANIES,
ENDPOINTS,
ALERT_GRAPH_STATISTICS_FUNC_NAME,
ALERT_GRAPH_STATISTICS_FUNC_NAME
)


Expand Down Expand Up @@ -96,7 +96,7 @@ def get_risk_vector_data(
data_to_post = []
risk_vector_data = []
checkpoint_key = "{}".format(company_guid)
checkpoint_data = self.checkpoint_obj.get_last_data(state)
checkpoint_data = self.checkpoint_obj.get_last_data(state, table_name=table_name)
last_data = self.checkpoint_obj.get_endpoint_last_data(
checkpoint_data, endpoint, checkpoint_key
)
Expand Down Expand Up @@ -124,9 +124,8 @@ def get_risk_vector_data(
self.send_data_to_sentinel(
risk_vector_data, table_name, company_name, endpoint
)
# data_to_post = str(data_to_post)
self.checkpoint_obj.save_checkpoint(
state, checkpoint_data, endpoint, checkpoint_key, data_to_post
state, checkpoint_data, endpoint, "{}_{}".format(table_name, "Checkpoint"), checkpoint_key, data_to_post
)
except BitSightException:
raise BitSightException()
Expand Down Expand Up @@ -183,7 +182,7 @@ def get_diligence_historical_statistics_details(self, company_name, company_guid
post_data = []
checkpoint_key = "{}".format(company_guid)
checkpoint_data = self.checkpoint_obj.get_last_data(
self.diligence_historical_statistics_state
self.diligence_historical_statistics_state, table_name=DILIGENCE_HISTORICAL_STATISTICS_TABLE
)
last_data = self.checkpoint_obj.get_endpoint_last_data(
checkpoint_data, "diligence_historical-statistics", company_guid
Expand Down Expand Up @@ -218,11 +217,11 @@ def get_diligence_historical_statistics_details(self, company_name, company_guid
company_name,
"diligence historical statistics",
)
# checkpoint_data_to_post = str(checkpoint_data_to_post)
self.checkpoint_obj.save_checkpoint(
self.diligence_historical_statistics_state,
checkpoint_data,
"diligence_historical-statistics",
"{}_{}".format(DILIGENCE_HISTORICAL_STATISTICS_TABLE, "Checkpoint"),
checkpoint_key,
checkpoint_data_to_post,
)
Expand Down Expand Up @@ -254,7 +253,7 @@ def get_graph_data(self, company_name, company_guid):
rating_diff = None
last_date = None
checkpoint_key = "{}".format(company_guid)
checkpoint_data = self.checkpoint_obj.get_last_data(self.graph_state)
checkpoint_data = self.checkpoint_obj.get_last_data(self.graph_state, table_name=GRAPH_DATA_TABLE)
last_data = self.checkpoint_obj.get_endpoint_last_data(
checkpoint_data, "graph_data", company_guid
)
Expand Down Expand Up @@ -307,6 +306,7 @@ def get_graph_data(self, company_name, company_guid):
self.graph_state,
checkpoint_data,
"graph_data",
"{}_{}".format(GRAPH_DATA_TABLE, "Checkpoint"),
checkpoint_key,
data_to_post,
)
Expand Down Expand Up @@ -340,7 +340,7 @@ def get_alerts_details(self, company_name, company_guid):
try:
data_to_post = None
checkpoint_key = "{}".format(company_guid)
checkpoint_data = self.checkpoint_obj.get_last_data(self.alerts_state)
checkpoint_data = self.checkpoint_obj.get_last_data(self.alerts_state, table_name=ALERTS_DATA_TABLE)
last_date = self.checkpoint_obj.get_endpoint_last_data(
checkpoint_data, "alerts_data", company_guid
)
Expand Down Expand Up @@ -403,6 +403,7 @@ def get_alerts_details(self, company_name, company_guid):
self.alerts_state,
checkpoint_data,
"alerts_data",
"{}_{}".format(ALERTS_DATA_TABLE, "Checkpoint"),
checkpoint_key,
data_to_post,
)
Expand All @@ -422,7 +423,7 @@ def get_all_copmanies_alerts_graph_statisctics_details(
company_names (list): List of company names.
"""
fetching_index = self.get_last_data_index(
company_names, self.checkpoint_obj, self.company_state
company_names, self.checkpoint_obj, self.company_state, table_name="{}_{}".format(ALERTS_DATA_TABLE, "Statistics")
)
for company_index in range(fetching_index + 1, len(logs_data)):
company_name = logs_data[company_index].get("name_s")
Expand All @@ -443,6 +444,7 @@ def get_all_copmanies_alerts_graph_statisctics_details(
self.company_state,
company_name,
"statisctics_company",
"{}_{}".format(ALERTS_DATA_TABLE, "Statistics_Company_Checkpoint"),
company_name_flag=True,
)

Expand Down
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ def get_all_companies_breaches_details(self, company_names, logs_data):
"""
count_companies = 0
fetching_index = self.get_last_data_index(
company_names, self.checkpoint_obj, self.breach_company_state
company_names, self.checkpoint_obj, self.breach_company_state, table_name=consts.BREACHES_TABLE_NAME
)
for company_index in range(fetching_index + 1, len(logs_data)):
company_name = logs_data[company_index].get("name_s")
Expand All @@ -98,6 +98,7 @@ def get_all_companies_breaches_details(self, company_names, logs_data):
self.breach_company_state,
company_name,
"breaches",
"{}_{}".format(consts.BREACHES_TABLE_NAME, "Company_Checkpoint"),
company_name_flag=True,
)
applogger.info(
Expand Down Expand Up @@ -171,7 +172,7 @@ def get_breaches_data(self, company_name, company_guid):
)
)
return
last_data = self.checkpoint_obj.get_last_data(self.breaches_details_state)
last_data = self.checkpoint_obj.get_last_data(self.breaches_details_state, table_name=consts.BREACHES_TABLE_NAME)
last_checkpoint_company = self.checkpoint_obj.get_endpoint_last_data(
last_data, "breaches", company_guid
)
Expand All @@ -188,6 +189,7 @@ def get_breaches_data(self, company_name, company_guid):
self.breaches_details_state,
last_data,
"breaches",
"{}_{}".format(consts.BREACHES_TABLE_NAME, "Checkpoint"),
checkpoint_key,
checkpoint_date,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
COMPANIES_RATING_DETAILS_TABLE_NAME,
COMPANY_DETAIL_TABLE_NAME,
ENDPOINTS,
COMPANY_DETAILS_FUNC_NAME,
COMPANY_DETAILS_FUNC_NAME
)
from ..SharedCode.get_logs_data import get_logs_data
from ..SharedCode.logger import applogger
Expand Down Expand Up @@ -61,10 +61,10 @@ def get_company_details(self, company_name, company_guid):
post_data_ratings = []
checkpoint_key = "{}".format(company_guid)
checkpoint_data_company = self.checkpoint_obj.get_last_data(
self.company_detail_state
self.company_detail_state, table_name=COMPANY_DETAIL_TABLE_NAME
)
checkpoint_data_company_ratings = self.checkpoint_obj.get_last_data(
self.company_rating_state
self.company_rating_state, table_name=COMPANIES_RATING_DETAILS_TABLE_NAME
)
last_data_company_details = self.checkpoint_obj.get_endpoint_last_data(
checkpoint_data_company, "companies_details", checkpoint_key
Expand Down Expand Up @@ -114,6 +114,7 @@ def get_company_details(self, company_name, company_guid):
self.company_rating_state,
checkpoint_data_company_ratings,
"companies_ratings_details",
"{}_{}".format(COMPANIES_RATING_DETAILS_TABLE_NAME, "Checkpoint"),
checkpoint_key,
data_to_post,
)
Expand All @@ -137,6 +138,7 @@ def get_company_details(self, company_name, company_guid):
self.company_detail_state,
checkpoint_data_company,
"companies_details",
"{}_{}".format(COMPANY_DETAIL_TABLE_NAME, "Checkpoint"),
checkpoint_key,
data_to_post,
)
Expand Down Expand Up @@ -166,7 +168,7 @@ def get_all_copmanies_details(self, logs_data, company_names):
"""
count_companies = 0
fetching_index = self.get_last_data_index(
company_names, self.checkpoint_obj, self.company_state
company_names, self.checkpoint_obj, self.company_state, table_name=COMPANY_DETAIL_TABLE_NAME
)
for company_index in range(fetching_index + 1, len(logs_data)):
company_name = logs_data[company_index].get("name_s")
Expand All @@ -186,6 +188,7 @@ def get_all_copmanies_details(self, logs_data, company_names):
self.company_state,
company_name,
"portfolio_company",
"{}_{}".format(COMPANY_DETAIL_TABLE_NAME, "Company_Checkpoint"),
company_name_flag=True,
)
applogger.info(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
FINDINGS_TABLE_NAME,
COMPANIES,
ENDPOINTS,
FINDING_DETAILS_QUERY
)


Expand Down Expand Up @@ -40,7 +41,7 @@ def __init__(self, start_time) -> None:
def get_all_copmanies_findings_details(self, logs_data, company_names):
count_companies = 0
fetching_index = self.get_last_data_index(
company_names, self.checkpoint_obj, self.company_state
company_names, self.checkpoint_obj, self.company_state, table_name=FINDINGS_TABLE_NAME
)
for company_index in range(fetching_index + 1, len(logs_data)):
company_name = logs_data[company_index].get("name_s")
Expand All @@ -63,6 +64,7 @@ def get_all_copmanies_findings_details(self, logs_data, company_names):
self.company_state,
company_name,
"findings_company",
"{}_{}".format(FINDINGS_TABLE_NAME, "Company_Checkpoint"),
company_name_flag=True,
)

Expand Down Expand Up @@ -125,7 +127,7 @@ def get_findings_details(self, company_name, company_guid):
{"risk_category": "Compromised Systems"},
{"risk_category": "User Behavior"},
]
last_data = self.checkpoint_obj.get_last_data(self.findings_state)
last_data = self.checkpoint_obj.get_last_data(self.findings_state, table_name=FINDINGS_TABLE_NAME, checkpoint_query=FINDING_DETAILS_QUERY)
findings_url = self.base_url + self.findings_endpoint_path.format(
company_guid
)
Expand Down Expand Up @@ -173,6 +175,7 @@ def get_findings_details(self, company_name, company_guid):
self.findings_state,
last_data,
"findings_details",
"{}_{}".format(FINDINGS_TABLE_NAME, "Checkpoint"),
checkpoint_key,
str(data_to_post.date()),
)
Expand All @@ -196,6 +199,13 @@ def get_findings_details(self, company_name, company_guid):
c_data["next1"] = self.get_bitsight_data(findings_url, params)
next_link = c_data["next1"].get("links").get("next")
length_results = len(c_data.get("next1").get("results"))
if length_results == 0:
applogger.info(
'BitSight: No new findings found for {} on page {} ({})'.format(
company_name, page, risk
)
)
break
applogger.info(
"BitSight: Got {} findings for {} on page {}".format(
length_results, company_name, page
Expand All @@ -222,6 +232,7 @@ def get_findings_details(self, company_name, company_guid):
self.findings_state,
last_data,
"findings_details",
"{}_{}".format(FINDINGS_TABLE_NAME, "Checkpoint"),
checkpoint_key,
str(data_to_post.date()),
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def get_all_companies_findings_summary_details(self, company_names, logs_data):
"""
count_companies = 0
fetching_index = self.get_last_data_index(
company_names, self.checkpoint_obj, self.findings_summary_company_state
company_names, self.checkpoint_obj, self.findings_summary_company_state, table_name=consts.FINDINGS_SUMMARY_TABLE_NAME
)
for company_index in range(fetching_index + 1, len(logs_data)):
company_name = logs_data[company_index].get("name_s")
Expand All @@ -103,6 +103,7 @@ def get_all_companies_findings_summary_details(self, company_names, logs_data):
self.findings_summary_company_state,
company_name,
"findings_summary",
"{}_{}".format(consts.FINDINGS_SUMMARY_TABLE_NAME, "Company_Checkpoint"),
company_name_flag=True,
)
applogger.info(
Expand Down Expand Up @@ -232,7 +233,7 @@ def create_findings_summary_data(
company_guid (str): GUID of the company.
"""
last_data = self.checkpoint_obj.get_last_data(
self.findings_summary_details_state
self.findings_summary_details_state, table_name=consts.FINDINGS_SUMMARY_TABLE_NAME
)
last_checkpoint_company = self.checkpoint_obj.get_endpoint_last_data(
last_data, "findings_summary", company_guid
Expand Down Expand Up @@ -276,6 +277,7 @@ def create_findings_summary_data(
self.findings_summary_details_state,
last_data,
"findings_summary",
"{}_{}".format(consts.FINDINGS_SUMMARY_TABLE_NAME, "Checkpoint"),
checkpoint_key,
last_checkpoint_company,
)
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
COMPANIES_TABLE_NAME,
ENDPOINTS,
LOGS_STARTS_WITH,
PORTFOLIO_PAGE_SIZE,
PORTFOLIO_PAGE_SIZE
)
from ..SharedCode.get_logs_data import get_logs_data
from ..SharedCode.logger import applogger
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from ..SharedCode.azure_sentinel import MicrosoftSentinel
from .bitsight_exception import BitSightException
from .utils import CheckpointManager
from .consts import API_TOKEN, BASE_URL, LOGS_STARTS_WITH
from .consts import API_TOKEN, BASE_URL, LOGS_STARTS_WITH, COMPANY_FETCH_QUERY
from .logger import applogger


Expand Down Expand Up @@ -36,7 +36,6 @@ def check_environment_var_exist(self, environment_var):
Returns:
bool: True if all environment variables are set, False otherwise.
"""
__method_name = inspect.currentframe().f_code.co_name
try:
applogger.debug(
"BitSight: check_environment_var_exist: started checking existence of all custom environment variable"
Expand Down Expand Up @@ -86,20 +85,21 @@ def generate_auth_token(self):
raise BitSightException()

def get_last_data_index(
self, company_names, checkpoint_obj: CheckpointManager, company_state
self, company_names, checkpoint_obj: CheckpointManager, company_state, table_name
):
"""Get the index for fetching last data.

Args:
company_names (list): List of company names.
checkpoint_obj (CheckpointManager): CheckpointManager object.
company_state (str): State of the company.
table_name (str): Table name from which data should be fetched in case of checkpoint file corrupted.

Returns:
int: Index for fetching last data.
"""
last_company_name = checkpoint_obj.get_last_data(
company_state, company_name_flag=True
company_state, company_name_flag=True, table_name=table_name, checkpoint_query=COMPANY_FETCH_QUERY
)
fetching_index = -1
if last_company_name is not None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,20 @@
FINDINGS_FUNC_NAME = "Findings:"
COMPANY_DETAILS_FUNC_NAME = "Company Details:"
ALERT_GRAPH_STATISTICS_FUNC_NAME = "Alerts-Graph-statistics Details:"
PORTFOLIO_COMPANY_QUERY = """{}_CL
| summarize arg_max(TimeGenerated, *) by guid_g
| sort by name_s asc
| project name_s, guid_g""".format(
COMPANIES_TABLE_NAME
)
FINDING_DETAILS_QUERY = """{}_CL
| summarize arg_max(TimeGenerated, *) by Key_s
| sort by Key_s asc
| project Key_s, Value_s"""
CHECKPOINT_DATA_QUERY = """{}_CL
| summarize arg_max(TimeGenerated, *) by Key_g
| sort by Key_g asc
| project Key_g, Value_s"""
COMPANY_FETCH_QUERY = """{}_CL
| summarize arg_max(TimeGenerated, *) by Key_s
| project Key_s, Value_s"""
Loading