@@ -66,7 +66,7 @@ def generate_log_entries(audit_events, headAuditEventTime):
66
66
logging .getLogger ().debug ("End Function - Generate log entries" )
67
67
return aus
68
68
69
- def get_audit_events (data_safe_cl ,l_compartment_id , l_sort_order , l_limit , l_sort_by , l_compartment_id_in_subtree , l_access_level , l_scim_query , headerTimeCollected ):
69
+ def get_audit_events (data_safe_cl ,l_compartment_id , l_sort_order , l_limit , l_sort_by , l_compartment_id_in_subtree , l_access_level , l_scim_query , headerTimeCollected , l_max_auditevents ):
70
70
logging .getLogger ().debug ("get DB Audit Events from DataSafe" )
71
71
try :
72
72
audit_events_response = data_safe_cl .list_audit_events (
@@ -83,7 +83,7 @@ def get_audit_events(data_safe_cl,l_compartment_id, l_sort_order, l_limit, l_sor
83
83
ds_audit = pd .DataFrame ()
84
84
ds_audit = pd .json_normalize (to_dict (audit_events_response .data ), record_path = 'items' )
85
85
#Paging audit events
86
- while audit_events_response .has_next_page :
86
+ while ( audit_events_response .has_next_page and len ( ds_audit ) < l_max_auditevents ) :
87
87
audit_events_response = data_safe_cl .list_audit_events (
88
88
compartment_id = l_compartment_id ,
89
89
sort_order = l_sort_order ,
@@ -97,6 +97,7 @@ def get_audit_events(data_safe_cl,l_compartment_id, l_sort_order, l_limit, l_sor
97
97
#Add audit events in pandas Dataframe
98
98
ds_audit = pd .concat ([ds_audit ,pd .json_normalize (to_dict (audit_events_response .data ), record_path = 'items' )],verify_integrity = True , ignore_index = True )
99
99
logging .getLogger ().info ("Paging List audit events from Data Safe" )
100
+ logging .getLogger ().info ("Number of audit events imported: %s" , len (ds_audit ))
100
101
if (not ds_audit .empty ):
101
102
#To Camel Dataframe Headers
102
103
ds_audit .columns = map (to_camel_case , ds_audit .columns )
@@ -271,14 +272,15 @@ def main(ctx):
271
272
limit = 10000
272
273
access_level = "ACCESSIBLE"
273
274
sort_by = "timeCollected"
274
- sort_order = "DESC "
275
+ sort_order = "ASC "
275
276
compartment_id_in_subtree = True
276
277
headerTimeCollected = "timeCollected"
277
278
headerAuditEventTime = "auditEventTime"
278
279
cursor_file_name = "cursor.json"
279
280
lock_file_name = "lock.json"
280
281
lastAuditEventRecordTime_attr = "lastAuditEventRecordTime"
281
282
ds_dbaudit_events = pd .DataFrame ()
283
+ max_auditevents = 50000
282
284
283
285
try :
284
286
logging .getLogger ().info ("function start" )
@@ -347,7 +349,7 @@ def main(ctx):
347
349
logging .getLogger ().debug ("Generate SCIM Query Done" )
348
350
# Step 8: Get DB Audit Events from DataSafe
349
351
logging .getLogger ().debug ("get DB Audit Events from DataSafe" )
350
- ds_dbaudit_events = get_audit_events (data_safe_client ,ociDataSafeCompartmentOCID , sort_order , limit , sort_by , compartment_id_in_subtree , access_level , scim_query , headerTimeCollected )
352
+ ds_dbaudit_events = get_audit_events (data_safe_client ,ociDataSafeCompartmentOCID , sort_order , limit , sort_by , compartment_id_in_subtree , access_level , scim_query , headerTimeCollected , max_auditevents )
351
353
if not ds_dbaudit_events .empty :
352
354
# Step 9: Get Last Event time DB Audit Collected
353
355
lastdbauditeventcolletcted = ds_dbaudit_events [headerTimeCollected ].iloc [0 ]
0 commit comments