Skip to content

Commit f515704

Browse files
committed
update spme typos
1 parent ce26d62 commit f515704

File tree

1 file changed

+21
-23
lines changed
  • security/security-design/fn-datasafe-dbaudit-to-oci-logging/terraform/function/oci-datasafe-audit-to-logging

1 file changed

+21
-23
lines changed

security/security-design/fn-datasafe-dbaudit-to-oci-logging/terraform/function/oci-datasafe-audit-to-logging/func.py

Lines changed: 21 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
from fdk import response
1919
from datetime import datetime, timedelta
2020

21-
#Set Logging level.
21+
# Set Logging level.
2222
logger = logging.getLogger()
2323
logger.setLevel(logging.INFO)
2424

@@ -90,10 +90,10 @@ def get_audit_events(data_safe_cl,l_compartment_id, l_sort_order, l_limit, l_sor
9090
scim_query=l_scim_query
9191
)
9292

93-
#Add audit events in pandas Dataframe.
93+
# Add audit events in pandas Dataframe.
9494
ds_audit = pd.DataFrame()
9595
ds_audit=pd.json_normalize(to_dict(audit_events_response.data), record_path='items')
96-
#Paging audit events
96+
# Paging audit events
9797
while (audit_events_response.has_next_page and len(ds_audit) < l_max_auditevents):
9898
audit_events_response = data_safe_cl.list_audit_events(
9999
compartment_id=l_compartment_id,
@@ -105,18 +105,18 @@ def get_audit_events(data_safe_cl,l_compartment_id, l_sort_order, l_limit, l_sor
105105
scim_query=l_scim_query,
106106
page=audit_events_response.next_page
107107
)
108-
#Add audit events in pandas Dataframe.
108+
# Add audit events in pandas Dataframe.
109109
ds_audit=pd.concat([ds_audit,pd.json_normalize(to_dict(audit_events_response.data), record_path='items')],verify_integrity=True, ignore_index=True)
110110
logger.info("Paging List audit events from Data Safe.")
111111
logger.info("Number of audit events imported: %s.", len(ds_audit))
112112
if (not ds_audit.empty):
113-
#To Camel Dataframe Headers.
113+
# To Camel Dataframe Headers.
114114
ds_audit.columns = map(to_camel_case, ds_audit.columns)
115-
#Convert timeCollected column in datatime format.
115+
# Convert timeCollected column in datatime format.
116116
ds_audit[headerTimeCollected] = pd.to_datetime(ds_audit[headerTimeCollected], format='mixed', yearfirst=True, utc=True)
117-
#Sort DataFrame by 'timeCollected' value.
117+
# Sort DataFrame by 'timeCollected' value.
118118
ds_audit = ds_audit.sort_values(by=headerTimeCollected, ascending=False, ignore_index=True)
119-
#Rebuild index of DataFrame.
119+
# Rebuild index of DataFrame.
120120
ds_audit = ds_audit.reset_index(drop=True)
121121
except Exception as e:
122122
logger.error("List Audits from Data Safe Error: %s.", e)
@@ -159,7 +159,7 @@ def check_object_from_bucket(l_bucketName, l_objectName, ol_client):
159159
)
160160
if list_objects_response.data.objects:
161161
for o in list_objects_response.data.objects:
162-
#File already present in bucket.
162+
# File already present in bucket.
163163
logger.debug("ObjectName: " + str(o.name) + ".")
164164
if o.name == l_objectName:
165165
logger.debug("Object file " + l_objectName + " is in a Bucket.")
@@ -168,7 +168,7 @@ def check_object_from_bucket(l_bucketName, l_objectName, ol_client):
168168
logger.debug("Object file " + l_objectName + " is not present in a Bucket.")
169169
fileExist = False
170170
else:
171-
#File is not present in bucket.
171+
# File is not present in bucket.
172172
logger.debug("Object file " + l_objectName + " is not present in a Bucket.")
173173
fileExist = False
174174
except Exception as e:
@@ -179,15 +179,15 @@ def check_object_from_bucket(l_bucketName, l_objectName, ol_client):
179179
return fileExist
180180

181181
def check_file_lock_bucket(lo_bucketName, lo_objectName, lo_client, lo_fntimeout, lo_current_time):
182-
#Check if lock file is in OCI ObjectStorage/Bucket.
182+
# Check if lock file is in OCI ObjectStorage/Bucket.
183183
logger.debug("Check lock file is in a Bucket")
184184
lockFilePresent = False
185185
lockFileExist = check_object_from_bucket(lo_bucketName, lo_objectName, lo_client)
186186
if lockFileExist:
187187
logger.debug("Check if lock file last modified time is > of fn execution.")
188-
#Get Last Modified Time lock file from bucket.
188+
# Get Last Modified Time lock file from bucket.
189189
oblastmodifiedtime_str = get_object_last_modified_time_from_bucket(lo_bucketName, lo_objectName, lo_client)
190-
#Covert data in datatimeFormat date: Thu, 15 Jun 2023 10:25:17 GMT.
190+
# Covert data in datatimeFormat date: Thu, 15 Jun 2023 10:25:17 GMT.
191191
oblastmodifiedtime = datetime.strptime(oblastmodifiedtime_str, '%a, %d %b %Y %H:%M:%S %Z')
192192
oblastmodifiedtime = oblastmodifiedtime + lo_fntimeout
193193

@@ -204,7 +204,7 @@ def check_file_lock_bucket(lo_bucketName, lo_objectName, lo_client, lo_fntimeout
204204
return lockFilePresent
205205

206206
def get_object_from_bucket(r_bucketName, r_objectName, or_client, r_lastAuditEventRecordTime_attr):
207-
#Get cursor file with last execution in OCI ObjectStorage/Bucket.
207+
# Get cursor file with last execution in OCI ObjectStorage/Bucket.
208208
logger.debug("Get object " + r_objectName + " file from Bucket.")
209209
r_namespace = or_client.get_namespace().data
210210
try:
@@ -220,7 +220,7 @@ def get_object_from_bucket(r_bucketName, r_objectName, or_client, r_lastAuditEve
220220
return r_lastexecutionupdatime
221221

222222
def delete_object_from_bucket(d_bucketName, d_objectName, d_client):
223-
#Delete object from bucket in OCI ObjectStorage/Bucket.
223+
# Delete object from bucket in OCI ObjectStorage/Bucket.
224224
logger.debug("Delete object " + d_objectName + " Bucket.")
225225
d_namespace = d_client.get_namespace().data
226226
try:
@@ -233,7 +233,7 @@ def delete_object_from_bucket(d_bucketName, d_objectName, d_client):
233233

234234

235235
def get_object_last_modified_time_from_bucket(h_bucketName, h_objectName, h_client):
236-
#Get last_modified_time header from file in bucket.
236+
# Get last_modified_time header from file in bucket.
237237
logger.debug("Get last_modified_time header from object " + h_objectName + " in bucket.")
238238
h_namespace = h_client.get_namespace().data
239239
try:
@@ -275,7 +275,7 @@ def generate_scim_query(q_last_time_collected, q_actual_time):
275275
return scim_query
276276

277277
def main(ctx):
278-
#Initializing Variables.
278+
# Initializing Variables.
279279
limit = 10000
280280
access_level = "ACCESSIBLE"
281281
sort_by = "timeCollected"
@@ -287,7 +287,7 @@ def main(ctx):
287287
lock_file_name = "lock.json"
288288
lastAuditEventRecordTime_attr = "lastAuditEventRecordTime"
289289
ds_dbaudit_events = pd.DataFrame()
290-
#Maximun number of audit events collected for each execution. The value 50000 is specific with function timeout equal to 5 mins.
290+
# Maximun number of audit events collected for each execution. The value 50000 is specific with function timeout equal to 5 mins.
291291
max_auditevents = 50000
292292
try:
293293
logger.info("Function start.")
@@ -306,7 +306,7 @@ def main(ctx):
306306
"OCI OS BucketName: " + ociOSTrackerBucketName + "."
307307
)
308308

309-
#Calculate fn timeout configured for manage validity of lock file.
309+
# Calculate fn timeout configured for manage validity of lock file.
310310
fndeadlinetime_str = ctx.Deadline()
311311
fndeadlinetime = datetime.strptime(fndeadlinetime_str, '%Y-%m-%dT%H:%M:%SZ')
312312
current_time_t = datetime.utcnow()
@@ -331,7 +331,7 @@ def main(ctx):
331331
logger.debug("Manage file lock in Bucket.")
332332
check_file_lock = check_file_lock_bucket(ociOSTrackerBucketName, lock_file_name, os_client, fntimeout, current_time_t)
333333
if check_file_lock:
334-
#File lock is present and valid other fn execution is active.
334+
# File lock is present and valid other fn execution is active.
335335
logger.info("File Lock is valid other fn session is yet in execution.")
336336
else:
337337
# Step 4: Check if exist file cursor in ObjectStorage/Bucket.
@@ -414,6 +414,4 @@ def handler(ctx, data: io.BytesIO = None):
414414
pass
415415
return response.Response(
416416
ctx, status_code=401, response_data=json.dumps({"error": "exception"})
417-
)
418-
419-
417+
)

0 commit comments

Comments
 (0)