Skip to content

Commit 77029d8

Browse files
committed
add new tools, add new status checks, move from tabs to sidebar
1 parent f28a40f commit 77029d8

File tree

3 files changed

+273
-1
lines changed

3 files changed

+273
-1
lines changed

cloudfs.py

Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
#!/usr/bin/python
2+
import sys
3+
import csv
4+
import urllib3
5+
import logging
6+
7+
from pathlib import Path
8+
from getpass import getpass
9+
from cterasdk import GlobalAdmin, portal_types, CTERAException, config, tojsonstr
10+
from cterasdk.lib.filesystem import FileSystem
11+
12+
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
13+
14+
def ask(prompt):
15+
user_input = None
16+
while not user_input:
17+
user_input = input(prompt)
18+
return user_input
19+
20+
def create_folders(global_admin,filepath):
21+
try:
22+
# address = ask('Enter CTERA Portal address: ')
23+
# username = ask('Username: ')
24+
# password = getpass('Password: ')
25+
26+
# global_admin = GlobalAdmin(address)
27+
# global_admin.login(username, password)
28+
with open(filepath, 'r') as csv_file:
29+
csv_reader = csv.DictReader(csv_file, delimiter=',')
30+
for row in csv_reader:
31+
try:
32+
folder_group = row['folder_group']
33+
cloud_drive_folder = row['cloud_drive_folder']
34+
cloud_drive_folder_description = row['cloud_drive_folder_description']
35+
user_owner = row['user_owner']
36+
zone_name = row['zone']
37+
zone_description = row['zone_description']
38+
deduplication_method_type = row['deduplication_method']
39+
idx = user_owner.rfind('\\')
40+
41+
owner = None
42+
if idx > 0:
43+
domain, user = user_owner.split('\\')
44+
owner = portal_types.UserAccount(user, domain)
45+
else:
46+
owner = portal_types.UserAccount(user_owner)
47+
48+
try:
49+
global_admin.cloudfs.mkfg(folder_group, owner, deduplication_method_type)
50+
except CTERAException as error:
51+
logging.getLogger().warn('Failed creating folder group. %s', {'name': folder_group, 'error': tojsonstr(error, False)})
52+
53+
try:
54+
global_admin.cloudfs.mkdir(cloud_drive_folder, folder_group, owner, winacls=True, description=cloud_drive_folder_description)
55+
except CTERAException as error:
56+
logging.getLogger().warn('Failed creating cloud drive folder. %s', {'name': cloud_drive_folder, 'error': tojsonstr(error, False)})
57+
58+
try:
59+
global_admin.zones.add(zone_name, description=zone_description) # add zone
60+
except CTERAException as error:
61+
logging.getLogger().warn('Failed creating zone. %s', {'name': zone_name, 'error': tojsonstr(error, False)})
62+
63+
try:
64+
cloudfs_folder_helper = portal_types.CloudFSFolderFindingHelper(cloud_drive_folder, owner)
65+
logging.getLogger().info('Adding cloud folders to zone. %s', {'zone': zone_name, 'cloud_drive_folders': [cloud_drive_folder]})
66+
global_admin.zones.add_folders(zone_name, [cloudfs_folder_helper]) # add folders
67+
except CTERAException as error:
68+
logging.getLogger().warn('Failed adding folders to zone. %s', {'zone': zone_name, 'error': tojsonstr(error, False)})
69+
except CTERAException as error:
70+
print(error)
71+
input()
72+
global_admin.logout()
73+
except KeyboardInterrupt:
74+
pass
75+
76+
def usage():
77+
print()
78+
print('Usage: ' + sys.argv[0] + ' ' + '<csv>')
79+
80+
if __name__ == "__main__":
81+
82+
config.http['ssl'] = 'Trust' # ignore certificate errors connecting to CTERA Portal
83+
config.Logging.get().setLevel(logging.INFO) # enable debug: logging.DEBUG
84+
85+
args = sys.argv
86+
if len(args) < 2:
87+
logging.getLogger().error('You did not specify an input file. Exiting.')
88+
usage()
89+
quit()
90+
91+
if len(args) > 2:
92+
logging.getLogger().error('Too many arguments.')
93+
usage()
94+
quit()
95+
96+
filesystem = FileSystem.instance()
97+
try:
98+
filepath = args[1]
99+
logging.getLogger().debug('Looking for input file. %s', {'filepath': filepath})
100+
info = FileSystem.get_local_file_info(filepath) # look for config file
101+
logging.getLogger().debug('Found input file. %s', {'name': info['name'], 'size': info['size']})
102+
except CTERAException as error:
103+
logging.getLogger().error('Could not find input file.', {'filepath': filepath})
104+
usage()
105+
quit()
106+
107+
try:
108+
logging.getLogger().info('Populating CloudFS Structure on CTERA Portal.')
109+
create_folders(filepath)
110+
logging.getLogger().info('Completed.')
111+
except CTERAException as error:
112+
logging.getLogger().fatal(error.message)
113+
except KeyboardInterrupt:
114+
logging.getLogger().fatal('Cancelled by user.')

requirements.txt

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,6 @@
1-
cterasdk
1+
cterasdk
22
Gooey
3+
itsdangerous==2.0.1
4+
Flask==2.1.0
5+
Werkzeug==2.0.1
6+
flask-socketio==4.3.2

smb_audit.py

Lines changed: 154 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
import sys
2+
import os
3+
import pandas as pd
4+
from pandas.api.types import CategoricalDtype
5+
import matplotlib.pyplot as plt
6+
from datetime import datetime
7+
import feather
8+
import math
9+
import glob
10+
import logging
11+
12+
def convert_size(size_bytes):
13+
if size_bytes == 0:
14+
return "0B"
15+
size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
16+
i = int(math.floor(math.log(size_bytes, 1024)))
17+
p = math.pow(1024, i)
18+
s = round(size_bytes / p, 2)
19+
return "%s %s" % (s, size_name[i])
20+
21+
def extract_epoch(seq):
22+
seq_type= type(seq)
23+
return int(seq_type().join(filter(seq_type.isdigit, seq)))
24+
25+
def convert_time(epoch_time):
26+
return datetime.fromtimestamp(extract_epoch(epoch_time))
27+
28+
def show_ftr_details(df):
29+
logging.info('Most recent time of dataset: ' + str(df.local_time.max()))
30+
logging.info('Oldest time of dataset: ' + str(df.local_time.min()))
31+
logging.info('Time between start and end of dataset: ' + str(df.local_time.max() - df.local_time.min()))
32+
logging.info("=====================================================")
33+
34+
35+
def parse_audit(source_directory, output_file):
36+
pd.set_option('display.max_rows', None)
37+
col_names = ["col0", "col1", "col2", "col3", "result", "col5", "user", "col7", "smb_operation_type", "utc_time", "local_time", "col11", "share", "path", "col14", "col15", "col16", "col17"]
38+
small_dfs = []
39+
40+
all_files = glob.glob(os.path.join(source_directory, "audit.*.log"))
41+
total_size = 0
42+
43+
for file in all_files:
44+
total_size += os.path.getsize(file)
45+
logging.info(all_files)
46+
df_from_each_file = (pd.read_csv(f, sep='|', names=col_names, index_col=False, low_memory=False, parse_dates=['local_time'], date_parser=convert_time, usecols=[4, 6, 8, 10, 12, 13]) for f in all_files)
47+
48+
concatenated_df = pd.concat(df_from_each_file, copy=False)
49+
#concatenated_df.info()
50+
#logging.info(concatenated_df.memory_usage(deep=True) / 1e6)
51+
52+
concatenated_df.smb_operation_type = concatenated_df.smb_operation_type.astype('category')
53+
concatenated_df.info()
54+
logging.info(concatenated_df.memory_usage(deep=True) / 1e6)
55+
56+
57+
logging.info('Creating feather file at: ' + str(output_file) + ".ftr")
58+
concatenated_df.reset_index().to_feather(output_file + ".ftr")
59+
#if (make_csv):
60+
# csv_file_name = output_file + ".csv"
61+
# logging.info('Creating output file at: ' + csv_file_name)
62+
# concatenated_df.reset_index().to_csv(csv_file_name, index = False, compression = 'gzip')
63+
# os.rename(csv_file_name, csv_file_name + ".gz")
64+
65+
ftr_size = os.path.getsize(output_file + ".ftr")
66+
logging.info("Completed parsing " + str(convert_size(total_size)) + " to " + str(convert_size(ftr_size)))
67+
logging.info("=====================================================")
68+
logging.info('Most recent time of dataset: ' + str(concatenated_df.local_time.max()))
69+
logging.info('Oldest time of dataset: ' + str(concatenated_df.local_time.min()))
70+
logging.info('Time between start and end of dataset: ' + str(concatenated_df.local_time.max() - concatenated_df.local_time.min()))
71+
logging.info("=====================================================")
72+
73+
def summarize_audit(ftr_file, time_interval):
74+
df = pd.read_feather(ftr_file)
75+
logging.info('Loading FTR file at: ' + str(ftr_file))
76+
logging.info("=====================================================")
77+
show_ftr_details(df)
78+
logging.info('Totals per SMB operation type for this dataset:\n' + str(df['smb_operation_type'].value_counts()))
79+
logging.info("=====================================================")
80+
logging.info('Top 10 users for this dataset:\n' + str(df['user'].value_counts().nlargest(10)))
81+
logging.info("=====================================================")
82+
logging.info('Top 10 shares for this dataset:\n' + str(df['share'].value_counts().nlargest(10)))
83+
logging.info("=====================================================")
84+
logging.info('Top 10 paths for this dataset:\n' + str(df['path'].value_counts().nlargest(10)))
85+
logging.info("=====================================================")
86+
#logging.info(str(df.groupby([df.local_time.dt.floor(time_interval), 'smb_operation_type']).size()))
87+
88+
plt.rc('legend',fontsize=6)
89+
#df.groupby([df.local_time.dt.floor('60min'), 'smb_operation_type']).size().plot()
90+
#df.groupby([df.local_time.dt.floor(time_interval), 'smb_operation_type']).size().unstack().plot(colormap='nipy_spectral').legend(loc='center left',bbox_to_anchor=(1.0, 0.5))
91+
df.groupby([df.local_time.dt.floor(time_interval), 'smb_operation_type']).size().unstack().plot(colormap='nipy_spectral', x_compat=True).legend(loc='best')
92+
#df.groupby([df.local_time.dt.floor(time_interval), 'smb_operation_type']).size().unstack().plot(colormap='nipy_spectral', x_compat=True).legend(loc='best')
93+
plt.show()
94+
95+
def search_audit(ftr_file, search_field, search_string, show_smb_ops):
96+
df = pd.read_feather(ftr_file)
97+
logging.info('Loading FTR file at: ' + str(ftr_file))
98+
logging.info("=====================================================")
99+
show_ftr_details(df)
100+
#logging.info(df[df[arguments.search_field].str.contains(arguments.search_string)].to_string())
101+
search_results = df[(df[search_field].str.contains(search_string)) & (df['smb_operation_type'].isin(show_smb_ops))].to_string()
102+
logging.info(search_results)
103+
104+
def smb_audit(args):
105+
try:
106+
if (args.is_debug):
107+
logging.debug("Usage:\n{0}\n".format(" ".join([x for x in sys.argv])))
108+
logging.debug("")
109+
logging.debug("All settings used:")
110+
for k,v in sorted(vars(args).items()):
111+
logging.debug("{0}: {1}".format(k,v))
112+
113+
if (args.function == 'Parse'):
114+
parse_audit(args.source_directory, args.output_file)
115+
116+
if (args.function == "Summarize"):
117+
summarize_audit(args.ftr_file, args.time_interval)
118+
119+
if (args.function == "Search"):
120+
#Create List of SMB Operations to show in Search
121+
show_smb_ops = []
122+
if (not args.ACEChanged):
123+
show_smb_ops.append('op=ACEChanged')
124+
if (not args.ACLAdded):
125+
show_smb_ops.append('op=ACLAdded')
126+
if (not args.ACLDeleted):
127+
show_smb_ops.append('op=ACLDeleted')
128+
if (not args.AclDenied):
129+
show_smb_ops.append('op=AclDenied')
130+
if (not args.chown):
131+
show_smb_ops.append('op=chown')
132+
if (not args.create):
133+
show_smb_ops.append('op=create')
134+
if (not args.createDenied):
135+
show_smb_ops.append('op=createDenied')
136+
if (not args.delete):
137+
show_smb_ops.append('op=delete')
138+
if (not args.deleteDenied):
139+
show_smb_ops.append('op=deleteDenied')
140+
if (not args.move):
141+
show_smb_ops.append('op=move')
142+
if (not args.open):
143+
show_smb_ops.append('op=open')
144+
if (not args.OpenDenied):
145+
show_smb_ops.append('op=OpenDenied')
146+
if (not args.setattrib):
147+
show_smb_ops.append('op=setattrib')
148+
if (not args.setdacl):
149+
show_smb_ops.append('op=setdacl')
150+
if (not args.write):
151+
show_smb_ops.append('op=write')
152+
search_audit(args.ftr_file, args.search_field, args.search_string, show_smb_ops)
153+
except KeyboardInterrupt:
154+
logging.getLogger().fatal('Cancelled by user.')

0 commit comments

Comments
 (0)