|
| 1 | +import argparse |
| 2 | +import os |
| 3 | +import sys |
| 4 | +import socket |
| 5 | +import multiprocessing |
| 6 | +import paramiko |
| 7 | +import csv |
| 8 | +import json |
| 9 | + |
| 10 | +specific_fieldnames = ['displayName','hostname', 'privateIp','networkBlockId','rackid', 'ociAdName','id'] |
| 11 | + |
| 12 | +def is_valid_file(parser, arg): |
| 13 | + if not os.path.exists(arg): |
| 14 | + parser.error(f"The file {arg} does not exist!") |
| 15 | + else: |
| 16 | + return arg |
| 17 | + |
| 18 | +def is_valid_hostname(parser, arg): |
| 19 | + try: |
| 20 | + socket.gethostbyname(arg) |
| 21 | + return arg |
| 22 | + except socket.error: |
| 23 | + parser.error(f"Invalid hostname or IP address: {arg}") |
| 24 | + |
| 25 | +def json_to_stdout(flattened_results): |
| 26 | + # Write JSON data to STDOUT |
| 27 | + writer = csv.DictWriter(sys.stdout, fieldnames=specific_fieldnames) |
| 28 | + writer.writeheader() |
| 29 | + for data in flattened_results: |
| 30 | + writer.writerow(data) |
| 31 | + |
| 32 | +def json_to_csv(flattened_results, csv_file): |
| 33 | + # Get the specific fieldnames |
| 34 | +# print("Content of result:", entries_data) |
| 35 | + # Write JSON data to CSV |
| 36 | + with open(csv_file, mode='w', newline='') as file: |
| 37 | + writer = csv.DictWriter(file, fieldnames=specific_fieldnames) |
| 38 | + writer.writeheader() |
| 39 | + |
| 40 | + for data in flattened_results: |
| 41 | + writer.writerow(data) |
| 42 | + |
| 43 | +def process_entry(entry, username): |
| 44 | + # Replace this with the path to your private key |
| 45 | + ssh_key = "/home/"+username+"/.ssh/id_rsa" |
| 46 | + |
| 47 | + # Replace this with your SSH connection details |
| 48 | + ssh_host = entry |
| 49 | + ssh_user = username |
| 50 | + |
| 51 | + # Create SSH client |
| 52 | + ssh_client = paramiko.SSHClient() |
| 53 | + ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) |
| 54 | + |
| 55 | + parsed_data_list = [] |
| 56 | + |
| 57 | + try: |
| 58 | + # Connect to SSH server using key pair authentication |
| 59 | + ssh_client.connect(ssh_host, username=ssh_user, key_filename=ssh_key) |
| 60 | + |
| 61 | + # Perform SSH operations here |
| 62 | + stdin, stdout, stderr = ssh_client.exec_command('curl -H "Authorization: Bearer Oracle" -L http://169.254.169.254/opc/v2/instance/') |
| 63 | + output = stdout.read().decode() |
| 64 | + parsed_instance = json.loads(output) |
| 65 | + |
| 66 | + stdin, stdout, stderr = ssh_client.exec_command('curl -H "Authorization: Bearer Oracle" -L http://169.254.169.254/opc/v2/host/') |
| 67 | + output = stdout.read().decode() |
| 68 | + parsed_host = json.loads(output) |
| 69 | + |
| 70 | + stdin, stdout, stderr = ssh_client.exec_command('curl -H "Authorization: Bearer Oracle" -L http://169.254.169.254/opc/v2/vnics/') |
| 71 | + output = stdout.read().decode() |
| 72 | + list_of_vnics = json.loads(output) |
| 73 | + first_vnic = list_of_vnics[0] |
| 74 | + |
| 75 | + parsed_data = {**parsed_instance, **parsed_host, **first_vnic} |
| 76 | + |
| 77 | + # Extract required fields from parsed_data |
| 78 | + required_fields = specific_fieldnames |
| 79 | + extracted_data = {field: parsed_data.get(field, "") for field in required_fields} |
| 80 | + parsed_data_list.append(extracted_data) |
| 81 | + |
| 82 | + except socket.error as e: |
| 83 | + print(f"Error occurred while connecting to {ssh_host}: {e}") |
| 84 | + return None |
| 85 | + except paramiko.AuthenticationException as e: |
| 86 | + print(f"Authentication error occurred while connecting to {ssh_host}: {e}") |
| 87 | + return None |
| 88 | + except paramiko.SSHException as e: |
| 89 | + print(f"SSH error occurred while connecting to {ssh_host}: {e}") |
| 90 | + return None |
| 91 | + except Exception as e: |
| 92 | + print(f"Error occurred while connecting to {ssh_host}: {e}") |
| 93 | + return None |
| 94 | + |
| 95 | + finally: |
| 96 | + # Close SSH connection |
| 97 | + ssh_client.close() |
| 98 | + |
| 99 | + return parsed_data_list |
| 100 | + |
| 101 | +def process_entry_wrapper(args): |
| 102 | + entry, private_key = args |
| 103 | + return process_entry(entry, private_key) |
| 104 | + |
| 105 | +def main(): |
| 106 | + parser = argparse.ArgumentParser(description="Process file or hostname/IP address and optionally generate a CSV file of results.") |
| 107 | + parser.add_argument('input', metavar='input', type=str, help='Input file or hostname/IP address') |
| 108 | + parser.add_argument('--output-dir', metavar='output_dir', type=str, default='.', help='Output directory to save files (default: current directory)') |
| 109 | + parser.add_argument('--username', metavar='username', type=str, help='Username to pass to ssh connection, if not set will use login username') |
| 110 | + parser.add_argument('--csv', metavar='csv', type=str, help='Generate a CSV file of results') |
| 111 | + args = parser.parse_args() |
| 112 | + |
| 113 | + if not args.username: |
| 114 | + args.username=os.getlogin() |
| 115 | + |
| 116 | + if os.path.isfile(args.input): |
| 117 | + print(f"Processing file: {args.input}") |
| 118 | + with open(args.input, 'r') as file: |
| 119 | + entries = [line.strip() for line in file.readlines()] |
| 120 | + |
| 121 | + # Create a pool of worker processes |
| 122 | + pool = multiprocessing.Pool() |
| 123 | + |
| 124 | + # Execute the process_entry function on each entry in parallel |
| 125 | + results = pool.map(process_entry_wrapper, [(entry, args.username) for entry in entries]) |
| 126 | + flattened_results = [item for sublist in results for item in sublist] |
| 127 | + |
| 128 | + # Close the pool to release resources |
| 129 | + pool.close() |
| 130 | + pool.join() |
| 131 | + # Parse JSON data and generate CSV file |
| 132 | + if args.csv: |
| 133 | + json_to_csv(flattened_results, args.csv) |
| 134 | + else: |
| 135 | + json_to_stdout(flattened_results) |
| 136 | + |
| 137 | + else: |
| 138 | + print(f"Processing hostname/IP: {args.input}") |
| 139 | + result = process_entry(args.input, args.username) |
| 140 | + |
| 141 | + # Parse JSON data and generate CSV file |
| 142 | + if args.csv: |
| 143 | + json_to_csv(result, args.csv) |
| 144 | + else: |
| 145 | + json_to_stdout(result) |
| 146 | + |
| 147 | + |
| 148 | +if __name__ == "__main__": |
| 149 | + main() |
0 commit comments