|
2 | 2 |
|
3 | 3 | import sys |
4 | 4 | import os |
| 5 | +import geoip2.database |
| 6 | +import socket |
| 7 | +import json |
| 8 | +import fcntl |
| 9 | +import influxdb_client |
| 10 | +from influxdb_client.client.write_api import SYNCHRONOUS |
| 11 | +import time |
| 12 | +from datetime import datetime, timedelta |
| 13 | + |
5 | 14 | print ('**************** start *********************') |
6 | 15 | measurement_name = (sys.argv[5]) # get measurement from argv |
7 | 16 | print ('Measurement-name: '+measurement_name) |
8 | 17 |
|
9 | | -# argv1 = outsideip, agrv2 = Domain, argv3 length, argv4 tragetip, sys.argv[5] bucketname, sys.argv[6] date, sys.argv[7] asn, sys.argv[8] abuse |
10 | | - |
11 | | - |
| 18 | +# argv[1] = outsideip, agrv[2] = Domain, argv[3] length, argv[4] tragetip, sys.argv[5] bucketname, sys.argv[6] date, sys.argv[7] asn, sys.argv[8] statuscode |
12 | 19 |
|
| 20 | +# Configuration for Persistent Data |
| 21 | +DATA_DIR = "/data" |
| 22 | +CACHE_FILE = os.path.join(DATA_DIR, "abuseip_cache.json") |
| 23 | +CACHE_EXPIRATION_HOURS = 48 |
| 24 | +INFLUX_TOKEN_FILE = os.path.join(DATA_DIR, "influxdb-token.txt") |
| 25 | +ABUSEIP_KEY_FILE = os.path.join(DATA_DIR, "abuseipdb-key.txt") |
13 | 26 |
|
14 | | -abuseip_key = os.getenv('ABUSEIP_KEY') |
15 | | -if abuseip_key is not None: |
16 | | - import requests |
17 | | - import json |
18 | | - url = 'https://api.abuseipdb.com/api/v2/check' |
19 | | - querystring = { |
20 | | - 'ipAddress': str(sys.argv[1]), |
21 | | - 'maxAgeInDays': '90' |
22 | | - } |
23 | | - headers = { |
24 | | - 'Accept': 'application/json', |
25 | | - 'Key': abuseip_key |
26 | | - } |
27 | | - |
28 | | - response = requests.request(method='GET', url=url, headers=headers, params=querystring) |
29 | | - abuseip_response = json.loads(response.text) |
30 | | - abuseConfidenceScore = str(abuseip_response["data"]["abuseConfidenceScore"]) |
31 | | - totalReports = str(abuseip_response["data"]["totalReports"]) |
32 | | - #print(json.dumps(abuseip_response, sort_keys=True, indent=4)) |
| 27 | +# Abuseipdb cache |
| 28 | +# Ensure the data directory exists |
| 29 | +if not os.path.exists(DATA_DIR): |
| 30 | + os.makedirs(DATA_DIR) |
33 | 31 |
|
| 32 | +# Function to load the cache from the file |
| 33 | +def load_cache(): |
| 34 | + if not os.path.exists(CACHE_FILE): |
| 35 | + return {} |
| 36 | + try: |
| 37 | + with open(CACHE_FILE, 'r') as f: |
| 38 | + fcntl.flock(f.fileno(), fcntl.LOCK_SH) # Shared lock: allows multiple readers |
| 39 | + data = json.load(f) |
| 40 | + fcntl.flock(f.fileno(), fcntl.LOCK_UN) # Unlock |
| 41 | + return data |
| 42 | + except (json.JSONDecodeError, IOError, OSError) as e: |
| 43 | + print(f"Error loading cache: {e}. Renaming corrupted file and starting new cache.") |
| 44 | + # Rename the corrupted file to avoid overwriting it |
| 45 | + corrupted_file = CACHE_FILE + ".corrupted" |
| 46 | + try: |
| 47 | + os.rename(CACHE_FILE, corrupted_file) |
| 48 | + print(f"Corrupted cache renamed to {corrupted_file}. You can inspect/recover it manually.") |
| 49 | + except OSError as rename_error: |
| 50 | + print(f"Failed to rename corrupted file: {rename_error}. Proceeding with empty cache.") |
| 51 | + return {} # Start fresh |
| 52 | + |
| 53 | +# Function to save the cache to the file |
| 54 | +def save_cache(cache_data): |
| 55 | + try: |
| 56 | + with open(CACHE_FILE, 'w') as f: |
| 57 | + fcntl.flock(f.fileno(), fcntl.LOCK_EX) # Exclusive lock: blocks all other access |
| 58 | + json.dump(cache_data, f, indent=4) |
| 59 | + fcntl.flock(f.fileno(), fcntl.LOCK_UN) # Unlock |
| 60 | + except (IOError, OSError) as e: |
| 61 | + print(f"Error saving cache: {e}") |
| 62 | + |
| 63 | +# Function to get AbuseIPDB info, using the cache |
| 64 | +def get_abuseip_info(ip_address): |
| 65 | + cache = load_cache() |
| 66 | + current_time = time.time() |
| 67 | + |
| 68 | + # Check if a valid, non-expired entry exists in the cache |
| 69 | + if ip_address in cache: |
| 70 | + entry = cache[ip_address] |
| 71 | + entry_time = entry.get('timestamp', 0) |
| 72 | + if current_time - entry_time < CACHE_EXPIRATION_HOURS * 3600: |
| 73 | + print(f"Cache HIT for IP: {ip_address}") |
| 74 | + return entry['data'] |
| 75 | + |
| 76 | + # If not in cache or expired, fetch from API (Cache MISS) |
| 77 | + print(f"Cache MISS for IP: {ip_address}. Fetching from API.") |
| 78 | + if os.path.exists(ABUSEIP_KEY_FILE): |
| 79 | + with open(ABUSEIP_KEY_FILE, 'r') as file: |
| 80 | + abuseip_key = file.read().strip() |
| 81 | + elif os.getenv('ABUSEIP_KEY') is not None: |
| 82 | + abuseip_key = os.getenv('ABUSEIP_KEY') |
| 83 | + else: |
| 84 | + return None |
34 | 85 |
|
35 | | -asn = str(sys.argv[7]) |
| 86 | + try: |
| 87 | + import requests |
| 88 | + url = 'https://api.abuseipdb.com/api/v2/check' |
| 89 | + querystring = {'ipAddress': ip_address, 'maxAgeInDays': '90'} |
| 90 | + headers = {'Accept': 'application/json', 'Key': abuseip_key} |
| 91 | + |
| 92 | + response = requests.request(method='GET', url=url, headers=headers, params=querystring) |
| 93 | + response.raise_for_status() # Will raise an exception for HTTP error codes |
| 94 | + |
| 95 | + api_data = response.json().get("data") |
| 96 | + if api_data: |
| 97 | + # Update cache with new data and timestamp |
| 98 | + cache[ip_address] = {'timestamp': current_time, 'data': api_data} |
| 99 | + save_cache(cache) |
| 100 | + return api_data |
| 101 | + except requests.exceptions.RequestException as e: |
| 102 | + print(f"API request failed: {e}") |
| 103 | + return None |
| 104 | + except json.JSONDecodeError: |
| 105 | + print("Failed to parse API response.") |
| 106 | + return None |
| 107 | + |
| 108 | +abuseConfidenceScore = "0" |
| 109 | +totalReports = "0" |
| 110 | +abuseip_key = None |
| 111 | + |
| 112 | +if os.path.exists(ABUSEIP_KEY_FILE): |
| 113 | + with open(ABUSEIP_KEY_FILE, 'r') as file: |
| 114 | + abuseip_key = file.read().strip() |
| 115 | +elif os.getenv('ABUSEIP_KEY') is not None: |
| 116 | + abuseip_key = os.getenv('ABUSEIP_KEY') |
| 117 | + |
| 118 | +if abuseip_key: |
| 119 | + abuseip_data = get_abuseip_info(sys.argv[1]) |
| 120 | + if abuseip_data: |
| 121 | + abuseConfidenceScore = str(abuseip_data.get("abuseConfidenceScore", "0")) |
| 122 | + totalReports = str(abuseip_data.get("totalReports", "0")) |
36 | 123 |
|
37 | | -import geoip2.database |
38 | | -import socket |
39 | | - |
40 | 124 | # IP gets infos from the DB |
41 | 125 | reader = geoip2.database.Reader('/geolite/GeoLite2-City.mmdb') |
42 | 126 | response = reader.city(str(sys.argv[1])) |
|
50 | 134 | Zip = response.postal.code |
51 | 135 | IP = str(sys.argv[1]) |
52 | 136 | Domain = str(sys.argv[2]) |
53 | | -duration = int(sys.argv[3]) |
| 137 | +length = int(sys.argv[3]) |
54 | 138 | Target = str(sys.argv[4]) |
| 139 | +statuscode = int(sys.argv[8]) |
55 | 140 | reader.close() |
56 | 141 |
|
57 | | -import sys |
58 | | -import geoip2.database |
59 | | - |
| 142 | +asn = str(sys.argv[7]) |
60 | 143 | if asn == 'true': |
61 | 144 | try: |
62 | 145 | reader = geoip2.database.Reader('/geolite/GeoLite2-ASN.mmdb') |
|
69 | 152 | finally: |
70 | 153 | reader.close() |
71 | 154 |
|
72 | | - |
73 | 155 | # print to log |
74 | 156 | print (Country) |
75 | 157 | print (State) |
|
83 | 165 | print ('Outside IP: ', IP) |
84 | 166 | print ('Target IP: ', Target) |
85 | 167 | print ('Domain: ', Domain) |
86 | | -if abuseip_key is not None: |
| 168 | +print ('Statuscode ', statuscode) |
| 169 | +if abuseip_key: |
87 | 170 | print("abuseConfidenceScore: " + abuseConfidenceScore) |
88 | 171 | print("totalReports: " + totalReports) |
89 | 172 |
|
90 | | -import influxdb_client |
91 | | -from influxdb_client.client.write_api import SYNCHRONOUS |
92 | | - |
93 | | - |
94 | 173 | # influx configuration - edit these |
95 | 174 | npmhome = "/root/.config/NPMGRAF" |
96 | 175 | ifhost = os.getenv('INFLUX_HOST') |
97 | 176 | ifbucket = os.getenv('INFLUX_BUCKET') |
98 | 177 | iforg = os.getenv('INFLUX_ORG') |
99 | | -iftoken = os.getenv('INFLUX_TOKEN') |
100 | 178 |
|
| 179 | +if os.getenv('INFLUX_TOKEN') is not None: |
| 180 | + iftoken = os.getenv('INFLUX_TOKEN') |
| 181 | +elif os.path.exists(INFLUX_TOKEN_FILE): |
| 182 | + with open(INFLUX_TOKEN_FILE, 'r') as file: |
| 183 | + iftoken = file.read().strip() |
| 184 | +else: |
| 185 | + print('No InfluxDB Token found.') |
| 186 | + print('Please add the Token. Exiting now.') |
| 187 | + sys.exit(1) |
| 188 | + |
101 | 189 | # take a timestamp for this measurement |
102 | | -oldtime = str(sys.argv[6]) #30/May/2023:14:16:48 +0000 to 2009-11-10T23:00:00.123456Z |
| 190 | +oldtime = str(sys.argv[6]) #30/May/2023:14:16:48 +0000 to 2009-11-10T23:00:00+00:00 (+00:00 is Timezone) |
103 | 191 | #transform month |
104 | | -month = oldtime[3:6] |
105 | | -if month == 'Jan': |
106 | | - month = '01' |
107 | | -elif month =='Feb': |
108 | | - month = '02' |
109 | | -elif month =='Mar': |
110 | | - month = '03' |
111 | | -elif month =='Apr': |
112 | | - month = '04' |
113 | | -elif month =='May': |
114 | | - month = '05' |
115 | | -elif month =='Jun': |
116 | | - month = '06' |
117 | | -elif month =='Jul': |
118 | | - month = '07' |
119 | | -elif month =='Aug': |
120 | | - month = '08' |
121 | | -elif month =='Sep': |
122 | | - month = '09' |
123 | | -elif month =='Oct': |
124 | | - month = '10' |
125 | | -elif month =='Nov': |
126 | | - month = '11' |
127 | | -else: |
128 | | - month = '12' |
129 | | - |
| 192 | +month_map = {'Jan': '01', 'Feb': '02', 'Mar': '03', 'Apr': '04', 'May': '05', 'Jun': '06', 'Jul': '07', 'Aug': '08', 'Sep': '09', 'Oct': '10', 'Nov': '11', 'Dec': '12'} |
| 193 | +month = month_map.get(oldtime[3:6], '12') |
130 | 194 | # build new time |
131 | | -time=oldtime[7:11]+'-'+month+'-'+oldtime[0:2]+'T'+oldtime[12:20]+oldtime[21:24]+':'+oldtime[24:26] |
132 | | -print('Measurement Time: ', time) |
| 195 | +time_str = f"{oldtime[7:11]}-{month}-{oldtime[0:2]}T{oldtime[12:20]}{oldtime[21:24]}:{oldtime[24:26]}" |
| 196 | +print('Measurement Time: ', time_str) |
133 | 197 |
|
134 | 198 | ifclient = influxdb_client.InfluxDBClient( |
135 | 199 | url=ifhost, |
|
152 | 216 | point.tag("Target", Target) |
153 | 217 | if asn =='true': |
154 | 218 | point.tag("Asn", Asn) |
155 | | -if abuseip_key is not None: |
| 219 | +if abuseip_key: |
156 | 220 | point.tag("abuseConfidenceScore", abuseConfidenceScore) |
157 | 221 | point.tag("totalReports", totalReports) |
158 | 222 |
|
|
167 | 231 | if asn =='true': |
168 | 232 | point.field("Asn", Asn) |
169 | 233 | point.field("Name", Country) |
170 | | -point.field("duration", duration) |
| 234 | +point.field("length", length) |
| 235 | +point.field("statuscode", statuscode) |
171 | 236 | point.field("metric", 1) |
172 | | -if abuseip_key is not None: |
| 237 | +if abuseip_key: |
173 | 238 | point.field("abuseConfidenceScore", abuseConfidenceScore) |
174 | 239 | point.field("totalReports", totalReports) |
175 | 240 |
|
176 | | -point.time(time) |
| 241 | +point.time(time_str) |
177 | 242 |
|
178 | 243 | write_api.write(bucket=ifbucket, org=iforg, record=point) |
179 | 244 |
|
|
0 commit comments