Skip to content

Commit 09656b1

Browse files
authored
release v3.0.0
1 parent 73e946a commit 09656b1

13 files changed

+3754
-165
lines changed

Dockerfile

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Stage 1: Build environment
2-
FROM python:3.13-slim AS builder
2+
FROM python:3.14-slim AS builder
33

4-
LABEL maintainer="npmgrafstats@smilebasti.myhome-server.de"
4+
LABEL maintainer="[email protected]"
55

66
# Setup home folder
77
RUN mkdir -p /root/.config/NPMGRAF
@@ -23,21 +23,22 @@ COPY requirements.txt /root/.config/NPMGRAF/requirements.txt
2323
RUN pip install --no-cache-dir -r /root/.config/NPMGRAF/requirements.txt
2424

2525
# Stage 2: Runtime environment
26-
FROM python:3.13-slim
26+
FROM python:3.14-slim
2727

2828
# Setup home folder
2929
RUN mkdir -p /root/.config/NPMGRAF
3030

3131
# Install curl in the final image
32-
RUN apt-get update && apt-get install -y --no-install-recommends \
33-
curl \
34-
&& rm -rf /var/lib/apt/lists/*
32+
RUN apt-get update && \
33+
apt-get install -y --no-install-recommends curl && \
34+
apt-get clean && \
35+
rm -rf /var/lib/apt/lists/*
3536

3637
# Copy the installed grepcidr binary from the builder stage
3738
COPY --from=builder /usr/local/bin/grepcidr /usr/local/bin/grepcidr
3839

3940
# Copy installed Python packages from the builder stage
40-
COPY --from=builder /usr/local/lib/python3.13/site-packages /usr/local/lib/python3.13/site-packages
41+
COPY --from=builder /usr/local/lib/python3.14/site-packages /usr/local/lib/python3.14/site-packages
4142

4243
# Copy Python scripts and set permissions
4344
COPY Getipinfo.py /root/.config/NPMGRAF/Getipinfo.py

Dockerfile-arm

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
FROM python:3
22

3-
LABEL maintainer="npmgrafstats@smilebasti.myhome-server.de"
3+
LABEL maintainer="[email protected]"
44

55
## setup home folder
66
RUN mkdir -p /root/.config/NPMGRAF
@@ -14,10 +14,6 @@ RUN git clone https://github.com/ryantig/grepcidr.git /opt/grepcidr \
1414
&& apt-get clean \
1515
&& rm -rf /var/lib/apt/lists/*
1616

17-
RUN apt-get remove git build-essential -y\
18-
&& rm -rf /var/cache/apk/* \
19-
&& apt-get autoremove -y && apt-get clean
20-
2117
COPY requirements.txt /root/.config/NPMGRAF/requirements.txt
2218
RUN pip install --no-cache-dir -r /root/.config/NPMGRAF/requirements.txt
2319

Getipinfo.py

Lines changed: 135 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -2,41 +2,125 @@
22

33
import sys
44
import os
5+
import geoip2.database
6+
import socket
7+
import json
8+
import fcntl
9+
import influxdb_client
10+
from influxdb_client.client.write_api import SYNCHRONOUS
11+
import time
12+
from datetime import datetime, timedelta
13+
514
print ('**************** start *********************')
615
measurement_name = (sys.argv[5]) # get measurement from argv
716
print ('Measurement-name: '+measurement_name)
817

9-
# argv1 = outsideip, agrv2 = Domain, argv3 length, argv4 tragetip, sys.argv[5] bucketname, sys.argv[6] date, sys.argv[7] asn, sys.argv[8] abuse
10-
11-
18+
# argv[1] = outsideip, agrv[2] = Domain, argv[3] length, argv[4] tragetip, sys.argv[5] bucketname, sys.argv[6] date, sys.argv[7] asn, sys.argv[8] statuscode
1219

20+
# Configuration for Persistent Data
21+
DATA_DIR = "/data"
22+
CACHE_FILE = os.path.join(DATA_DIR, "abuseip_cache.json")
23+
CACHE_EXPIRATION_HOURS = 48
24+
INFLUX_TOKEN_FILE = os.path.join(DATA_DIR, "influxdb-token.txt")
25+
ABUSEIP_KEY_FILE = os.path.join(DATA_DIR, "abuseipdb-key.txt")
1326

14-
abuseip_key = os.getenv('ABUSEIP_KEY')
15-
if abuseip_key is not None:
16-
import requests
17-
import json
18-
url = 'https://api.abuseipdb.com/api/v2/check'
19-
querystring = {
20-
'ipAddress': str(sys.argv[1]),
21-
'maxAgeInDays': '90'
22-
}
23-
headers = {
24-
'Accept': 'application/json',
25-
'Key': abuseip_key
26-
}
27-
28-
response = requests.request(method='GET', url=url, headers=headers, params=querystring)
29-
abuseip_response = json.loads(response.text)
30-
abuseConfidenceScore = str(abuseip_response["data"]["abuseConfidenceScore"])
31-
totalReports = str(abuseip_response["data"]["totalReports"])
32-
#print(json.dumps(abuseip_response, sort_keys=True, indent=4))
27+
# Abuseipdb cache
28+
# Ensure the data directory exists
29+
if not os.path.exists(DATA_DIR):
30+
os.makedirs(DATA_DIR)
3331

32+
# Function to load the cache from the file
33+
def load_cache():
34+
if not os.path.exists(CACHE_FILE):
35+
return {}
36+
try:
37+
with open(CACHE_FILE, 'r') as f:
38+
fcntl.flock(f.fileno(), fcntl.LOCK_SH) # Shared lock: allows multiple readers
39+
data = json.load(f)
40+
fcntl.flock(f.fileno(), fcntl.LOCK_UN) # Unlock
41+
return data
42+
except (json.JSONDecodeError, IOError, OSError) as e:
43+
print(f"Error loading cache: {e}. Renaming corrupted file and starting new cache.")
44+
# Rename the corrupted file to avoid overwriting it
45+
corrupted_file = CACHE_FILE + ".corrupted"
46+
try:
47+
os.rename(CACHE_FILE, corrupted_file)
48+
print(f"Corrupted cache renamed to {corrupted_file}. You can inspect/recover it manually.")
49+
except OSError as rename_error:
50+
print(f"Failed to rename corrupted file: {rename_error}. Proceeding with empty cache.")
51+
return {} # Start fresh
52+
53+
# Function to save the cache to the file
54+
def save_cache(cache_data):
55+
try:
56+
with open(CACHE_FILE, 'w') as f:
57+
fcntl.flock(f.fileno(), fcntl.LOCK_EX) # Exclusive lock: blocks all other access
58+
json.dump(cache_data, f, indent=4)
59+
fcntl.flock(f.fileno(), fcntl.LOCK_UN) # Unlock
60+
except (IOError, OSError) as e:
61+
print(f"Error saving cache: {e}")
62+
63+
# Function to get AbuseIPDB info, using the cache
64+
def get_abuseip_info(ip_address):
65+
cache = load_cache()
66+
current_time = time.time()
67+
68+
# Check if a valid, non-expired entry exists in the cache
69+
if ip_address in cache:
70+
entry = cache[ip_address]
71+
entry_time = entry.get('timestamp', 0)
72+
if current_time - entry_time < CACHE_EXPIRATION_HOURS * 3600:
73+
print(f"Cache HIT for IP: {ip_address}")
74+
return entry['data']
75+
76+
# If not in cache or expired, fetch from API (Cache MISS)
77+
print(f"Cache MISS for IP: {ip_address}. Fetching from API.")
78+
if os.path.exists(ABUSEIP_KEY_FILE):
79+
with open(ABUSEIP_KEY_FILE, 'r') as file:
80+
abuseip_key = file.read().strip()
81+
elif os.getenv('ABUSEIP_KEY') is not None:
82+
abuseip_key = os.getenv('ABUSEIP_KEY')
83+
else:
84+
return None
3485

35-
asn = str(sys.argv[7])
86+
try:
87+
import requests
88+
url = 'https://api.abuseipdb.com/api/v2/check'
89+
querystring = {'ipAddress': ip_address, 'maxAgeInDays': '90'}
90+
headers = {'Accept': 'application/json', 'Key': abuseip_key}
91+
92+
response = requests.request(method='GET', url=url, headers=headers, params=querystring)
93+
response.raise_for_status() # Will raise an exception for HTTP error codes
94+
95+
api_data = response.json().get("data")
96+
if api_data:
97+
# Update cache with new data and timestamp
98+
cache[ip_address] = {'timestamp': current_time, 'data': api_data}
99+
save_cache(cache)
100+
return api_data
101+
except requests.exceptions.RequestException as e:
102+
print(f"API request failed: {e}")
103+
return None
104+
except json.JSONDecodeError:
105+
print("Failed to parse API response.")
106+
return None
107+
108+
abuseConfidenceScore = "0"
109+
totalReports = "0"
110+
abuseip_key = None
111+
112+
if os.path.exists(ABUSEIP_KEY_FILE):
113+
with open(ABUSEIP_KEY_FILE, 'r') as file:
114+
abuseip_key = file.read().strip()
115+
elif os.getenv('ABUSEIP_KEY') is not None:
116+
abuseip_key = os.getenv('ABUSEIP_KEY')
117+
118+
if abuseip_key:
119+
abuseip_data = get_abuseip_info(sys.argv[1])
120+
if abuseip_data:
121+
abuseConfidenceScore = str(abuseip_data.get("abuseConfidenceScore", "0"))
122+
totalReports = str(abuseip_data.get("totalReports", "0"))
36123

37-
import geoip2.database
38-
import socket
39-
40124
# IP gets infos from the DB
41125
reader = geoip2.database.Reader('/geolite/GeoLite2-City.mmdb')
42126
response = reader.city(str(sys.argv[1]))
@@ -50,13 +134,12 @@
50134
Zip = response.postal.code
51135
IP = str(sys.argv[1])
52136
Domain = str(sys.argv[2])
53-
duration = int(sys.argv[3])
137+
length = int(sys.argv[3])
54138
Target = str(sys.argv[4])
139+
statuscode = int(sys.argv[8])
55140
reader.close()
56141

57-
import sys
58-
import geoip2.database
59-
142+
asn = str(sys.argv[7])
60143
if asn == 'true':
61144
try:
62145
reader = geoip2.database.Reader('/geolite/GeoLite2-ASN.mmdb')
@@ -69,7 +152,6 @@
69152
finally:
70153
reader.close()
71154

72-
73155
# print to log
74156
print (Country)
75157
print (State)
@@ -83,53 +165,35 @@
83165
print ('Outside IP: ', IP)
84166
print ('Target IP: ', Target)
85167
print ('Domain: ', Domain)
86-
if abuseip_key is not None:
168+
print ('Statuscode ', statuscode)
169+
if abuseip_key:
87170
print("abuseConfidenceScore: " + abuseConfidenceScore)
88171
print("totalReports: " + totalReports)
89172

90-
import influxdb_client
91-
from influxdb_client.client.write_api import SYNCHRONOUS
92-
93-
94173
# influx configuration - edit these
95174
npmhome = "/root/.config/NPMGRAF"
96175
ifhost = os.getenv('INFLUX_HOST')
97176
ifbucket = os.getenv('INFLUX_BUCKET')
98177
iforg = os.getenv('INFLUX_ORG')
99-
iftoken = os.getenv('INFLUX_TOKEN')
100178

179+
if os.getenv('INFLUX_TOKEN') is not None:
180+
iftoken = os.getenv('INFLUX_TOKEN')
181+
elif os.path.exists(INFLUX_TOKEN_FILE):
182+
with open(INFLUX_TOKEN_FILE, 'r') as file:
183+
iftoken = file.read().strip()
184+
else:
185+
print('No InfluxDB Token found.')
186+
print('Please add the Token. Exiting now.')
187+
sys.exit(1)
188+
101189
# take a timestamp for this measurement
102-
oldtime = str(sys.argv[6]) #30/May/2023:14:16:48 +0000 to 2009-11-10T23:00:00.123456Z
190+
oldtime = str(sys.argv[6]) #30/May/2023:14:16:48 +0000 to 2009-11-10T23:00:00+00:00 (+00:00 is Timezone)
103191
#transform month
104-
month = oldtime[3:6]
105-
if month == 'Jan':
106-
month = '01'
107-
elif month =='Feb':
108-
month = '02'
109-
elif month =='Mar':
110-
month = '03'
111-
elif month =='Apr':
112-
month = '04'
113-
elif month =='May':
114-
month = '05'
115-
elif month =='Jun':
116-
month = '06'
117-
elif month =='Jul':
118-
month = '07'
119-
elif month =='Aug':
120-
month = '08'
121-
elif month =='Sep':
122-
month = '09'
123-
elif month =='Oct':
124-
month = '10'
125-
elif month =='Nov':
126-
month = '11'
127-
else:
128-
month = '12'
129-
192+
month_map = {'Jan': '01', 'Feb': '02', 'Mar': '03', 'Apr': '04', 'May': '05', 'Jun': '06', 'Jul': '07', 'Aug': '08', 'Sep': '09', 'Oct': '10', 'Nov': '11', 'Dec': '12'}
193+
month = month_map.get(oldtime[3:6], '12')
130194
# build new time
131-
time=oldtime[7:11]+'-'+month+'-'+oldtime[0:2]+'T'+oldtime[12:20]+oldtime[21:24]+':'+oldtime[24:26]
132-
print('Measurement Time: ', time)
195+
time_str = f"{oldtime[7:11]}-{month}-{oldtime[0:2]}T{oldtime[12:20]}{oldtime[21:24]}:{oldtime[24:26]}"
196+
print('Measurement Time: ', time_str)
133197

134198
ifclient = influxdb_client.InfluxDBClient(
135199
url=ifhost,
@@ -152,7 +216,7 @@
152216
point.tag("Target", Target)
153217
if asn =='true':
154218
point.tag("Asn", Asn)
155-
if abuseip_key is not None:
219+
if abuseip_key:
156220
point.tag("abuseConfidenceScore", abuseConfidenceScore)
157221
point.tag("totalReports", totalReports)
158222

@@ -167,13 +231,14 @@
167231
if asn =='true':
168232
point.field("Asn", Asn)
169233
point.field("Name", Country)
170-
point.field("duration", duration)
234+
point.field("length", length)
235+
point.field("statuscode", statuscode)
171236
point.field("metric", 1)
172-
if abuseip_key is not None:
237+
if abuseip_key:
173238
point.field("abuseConfidenceScore", abuseConfidenceScore)
174239
point.field("totalReports", totalReports)
175240

176-
point.time(time)
241+
point.time(time_str)
177242

178243
write_api.write(bucket=ifbucket, org=iforg, record=point)
179244

0 commit comments

Comments
 (0)