-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathquick_benchmark.py
More file actions
79 lines (60 loc) · 2.45 KB
/
quick_benchmark.py
File metadata and controls
79 lines (60 loc) · 2.45 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
#!/usr/bin/env python3
"""Quick performance benchmark for Sentinel Shield"""
import sys
import os
import time
import statistics
# Add src to path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'src'))
from shield.core.piimasker import shield
from shield.models.schemas import MaskRequest, ProcessingMode
def run_quick_benchmark():
print("🛡️ SENTINEL SHIELD QUICK BENCHMARK")
print("=" * 40)
# Test cases
test_cases = [
("Simple Email", "Contact john@example.com"),
("Medium PII", "Email: user@test.com, Phone: (555) 123-4567"),
("Complex Document", "Customer service@company.com ordered item #12345. Card: 4532-1234-5678-9012. API: sk_abcdef123456")
]
# Test each mode
for mode in [ProcessingMode.SPEED, ProcessingMode.BALANCED]:
print(f"\n{mode.value.upper()} MODE:")
print("-" * 20)
latencies = []
for name, text in test_cases:
request = MaskRequest(text=text, mode=mode)
# Run 10 iterations for accuracy
times = []
for _ in range(10):
start = time.perf_counter()
response = shield.mask(request)
elapsed = (time.perf_counter() - start) * 1000
times.append(elapsed)
avg_time = statistics.mean(times)
p95_time = sorted(times)[8] # 90th percentile approximation
latencies.append(avg_time)
print(f"{name:15}: {avg_time:6.2f}ms avg, {p95_time:6.2f}ms p95")
overall_avg = statistics.mean(latencies)
print(f"Overall Average: {overall_avg:.2f}ms")
# Throughput test
print(f"\nTHROUGHPUT TEST:")
print("-" * 20)
test_text = "Contact support@company.com or call (555) 123-4567"
request = MaskRequest(text=test_text, mode=ProcessingMode.BALANCED)
# Warm up
for _ in range(5):
shield.mask(request)
# Measure throughput
start_time = time.time()
requests_completed = 0
duration = 5.0 # 5 seconds
while time.time() - start_time < duration:
shield.mask(request)
requests_completed += 1
elapsed = time.time() - start_time
throughput = requests_completed / elapsed
print(f"Completed {requests_completed} requests in {elapsed:.2f} seconds")
print(f"Throughput: {throughput:.1f} requests/second")
if __name__ == "__main__":
run_quick_benchmark()