-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathmassive_algorithm_analyzer.py
More file actions
86 lines (67 loc) · 3.11 KB
/
massive_algorithm_analyzer.py
File metadata and controls
86 lines (67 loc) · 3.11 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
from agent_bfs import *
from agent_astar import *
from agent_astar2 import *
import os
class Analyzer():
def __init__(self, agent, n_samples):
self.agent = agent
self.n_samples = n_samples
# Función para generar semillas pseudoaleatorias, dispersas y grandes
def seed_function(self, x):
# Constantes (números primos) para generar grandes números
a = 15485863
b = 982451653
c = 961748941
# Fórmula no lineal que genera valores grandes y dispersos
result = (a * x ** 5 + b * x ** 3 + c * x) % (10**10)
# Asegurarse de que el número tenga al menos 6 dígitos
return result + 10**6 if result < 10**6 else result
# Analizar un agente
def analyze(self):
script_directory = os.path.dirname(os.path.abspath(__file__)).replace('\\', '\\\\')
file_path = script_directory + "\\analyzer_data.txt"
# Crear o vaciar el archivo
with open(file_path,'w') as file:
file.write("")
# Ejecutar la búsqueda y guardar los resultados en el archivo
for i in range(self.n_samples):
print(f"Sample {i+1}/{self.n_samples}")
self.agent.search(seed=self.seed_function(i+1), massive_analyzer=True)
# Leer los datos generados y analizar los resultados
scores = []
times = []
expanded_nodes = []
depths = []
# Leer archivo de datos para el analizador
with open(file_path, 'r') as file:
lines = file.readlines()
for i in range(0, len(lines), 6):
if i + 4 < len(lines):
score = float(lines[i+1].strip())
time = float(lines[i+2].strip())
expanded_node = int(lines[i+3].strip())
depth = int(lines[i+4].strip())
scores.append(score)
times.append(time)
expanded_nodes.append(expanded_node)
depths.append(depth)
if not scores: # Tiempo excedido en cada ejecución
print("\nNo data to analyze, probably due to timeouts.")
return
# Calcular las medias
avg_score = sum(scores) / len(scores) if scores else 0
avg_time = sum(times) / len(times) if times else 0
avg_expanded_nodes = sum(expanded_nodes) / len(expanded_nodes) if expanded_nodes else 0
avg_depth = sum(depths) / len(depths) if depths else 0
# Imprimir los resultados
print("\nAnalysis completed.\n")
print(f"{self.n_samples - len(scores)} / {self.n_samples} interrupted searches due to timeouts.\n")
print(f"Average Score: {avg_score}")
print(f"Average Time: {avg_time}")
print(f"Average Expanded Nodes: {avg_expanded_nodes}")
print(f"Average Depth: {avg_depth}")
if __name__ == '__main__':
agent = AgentASTAR(time_out=120) # Agente escogido
n_samples = 30 # Número de ejecuciones para analizar
analyzer = Analyzer(agent, n_samples)
analyzer.analyze()