Skip to content

Commit 4a6fb6f

Browse files
committed
Update process_parallel.py
1 parent 79a9e9e commit 4a6fb6f

File tree

1 file changed

+7
-11
lines changed

1 file changed

+7
-11
lines changed

openevolve/process_parallel.py

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515

1616
from openevolve.config import Config
1717
from openevolve.database import Program, ProgramDatabase
18+
from openevolve.utils.metrics_utils import safe_numeric_average
1819

1920
logger = logging.getLogger(__name__)
2021

@@ -145,8 +146,6 @@ def _run_iteration_worker(
145146
]
146147

147148
# Sort by metrics for top programs
148-
from openevolve.utils.metrics_utils import safe_numeric_average
149-
150149
island_programs.sort(
151150
key=lambda p: p.metrics.get("combined_score", safe_numeric_average(p.metrics)),
152151
reverse=True,
@@ -530,8 +529,6 @@ async def run_evolution(
530529
"combined_score" not in child_program.metrics
531530
and not self._warned_about_combined_score
532531
):
533-
from openevolve.utils.metrics_utils import safe_numeric_average
534-
535532
avg_score = safe_numeric_average(child_program.metrics)
536533
logger.warning(
537534
f"⚠️ No 'combined_score' metric found in evaluation results. "
@@ -580,14 +577,13 @@ async def run_evolution(
580577
current_score = None
581578
if self.config.early_stopping_metric in child_program.metrics:
582579
current_score = child_program.metrics[self.config.early_stopping_metric]
580+
elif self.config.early_stopping_metric == "combined_score":
581+
# Default metric not found, use safe average (standard pattern)
582+
current_score = safe_numeric_average(child_program.metrics)
583583
else:
584-
# Fall back to average of numeric metrics if specified metric doesn't exist
585-
numeric_metrics = [
586-
v for v in child_program.metrics.values()
587-
if isinstance(v, (int, float)) and not isinstance(v, bool)
588-
]
589-
if numeric_metrics:
590-
current_score = sum(numeric_metrics) / len(numeric_metrics)
584+
# User specified a custom metric that doesn't exist
585+
logger.warning(f"Early stopping metric '{self.config.early_stopping_metric}' not found, using safe numeric average")
586+
current_score = safe_numeric_average(child_program.metrics)
591587

592588
if current_score is not None and isinstance(current_score, (int, float)):
593589
# Check for improvement

0 commit comments

Comments
 (0)