|
15 | 15 |
|
16 | 16 | from openevolve.config import Config |
17 | 17 | from openevolve.database import Program, ProgramDatabase |
| 18 | +from openevolve.utils.metrics_utils import safe_numeric_average |
18 | 19 |
|
19 | 20 | logger = logging.getLogger(__name__) |
20 | 21 |
|
@@ -145,8 +146,6 @@ def _run_iteration_worker( |
145 | 146 | ] |
146 | 147 |
|
147 | 148 | # Sort by metrics for top programs |
148 | | - from openevolve.utils.metrics_utils import safe_numeric_average |
149 | | - |
150 | 149 | island_programs.sort( |
151 | 150 | key=lambda p: p.metrics.get("combined_score", safe_numeric_average(p.metrics)), |
152 | 151 | reverse=True, |
@@ -530,8 +529,6 @@ async def run_evolution( |
530 | 529 | "combined_score" not in child_program.metrics |
531 | 530 | and not self._warned_about_combined_score |
532 | 531 | ): |
533 | | - from openevolve.utils.metrics_utils import safe_numeric_average |
534 | | - |
535 | 532 | avg_score = safe_numeric_average(child_program.metrics) |
536 | 533 | logger.warning( |
537 | 534 | f"⚠️ No 'combined_score' metric found in evaluation results. " |
@@ -580,14 +577,13 @@ async def run_evolution( |
580 | 577 | current_score = None |
581 | 578 | if self.config.early_stopping_metric in child_program.metrics: |
582 | 579 | current_score = child_program.metrics[self.config.early_stopping_metric] |
| 580 | + elif self.config.early_stopping_metric == "combined_score": |
| 581 | + # Default metric not found, use safe average (standard pattern) |
| 582 | + current_score = safe_numeric_average(child_program.metrics) |
583 | 583 | else: |
584 | | - # Fall back to average of numeric metrics if specified metric doesn't exist |
585 | | - numeric_metrics = [ |
586 | | - v for v in child_program.metrics.values() |
587 | | - if isinstance(v, (int, float)) and not isinstance(v, bool) |
588 | | - ] |
589 | | - if numeric_metrics: |
590 | | - current_score = sum(numeric_metrics) / len(numeric_metrics) |
| 584 | + # User specified a custom metric that doesn't exist |
| 585 | + logger.warning(f"Early stopping metric '{self.config.early_stopping_metric}' not found, using safe numeric average") |
| 586 | + current_score = safe_numeric_average(child_program.metrics) |
591 | 587 |
|
592 | 588 | if current_score is not None and isinstance(current_score, (int, float)): |
593 | 589 | # Check for improvement |
|
0 commit comments