Skip to content

Commit 92c7f7c

Browse files
authored
Merge pull request #120 from SuhailB/updated-parallel-iterations
Updated parallel iterations
2 parents 2b988d5 + 7dcfda8 commit 92c7f7c

File tree

8 files changed

+588
-217
lines changed

8 files changed

+588
-217
lines changed

openevolve/cli.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,7 @@ async def main_async() -> int:
126126
best_program = await openevolve.run(
127127
iterations=args.iterations,
128128
target_score=args.target_score,
129+
checkpoint_path=args.checkpoint,
129130
)
130131

131132
# Get the checkpoint path

openevolve/config.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,10 @@ class LLMConfig(LLMModelConfig):
5656
retry_delay: int = 5
5757

5858
# n-model configuration for evolution LLM ensemble
59-
models: List[LLMModelConfig] = field(default_factory=lambda: [LLMModelConfig()])
59+
models: List[LLMModelConfig] = field(default_factory=lambda: [
60+
LLMModelConfig(name="gpt-4o-mini", weight=0.8),
61+
LLMModelConfig(name="gpt-4o", weight=0.2)
62+
])
6063

6164
# n-model configuration for evaluator LLM ensemble
6265
evaluator_models: List[LLMModelConfig] = field(default_factory=lambda: [])
@@ -195,7 +198,7 @@ class EvaluatorConfig:
195198
cascade_thresholds: List[float] = field(default_factory=lambda: [0.5, 0.75, 0.9])
196199

197200
# Parallel evaluation
198-
parallel_evaluations: int = 4
201+
parallel_evaluations: int = 1
199202
distributed: bool = False
200203

201204
# LLM-based feedback
@@ -217,6 +220,7 @@ class Config:
217220
log_level: str = "INFO"
218221
log_dir: Optional[str] = None
219222
random_seed: Optional[int] = 42
223+
language: str = None
220224

221225
# Component configurations
222226
llm: LLMConfig = field(default_factory=LLMConfig)
@@ -361,4 +365,4 @@ def load_config(config_path: Optional[Union[str, Path]] = None) -> Config:
361365
# Make the system message available to the individual models, in case it is not provided from the prompt sampler
362366
config.llm.update_model_params({"system_message": config.prompt.system_message})
363367

364-
return config
368+
return config

0 commit comments

Comments
 (0)