|
| 1 | +from metagpt.actions.action import Action |
| 2 | +from metagpt.config2 import Config |
| 3 | +from metagpt.const import TEST_DATA_PATH |
| 4 | +from metagpt.context import Context |
| 5 | +from metagpt.provider.llm_provider_registry import create_llm_instance |
| 6 | +from metagpt.roles.role import Role |
| 7 | + |
| 8 | + |
| 9 | +def test_set_llm(): |
| 10 | + config1 = Config.default() |
| 11 | + config2 = Config.default() |
| 12 | + config2.llm.model = "gpt-3.5-turbo" |
| 13 | + |
| 14 | + context = Context(config=config1) |
| 15 | + act = Action(context=context) |
| 16 | + assert act.config.llm.model == config1.llm.model |
| 17 | + |
| 18 | + llm2 = create_llm_instance(config2.llm) |
| 19 | + act.llm = llm2 |
| 20 | + assert act.llm.model == llm2.model |
| 21 | + |
| 22 | + role = Role(context=context) |
| 23 | + role.set_actions([act]) |
| 24 | + assert act.llm.model == llm2.model |
| 25 | + |
| 26 | + role1 = Role(context=context) |
| 27 | + act1 = Action(context=context) |
| 28 | + assert act1.config.llm.model == config1.llm.model |
| 29 | + act1.config = config2 |
| 30 | + role1.set_actions([act1]) |
| 31 | + assert act1.llm.model == llm2.model |
| 32 | + |
| 33 | + # multiple LLM |
| 34 | + |
| 35 | + config3_path = TEST_DATA_PATH / "config/config2_multi_llm.yaml" |
| 36 | + dict3 = Config.read_yaml(config3_path) |
| 37 | + config3 = Config(**dict3) |
| 38 | + context3 = Context(config=config3) |
| 39 | + role3 = Role(context=context3) |
| 40 | + act3 = Action(context=context3, llm_name_or_type="YOUR_MODEL_NAME_1") |
| 41 | + assert act3.config.llm.model == "gpt-3.5-turbo" |
| 42 | + assert act3.llm.model == "gpt-4-turbo" |
| 43 | + role3.set_actions([act3]) |
| 44 | + assert act3.config.llm.model == "gpt-3.5-turbo" |
| 45 | + assert act3.llm.model == "gpt-4-turbo" |
0 commit comments