diff --git a/README.md b/README.md index 46a731ea..5c1902d7 100644 --- a/README.md +++ b/README.md @@ -229,6 +229,27 @@ if __name__ == "__main__": > \[!TIP\] > For more detailed instructions and additional configuration options, check out the [documentation](https://docs.oasis.camel-ai.org/). +### Using MiniMax as the LLM Provider + +OASIS supports [MiniMax](https://www.minimaxi.com/) models via the built-in `create_minimax_model()` helper. MiniMax offers an OpenAI-compatible API with models such as **MiniMax-M2.7** (1M context window) and **MiniMax-M2.7-highspeed** (faster variant). + +1. Set your MiniMax API key: + +```bash +export MINIMAX_API_KEY= +``` + +2. Use `create_minimax_model()` in your simulation: + +```python +from oasis.minimax import create_minimax_model + +minimax_model = create_minimax_model("MiniMax-M2.7") +# Use minimax_model the same way as any other CAMEL model backend +``` + +See [`examples/reddit_simulation_minimax.py`](examples/reddit_simulation_minimax.py) for a complete simulation example. + ### More Tutorials To discover how to create profiles for large-scale users, as well as how to visualize and analyze social simulation data once your experiment concludes, please refer to [More Tutorials](examples/experiment/user_generation_visualization.md) for detailed guidance. diff --git a/examples/reddit_simulation_minimax.py b/examples/reddit_simulation_minimax.py new file mode 100644 index 00000000..a97d76ce --- /dev/null +++ b/examples/reddit_simulation_minimax.py @@ -0,0 +1,101 @@ +# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. =========== +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. =========== +"""Reddit simulation powered by MiniMax LLM. + +This example demonstrates how to run an OASIS Reddit simulation using +MiniMax's ``MiniMax-M2.7`` model via the OpenAI-compatible API. + +Prerequisites: + 1. ``pip install camel-oasis`` + 2. Set ``MINIMAX_API_KEY`` in your environment. + 3. Place Reddit agent profiles in ``./data/reddit/user_data_36.json``. + (Download from https://github.com/camel-ai/oasis/blob/main/data/reddit/user_data_36.json) +""" + +import asyncio +import os + +import oasis +from oasis import ActionType, LLMAction, ManualAction, generate_reddit_agent_graph +from oasis.minimax import create_minimax_model + + +async def main(): + # Create a MiniMax model for all agents. + # The MINIMAX_API_KEY environment variable must be set. + minimax_model = create_minimax_model("MiniMax-M2.7") + + # Define the available actions for the agents + available_actions = ActionType.get_default_reddit_actions() + + agent_graph = await generate_reddit_agent_graph( + profile_path="./data/reddit/user_data_36.json", + model=minimax_model, + available_actions=available_actions, + ) + + # Define the path to the database + db_path = "./data/reddit_simulation_minimax.db" + os.environ["OASIS_DB_PATH"] = os.path.abspath(db_path) + + # Delete the old database + if os.path.exists(db_path): + os.remove(db_path) + + # Make the environment + env = oasis.make( + agent_graph=agent_graph, + platform=oasis.DefaultPlatformType.REDDIT, + database_path=db_path, + ) + + # Run the environment + await env.reset() + + actions_1 = {} + actions_1[env.agent_graph.get_agent(0)] = [ + ManualAction( + action_type=ActionType.CREATE_POST, + action_args={"content": "Hello, world!"}, + ), + ManualAction( + action_type=ActionType.CREATE_COMMENT, + action_args={ + "post_id": "1", + "content": "Welcome to the OASIS World!", + }, + ), + ] + actions_1[env.agent_graph.get_agent(1)] = ManualAction( + action_type=ActionType.CREATE_COMMENT, + action_args={ + "post_id": "1", + "content": "I like the OASIS world.", + }, + ) + await env.step(actions_1) + + actions_2 = { + agent: LLMAction() for _, agent in env.agent_graph.get_agents() + } + + # Perform the actions + await env.step(actions_2) + + # Close the environment + await env.close() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/oasis/__init__.py b/oasis/__init__.py index ede59569..f8d76f9b 100644 --- a/oasis/__init__.py +++ b/oasis/__init__.py @@ -22,10 +22,12 @@ from oasis.social_platform.config import UserInfo from oasis.social_platform.platform import Platform from oasis.social_platform.typing import ActionType, DefaultPlatformType +from oasis.minimax import create_minimax_model from oasis.testing.show_db import print_db_contents __all__ = [ "make", "Platform", "ActionType", "DefaultPlatformType", "ManualAction", "LLMAction", "print_db_contents", "AgentGraph", "SocialAgent", "UserInfo", - "generate_reddit_agent_graph", "generate_twitter_agent_graph" + "generate_reddit_agent_graph", "generate_twitter_agent_graph", + "create_minimax_model" ] diff --git a/oasis/minimax.py b/oasis/minimax.py new file mode 100644 index 00000000..9b82516e --- /dev/null +++ b/oasis/minimax.py @@ -0,0 +1,119 @@ +# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. =========== +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. =========== +"""MiniMax model integration for OASIS. + +This module provides a convenience function for creating MiniMax LLM models +using CAMEL's ``ModelFactory`` with the ``OPENAI_COMPATIBLE_MODEL`` platform +type. MiniMax offers an OpenAI-compatible API at ``https://api.minimax.io/v1``. + +Available models: + +* **MiniMax-M2.7** -- Latest flagship model with 1M context window. +* **MiniMax-M2.7-highspeed** -- Faster variant for latency-sensitive workloads. + +Usage:: + + from oasis.models import create_minimax_model + + model = create_minimax_model("MiniMax-M2.7") + +The ``MINIMAX_API_KEY`` environment variable must be set. +""" + +from __future__ import annotations + +import os +from typing import Any, Dict, Optional + +from camel.models import BaseModelBackend, ModelFactory +from camel.types import ModelPlatformType + +MINIMAX_API_BASE_URL = "https://api.minimax.io/v1" + +MINIMAX_MODELS: Dict[str, Dict[str, Any]] = { + "MiniMax-M2.7": { + "description": "Flagship model with 1M context window", + "context_length": 1_000_000, + }, + "MiniMax-M2.7-highspeed": { + "description": "Faster variant for latency-sensitive workloads", + "context_length": 1_000_000, + }, +} + + +def create_minimax_model( + model_type: str = "MiniMax-M2.7", + api_key: Optional[str] = None, + url: Optional[str] = None, + model_config_dict: Optional[Dict[str, Any]] = None, + **kwargs: Any, +) -> BaseModelBackend: + """Create a MiniMax model backend via CAMEL's ``ModelFactory``. + + MiniMax provides an OpenAI-compatible API, so this function uses + ``ModelPlatformType.OPENAI_COMPATIBLE_MODEL`` under the hood. + + Args: + model_type: MiniMax model identifier. Defaults to ``"MiniMax-M2.7"``. + Supported values: ``"MiniMax-M2.7"``, + ``"MiniMax-M2.7-highspeed"``. + api_key: MiniMax API key. If *None*, reads from the + ``MINIMAX_API_KEY`` environment variable. + url: API base URL. Defaults to ``https://api.minimax.io/v1``. + model_config_dict: Extra model configuration passed to CAMEL's + ``ModelFactory.create()``. Temperature is automatically + clamped to the MiniMax-supported range ``(0.0, 1.0]``. + **kwargs: Additional keyword arguments forwarded to + ``ModelFactory.create()``. + + Returns: + A CAMEL ``BaseModelBackend`` instance configured for MiniMax. + + Raises: + ValueError: If *model_type* is not a recognized MiniMax model. + ValueError: If no API key is provided or found in the environment. + """ + if model_type not in MINIMAX_MODELS: + raise ValueError( + f"Unknown MiniMax model: {model_type!r}. " + f"Supported models: {list(MINIMAX_MODELS.keys())}" + ) + + resolved_key = api_key or os.environ.get("MINIMAX_API_KEY") + if not resolved_key: + raise ValueError( + "MiniMax API key is required. Pass it via the 'api_key' argument " + "or set the MINIMAX_API_KEY environment variable." + ) + + resolved_url = url or MINIMAX_API_BASE_URL + + # Apply temperature clamping for MiniMax (must be in (0.0, 1.0]) + config = dict(model_config_dict or {}) + if "temperature" in config: + temp = config["temperature"] + if temp <= 0.0: + config["temperature"] = 0.01 + elif temp > 1.0: + config["temperature"] = 1.0 + + return ModelFactory.create( + model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL, + model_type=model_type, + api_key=resolved_key, + url=resolved_url, + model_config_dict=config or None, + **kwargs, + ) diff --git a/test/agent/test_minimax_provider.py b/test/agent/test_minimax_provider.py new file mode 100644 index 00000000..4261b44c --- /dev/null +++ b/test/agent/test_minimax_provider.py @@ -0,0 +1,307 @@ +# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. =========== +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. =========== +"""Unit and integration tests for MiniMax provider support.""" + +import os +from unittest.mock import MagicMock, patch + +import pytest +from camel.models import BaseModelBackend +from camel.types import ModelPlatformType + +from oasis.minimax import ( + MINIMAX_API_BASE_URL, + MINIMAX_MODELS, + create_minimax_model, +) + + +# --------------------------------------------------------------------------- +# Unit tests +# --------------------------------------------------------------------------- + + +class TestMiniMaxModels: + """Tests for the MiniMax model constants.""" + + def test_models_dict_has_expected_models(self): + assert "MiniMax-M2.7" in MINIMAX_MODELS + assert "MiniMax-M2.7-highspeed" in MINIMAX_MODELS + + def test_models_have_description(self): + for model_name, info in MINIMAX_MODELS.items(): + assert "description" in info, f"{model_name} missing description" + assert isinstance(info["description"], str) + + def test_models_have_context_length(self): + for model_name, info in MINIMAX_MODELS.items(): + assert "context_length" in info, ( + f"{model_name} missing context_length" + ) + assert info["context_length"] > 0 + + def test_api_base_url(self): + assert MINIMAX_API_BASE_URL == "https://api.minimax.io/v1" + + +class TestCreateMiniMaxModelValidation: + """Tests for argument validation in create_minimax_model().""" + + def test_unknown_model_raises(self): + with pytest.raises(ValueError, match="Unknown MiniMax model"): + create_minimax_model("nonexistent-model", api_key="test-key") + + def test_missing_api_key_raises(self): + env = os.environ.copy() + env.pop("MINIMAX_API_KEY", None) + with patch.dict(os.environ, env, clear=True): + with pytest.raises(ValueError, match="API key is required"): + create_minimax_model("MiniMax-M2.7") + + def test_api_key_from_env(self): + with patch.dict(os.environ, {"MINIMAX_API_KEY": "env-key"}): + with patch( + "oasis.minimax.ModelFactory.create" + ) as mock_create: + mock_create.return_value = MagicMock(spec=BaseModelBackend) + model = create_minimax_model("MiniMax-M2.7") + mock_create.assert_called_once() + call_kwargs = mock_create.call_args + assert call_kwargs.kwargs["api_key"] == "env-key" + assert model is not None + + def test_explicit_api_key_overrides_env(self): + with patch.dict(os.environ, {"MINIMAX_API_KEY": "env-key"}): + with patch( + "oasis.minimax.ModelFactory.create" + ) as mock_create: + mock_create.return_value = MagicMock(spec=BaseModelBackend) + create_minimax_model("MiniMax-M2.7", api_key="explicit-key") + call_kwargs = mock_create.call_args + assert call_kwargs.kwargs["api_key"] == "explicit-key" + + +class TestCreateMiniMaxModelFactory: + """Tests for correct ModelFactory.create() invocation.""" + + @patch("oasis.minimax.ModelFactory.create") + def test_default_model(self, mock_create): + mock_create.return_value = MagicMock(spec=BaseModelBackend) + create_minimax_model(api_key="test-key") + mock_create.assert_called_once_with( + model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL, + model_type="MiniMax-M2.7", + api_key="test-key", + url=MINIMAX_API_BASE_URL, + model_config_dict=None, + ) + + @patch("oasis.minimax.ModelFactory.create") + def test_highspeed_model(self, mock_create): + mock_create.return_value = MagicMock(spec=BaseModelBackend) + create_minimax_model("MiniMax-M2.7-highspeed", api_key="test-key") + call_kwargs = mock_create.call_args + assert call_kwargs.kwargs["model_type"] == "MiniMax-M2.7-highspeed" + + @patch("oasis.minimax.ModelFactory.create") + def test_custom_url(self, mock_create): + mock_create.return_value = MagicMock(spec=BaseModelBackend) + custom_url = "https://custom.minimax.io/v1" + create_minimax_model(api_key="test-key", url=custom_url) + call_kwargs = mock_create.call_args + assert call_kwargs.kwargs["url"] == custom_url + + @patch("oasis.minimax.ModelFactory.create") + def test_returns_base_model_backend(self, mock_create): + mock_backend = MagicMock(spec=BaseModelBackend) + mock_create.return_value = mock_backend + result = create_minimax_model(api_key="test-key") + assert result is mock_backend + + +class TestTemperatureClamping: + """Tests for MiniMax temperature constraints.""" + + @patch("oasis.minimax.ModelFactory.create") + def test_zero_temperature_clamped(self, mock_create): + mock_create.return_value = MagicMock(spec=BaseModelBackend) + create_minimax_model( + api_key="test-key", + model_config_dict={"temperature": 0.0}, + ) + call_kwargs = mock_create.call_args + config = call_kwargs.kwargs["model_config_dict"] + assert config["temperature"] == 0.01 + + @patch("oasis.minimax.ModelFactory.create") + def test_negative_temperature_clamped(self, mock_create): + mock_create.return_value = MagicMock(spec=BaseModelBackend) + create_minimax_model( + api_key="test-key", + model_config_dict={"temperature": -1.0}, + ) + call_kwargs = mock_create.call_args + config = call_kwargs.kwargs["model_config_dict"] + assert config["temperature"] == 0.01 + + @patch("oasis.minimax.ModelFactory.create") + def test_high_temperature_clamped(self, mock_create): + mock_create.return_value = MagicMock(spec=BaseModelBackend) + create_minimax_model( + api_key="test-key", + model_config_dict={"temperature": 2.0}, + ) + call_kwargs = mock_create.call_args + config = call_kwargs.kwargs["model_config_dict"] + assert config["temperature"] == 1.0 + + @patch("oasis.minimax.ModelFactory.create") + def test_valid_temperature_unchanged(self, mock_create): + mock_create.return_value = MagicMock(spec=BaseModelBackend) + create_minimax_model( + api_key="test-key", + model_config_dict={"temperature": 0.7}, + ) + call_kwargs = mock_create.call_args + config = call_kwargs.kwargs["model_config_dict"] + assert config["temperature"] == 0.7 + + @patch("oasis.minimax.ModelFactory.create") + def test_no_temperature_no_config(self, mock_create): + mock_create.return_value = MagicMock(spec=BaseModelBackend) + create_minimax_model(api_key="test-key") + call_kwargs = mock_create.call_args + assert call_kwargs.kwargs["model_config_dict"] is None + + +class TestOasisImports: + """Tests that MiniMax helpers are accessible from the oasis package.""" + + def test_import_from_oasis(self): + from oasis import create_minimax_model as fn + assert callable(fn) + + def test_import_from_oasis_minimax(self): + from oasis.minimax import ( + MINIMAX_API_BASE_URL, + MINIMAX_MODELS, + create_minimax_model, + ) + assert callable(create_minimax_model) + assert isinstance(MINIMAX_MODELS, dict) + assert isinstance(MINIMAX_API_BASE_URL, str) + + +class TestSocialAgentWithMiniMax: + """Tests that SocialAgent accepts a MiniMax model backend.""" + + @patch("oasis.minimax.ModelFactory.create") + def test_agent_accepts_minimax_model(self, mock_create): + mock_backend = MagicMock(spec=BaseModelBackend) + mock_backend.model_type = "MiniMax-M2.7" + mock_create.return_value = mock_backend + + from oasis import SocialAgent, ActionType + from oasis.social_platform.config import UserInfo + + model = create_minimax_model(api_key="test-key") + agent = SocialAgent( + agent_id=0, + user_info=UserInfo( + user_name="test_user", + name="Test User", + description="A test user", + profile=None, + recsys_type="reddit", + ), + model=model, + available_actions=[ActionType.CREATE_POST, ActionType.DO_NOTHING], + ) + assert agent is not None + assert agent.social_agent_id == 0 + + +# --------------------------------------------------------------------------- +# Integration tests (require MINIMAX_API_KEY) +# --------------------------------------------------------------------------- + + +@pytest.mark.skipif( + not os.environ.get("MINIMAX_API_KEY"), + reason="MINIMAX_API_KEY not set", +) +class TestMiniMaxIntegration: + """Integration tests that call the real MiniMax API.""" + + def test_create_model_real(self): + model = create_minimax_model("MiniMax-M2.7") + assert isinstance(model, BaseModelBackend) + + def test_create_highspeed_model_real(self): + model = create_minimax_model("MiniMax-M2.7-highspeed") + assert isinstance(model, BaseModelBackend) + + @pytest.mark.asyncio + async def test_agent_with_minimax_model(self): + import asyncio + from oasis import ( + ActionType, AgentGraph, ManualAction, SocialAgent, UserInfo, + ) + from oasis.social_platform.channel import Channel + from oasis.social_platform.platform import Platform + + model = create_minimax_model("MiniMax-M2.7") + channel = Channel() + + test_db = os.path.join( + os.path.dirname(__file__), "test_minimax_integration.db" + ) + if os.path.exists(test_db): + os.remove(test_db) + + try: + infra = Platform( + db_path=test_db, channel=channel, recsys_type="reddit" + ) + task = asyncio.create_task(infra.running()) + + agent = SocialAgent( + agent_id=0, + user_info=UserInfo( + user_name="minimax_user", + name="MiniMax Tester", + description="An agent using MiniMax M2.7", + profile=None, + recsys_type="reddit", + ), + channel=channel, + model=model, + available_actions=[ + ActionType.CREATE_POST, + ActionType.DO_NOTHING, + ], + ) + + await agent.env.action.sign_up( + "minimax_user", "MiniMax Tester", "Testing MiniMax." + ) + await agent.env.action.create_post( + "Hello from MiniMax-M2.7!" + ) + + await channel.write_to_receive_queue((None, None, "exit")) + await task + finally: + if os.path.exists(test_db): + os.remove(test_db)