Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -229,6 +229,27 @@ if __name__ == "__main__":
> \[!TIP\]
> For more detailed instructions and additional configuration options, check out the [documentation](https://docs.oasis.camel-ai.org/).

### Using MiniMax as the LLM Provider

OASIS supports [MiniMax](https://www.minimaxi.com/) models via the built-in `create_minimax_model()` helper. MiniMax offers an OpenAI-compatible API with models such as **MiniMax-M2.7** (1M context window) and **MiniMax-M2.7-highspeed** (faster variant).

1. Set your MiniMax API key:

```bash
export MINIMAX_API_KEY=<your MiniMax API key>
```

2. Use `create_minimax_model()` in your simulation:

```python
from oasis.minimax import create_minimax_model

minimax_model = create_minimax_model("MiniMax-M2.7")
# Use minimax_model the same way as any other CAMEL model backend
```

See [`examples/reddit_simulation_minimax.py`](examples/reddit_simulation_minimax.py) for a complete simulation example.

Comment on lines +232 to +252
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We plan to only include the most representative documentation in the README.md. Could you please move this part to our full documentation? Such as add these content to https://github.com/camel-ai/oasis/blob/main/docs/key_modules/models.mdx.

### More Tutorials

To discover how to create profiles for large-scale users, as well as how to visualize and analyze social simulation data once your experiment concludes, please refer to [More Tutorials](examples/experiment/user_generation_visualization.md) for detailed guidance.
Expand Down
101 changes: 101 additions & 0 deletions examples/reddit_simulation_minimax.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
"""Reddit simulation powered by MiniMax LLM.

This example demonstrates how to run an OASIS Reddit simulation using
MiniMax's ``MiniMax-M2.7`` model via the OpenAI-compatible API.

Prerequisites:
1. ``pip install camel-oasis``
2. Set ``MINIMAX_API_KEY`` in your environment.
3. Place Reddit agent profiles in ``./data/reddit/user_data_36.json``.
(Download from https://github.com/camel-ai/oasis/blob/main/data/reddit/user_data_36.json)
"""

import asyncio
import os

import oasis
from oasis import ActionType, LLMAction, ManualAction, generate_reddit_agent_graph
from oasis.minimax import create_minimax_model


async def main():
# Create a MiniMax model for all agents.
# The MINIMAX_API_KEY environment variable must be set.
minimax_model = create_minimax_model("MiniMax-M2.7")

# Define the available actions for the agents
available_actions = ActionType.get_default_reddit_actions()

agent_graph = await generate_reddit_agent_graph(
profile_path="./data/reddit/user_data_36.json",
model=minimax_model,
available_actions=available_actions,
)

# Define the path to the database
db_path = "./data/reddit_simulation_minimax.db"
os.environ["OASIS_DB_PATH"] = os.path.abspath(db_path)

# Delete the old database
if os.path.exists(db_path):
os.remove(db_path)

# Make the environment
env = oasis.make(
agent_graph=agent_graph,
platform=oasis.DefaultPlatformType.REDDIT,
database_path=db_path,
)

# Run the environment
await env.reset()

actions_1 = {}
actions_1[env.agent_graph.get_agent(0)] = [
ManualAction(
action_type=ActionType.CREATE_POST,
action_args={"content": "Hello, world!"},
),
ManualAction(
action_type=ActionType.CREATE_COMMENT,
action_args={
"post_id": "1",
"content": "Welcome to the OASIS World!",
},
),
]
actions_1[env.agent_graph.get_agent(1)] = ManualAction(
action_type=ActionType.CREATE_COMMENT,
action_args={
"post_id": "1",
"content": "I like the OASIS world.",
},
)
await env.step(actions_1)

actions_2 = {
agent: LLMAction() for _, agent in env.agent_graph.get_agents()
}

# Perform the actions
await env.step(actions_2)

# Close the environment
await env.close()


if __name__ == "__main__":
asyncio.run(main())
4 changes: 3 additions & 1 deletion oasis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,12 @@
from oasis.social_platform.config import UserInfo
from oasis.social_platform.platform import Platform
from oasis.social_platform.typing import ActionType, DefaultPlatformType
from oasis.minimax import create_minimax_model
from oasis.testing.show_db import print_db_contents

__all__ = [
"make", "Platform", "ActionType", "DefaultPlatformType", "ManualAction",
"LLMAction", "print_db_contents", "AgentGraph", "SocialAgent", "UserInfo",
"generate_reddit_agent_graph", "generate_twitter_agent_graph"
"generate_reddit_agent_graph", "generate_twitter_agent_graph",
"create_minimax_model"
]
119 changes: 119 additions & 0 deletions oasis/minimax.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
"""MiniMax model integration for OASIS.

This module provides a convenience function for creating MiniMax LLM models
using CAMEL's ``ModelFactory`` with the ``OPENAI_COMPATIBLE_MODEL`` platform
type. MiniMax offers an OpenAI-compatible API at ``https://api.minimax.io/v1``.

Available models:

* **MiniMax-M2.7** -- Latest flagship model with 1M context window.
* **MiniMax-M2.7-highspeed** -- Faster variant for latency-sensitive workloads.

Usage::

from oasis.models import create_minimax_model

model = create_minimax_model("MiniMax-M2.7")

The ``MINIMAX_API_KEY`` environment variable must be set.
"""

from __future__ import annotations

import os
from typing import Any, Dict, Optional

from camel.models import BaseModelBackend, ModelFactory
from camel.types import ModelPlatformType

MINIMAX_API_BASE_URL = "https://api.minimax.io/v1"

MINIMAX_MODELS: Dict[str, Dict[str, Any]] = {
"MiniMax-M2.7": {
"description": "Flagship model with 1M context window",
"context_length": 1_000_000,
},
"MiniMax-M2.7-highspeed": {
"description": "Faster variant for latency-sensitive workloads",
"context_length": 1_000_000,
},
}


def create_minimax_model(
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

From my perspective, CAMEL already support different models and every model created by ModelFactory can be used in OASIS. So it is not that necessary to provide an additional function to create specific model. But it is necessary to provide such script in the example, that is, to include this part into example script.

What do you think?

model_type: str = "MiniMax-M2.7",
api_key: Optional[str] = None,
url: Optional[str] = None,
model_config_dict: Optional[Dict[str, Any]] = None,
**kwargs: Any,
) -> BaseModelBackend:
"""Create a MiniMax model backend via CAMEL's ``ModelFactory``.

MiniMax provides an OpenAI-compatible API, so this function uses
``ModelPlatformType.OPENAI_COMPATIBLE_MODEL`` under the hood.

Args:
model_type: MiniMax model identifier. Defaults to ``"MiniMax-M2.7"``.
Supported values: ``"MiniMax-M2.7"``,
``"MiniMax-M2.7-highspeed"``.
api_key: MiniMax API key. If *None*, reads from the
``MINIMAX_API_KEY`` environment variable.
url: API base URL. Defaults to ``https://api.minimax.io/v1``.
model_config_dict: Extra model configuration passed to CAMEL's
``ModelFactory.create()``. Temperature is automatically
clamped to the MiniMax-supported range ``(0.0, 1.0]``.
**kwargs: Additional keyword arguments forwarded to
``ModelFactory.create()``.

Returns:
A CAMEL ``BaseModelBackend`` instance configured for MiniMax.

Raises:
ValueError: If *model_type* is not a recognized MiniMax model.
ValueError: If no API key is provided or found in the environment.
"""
if model_type not in MINIMAX_MODELS:
raise ValueError(
f"Unknown MiniMax model: {model_type!r}. "
f"Supported models: {list(MINIMAX_MODELS.keys())}"
)

resolved_key = api_key or os.environ.get("MINIMAX_API_KEY")
if not resolved_key:
raise ValueError(
"MiniMax API key is required. Pass it via the 'api_key' argument "
"or set the MINIMAX_API_KEY environment variable."
)

resolved_url = url or MINIMAX_API_BASE_URL

# Apply temperature clamping for MiniMax (must be in (0.0, 1.0])
config = dict(model_config_dict or {})
if "temperature" in config:
temp = config["temperature"]
if temp <= 0.0:
config["temperature"] = 0.01
elif temp > 1.0:
config["temperature"] = 1.0

return ModelFactory.create(
model_platform=ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
model_type=model_type,
api_key=resolved_key,
url=resolved_url,
model_config_dict=config or None,
**kwargs,
)
Loading
Loading