|
| 1 | +import asyncio |
| 2 | +import os |
| 3 | + |
| 4 | +from coagent.agents import ChatAgent, ChatMessage, ModelClient |
| 5 | +from coagent.core import AgentSpec, new, set_stderr_logger |
| 6 | +from coagent.runtimes import LocalRuntime |
| 7 | + |
| 8 | + |
| 9 | +client = ModelClient( |
| 10 | + model="openai/deepseek-reasoner", |
| 11 | + api_base="https://api.deepseek.com/v1", |
| 12 | + api_key=os.getenv("DEEPSEEK_API_KEY"), |
| 13 | +) |
| 14 | + |
| 15 | + |
| 16 | +deepseek_reasoner = AgentSpec("deepseek_reasoner", new(ChatAgent, client=client)) |
| 17 | + |
| 18 | + |
| 19 | +async def main(): |
| 20 | + async with LocalRuntime() as runtime: |
| 21 | + await runtime.register(deepseek_reasoner) |
| 22 | + |
| 23 | + result = await deepseek_reasoner.run( |
| 24 | + ChatMessage( |
| 25 | + role="user", content="9.11 and 9.8, which is greater?" |
| 26 | + ).encode(), |
| 27 | + stream=True, |
| 28 | + ) |
| 29 | + |
| 30 | + reasoning_started = False |
| 31 | + reasoning_stopped = False |
| 32 | + async for chunk in result: |
| 33 | + msg = ChatMessage.decode(chunk) |
| 34 | + if msg.reasoning_content: |
| 35 | + if not reasoning_started: |
| 36 | + print("<think>", flush=True) |
| 37 | + reasoning_started = True |
| 38 | + print(msg.reasoning_content, end="", flush=True) |
| 39 | + if msg.content: |
| 40 | + if not reasoning_stopped: |
| 41 | + print("</think>", flush=True) |
| 42 | + reasoning_stopped = True |
| 43 | + print(msg.content, end="", flush=True) |
| 44 | + |
| 45 | + |
| 46 | +if __name__ == "__main__": |
| 47 | + set_stderr_logger() |
| 48 | + asyncio.run(main()) |
0 commit comments