File tree Expand file tree Collapse file tree 3 files changed +196
-0
lines changed
examples/structured-outputs Expand file tree Collapse file tree 3 files changed +196
-0
lines changed Original file line number Diff line number Diff line change 1+ # Structured Outputs
2+
3+ This example demonstrates how to enable structured outputs when chatting with an agent. As a comparison, please also refer to the [ original example from Ollama] [ 1 ] .
4+
5+ To learn more about structured outputs, see [ OpenAI's Structured Outputs] [ 2 ] .
6+
7+
8+ ## Prerequisites
9+
10+ - Install ` coagent ` (see [ Installation] ( ../../README.md#installation ) ).
11+ - Start a NATS server (see [ Distributed] ( ../../README.md#distributed ) ).
12+
13+
14+ ## Quick Start
15+
16+ Run [ llama3.1] [ 3 ] by using [ Ollama] [ 4 ] :
17+
18+ ``` bash
19+ ollama run llama3.1
20+ ```
21+
22+ ### Run the local agent
23+
24+ Run the agent as a script:
25+
26+ ``` bash
27+ python examples/structured-outputs/local_agent.py
28+ ```
29+
30+ ### Run the daemon agent
31+
32+ Run the agent as a daemon:
33+
34+ ``` bash
35+ python examples/structured-outputs/daemon_agent.py
36+ ```
37+
38+ Then communicate with the agent using the ` coagent ` CLI:
39+
40+ ``` bash
41+ coagent structured -H type:StructuredOutput --chat -d ' {
42+ "input": {
43+ "role": "user",
44+ "content": "I have two friends. The first is Ollama 22 years old busy saving the world, and the second is Alonso 23 years old and wants to hang out. Return a list of friends in JSON format"
45+ },
46+ "output_schema": {
47+ "type": "json_schema",
48+ "json_schema": {
49+ "name": "FriendList",
50+ "strict": true,
51+ "schema": {
52+ "type": "object",
53+ "properties": {
54+ "friends": {
55+ "items": {
56+ "$ref": "#/$defs/FriendInfo"
57+ },
58+ "type": "array"
59+ }
60+ },
61+ "required": [
62+ "friends"
63+ ],
64+ "$defs": {
65+ "FriendInfo": {
66+ "type": "object",
67+ "properties": {
68+ "name": {
69+ "type": "string"
70+ },
71+ "age": {
72+ "type": "integer"
73+ },
74+ "is_available": {
75+ "type": "boolean"
76+ }
77+ },
78+ "required": [
79+ "name",
80+ "age",
81+ "is_available"
82+ ]
83+ }
84+ }
85+ }
86+ }
87+ }
88+ }'
89+ ```
90+
91+ [ 1 ] : https://github.com/ollama/ollama/blob/main/docs/openai.md#structured-outputs
92+ [ 2 ] : https://platform.openai.com/docs/guides/structured-outputs
93+ [ 3 ] : https://ollama.com/library/llama3.1
94+ [ 4 ] : https://github.com/ollama/ollama
Original file line number Diff line number Diff line change 1+ import asyncio
2+
3+ from coagent .agents import ChatAgent , ModelClient
4+ from coagent .core import AgentSpec , idle_loop , new , set_stderr_logger
5+ from coagent .runtimes import NATSRuntime
6+ from pydantic import BaseModel
7+
8+
9+ class FriendInfo (BaseModel ):
10+ name : str
11+ age : int
12+ is_available : bool
13+
14+
15+ class FriendList (BaseModel ):
16+ friends : list [FriendInfo ]
17+
18+
19+ client = ModelClient (
20+ model = "openai/llama3.1" ,
21+ api_base = "http://localhost:11434/v1" ,
22+ api_key = "ollama" ,
23+ )
24+
25+
26+ structured = AgentSpec (
27+ "structured" ,
28+ new (
29+ ChatAgent ,
30+ client = client ,
31+ ),
32+ )
33+
34+
35+ async def main ():
36+ async with NATSRuntime .from_servers ("nats://127.0.0.1:4222" ) as runtime :
37+ await runtime .register (structured )
38+ await idle_loop ()
39+
40+
41+ if __name__ == "__main__" :
42+ set_stderr_logger ()
43+ asyncio .run (main ())
Original file line number Diff line number Diff line change 1+ import asyncio
2+
3+ from coagent .agents import ChatAgent , ChatMessage , ModelClient , StructuredOutput
4+ from coagent .core import AgentSpec , new , set_stderr_logger
5+ from coagent .runtimes import LocalRuntime
6+ from pydantic import BaseModel
7+
8+
9+ class FriendInfo (BaseModel ):
10+ name : str
11+ age : int
12+ is_available : bool
13+
14+
15+ class FriendList (BaseModel ):
16+ friends : list [FriendInfo ]
17+
18+
19+ client = ModelClient (
20+ model = "openai/llama3.1" ,
21+ api_base = "http://localhost:11434/v1" ,
22+ api_key = "ollama" ,
23+ )
24+
25+
26+ structured = AgentSpec (
27+ "structured" ,
28+ new (
29+ ChatAgent ,
30+ client = client ,
31+ ),
32+ )
33+
34+
35+ async def main ():
36+ async with LocalRuntime () as runtime :
37+ await runtime .register (structured )
38+
39+ result = await structured .run (
40+ StructuredOutput (
41+ input = ChatMessage (
42+ role = "user" ,
43+ content = "\
44+ I have two friends. The first is Ollama 22 years old busy saving the world, \
45+ and the second is Alonso 23 years old and wants to hang out. Return a list \
46+ of friends in JSON format" ,
47+ ),
48+ output_type = FriendList ,
49+ ).encode (),
50+ stream = True ,
51+ )
52+ async for chunk in result :
53+ msg = ChatMessage .decode (chunk )
54+ print (msg .content , end = "" , flush = True )
55+
56+
57+ if __name__ == "__main__" :
58+ set_stderr_logger ()
59+ asyncio .run (main ())
You can’t perform that action at this time.
0 commit comments