Skip to content

Commit dad9e1c

Browse files
authored
examples: add gpt-oss tools (#549)
1 parent fe91357 commit dad9e1c

File tree

3 files changed

+151
-0
lines changed

3 files changed

+151
-0
lines changed

examples/README.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,10 @@ See [ollama/docs/api.md](https://github.com/ollama/ollama/blob/main/docs/api.md)
2727
- [async-tools.py](async-tools.py)
2828
- [multi-tool.py](multi-tool.py) - Using multiple tools, with thinking enabled
2929

30+
#### gpt-oss
31+
- [gpt-oss-tools.py](gpt-oss-tools.py) - Using tools with gpt-oss
32+
- [gpt-oss-tools-stream.py](gpt-oss-tools-stream.py) - Using tools with gpt-oss, with streaming enabled
33+
3034

3135
### Multimodal with Images - Chat with a multimodal (image chat) model
3236
- [multimodal-chat.py](multimodal-chat.py)

examples/gpt-oss-tools-stream.py

Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
import random
2+
from typing import Iterator
3+
4+
from ollama import chat
5+
from ollama._types import ChatResponse
6+
7+
8+
def get_weather(city: str) -> str:
9+
"""
10+
Get the current temperature for a city
11+
12+
Args:
13+
city (str): The name of the city
14+
15+
Returns:
16+
str: The current temperature
17+
"""
18+
temperatures = list(range(-10, 35))
19+
20+
temp = random.choice(temperatures)
21+
22+
return f'The temperature in {city} is {temp}°C'
23+
24+
25+
def get_weather_conditions(city: str) -> str:
26+
"""
27+
Get the weather conditions for a city
28+
29+
Args:
30+
city (str): The name of the city
31+
32+
Returns:
33+
str: The current weather conditions
34+
"""
35+
conditions = ['sunny', 'cloudy', 'rainy', 'snowy', 'foggy']
36+
return random.choice(conditions)
37+
38+
39+
available_tools = {'get_weather': get_weather, 'get_weather_conditions': get_weather_conditions}
40+
41+
messages = [{'role': 'user', 'content': 'What is the weather like in London? What are the conditions in Toronto?'}]
42+
43+
44+
model = 'gpt-oss:20b'
45+
# gpt-oss can call tools while "thinking"
46+
# a loop is needed to call the tools and get the results
47+
final = True
48+
while True:
49+
response_stream: Iterator[ChatResponse] = chat(model=model, messages=messages, tools=[get_weather, get_weather_conditions], stream=True)
50+
51+
for chunk in response_stream:
52+
if chunk.message.content:
53+
if not (chunk.message.thinking or chunk.message.thinking == '') and final:
54+
print('\nFinal result: ')
55+
final = False
56+
print(chunk.message.content, end='', flush=True)
57+
if chunk.message.thinking:
58+
print(chunk.message.thinking, end='', flush=True)
59+
60+
print()
61+
62+
if chunk.message.tool_calls:
63+
for tool_call in chunk.message.tool_calls:
64+
function_to_call = available_tools.get(tool_call.function.name)
65+
if function_to_call:
66+
print('\nCalling tool: ', tool_call.function.name, 'with arguments: ', tool_call.function.arguments)
67+
result = function_to_call(**tool_call.function.arguments)
68+
print('Tool result: ', result + '\n')
69+
70+
messages.append(chunk.message)
71+
messages.append({'role': 'tool', 'content': result, 'tool_name': tool_call.function.name})
72+
else:
73+
print(f'Tool {tool_call.function.name} not found')
74+
75+
else:
76+
# no more tool calls, we can stop the loop
77+
break

examples/gpt-oss-tools.py

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
import random
2+
3+
from ollama import chat
4+
from ollama._types import ChatResponse
5+
6+
7+
def get_weather(city: str) -> str:
8+
"""
9+
Get the current temperature for a city
10+
11+
Args:
12+
city (str): The name of the city
13+
14+
Returns:
15+
str: The current temperature
16+
"""
17+
temperatures = list(range(-10, 35))
18+
19+
temp = random.choice(temperatures)
20+
21+
return f'The temperature in {city} is {temp}°C'
22+
23+
24+
def get_weather_conditions(city: str) -> str:
25+
"""
26+
Get the weather conditions for a city
27+
28+
Args:
29+
city (str): The name of the city
30+
31+
Returns:
32+
str: The current weather conditions
33+
"""
34+
conditions = ['sunny', 'cloudy', 'rainy', 'snowy', 'foggy']
35+
return random.choice(conditions)
36+
37+
38+
available_tools = {'get_weather': get_weather, 'get_weather_conditions': get_weather_conditions}
39+
40+
messages = [{'role': 'user', 'content': 'What is the weather like in London? What are the conditions in Toronto?'}]
41+
42+
43+
model = 'gpt-oss:20b'
44+
# gpt-oss can call tools while "thinking"
45+
# a loop is needed to call the tools and get the results
46+
while True:
47+
response: ChatResponse = chat(model=model, messages=messages, tools=[get_weather, get_weather_conditions])
48+
49+
if response.message.content:
50+
print('Content: ')
51+
print(response.message.content + '\n')
52+
if response.message.thinking:
53+
print('Thinking: ')
54+
print(response.message.thinking + '\n')
55+
56+
if response.message.tool_calls:
57+
for tool_call in response.message.tool_calls:
58+
function_to_call = available_tools.get(tool_call.function.name)
59+
if function_to_call:
60+
result = function_to_call(**tool_call.function.arguments)
61+
print('Result from tool call name: ', tool_call.function.name, 'with arguments: ', tool_call.function.arguments, 'result: ', result + '\n')
62+
63+
messages.append(response.message)
64+
messages.append({'role': 'tool', 'content': result, 'tool_name': tool_call.function.name})
65+
else:
66+
print(f'Tool {tool_call.function.name} not found')
67+
68+
else:
69+
# no more tool calls, we can stop the loop
70+
break

0 commit comments

Comments
 (0)