-
Notifications
You must be signed in to change notification settings - Fork 1.6k
Expand file tree
/
Copy pathollama_chat_client.py
More file actions
53 lines (40 loc) · 1.63 KB
/
ollama_chat_client.py
File metadata and controls
53 lines (40 loc) · 1.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
# Copyright (c) Microsoft. All rights reserved.
import asyncio
from datetime import datetime
from agent_framework import Message, tool
from agent_framework.ollama import OllamaChatClient
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
"""
Ollama Chat Client Example
This sample demonstrates using the native Ollama Chat Client directly.
Ensure to install Ollama and have a model running locally before running the sample.
Not all Models support function calling, to test function calling try llama3.2
Set the model to use via the OLLAMA_MODEL environment variable or modify the code below.
https://ollama.com/
"""
# NOTE: approval_mode="never_require" is for sample brevity. Use "always_require" in production;
# see samples/02-agents/tools/function_tool_with_approval.py
# and samples/02-agents/tools/function_tool_with_approval_and_sessions.py.
@tool(approval_mode="never_require")
def get_time():
"""Get the current time."""
return f"The current time is {datetime.now().strftime('%I:%M %p')}."
async def main() -> None:
client = OllamaChatClient()
message = "What time is it? Use a tool call"
messages = [Message(role="user", contents=[message])]
stream = False
print(f"User: {message}")
if stream:
print("Assistant: ", end="")
async for chunk in client.get_response(messages, tools=get_time, stream=True):
if str(chunk):
print(str(chunk), end="")
print("")
else:
response = await client.get_response(messages, tools=get_time)
print(f"Assistant: {response}")
if __name__ == "__main__":
asyncio.run(main())