forked from Arize-ai/openinference
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathchat.py
More file actions
28 lines (19 loc) · 664 Bytes
/
chat.py
File metadata and controls
28 lines (19 loc) · 664 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
import asyncio
import sys
import traceback
from beeai_framework.adapters.ollama import OllamaChatModel
from beeai_framework.backend import UserMessage
from beeai_framework.errors import FrameworkError
from examples.setup import setup_observability
setup_observability()
prompt = "Hello, How are you?"
async def main() -> None:
llm = OllamaChatModel("llama3.1")
response = await llm.run([UserMessage(prompt)], stream=True, max_tokens=10)
print("LLM 🤖 : ", response.get_text_content())
if __name__ == "__main__":
try:
asyncio.run(main())
except FrameworkError as e:
traceback.print_exc()
sys.exit(e.explain())