Skip to content

Commit 5fbcccf

Browse files
authored
fix(sample-app): Update google genai package (#3358)
1 parent a7a2745 commit 5fbcccf

File tree

1 file changed

+27
-14
lines changed

1 file changed

+27
-14
lines changed

packages/sample-app/sample_app/gemini.py

Lines changed: 27 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,21 @@
11
import os
22
import asyncio
3-
import google.generativeai as genai
3+
import google.genai as genai
44
from traceloop.sdk import Traceloop
55
from traceloop.sdk.decorators import workflow
66

77
Traceloop.init(app_name="gemini_example")
88

9-
genai.configure(api_key=os.environ.get("GENAI_API_KEY"))
9+
client = genai.Client(api_key=os.environ.get("GENAI_API_KEY"))
1010

1111

1212
@workflow("predict")
1313
def predict_text() -> str:
1414
"""Ideation example with a Large Language Model"""
1515

16-
model = genai.GenerativeModel("gemini-1.5-pro-002")
17-
response = model.generate_content(
18-
"Give me ten interview questions for the role of program manager.",
16+
response = client.models.generate_content(
17+
model="gemini-1.5-pro-002",
18+
contents="Give me ten interview questions for the role of program manager.",
1919
)
2020

2121
return response.text
@@ -25,24 +25,37 @@ def predict_text() -> str:
2525
async def async_predict_text() -> str:
2626
"""Async Ideation example with a Large Language Model"""
2727

28-
model = genai.GenerativeModel("gemini-1.5-pro-002")
29-
response = await model.generate_content_async(
30-
"Give me ten interview questions for the role of program manager.",
28+
response = client.models.generate_content(
29+
model="gemini-1.5-pro-002",
30+
contents="Give me ten interview questions for the role of program manager.",
3131
)
3232

3333
return response.text
3434

3535

3636
@workflow("chat")
3737
def chat() -> str:
38-
"""Chat example with a Large Language Model"""
38+
"""Real chat example with conversation context"""
3939

40-
model = genai.GenerativeModel("gemini-1.5-pro-002")
41-
chat = model.start_chat()
42-
response = chat.send_message("Hello, how are you?")
43-
response = chat.send_message("What is the capital of France?")
40+
# First message
41+
response1 = client.models.generate_content(
42+
model="gemini-1.5-pro-002",
43+
contents="Hello, how are you?",
44+
)
4445

45-
return response.text
46+
# Second message with conversation history
47+
conversation = [
48+
{"role": "user", "parts": [{"text": "Hello, how are you?"}]},
49+
{"role": "model", "parts": [{"text": response1.text}]},
50+
{"role": "user", "parts": [{"text": "What is the capital of France?"}]},
51+
]
52+
53+
response2 = client.models.generate_content(
54+
model="gemini-1.5-pro-002",
55+
contents=conversation,
56+
)
57+
58+
return response2.text
4659

4760

4861
if __name__ == "__main__":

0 commit comments

Comments
 (0)