Skip to content

Commit 7e18985

Browse files
committed
added gemini api provider
1 parent 957fb4f commit 7e18985

File tree

1 file changed

+71
-0
lines changed

1 file changed

+71
-0
lines changed
Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
2+
import os
3+
import google.generativeai as genai
4+
from typing import Sequence, Any
5+
6+
from .ai_service import AIProvider, Character, Message
7+
8+
9+
class GeminiAIProvider:
10+
def __init__(self, api_key: str, model_name: str = 'gemini-2.5-flash'):
11+
genai.configure(api_key=api_key)
12+
self.model = genai.GenerativeModel(model_name)
13+
self.model_name = model_name
14+
15+
def _format_history_for_gemini(self, history: Sequence[Message]) -> list[dict[str, Any]]:
16+
gemini_history = []
17+
for message in history:
18+
role = 'user' if message.sender == 'user' else 'model'
19+
gemini_history.append({'role': role, 'parts': [message.content]})
20+
return gemini_history
21+
22+
def get_completion(self, character: Character, history: Sequence[Message]) -> str:
23+
if not history:
24+
return "Error: No conversation history."
25+
26+
last_message = history[-1]
27+
28+
if last_message.sender != 'user':
29+
print(f"Warning: Gemini provider expects last message from user, but got {last_message.sender}.")
30+
last_user_message_content = None
31+
context_history_for_api = []
32+
found_last_user = False
33+
34+
for msg in reversed(history):
35+
if msg.sender == 'user' and not found_last_user:
36+
last_user_message_content = msg.content
37+
found_last_user = True
38+
elif found_last_user:
39+
role = 'user' if msg.sender == 'user' else 'model'
40+
context_history_for_api.append({'role': role, 'parts': [msg.content]})
41+
42+
if not found_last_user or last_user_message_content is None:
43+
return "Error: No user message found in history for Gemini."
44+
45+
context_history_for_api.reverse()
46+
47+
formatted_context_history = context_history_for_api
48+
current_user_prompt = last_user_message_content
49+
50+
else:
51+
formatted_context_history = self._format_history_for_gemini(history[:-1])
52+
current_user_prompt = last_message.content
53+
54+
55+
system_instruction = character.system_prompt
56+
57+
try:
58+
chat = self.model.start_chat(history=formatted_context_history,
59+
system_instruction=system_instruction)
60+
response = chat.send_message(current_user_prompt)
61+
ai_response_text = response.text
62+
63+
if not ai_response_text:
64+
print("Warning: Gemini API returned no text.")
65+
return "I cannot generate a response for that request."
66+
67+
return ai_response_text
68+
69+
except Exception as e:
70+
print(f"Error calling Gemini API: {e}")
71+
return "Sorry, I encountered an error with the AI service."

0 commit comments

Comments
 (0)