Skip to content

Commit 38e2621

Browse files
committed
ruff
1 parent 392677a commit 38e2621

File tree

1 file changed

+6
-5
lines changed

1 file changed

+6
-5
lines changed

agentops/llms/providers/gemini.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
from agentops.log_config import logger
88
from agentops.singleton import singleton
99

10+
1011
@singleton
1112
class GeminiProvider(BaseProvider):
1213
original_generate_content = None
@@ -27,9 +28,7 @@ def __init__(self, client=None):
2728
super().__init__(client)
2829
self._provider_name = "Gemini"
2930

30-
def handle_response(
31-
self, response, kwargs, init_timestamp, session: Optional[Session] = None
32-
) -> dict:
31+
def handle_response(self, response, kwargs, init_timestamp, session: Optional[Session] = None) -> dict:
3332
"""Handle responses from Gemini API for both sync and streaming modes.
3433
3534
Args:
@@ -45,7 +44,7 @@ def handle_response(
4544
llm_event = LLMEvent(init_timestamp=init_timestamp, params=kwargs)
4645
if session is not None:
4746
llm_event.session_id = session.session_id
48-
47+
4948
accumulated_content = ""
5049

5150
def handle_stream_chunk(chunk):
@@ -82,10 +81,12 @@ def handle_stream_chunk(chunk):
8281

8382
# For streaming responses
8483
if kwargs.get("stream", False):
84+
8585
def generator():
8686
for chunk in response:
8787
handle_stream_chunk(chunk)
8888
yield chunk
89+
8990
return generator()
9091

9192
# For synchronous responses
@@ -190,4 +191,4 @@ def undo_override(self):
190191

191192
if self.original_generate_content_async is not None:
192193
genai.GenerativeModel.generate_content_async = self.original_generate_content_async
193-
self.original_generate_content_async = None
194+
self.original_generate_content_async = None

0 commit comments

Comments
 (0)