Skip to content

Commit 47940fd

Browse files
committed
WIP Gemini Live Pipecat Flows support
1 parent c3b29fc commit 47940fd

File tree

2 files changed

+49
-6
lines changed

2 files changed

+49
-6
lines changed

src/pipecat/adapters/services/gemini_adapter.py

Lines changed: 43 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -53,19 +53,32 @@ def id_for_llm_specific_messages(self) -> str:
5353
"""Get the identifier used in LLMSpecificMessage instances for Google."""
5454
return "google"
5555

56-
def get_llm_invocation_params(self, context: LLMContext) -> GeminiLLMInvocationParams:
56+
def get_llm_invocation_params(
57+
self, context: LLMContext, *, strip_function_messages: bool = False
58+
) -> GeminiLLMInvocationParams:
5759
"""Get Gemini-specific LLM invocation parameters from a universal LLM context.
5860
5961
Args:
6062
context: The LLM context containing messages, tools, etc.
63+
strip_function_messages: If True, filter out function_call and function_response
64+
parts from messages. This is needed for Gemini Live (at least with
65+
"models/gemini-2.5-flash-native-audio-preview-12-2025", the default at
66+
the time of this writing) which cannot handle function-call-related
67+
messages when initializing conversation history.
68+
See https://stackoverflow.com/a/79851394.
6169
6270
Returns:
6371
Dictionary of parameters for Gemini's API.
6472
"""
65-
messages = self._from_universal_context_messages(self.get_messages(context))
73+
converted = self._from_universal_context_messages(self.get_messages(context))
74+
messages = converted.messages
75+
76+
if strip_function_messages:
77+
messages = self._strip_function_messages(messages)
78+
6679
return {
67-
"system_instruction": messages.system_instruction,
68-
"messages": messages.messages,
80+
"system_instruction": converted.system_instruction,
81+
"messages": messages,
6982
# NOTE: LLMContext's tools are guaranteed to be a ToolsSchema (or NOT_GIVEN)
7083
"tools": self.from_standard_tools(context.tools),
7184
}
@@ -668,3 +681,29 @@ def _thought_signature_inline_data_bookmark_matches_part(
668681
return True
669682

670683
return False
684+
685+
def _strip_function_messages(self, messages: List[Content]) -> List[Content]:
686+
"""Strip function_call and function_response parts from messages.
687+
688+
Args:
689+
messages: List of Content messages to filter.
690+
691+
Returns:
692+
List of Content messages with function-related parts removed.
693+
"""
694+
filtered_messages = []
695+
for msg in messages:
696+
if msg.parts:
697+
filtered_parts = [
698+
part
699+
for part in msg.parts
700+
if not (
701+
getattr(part, "function_call", None)
702+
or getattr(part, "function_response", None)
703+
)
704+
]
705+
if filtered_parts:
706+
filtered_messages.append(Content(role=msg.role, parts=filtered_parts))
707+
else:
708+
filtered_messages.append(msg)
709+
return filtered_messages

src/pipecat/services/google/gemini_live/llm.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1444,7 +1444,9 @@ async def _create_initial_response(self):
14441444
return
14451445

14461446
adapter: GeminiLLMAdapter = self.get_llm_adapter()
1447-
messages = adapter.get_llm_invocation_params(self._context).get("messages", [])
1447+
messages = adapter.get_llm_invocation_params(
1448+
self._context, strip_function_messages=True
1449+
).get("messages", [])
14481450
if not messages:
14491451
return
14501452

@@ -1474,7 +1476,9 @@ async def _create_single_response(self, messages_list):
14741476
# in the right format
14751477
context = LLMContext(messages=messages_list)
14761478
adapter: GeminiLLMAdapter = self.get_llm_adapter()
1477-
messages = adapter.get_llm_invocation_params(context).get("messages", [])
1479+
messages = adapter.get_llm_invocation_params(context, strip_function_messages=True).get(
1480+
"messages", []
1481+
)
14781482

14791483
if not messages:
14801484
return

0 commit comments

Comments
 (0)