Skip to content

Commit 114db93

Browse files
google-genai-botcopybara-github
authored andcommitted
ADK changes
PiperOrigin-RevId: 794403729
1 parent e2518dc commit 114db93

File tree

22 files changed

+54
-134
lines changed

22 files changed

+54
-134
lines changed

contributing/samples/langchain_structured_tool_agent/agent.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,6 @@
1515
"""
1616
This agent aims to test the Langchain tool with Langchain's StructuredTool
1717
"""
18-
from __future__ import annotations
19-
2018
from google.adk.agents.llm_agent import Agent
2119
from google.adk.tools.langchain_tool import LangchainTool
2220
from langchain.tools import tool
@@ -25,13 +23,11 @@
2523

2624

2725
async def add(x, y) -> int:
28-
"""Adds two numbers."""
2926
return x + y
3027

3128

3229
@tool
3330
def minus(x, y) -> int:
34-
"""Minus two numbers."""
3531
return x - y
3632

3733

src/google/adk/agents/llm_agent.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -109,8 +109,7 @@
109109

110110

111111
async def _convert_tool_union_to_tools(
112-
tool_union: ToolUnion,
113-
ctx: ReadonlyContext,
112+
tool_union: ToolUnion, ctx: ReadonlyContext
114113
) -> list[BaseTool]:
115114
if isinstance(tool_union, BaseTool):
116115
return [tool_union]

src/google/adk/flows/llm_flows/base_llm_flow.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ async def run_live(
7373
invocation_context: InvocationContext,
7474
) -> AsyncGenerator[Event, None]:
7575
"""Runs the flow using live api."""
76-
llm_request = LlmRequest(live_connect_config=types.LiveConnectConfig())
76+
llm_request = LlmRequest()
7777
event_id = Event.new_id()
7878

7979
# Preprocess before calling the LLM.
@@ -373,9 +373,7 @@ async def _run_one_step_async(
373373
yield event
374374

375375
async def _preprocess_async(
376-
self,
377-
invocation_context: InvocationContext,
378-
llm_request: LlmRequest,
376+
self, invocation_context: InvocationContext, llm_request: LlmRequest
379377
) -> AsyncGenerator[Event, None]:
380378
from ...agents.llm_agent import LlmAgent
381379

src/google/adk/flows/llm_flows/basic.py

Lines changed: 24 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -57,31 +57,30 @@ async def run_async(
5757
if agent.output_schema and not agent.tools:
5858
llm_request.set_output_schema(agent.output_schema)
5959

60-
if llm_request.live_connect_config:
61-
llm_request.live_connect_config.response_modalities = (
62-
invocation_context.run_config.response_modalities
63-
)
64-
llm_request.live_connect_config.speech_config = (
65-
invocation_context.run_config.speech_config
66-
)
67-
llm_request.live_connect_config.output_audio_transcription = (
68-
invocation_context.run_config.output_audio_transcription
69-
)
70-
llm_request.live_connect_config.input_audio_transcription = (
71-
invocation_context.run_config.input_audio_transcription
72-
)
73-
llm_request.live_connect_config.realtime_input_config = (
74-
invocation_context.run_config.realtime_input_config
75-
)
76-
llm_request.live_connect_config.enable_affective_dialog = (
77-
invocation_context.run_config.enable_affective_dialog
78-
)
79-
llm_request.live_connect_config.proactivity = (
80-
invocation_context.run_config.proactivity
81-
)
82-
llm_request.live_connect_config.session_resumption = (
83-
invocation_context.run_config.session_resumption
84-
)
60+
llm_request.live_connect_config.response_modalities = (
61+
invocation_context.run_config.response_modalities
62+
)
63+
llm_request.live_connect_config.speech_config = (
64+
invocation_context.run_config.speech_config
65+
)
66+
llm_request.live_connect_config.output_audio_transcription = (
67+
invocation_context.run_config.output_audio_transcription
68+
)
69+
llm_request.live_connect_config.input_audio_transcription = (
70+
invocation_context.run_config.input_audio_transcription
71+
)
72+
llm_request.live_connect_config.realtime_input_config = (
73+
invocation_context.run_config.realtime_input_config
74+
)
75+
llm_request.live_connect_config.enable_affective_dialog = (
76+
invocation_context.run_config.enable_affective_dialog
77+
)
78+
llm_request.live_connect_config.proactivity = (
79+
invocation_context.run_config.proactivity
80+
)
81+
llm_request.live_connect_config.session_resumption = (
82+
invocation_context.run_config.session_resumption
83+
)
8584

8685
# TODO: handle tool append here, instead of in BaseTool.process_llm_request.
8786

src/google/adk/models/llm_request.py

Lines changed: 4 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,6 @@
1414

1515
from __future__ import annotations
1616

17-
from collections.abc import AsyncGenerator as ABCAsyncGenerator
18-
import inspect
19-
from typing import get_origin
2017
from typing import Optional
2118

2219
from google.genai import types
@@ -25,7 +22,6 @@
2522
from pydantic import Field
2623

2724
from ..tools.base_tool import BaseTool
28-
from ..tools.function_tool import FunctionTool
2925

3026

3127
def _find_tool_with_function_declarations(
@@ -70,13 +66,13 @@ class LlmRequest(BaseModel):
7066
config: types.GenerateContentConfig = Field(
7167
default_factory=types.GenerateContentConfig
7268
)
69+
live_connect_config: types.LiveConnectConfig = Field(
70+
default_factory=types.LiveConnectConfig
71+
)
7372
"""Additional config for the generate content request.
7473
7574
tools in generate_content_config should not be set.
7675
"""
77-
live_connect_config: Optional[types.LiveConnectConfig] = None
78-
"""Live connection config.
79-
"""
8076
tools_dict: dict[str, BaseTool] = Field(default_factory=dict, exclude=True)
8177
"""The tools dictionary."""
8278

@@ -103,23 +99,7 @@ def append_tools(self, tools: list[BaseTool]) -> None:
10399
return
104100
declarations = []
105101
for tool in tools:
106-
if self.live_connect_config is not None:
107-
# ignore response for tools that returns AsyncGenerator that the model
108-
# can't understand yet even though the model can't handle it, streaming
109-
# tools can handle it.
110-
# to check type, use typing.collections.abc.AsyncGenerator and not
111-
# typing.AsyncGenerator
112-
is_async_generator_return = False
113-
if isinstance(tool, FunctionTool):
114-
signature = inspect.signature(tool.func)
115-
is_async_generator_return = (
116-
get_origin(signature.return_annotation) is ABCAsyncGenerator
117-
)
118-
declaration = tool._get_declaration(
119-
ignore_return_declaration=is_async_generator_return
120-
)
121-
else:
122-
declaration = tool._get_declaration()
102+
declaration = tool._get_declaration()
123103
if declaration:
124104
declarations.append(declaration)
125105
self.tools_dict[tool.name] = tool

src/google/adk/tools/_automatic_function_calling_util.py

Lines changed: 3 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,6 @@ def build_function_declaration(
195195
func: Union[Callable, BaseModel],
196196
ignore_params: Optional[list[str]] = None,
197197
variant: GoogleLLMVariant = GoogleLLMVariant.GEMINI_API,
198-
ignore_return_declaration: bool = False,
199198
) -> types.FunctionDeclaration:
200199
signature = inspect.signature(func)
201200
should_update_signature = False
@@ -233,11 +232,9 @@ def build_function_declaration(
233232
new_func.__annotations__ = func.__annotations__
234233

235234
return (
236-
from_function_with_options(func, variant, ignore_return_declaration)
235+
from_function_with_options(func, variant)
237236
if not should_update_signature
238-
else from_function_with_options(
239-
new_func, variant, ignore_return_declaration
240-
)
237+
else from_function_with_options(new_func, variant)
241238
)
242239

243240

@@ -296,7 +293,6 @@ def build_function_declaration_util(
296293
def from_function_with_options(
297294
func: Callable,
298295
variant: GoogleLLMVariant = GoogleLLMVariant.GEMINI_API,
299-
ignore_return_declaration: bool = False,
300296
) -> 'types.FunctionDeclaration':
301297

302298
parameters_properties = {}
@@ -328,8 +324,7 @@ def from_function_with_options(
328324
declaration.parameters
329325
)
330326
)
331-
332-
if variant == GoogleLLMVariant.GEMINI_API or ignore_return_declaration:
327+
if variant == GoogleLLMVariant.GEMINI_API:
333328
return declaration
334329

335330
return_annotation = inspect.signature(func).return_annotation

src/google/adk/tools/agent_tool.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@
1515
from __future__ import annotations
1616

1717
from typing import Any
18-
from typing import Optional
1918
from typing import TYPE_CHECKING
2019

2120
from google.genai import types
@@ -62,9 +61,7 @@ def populate_name(cls, data: Any) -> Any:
6261
return data
6362

6463
@override
65-
def _get_declaration(
66-
self, ignore_return_declaration: bool = False
67-
) -> Optional[types.FunctionDeclaration]:
64+
def _get_declaration(self) -> types.FunctionDeclaration:
6865
from ..agents.llm_agent import LlmAgent
6966
from ..utils.variant_utils import GoogleLLMVariant
7067

src/google/adk/tools/application_integration_tool/integration_connector_tool.py

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
from typing import Optional
2121
from typing import Union
2222

23-
from google.genai import types
23+
from google.genai.types import FunctionDeclaration
2424
from typing_extensions import override
2525

2626
from ...auth.auth_credential import AuthCredential
@@ -115,9 +115,7 @@ def __init__(
115115
self._auth_credential = auth_credential
116116

117117
@override
118-
def _get_declaration(
119-
self, ignore_return_declaration: bool = False
120-
) -> Optional[types.FunctionDeclaration]:
118+
def _get_declaration(self) -> FunctionDeclaration:
121119
"""Returns the function declaration in the Gemini Schema format."""
122120
schema_dict = self._rest_api_tool._operation_parser.get_json_schema()
123121
for field in self.EXCLUDE_FIELDS:
@@ -128,7 +126,7 @@ def _get_declaration(
128126
schema_dict['required'].remove(field)
129127

130128
parameters = _to_gemini_schema(schema_dict)
131-
function_decl = types.FunctionDeclaration(
129+
function_decl = FunctionDeclaration(
132130
name=self.name, description=self.description, parameters=parameters
133131
)
134132
return function_decl

src/google/adk/tools/base_tool.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,9 +78,7 @@ def __init__(
7878
self.is_long_running = is_long_running
7979
self.custom_metadata = custom_metadata
8080

81-
def _get_declaration(
82-
self, ignore_return_declaration: bool = False
83-
) -> Optional[types.FunctionDeclaration]:
81+
def _get_declaration(self) -> Optional[types.FunctionDeclaration]:
8482
"""Gets the OpenAPI specification of this tool in the form of a FunctionDeclaration.
8583
8684
NOTE:

src/google/adk/tools/crewai_tool.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,6 @@
1414

1515
from __future__ import annotations
1616

17-
from typing import Optional
18-
1917
from google.genai import types
2018
from typing_extensions import override
2119

@@ -64,9 +62,7 @@ def __init__(self, tool: CrewaiBaseTool, *, name: str, description: str):
6462
self.description = tool.description
6563

6664
@override
67-
def _get_declaration(
68-
self, ignore_return_declaration: bool = False
69-
) -> Optional[types.FunctionDeclaration]:
65+
def _get_declaration(self) -> types.FunctionDeclaration:
7066
"""Build the function declaration for the tool."""
7167
function_declaration = _automatic_function_calling_util.build_function_declaration_for_params_for_crewai(
7268
False,

0 commit comments

Comments
 (0)