Skip to content

fix: Ensure user content is included in LLM requests when provided by… #2458

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/google/adk/flows/llm_flows/contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,12 +223,12 @@ def _get_contents(
for event in events:
if (
not event.content
or not event.content.role
or not event.content.parts
or event.content.parts[0].text == ''
):
# Skip events without content, or generated neither by user nor by model
# or has empty text.
# Doesn't skip events with user content but without a role.
# E.g. events purely for mutating session states.

continue
Expand Down
6 changes: 6 additions & 0 deletions src/google/adk/models/base_llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,12 @@ def _maybe_append_user_content(self, llm_request: LlmRequest):
)
return

# Insert user role for the content where the user message exists
# but not the role
if (llm_request.contents[-1].parts):
llm_request.contents[-1].role = "user"
return

# Insert a user content to preserve user intent and to avoid empty
# model response.
if llm_request.contents[-1].role != 'user':
Expand Down
48 changes: 48 additions & 0 deletions tests/unittests/models/test_base_llm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# Copyright 2025 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import pytest
from google.genai import types
from google.adk.models.llm_request import LlmRequest
from google.adk.models.lite_llm import _get_completion_inputs


@pytest.mark.parametrize("content_kwargs", [
# Case 1: Explicit role provided
{"role": "user", "parts": [types.Part(text="This is an input text from user.")]},
# Case 2: Role omitted, should still be treated as 'user'
{"parts": [types.Part(text="This is an input text from user.")]}
])
def test_user_content_role_defaults_to_user(content_kwargs):
"""
Verifies that user-provided messages are always passed to the LLM as 'user' role,
regardless of whether the role is explicitly set in types.Content.

The helper `_get_completion_inputs` should give normalize messages so that
explicit 'user' and implicit (missing role) are equivalent.
"""
llm_request = LlmRequest(
contents=[types.Content(**content_kwargs)],
config=types.GenerateContentConfig()
)

messages, _, _, _ = _get_completion_inputs(llm_request)

assert all(
msg.get("role") == "user" for msg in messages
), f"Expected role 'user' but got {messages}"
assert any(
"This is an input text from user." == (msg.get("content") or "")
for msg in messages
), f"Expected the user text to be preserved, but got {messages}"