Skip to content

Using event_stream_handler creates leading empty text message part when using gpt-oss family models in AWS Bedrock #3211

@daryllimyt

Description

@daryllimyt

Initial Checks

Description

Running the script below outputs the following:
Without event_stream_handler we see the expected message parts,

> uv run scripts/repro/async_gpt_oss_bedrock.py
[
  {
    "parts": [
      {
        "content": "Hi",
        "timestamp": "2025-10-21T15:51:57.221192Z",
        "part_kind": "user-prompt"
      }
    ],
    "instructions": "You are a helpful assistant.",
    "kind": "request"
  },
  {
    "parts": [
      {
        "content": "We need to respond. The user says \"Hi\". Just greet.",
        "id": null,
        "signature": null,
        "provider_name": null,
        "part_kind": "thinking"
      },
      {
        "content": "Hello! How can I assist you today?",
        "id": null,
        "part_kind": "text"
      }
    ],
    "usage": {
      "input_tokens": 80,
      "cache_write_tokens": 0,
      "cache_read_tokens": 0,
      "output_tokens": 33,
      "input_audio_tokens": 0,
      "cache_audio_read_tokens": 0,
      "output_audio_tokens": 0,
      "details": {}
    },
    "model_name": "openai.gpt-oss-120b-1:0",
    "timestamp": "2025-10-21T15:51:57.971203Z",
    "kind": "response",
    "provider_name": "bedrock",
    "provider_details": {
      "finish_reason": "end_turn"
    },
    "provider_response_id": "3058a041-a467-44ad-8778-916b45faaaa1",
    "finish_reason": "stop"
  }
]

with event_stream_handler we get an empty leading text part,

> uv run scripts/repro/async_gpt_oss_bedrock.py
[
  {
    "parts": [
      {
        "content": "Hi",
        "timestamp": "2025-10-21T15:52:54.808647Z",
        "part_kind": "user-prompt"
      }
    ],
    "instructions": "You are a helpful assistant.",
    "kind": "request"
  },
  {
    "parts": [
      {
        "content": "",
        "id": null,
        "part_kind": "text"
      },
      {
        "content": "We have a simple greeting. Should respond politely.",
        "id": null,
        "signature": null,
        "provider_name": null,
        "part_kind": "thinking"
      },
      {
        "content": "Hello! How can I help you today?",
        "id": null,
        "part_kind": "text"
      }
    ],
    "usage": {
      "input_tokens": 80,
      "cache_write_tokens": 0,
      "cache_read_tokens": 0,
      "output_tokens": 29,
      "input_audio_tokens": 0,
      "cache_audio_read_tokens": 0,
      "output_audio_tokens": 0,
      "details": {}
    },
    "model_name": "openai.gpt-oss-120b-1:0",
    "timestamp": "2025-10-21T15:52:55.441247Z",
    "kind": "response",
    "provider_name": "bedrock",
    "provider_details": {
      "finish_reason": "end_turn"
    },
    "provider_response_id": "2c7d7992-ed53-4583-a133-2b968012c631",
    "finish_reason": "stop"
  }
]

The empty leading part causes the bedrock ConverseStream validation to fail, which blocks running a conversation

Example Code

#!/usr/bin/env python3
# /// script
# requires-python = ">=3.12"
# dependencies = [
#     "pydantic-ai-slim[openai,anthropic,bedrock]==1.2.0",
#     "python-dotenv==1.1.1",
# ]
# ///
import asyncio
import os
from collections.abc import AsyncIterable

from dotenv import load_dotenv
from pydantic_ai import Agent, RunContext
from pydantic_ai.messages import AgentStreamEvent
from pydantic_ai.models.bedrock import BedrockConverseModel
from pydantic_ai.providers.bedrock import BedrockProvider
from pydantic_core import to_json

load_dotenv()

model_name = "openai.gpt-oss-120b-1:0"
access_key_id = os.getenv("AWS_ACCESS_KEY_ID")
secret_access_key = os.getenv("AWS_SECRET_ACCESS_KEY")
region = os.getenv("AWS_REGION")

model = BedrockConverseModel(
    model_name=model_name,
    provider=BedrockProvider(
        aws_access_key_id=access_key_id,
        aws_secret_access_key=secret_access_key,
        region_name=region,
    ),
)


async def noop(context: RunContext[None], events: AsyncIterable[AgentStreamEvent]):
    pass


async def main():
    agent = Agent(
        model=model,
        instructions="You are a helpful assistant.",
        event_stream_handler=noop,
    )

    result = await agent.run("Hi")

    print(to_json(result.new_messages(), indent=2).decode())


if __name__ == "__main__":
    asyncio.run(main())

Python, Pydantic AI & LLM client version

#!/usr/bin/env python3
# /// script
# requires-python = ">=3.12"
# dependencies = [
#     "pydantic-ai-slim[openai,anthropic,bedrock]==1.2.0",
#     "python-dotenv==1.1.1",
# ]
# ///

Metadata

Metadata

Assignees

No one assigned

    Labels

    bugSomething isn't working

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions