From 4dbdad79b946f7da89fa728c4951fd6aa667b018 Mon Sep 17 00:00:00 2001 From: Wouter Doppenberg Date: Fri, 15 Aug 2025 10:43:35 +0200 Subject: [PATCH 1/8] Added generics for state & forwarded props; poetry -> uv; linting Changelog: changed --- python-sdk/.gitignore | 1 + python-sdk/.pre-commit-config.yaml | 41 +++++++++++++++++++++++++++++ python-sdk/ag_ui/__init__.py | 0 python-sdk/ag_ui/core/__init__.py | 2 -- python-sdk/ag_ui/core/events.py | 18 ++++++------- python-sdk/ag_ui/core/types.py | 20 +++++++------- python-sdk/ag_ui/encoder/encoder.py | 2 +- python-sdk/pyproject.toml | 33 ++++++++++++++++------- python-sdk/tests/test_events.py | 5 ++-- 9 files changed, 87 insertions(+), 35 deletions(-) create mode 100644 python-sdk/.pre-commit-config.yaml create mode 100644 python-sdk/ag_ui/__init__.py diff --git a/python-sdk/.gitignore b/python-sdk/.gitignore index 5d9e5de22..69af64f9c 100644 --- a/python-sdk/.gitignore +++ b/python-sdk/.gitignore @@ -63,3 +63,4 @@ venv.bak/ # Project specific .DS_Store +/uv.lock diff --git a/python-sdk/.pre-commit-config.yaml b/python-sdk/.pre-commit-config.yaml new file mode 100644 index 000000000..5b1742428 --- /dev/null +++ b/python-sdk/.pre-commit-config.yaml @@ -0,0 +1,41 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: trailing-whitespace + exclude_types: [ jupyter ] + - id: end-of-file-fixer + exclude_types: [ jupyter ] + - id: check-docstring-first + - id: debug-statements + - id: check-ast + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: v0.11.8 + hooks: + - id: ruff + args: [ + --fix + ] + - id: ruff-format + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.15.0 + hooks: + - id: mypy + args: [ + --python-version=3.12, + --disallow-untyped-calls, + --disallow-untyped-defs, + --disallow-incomplete-defs, + --check-untyped-defs, + --no-implicit-optional, + --warn-redundant-casts, + --ignore-missing-imports, + ] + additional_dependencies: + - "types-pytz" + exclude_types: [ jupyter ] + exclude: "tests" + - repo: https://github.com/kynan/nbstripout + rev: 0.8.1 + hooks: + - id: nbstripout diff --git a/python-sdk/ag_ui/__init__.py b/python-sdk/ag_ui/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/python-sdk/ag_ui/core/__init__.py b/python-sdk/ag_ui/core/__init__.py index 7e909ad5b..59410aec0 100644 --- a/python-sdk/ag_ui/core/__init__.py +++ b/python-sdk/ag_ui/core/__init__.py @@ -46,7 +46,6 @@ Context, Tool, RunAgentInput, - State ) __all__ = [ @@ -92,5 +91,4 @@ "Context", "Tool", "RunAgentInput", - "State" ] diff --git a/python-sdk/ag_ui/core/events.py b/python-sdk/ag_ui/core/events.py index 16dfdccca..a42c2888d 100644 --- a/python-sdk/ag_ui/core/events.py +++ b/python-sdk/ag_ui/core/events.py @@ -3,11 +3,11 @@ """ from enum import Enum -from typing import Annotated, Any, List, Literal, Optional, Union +from typing import Annotated, List, Literal, Optional, Union, Generic from pydantic import Field -from .types import ConfiguredBaseModel, Message, State +from .types import ConfiguredBaseModel, Message, AgentStateT, JSONValue class EventType(str, Enum): @@ -46,7 +46,7 @@ class BaseEvent(ConfiguredBaseModel): """ type: EventType timestamp: Optional[int] = None - raw_event: Optional[Any] = None + raw_event: Optional[JSONValue] = None class TextMessageStartEvent(BaseEvent): @@ -161,12 +161,12 @@ class ThinkingEndEvent(BaseEvent): """ type: Literal[EventType.THINKING_END] = EventType.THINKING_END # pyright: ignore[reportIncompatibleVariableOverride] -class StateSnapshotEvent(BaseEvent): +class StateSnapshotEvent(BaseEvent, Generic[AgentStateT]): """ Event containing a snapshot of the state. """ type: Literal[EventType.STATE_SNAPSHOT] = EventType.STATE_SNAPSHOT # pyright: ignore[reportIncompatibleVariableOverride] - snapshot: State + snapshot: AgentStateT class StateDeltaEvent(BaseEvent): @@ -174,7 +174,7 @@ class StateDeltaEvent(BaseEvent): Event containing a delta of the state. """ type: Literal[EventType.STATE_DELTA] = EventType.STATE_DELTA # pyright: ignore[reportIncompatibleVariableOverride] - delta: List[Any] # JSON Patch (RFC 6902) + delta: JSONValue # JSON Patch (RFC 6902) class MessagesSnapshotEvent(BaseEvent): @@ -190,7 +190,7 @@ class RawEvent(BaseEvent): Event containing a raw event. """ type: Literal[EventType.RAW] = EventType.RAW # pyright: ignore[reportIncompatibleVariableOverride] - event: Any + event: JSONValue source: Optional[str] = None @@ -200,7 +200,7 @@ class CustomEvent(BaseEvent): """ type: Literal[EventType.CUSTOM] = EventType.CUSTOM # pyright: ignore[reportIncompatibleVariableOverride] name: str - value: Any + value: JSONValue class RunStartedEvent(BaseEvent): @@ -219,7 +219,7 @@ class RunFinishedEvent(BaseEvent): type: Literal[EventType.RUN_FINISHED] = EventType.RUN_FINISHED # pyright: ignore[reportIncompatibleVariableOverride] thread_id: str run_id: str - result: Optional[Any] = None + result: JSONValue = None class RunErrorEvent(BaseEvent): diff --git a/python-sdk/ag_ui/core/types.py b/python-sdk/ag_ui/core/types.py index 47b7ae182..efa1c03df 100644 --- a/python-sdk/ag_ui/core/types.py +++ b/python-sdk/ag_ui/core/types.py @@ -2,11 +2,16 @@ This module contains the types for the Agent User Interaction Protocol Python SDK. """ -from typing import Annotated, Any, List, Literal, Optional, Union +from typing import Annotated, Any, List, Literal, Optional, Union, Generic +from typing_extensions import TypeVar from pydantic import BaseModel, ConfigDict, Field from pydantic.alias_generators import to_camel +JSONValue = Union[str, int, float, bool, None, dict[str, Any], list[Any]] +AgentStateT = TypeVar('AgentStateT', default=JSONValue, contravariant=True) +FwdPropsT = TypeVar('FwdPropsT', default=JSONValue, contravariant=True) + class ConfiguredBaseModel(BaseModel): """ @@ -51,7 +56,6 @@ class DeveloperMessage(BaseMessage): A developer message. """ role: Literal["developer"] = "developer" # pyright: ignore[reportIncompatibleVariableOverride] - content: str class SystemMessage(BaseMessage): @@ -59,7 +63,6 @@ class SystemMessage(BaseMessage): A system message. """ role: Literal["system"] = "system" # pyright: ignore[reportIncompatibleVariableOverride] - content: str class AssistantMessage(BaseMessage): @@ -75,7 +78,6 @@ class UserMessage(BaseMessage): A user message. """ role: Literal["user"] = "user" # pyright: ignore[reportIncompatibleVariableOverride] - content: str class ToolMessage(ConfiguredBaseModel): @@ -114,18 +116,14 @@ class Tool(ConfiguredBaseModel): parameters: Any # JSON Schema for the tool parameters -class RunAgentInput(ConfiguredBaseModel): +class RunAgentInput(ConfiguredBaseModel, Generic[AgentStateT, FwdPropsT]): """ Input for running an agent. """ thread_id: str run_id: str - state: Any + state: AgentStateT messages: List[Message] tools: List[Tool] context: List[Context] - forwarded_props: Any - - -# State can be any type -State = Any + forwarded_props: FwdPropsT diff --git a/python-sdk/ag_ui/encoder/encoder.py b/python-sdk/ag_ui/encoder/encoder.py index f840e3bb8..2cfe88392 100644 --- a/python-sdk/ag_ui/encoder/encoder.py +++ b/python-sdk/ag_ui/encoder/encoder.py @@ -10,7 +10,7 @@ class EventEncoder: """ Encodes Agent User Interaction events. """ - def __init__(self, accept: str = None): + def __init__(self, accept: str | None = None): pass def get_content_type(self) -> str: diff --git a/python-sdk/pyproject.toml b/python-sdk/pyproject.toml index 42c02bf45..15b8b9649 100644 --- a/python-sdk/pyproject.toml +++ b/python-sdk/pyproject.toml @@ -1,15 +1,30 @@ -[tool.poetry] +[project] name = "ag-ui-protocol" -version = "0.1.8" +version = "0.1.9" description = "" -authors = ["Markus Ecker "] +authors = [ + { name = "Markus Ecker", email = "markus.ecker@gmail.com" }, +] readme = "README.md" -packages = [{include = "ag_ui", from = "."}] -[tool.poetry.dependencies] -python = "^3.9" -pydantic = "^2.11.2" +requires-python = ">=3.9,<4.0" +dependencies = [ + "pydantic>=2.11.2,<3.0.0", +] +packages = [ + { include = "ag_ui", from = "ag_ui" } +] + +[tool.hatch.build.targets.wheel] +packages = ["ag_ui"] [build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" +requires = ["hatchling"] +build-backend = "hatchling.build" + +[dependency-groups] +dev = [ + "mypy>=1.17.1", + "pyright>=1.1.403", + "ruff>=0.12.9", +] diff --git a/python-sdk/tests/test_events.py b/python-sdk/tests/test_events.py index c73a2537c..e413275c7 100644 --- a/python-sdk/tests/test_events.py +++ b/python-sdk/tests/test_events.py @@ -1,9 +1,8 @@ import unittest -import json from datetime import datetime -from pydantic import ValidationError, TypeAdapter +from pydantic import TypeAdapter -from ag_ui.core.types import Message, UserMessage, AssistantMessage, FunctionCall, ToolCall +from ag_ui.core.types import UserMessage, AssistantMessage, FunctionCall, ToolCall from ag_ui.core.events import ( EventType, BaseEvent, From df5f0d34aafd142c38b1ad3fca3d118330d4b0ff Mon Sep 17 00:00:00 2001 From: Wouter Doppenberg Date: Fri, 15 Aug 2025 10:45:39 +0200 Subject: [PATCH 2/8] pre-commit fixes --- python-sdk/.pre-commit-config.yaml | 19 +- python-sdk/ag_ui/core/__init__.py | 2 +- python-sdk/ag_ui/core/events.py | 50 +++++- python-sdk/ag_ui/core/types.py | 20 ++- python-sdk/ag_ui/encoder/encoder.py | 2 + python-sdk/tests/test_encoder.py | 100 ++++++----- python-sdk/tests/test_events.py | 263 ++++++++++++---------------- python-sdk/tests/test_types.py | 227 ++++++++++-------------- 8 files changed, 331 insertions(+), 352 deletions(-) diff --git a/python-sdk/.pre-commit-config.yaml b/python-sdk/.pre-commit-config.yaml index 5b1742428..f52033e00 100644 --- a/python-sdk/.pre-commit-config.yaml +++ b/python-sdk/.pre-commit-config.yaml @@ -1,14 +1,4 @@ repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 - hooks: - - id: trailing-whitespace - exclude_types: [ jupyter ] - - id: end-of-file-fixer - exclude_types: [ jupyter ] - - id: check-docstring-first - - id: debug-statements - - id: check-ast - repo: https://github.com/charliermarsh/ruff-pre-commit rev: v0.11.8 hooks: @@ -16,11 +6,14 @@ repos: args: [ --fix ] + files: ^python-sdk/ - id: ruff-format + files: ^python-sdk/ - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.15.0 hooks: - id: mypy + files: ^python-sdk/ args: [ --python-version=3.12, --disallow-untyped-calls, @@ -34,8 +27,4 @@ repos: additional_dependencies: - "types-pytz" exclude_types: [ jupyter ] - exclude: "tests" - - repo: https://github.com/kynan/nbstripout - rev: 0.8.1 - hooks: - - id: nbstripout + exclude: "tests" \ No newline at end of file diff --git a/python-sdk/ag_ui/core/__init__.py b/python-sdk/ag_ui/core/__init__.py index 59410aec0..a545ee13d 100644 --- a/python-sdk/ag_ui/core/__init__.py +++ b/python-sdk/ag_ui/core/__init__.py @@ -29,7 +29,7 @@ RunErrorEvent, StepStartedEvent, StepFinishedEvent, - Event + Event, ) from ag_ui.core.types import ( diff --git a/python-sdk/ag_ui/core/events.py b/python-sdk/ag_ui/core/events.py index a42c2888d..b0b2bab40 100644 --- a/python-sdk/ag_ui/core/events.py +++ b/python-sdk/ag_ui/core/events.py @@ -14,6 +14,7 @@ class EventType(str, Enum): """ The type of event. """ + TEXT_MESSAGE_START = "TEXT_MESSAGE_START" TEXT_MESSAGE_CONTENT = "TEXT_MESSAGE_CONTENT" TEXT_MESSAGE_END = "TEXT_MESSAGE_END" @@ -44,6 +45,7 @@ class BaseEvent(ConfiguredBaseModel): """ Base event for all events in the Agent User Interaction Protocol. """ + type: EventType timestamp: Optional[int] = None raw_event: Optional[JSONValue] = None @@ -53,6 +55,7 @@ class TextMessageStartEvent(BaseEvent): """ Event indicating the start of a text message. """ + type: Literal[EventType.TEXT_MESSAGE_START] = EventType.TEXT_MESSAGE_START # pyright: ignore[reportIncompatibleVariableOverride] message_id: str role: Literal["assistant"] = "assistant" @@ -62,6 +65,7 @@ class TextMessageContentEvent(BaseEvent): """ Event containing a piece of text message content. """ + type: Literal[EventType.TEXT_MESSAGE_CONTENT] = EventType.TEXT_MESSAGE_CONTENT # pyright: ignore[reportIncompatibleVariableOverride] message_id: str delta: str = Field(min_length=1) @@ -71,41 +75,58 @@ class TextMessageEndEvent(BaseEvent): """ Event indicating the end of a text message. """ + type: Literal[EventType.TEXT_MESSAGE_END] = EventType.TEXT_MESSAGE_END # pyright: ignore[reportIncompatibleVariableOverride] message_id: str + class TextMessageChunkEvent(BaseEvent): """ Event containing a chunk of text message content. """ + type: Literal[EventType.TEXT_MESSAGE_CHUNK] = EventType.TEXT_MESSAGE_CHUNK # pyright: ignore[reportIncompatibleVariableOverride] message_id: Optional[str] = None role: Optional[Literal["assistant"]] = None delta: Optional[str] = None + class ThinkingTextMessageStartEvent(BaseEvent): """ Event indicating the start of a thinking text message. """ - type: Literal[EventType.THINKING_TEXT_MESSAGE_START] = EventType.THINKING_TEXT_MESSAGE_START # pyright: ignore[reportIncompatibleVariableOverride] + + type: Literal[EventType.THINKING_TEXT_MESSAGE_START] = ( + EventType.THINKING_TEXT_MESSAGE_START + ) # pyright: ignore[reportIncompatibleVariableOverride] + class ThinkingTextMessageContentEvent(BaseEvent): """ Event indicating a piece of a thinking text message. """ - type: Literal[EventType.THINKING_TEXT_MESSAGE_CONTENT] = EventType.THINKING_TEXT_MESSAGE_CONTENT # pyright: ignore[reportIncompatibleVariableOverride] + + type: Literal[EventType.THINKING_TEXT_MESSAGE_CONTENT] = ( + EventType.THINKING_TEXT_MESSAGE_CONTENT + ) # pyright: ignore[reportIncompatibleVariableOverride] delta: str = Field(min_length=1) + class ThinkingTextMessageEndEvent(BaseEvent): """ Event indicating the end of a thinking text message. """ - type: Literal[EventType.THINKING_TEXT_MESSAGE_END] = EventType.THINKING_TEXT_MESSAGE_END # pyright: ignore[reportIncompatibleVariableOverride] + + type: Literal[EventType.THINKING_TEXT_MESSAGE_END] = ( + EventType.THINKING_TEXT_MESSAGE_END + ) # pyright: ignore[reportIncompatibleVariableOverride] + class ToolCallStartEvent(BaseEvent): """ Event indicating the start of a tool call. """ + type: Literal[EventType.TOOL_CALL_START] = EventType.TOOL_CALL_START # pyright: ignore[reportIncompatibleVariableOverride] tool_call_id: str tool_call_name: str @@ -116,6 +137,7 @@ class ToolCallArgsEvent(BaseEvent): """ Event containing tool call arguments. """ + type: Literal[EventType.TOOL_CALL_ARGS] = EventType.TOOL_CALL_ARGS # pyright: ignore[reportIncompatibleVariableOverride] tool_call_id: str delta: str @@ -125,46 +147,57 @@ class ToolCallEndEvent(BaseEvent): """ Event indicating the end of a tool call. """ + type: Literal[EventType.TOOL_CALL_END] = EventType.TOOL_CALL_END # pyright: ignore[reportIncompatibleVariableOverride] tool_call_id: str + class ToolCallChunkEvent(BaseEvent): """ Event containing a chunk of tool call content. """ + type: Literal[EventType.TOOL_CALL_CHUNK] = EventType.TOOL_CALL_CHUNK # pyright: ignore[reportIncompatibleVariableOverride] tool_call_id: Optional[str] = None tool_call_name: Optional[str] = None parent_message_id: Optional[str] = None delta: Optional[str] = None + class ToolCallResultEvent(BaseEvent): """ Event containing the result of a tool call. """ + message_id: str type: Literal[EventType.TOOL_CALL_RESULT] = EventType.TOOL_CALL_RESULT # pyright: ignore[reportIncompatibleVariableOverride] tool_call_id: str content: str role: Optional[Literal["tool"]] = None + class ThinkingStartEvent(BaseEvent): """ Event indicating the start of a thinking step event. """ + type: Literal[EventType.THINKING_START] = EventType.THINKING_START # pyright: ignore[reportIncompatibleVariableOverride] title: Optional[str] = None + class ThinkingEndEvent(BaseEvent): """ Event indicating the end of a thinking step event. """ + type: Literal[EventType.THINKING_END] = EventType.THINKING_END # pyright: ignore[reportIncompatibleVariableOverride] + class StateSnapshotEvent(BaseEvent, Generic[AgentStateT]): """ Event containing a snapshot of the state. """ + type: Literal[EventType.STATE_SNAPSHOT] = EventType.STATE_SNAPSHOT # pyright: ignore[reportIncompatibleVariableOverride] snapshot: AgentStateT @@ -173,6 +206,7 @@ class StateDeltaEvent(BaseEvent): """ Event containing a delta of the state. """ + type: Literal[EventType.STATE_DELTA] = EventType.STATE_DELTA # pyright: ignore[reportIncompatibleVariableOverride] delta: JSONValue # JSON Patch (RFC 6902) @@ -181,6 +215,7 @@ class MessagesSnapshotEvent(BaseEvent): """ Event containing a snapshot of the messages. """ + type: Literal[EventType.MESSAGES_SNAPSHOT] = EventType.MESSAGES_SNAPSHOT # pyright: ignore[reportIncompatibleVariableOverride] messages: List[Message] @@ -189,6 +224,7 @@ class RawEvent(BaseEvent): """ Event containing a raw event. """ + type: Literal[EventType.RAW] = EventType.RAW # pyright: ignore[reportIncompatibleVariableOverride] event: JSONValue source: Optional[str] = None @@ -198,6 +234,7 @@ class CustomEvent(BaseEvent): """ Event containing a custom event. """ + type: Literal[EventType.CUSTOM] = EventType.CUSTOM # pyright: ignore[reportIncompatibleVariableOverride] name: str value: JSONValue @@ -207,6 +244,7 @@ class RunStartedEvent(BaseEvent): """ Event indicating that a run has started. """ + type: Literal[EventType.RUN_STARTED] = EventType.RUN_STARTED # pyright: ignore[reportIncompatibleVariableOverride] thread_id: str run_id: str @@ -216,6 +254,7 @@ class RunFinishedEvent(BaseEvent): """ Event indicating that a run has finished. """ + type: Literal[EventType.RUN_FINISHED] = EventType.RUN_FINISHED # pyright: ignore[reportIncompatibleVariableOverride] thread_id: str run_id: str @@ -226,6 +265,7 @@ class RunErrorEvent(BaseEvent): """ Event indicating that a run has encountered an error. """ + type: Literal[EventType.RUN_ERROR] = EventType.RUN_ERROR # pyright: ignore[reportIncompatibleVariableOverride] message: str code: Optional[str] = None @@ -235,6 +275,7 @@ class StepStartedEvent(BaseEvent): """ Event indicating that a step has started. """ + type: Literal[EventType.STEP_STARTED] = EventType.STEP_STARTED # pyright: ignore[reportIncompatibleVariableOverride] step_name: str @@ -243,6 +284,7 @@ class StepFinishedEvent(BaseEvent): """ Event indicating that a step has finished. """ + type: Literal[EventType.STEP_FINISHED] = EventType.STEP_FINISHED # pyright: ignore[reportIncompatibleVariableOverride] step_name: str @@ -269,5 +311,5 @@ class StepFinishedEvent(BaseEvent): StepStartedEvent, StepFinishedEvent, ], - Field(discriminator="type") + Field(discriminator="type"), ] diff --git a/python-sdk/ag_ui/core/types.py b/python-sdk/ag_ui/core/types.py index efa1c03df..824f0bd70 100644 --- a/python-sdk/ag_ui/core/types.py +++ b/python-sdk/ag_ui/core/types.py @@ -9,14 +9,15 @@ from pydantic.alias_generators import to_camel JSONValue = Union[str, int, float, bool, None, dict[str, Any], list[Any]] -AgentStateT = TypeVar('AgentStateT', default=JSONValue, contravariant=True) -FwdPropsT = TypeVar('FwdPropsT', default=JSONValue, contravariant=True) +AgentStateT = TypeVar("AgentStateT", default=JSONValue, contravariant=True) +FwdPropsT = TypeVar("FwdPropsT", default=JSONValue, contravariant=True) class ConfiguredBaseModel(BaseModel): """ A configurable base model. """ + model_config = ConfigDict( extra="forbid", alias_generator=to_camel, @@ -28,6 +29,7 @@ class FunctionCall(ConfiguredBaseModel): """ Name and arguments of a function call. """ + name: str arguments: str @@ -36,6 +38,7 @@ class ToolCall(ConfiguredBaseModel): """ A tool call, modelled after OpenAI tool calls. """ + id: str type: Literal["function"] = "function" # pyright: ignore[reportIncompatibleVariableOverride] function: FunctionCall @@ -45,6 +48,7 @@ class BaseMessage(ConfiguredBaseModel): """ A base message, modelled after OpenAI messages. """ + id: str role: str content: Optional[str] = None @@ -55,6 +59,7 @@ class DeveloperMessage(BaseMessage): """ A developer message. """ + role: Literal["developer"] = "developer" # pyright: ignore[reportIncompatibleVariableOverride] @@ -62,6 +67,7 @@ class SystemMessage(BaseMessage): """ A system message. """ + role: Literal["system"] = "system" # pyright: ignore[reportIncompatibleVariableOverride] @@ -69,6 +75,7 @@ class AssistantMessage(BaseMessage): """ An assistant message. """ + role: Literal["assistant"] = "assistant" # pyright: ignore[reportIncompatibleVariableOverride] tool_calls: Optional[List[ToolCall]] = None @@ -77,13 +84,15 @@ class UserMessage(BaseMessage): """ A user message. """ - role: Literal["user"] = "user" # pyright: ignore[reportIncompatibleVariableOverride] + + role: Literal["user"] = "user" # pyright: ignore[reportIncompatibleVariableOverride] class ToolMessage(ConfiguredBaseModel): """ A tool result message. """ + id: str role: Literal["tool"] = "tool" content: str @@ -93,7 +102,7 @@ class ToolMessage(ConfiguredBaseModel): Message = Annotated[ Union[DeveloperMessage, SystemMessage, AssistantMessage, UserMessage, ToolMessage], - Field(discriminator="role") + Field(discriminator="role"), ] Role = Literal["developer", "system", "assistant", "user", "tool"] @@ -103,6 +112,7 @@ class Context(ConfiguredBaseModel): """ Additional context for the agent. """ + description: str value: str @@ -111,6 +121,7 @@ class Tool(ConfiguredBaseModel): """ A tool definition. """ + name: str description: str parameters: Any # JSON Schema for the tool parameters @@ -120,6 +131,7 @@ class RunAgentInput(ConfiguredBaseModel, Generic[AgentStateT, FwdPropsT]): """ Input for running an agent. """ + thread_id: str run_id: str state: AgentStateT diff --git a/python-sdk/ag_ui/encoder/encoder.py b/python-sdk/ag_ui/encoder/encoder.py index 2cfe88392..a30957568 100644 --- a/python-sdk/ag_ui/encoder/encoder.py +++ b/python-sdk/ag_ui/encoder/encoder.py @@ -6,10 +6,12 @@ AGUI_MEDIA_TYPE = "application/vnd.ag-ui.event+proto" + class EventEncoder: """ Encodes Agent User Interaction events. """ + def __init__(self, accept: str | None = None): pass diff --git a/python-sdk/tests/test_encoder.py b/python-sdk/tests/test_encoder.py index 2d466c5a4..4c2766888 100644 --- a/python-sdk/tests/test_encoder.py +++ b/python-sdk/tests/test_encoder.py @@ -3,7 +3,12 @@ from datetime import datetime from ag_ui.encoder.encoder import EventEncoder, AGUI_MEDIA_TYPE -from ag_ui.core.events import BaseEvent, EventType, TextMessageContentEvent, ToolCallStartEvent +from ag_ui.core.events import ( + BaseEvent, + EventType, + TextMessageContentEvent, + ToolCallStartEvent, +) class TestEventEncoder(unittest.TestCase): @@ -23,15 +28,17 @@ def test_encode_method(self): # Create a test event timestamp = int(datetime.now().timestamp() * 1000) event = BaseEvent(type=EventType.RAW, timestamp=timestamp) - + # Create encoder and encode event encoder = EventEncoder() encoded = encoder.encode(event) - + # The encode method calls encode_sse, so the result should be in SSE format - expected = f"data: {event.model_dump_json(by_alias=True, exclude_none=True)}\n\n" + expected = ( + f"data: {event.model_dump_json(by_alias=True, exclude_none=True)}\n\n" + ) self.assertEqual(encoded, expected) - + # Verify that camelCase is used in the encoded output self.assertIn('"type":', encoded) self.assertIn('"timestamp":', encoded) @@ -43,29 +50,29 @@ def test_encode_sse_method(self): """Test the encode_sse method""" # Create a test event with specific data event = TextMessageContentEvent( - message_id="msg_123", - delta="Hello, world!", - timestamp=1648214400000 + message_id="msg_123", delta="Hello, world!", timestamp=1648214400000 ) - + # Create encoder and encode event to SSE encoder = EventEncoder() encoded_sse = encoder._encode_sse(event) - + # Verify the format is correct for SSE (data: [json]\n\n) self.assertTrue(encoded_sse.startswith("data: ")) self.assertTrue(encoded_sse.endswith("\n\n")) - + # Extract and verify the JSON content json_content = encoded_sse[6:-2] # Remove "data: " prefix and "\n\n" suffix decoded = json.loads(json_content) - + # Check that all fields were properly encoded self.assertEqual(decoded["type"], "TEXT_MESSAGE_CONTENT") - self.assertEqual(decoded["messageId"], "msg_123") # Check snake_case converted to camelCase + self.assertEqual( + decoded["messageId"], "msg_123" + ) # Check snake_case converted to camelCase self.assertEqual(decoded["delta"], "Hello, world!") self.assertEqual(decoded["timestamp"], 1648214400000) - + # Verify that snake_case has been converted to camelCase self.assertIn("messageId", decoded) # camelCase key exists self.assertNotIn("message_id", decoded) # snake_case key doesn't exist @@ -74,77 +81,79 @@ def test_encode_with_different_event_types(self): """Test encoding different types of events""" # Create encoder encoder = EventEncoder() - + # Test with a basic BaseEvent base_event = BaseEvent(type=EventType.RAW, timestamp=1648214400000) encoded_base = encoder.encode(base_event) self.assertIn('"type":"RAW"', encoded_base) - + # Test with a more complex event content_event = TextMessageContentEvent( message_id="msg_456", delta="Testing different events", - timestamp=1648214400000 + timestamp=1648214400000, ) encoded_content = encoder.encode(content_event) - + # Verify correct encoding and camelCase conversion self.assertIn('"type":"TEXT_MESSAGE_CONTENT"', encoded_content) - self.assertIn('"messageId":"msg_456"', encoded_content) # Check snake_case converted to camelCase + self.assertIn( + '"messageId":"msg_456"', encoded_content + ) # Check snake_case converted to camelCase self.assertIn('"delta":"Testing different events"', encoded_content) - + # Extract JSON and verify camelCase conversion json_content = encoded_content.split("data: ")[1].rstrip("\n\n") decoded = json.loads(json_content) - + # Verify messageId is camelCase (not message_id) self.assertIn("messageId", decoded) self.assertNotIn("message_id", decoded) - + def test_null_value_exclusion(self): """Test that fields with None values are excluded from the JSON output""" # Create an event with some fields set to None event = BaseEvent( type=EventType.RAW, timestamp=1648214400000, - raw_event=None # Explicitly set to None + raw_event=None, # Explicitly set to None ) - + # Create encoder and encode event encoder = EventEncoder() encoded = encoder.encode(event) - + # Extract JSON json_content = encoded.split("data: ")[1].rstrip("\n\n") decoded = json.loads(json_content) - + # Verify fields that are present self.assertIn("type", decoded) self.assertIn("timestamp", decoded) - + # Verify null fields are excluded self.assertNotIn("rawEvent", decoded) - + # Test with another event that has optional fields # Create event with some optional fields set to None event_with_optional = ToolCallStartEvent( tool_call_id="call_123", tool_call_name="test_tool", parent_message_id=None, # Optional field explicitly set to None - timestamp=1648214400000 + timestamp=1648214400000, ) - + encoded_optional = encoder.encode(event_with_optional) json_content_optional = encoded_optional.split("data: ")[1].rstrip("\n\n") decoded_optional = json.loads(json_content_optional) - + # Required fields should be present self.assertIn("toolCallId", decoded_optional) self.assertIn("toolCallName", decoded_optional) - + # Optional field with None value should be excluded self.assertNotIn("parentMessageId", decoded_optional) - + def test_round_trip_serialization(self): """Test that events can be serialized to JSON with camelCase and deserialized back correctly""" # Create a complex event with multiple fields @@ -152,12 +161,12 @@ def test_round_trip_serialization(self): tool_call_id="call_abc123", tool_call_name="search_tool", parent_message_id="msg_parent_456", - timestamp=1648214400000 + timestamp=1648214400000, ) - + # Serialize to JSON with camelCase fields json_str = original_event.model_dump_json(by_alias=True) - + # Verify JSON uses camelCase json_data = json.loads(json_str) self.assertIn("toolCallId", json_data) @@ -166,19 +175,20 @@ def test_round_trip_serialization(self): self.assertNotIn("tool_call_id", json_data) self.assertNotIn("tool_call_name", json_data) self.assertNotIn("parent_message_id", json_data) - + # Deserialize back to an event deserialized_event = ToolCallStartEvent.model_validate_json(json_str) - + # Verify the deserialized event is equivalent to the original self.assertEqual(deserialized_event.type, original_event.type) self.assertEqual(deserialized_event.tool_call_id, original_event.tool_call_id) - self.assertEqual(deserialized_event.tool_call_name, original_event.tool_call_name) - self.assertEqual(deserialized_event.parent_message_id, original_event.parent_message_id) - self.assertEqual(deserialized_event.timestamp, original_event.timestamp) - - # Verify complete equality using model_dump self.assertEqual( - original_event.model_dump(), - deserialized_event.model_dump() + deserialized_event.tool_call_name, original_event.tool_call_name ) + self.assertEqual( + deserialized_event.parent_message_id, original_event.parent_message_id + ) + self.assertEqual(deserialized_event.timestamp, original_event.timestamp) + + # Verify complete equality using model_dump + self.assertEqual(original_event.model_dump(), deserialized_event.model_dump()) diff --git a/python-sdk/tests/test_events.py b/python-sdk/tests/test_events.py index e413275c7..1745f61f9 100644 --- a/python-sdk/tests/test_events.py +++ b/python-sdk/tests/test_events.py @@ -22,7 +22,7 @@ RunErrorEvent, StepStartedEvent, StepFinishedEvent, - Event + Event, ) @@ -47,13 +47,10 @@ def test_base_event_creation(self): def test_text_message_start(self): """Test creating and serializing a TextMessageStartEvent event""" - event = TextMessageStartEvent( - message_id="msg_123", - timestamp=1648214400000 - ) + event = TextMessageStartEvent(message_id="msg_123", timestamp=1648214400000) self.assertEqual(event.message_id, "msg_123") self.assertEqual(event.role, "assistant") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "TEXT_MESSAGE_START") @@ -63,13 +60,11 @@ def test_text_message_start(self): def test_text_message_content(self): """Test creating and serializing a TextMessageContentEvent event""" event = TextMessageContentEvent( - message_id="msg_123", - delta="Hello, world!", - timestamp=1648214400000 + message_id="msg_123", delta="Hello, world!", timestamp=1648214400000 ) self.assertEqual(event.message_id, "msg_123") self.assertEqual(event.delta, "Hello, world!") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "TEXT_MESSAGE_CONTENT") @@ -78,12 +73,9 @@ def test_text_message_content(self): def test_text_message_end(self): """Test creating and serializing a TextMessageEndEvent event""" - event = TextMessageEndEvent( - message_id="msg_123", - timestamp=1648214400000 - ) + event = TextMessageEndEvent(message_id="msg_123", timestamp=1648214400000) self.assertEqual(event.message_id, "msg_123") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "TEXT_MESSAGE_END") @@ -95,12 +87,12 @@ def test_tool_call_start(self): tool_call_id="call_123", tool_call_name="get_weather", parent_message_id="msg_456", - timestamp=1648214400000 + timestamp=1648214400000, ) self.assertEqual(event.tool_call_id, "call_123") self.assertEqual(event.tool_call_name, "get_weather") self.assertEqual(event.parent_message_id, "msg_456") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "TOOL_CALL_START") @@ -113,11 +105,11 @@ def test_tool_call_args(self): event = ToolCallArgsEvent( tool_call_id="call_123", delta='{"location": "New York"}', - timestamp=1648214400000 + timestamp=1648214400000, ) self.assertEqual(event.tool_call_id, "call_123") self.assertEqual(event.delta, '{"location": "New York"}') - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "TOOL_CALL_ARGS") @@ -126,12 +118,9 @@ def test_tool_call_args(self): def test_tool_call_end(self): """Test creating and serializing a ToolCallEndEvent event""" - event = ToolCallEndEvent( - tool_call_id="call_123", - timestamp=1648214400000 - ) + event = ToolCallEndEvent(tool_call_id="call_123", timestamp=1648214400000) self.assertEqual(event.tool_call_id, "call_123") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "TOOL_CALL_END") @@ -140,12 +129,9 @@ def test_tool_call_end(self): def test_state_snapshot(self): """Test creating and serializing a StateSnapshotEvent event""" state = {"conversation_state": "active", "user_info": {"name": "John"}} - event = StateSnapshotEvent( - snapshot=state, - timestamp=1648214400000 - ) + event = StateSnapshotEvent(snapshot=state, timestamp=1648214400000) self.assertEqual(event.snapshot, state) - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "STATE_SNAPSHOT") @@ -157,14 +143,11 @@ def test_state_delta(self): # JSON Patch format delta = [ {"op": "replace", "path": "/conversation_state", "value": "paused"}, - {"op": "add", "path": "/user_info/age", "value": 30} + {"op": "add", "path": "/user_info/age", "value": 30}, ] - event = StateDeltaEvent( - delta=delta, - timestamp=1648214400000 - ) + event = StateDeltaEvent(delta=delta, timestamp=1648214400000) self.assertEqual(event.delta, delta) - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "STATE_DELTA") @@ -176,42 +159,40 @@ def test_messages_snapshot(self): """Test creating and serializing a MessagesSnapshotEvent event""" messages = [ UserMessage(id="user_1", content="Hello"), - AssistantMessage(id="asst_1", content="Hi there", tool_calls=[ - ToolCall( - id="call_1", - function=FunctionCall( - name="get_weather", - arguments='{"location": "New York"}' + AssistantMessage( + id="asst_1", + content="Hi there", + tool_calls=[ + ToolCall( + id="call_1", + function=FunctionCall( + name="get_weather", arguments='{"location": "New York"}' + ), ) - ) - ]) + ], + ), ] - event = MessagesSnapshotEvent( - messages=messages, - timestamp=1648214400000 - ) + event = MessagesSnapshotEvent(messages=messages, timestamp=1648214400000) self.assertEqual(len(event.messages), 2) self.assertEqual(event.messages[0].id, "user_1") self.assertEqual(event.messages[1].tool_calls[0].function.name, "get_weather") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "MESSAGES_SNAPSHOT") self.assertEqual(len(serialized["messages"]), 2) self.assertEqual(serialized["messages"][0]["role"], "user") - self.assertEqual(serialized["messages"][1]["toolCalls"][0]["function"]["name"], "get_weather") + self.assertEqual( + serialized["messages"][1]["toolCalls"][0]["function"]["name"], "get_weather" + ) def test_raw_event(self): """Test creating and serializing a RawEvent""" raw_data = {"origin": "server", "data": {"key": "value"}} - event = RawEvent( - event=raw_data, - source="api", - timestamp=1648214400000 - ) + event = RawEvent(event=raw_data, source="api", timestamp=1648214400000) self.assertEqual(event.event, raw_data) self.assertEqual(event.source, "api") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "RAW") @@ -223,11 +204,11 @@ def test_custom_event(self): event = CustomEvent( name="user_action", value={"action": "click", "element": "button"}, - timestamp=1648214400000 + timestamp=1648214400000, ) self.assertEqual(event.name, "user_action") self.assertEqual(event.value["action"], "click") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "CUSTOM") @@ -237,13 +218,11 @@ def test_custom_event(self): def test_run_started(self): """Test creating and serializing a RunStartedEvent event""" event = RunStartedEvent( - thread_id="thread_123", - run_id="run_456", - timestamp=1648214400000 + thread_id="thread_123", run_id="run_456", timestamp=1648214400000 ) self.assertEqual(event.thread_id, "thread_123") self.assertEqual(event.run_id, "run_456") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "RUN_STARTED") @@ -253,13 +232,11 @@ def test_run_started(self): def test_run_finished(self): """Test creating and serializing a RunFinishedEvent event""" event = RunFinishedEvent( - thread_id="thread_123", - run_id="run_456", - timestamp=1648214400000 + thread_id="thread_123", run_id="run_456", timestamp=1648214400000 ) self.assertEqual(event.thread_id, "thread_123") self.assertEqual(event.run_id, "run_456") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "RUN_FINISHED") @@ -271,11 +248,11 @@ def test_run_error(self): event = RunErrorEvent( message="An error occurred during execution", code="ERROR_001", - timestamp=1648214400000 + timestamp=1648214400000, ) self.assertEqual(event.message, "An error occurred during execution") self.assertEqual(event.code, "ERROR_001") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "RUN_ERROR") @@ -284,12 +261,9 @@ def test_run_error(self): def test_step_started(self): """Test creating and serializing a StepStartedEvent event""" - event = StepStartedEvent( - step_name="process_data", - timestamp=1648214400000 - ) + event = StepStartedEvent(step_name="process_data", timestamp=1648214400000) self.assertEqual(event.step_name, "process_data") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "STEP_STARTED") @@ -297,12 +271,9 @@ def test_step_started(self): def test_step_finished(self): """Test creating and serializing a StepFinishedEvent event""" - event = StepFinishedEvent( - step_name="process_data", - timestamp=1648214400000 - ) + event = StepFinishedEvent(step_name="process_data", timestamp=1648214400000) self.assertEqual(event.step_name, "process_data") - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "STEP_FINISHED") @@ -311,48 +282,48 @@ def test_step_finished(self): def test_event_union_deserialization(self): """Test the Event union type correctly deserializes different event types""" event_adapter = TypeAdapter(Event) - + # Test different event types event_data = [ { "type": "TEXT_MESSAGE_START", "messageId": "msg_start", "role": "assistant", - "timestamp": 1648214400000 + "timestamp": 1648214400000, }, { "type": "TEXT_MESSAGE_CONTENT", "messageId": "msg_content", "delta": "Hello!", - "timestamp": 1648214400000 + "timestamp": 1648214400000, }, { "type": "TOOL_CALL_START", "toolCallId": "call_start", "toolCallName": "get_info", - "timestamp": 1648214400000 + "timestamp": 1648214400000, }, { "type": "STATE_SNAPSHOT", "snapshot": {"status": "active"}, - "timestamp": 1648214400000 + "timestamp": 1648214400000, }, { "type": "RUN_ERROR", "message": "Error occurred", "code": "ERR_001", - "timestamp": 1648214400000 - } + "timestamp": 1648214400000, + }, ] - + expected_types = [ TextMessageStartEvent, TextMessageContentEvent, ToolCallStartEvent, StateSnapshotEvent, - RunErrorEvent + RunErrorEvent, ] - + for data, expected_type in zip(event_data, expected_types): event = event_adapter.validate_python(data) self.assertIsInstance(event, expected_type) @@ -365,7 +336,7 @@ def test_validation_constraints(self): with self.assertRaises(ValueError): TextMessageContentEvent( message_id="msg_123", - delta="" # Empty delta, should fail + delta="", # Empty delta, should fail ) def test_serialization_round_trip(self): @@ -375,57 +346,54 @@ def test_serialization_round_trip(self): TextMessageStartEvent( message_id="msg_123", ), - TextMessageContentEvent( - message_id="msg_123", - delta="Hello, world!" - ), - ToolCallStartEvent( - tool_call_id="call_123", - tool_call_name="get_weather" - ), - StateSnapshotEvent( - snapshot={"status": "active"} - ), - MessagesSnapshotEvent( - messages=[ - UserMessage(id="user_1", content="Hello") - ] - ), - RunStartedEvent( - thread_id="thread_123", - run_id="run_456" - ) + TextMessageContentEvent(message_id="msg_123", delta="Hello, world!"), + ToolCallStartEvent(tool_call_id="call_123", tool_call_name="get_weather"), + StateSnapshotEvent(snapshot={"status": "active"}), + MessagesSnapshotEvent(messages=[UserMessage(id="user_1", content="Hello")]), + RunStartedEvent(thread_id="thread_123", run_id="run_456"), ] - + event_adapter = TypeAdapter(Event) - + # Test round trip for each event for original_event in events: # Serialize to JSON json_str = original_event.model_dump_json(by_alias=True) - + # Deserialize back to object deserialized_event = event_adapter.validate_json(json_str) - + # Verify the types match self.assertIsInstance(deserialized_event, type(original_event)) self.assertEqual(deserialized_event.type, original_event.type) - + # Verify event-specific fields if isinstance(original_event, TextMessageStartEvent): - self.assertEqual(deserialized_event.message_id, original_event.message_id) + self.assertEqual( + deserialized_event.message_id, original_event.message_id + ) self.assertEqual(deserialized_event.role, original_event.role) elif isinstance(original_event, TextMessageContentEvent): - self.assertEqual(deserialized_event.message_id, original_event.message_id) + self.assertEqual( + deserialized_event.message_id, original_event.message_id + ) self.assertEqual(deserialized_event.delta, original_event.delta) elif isinstance(original_event, ToolCallStartEvent): - self.assertEqual(deserialized_event.tool_call_id, original_event.tool_call_id) - self.assertEqual(deserialized_event.tool_call_name, original_event.tool_call_name) + self.assertEqual( + deserialized_event.tool_call_id, original_event.tool_call_id + ) + self.assertEqual( + deserialized_event.tool_call_name, original_event.tool_call_name + ) elif isinstance(original_event, StateSnapshotEvent): self.assertEqual(deserialized_event.snapshot, original_event.snapshot) elif isinstance(original_event, MessagesSnapshotEvent): - self.assertEqual(len(deserialized_event.messages), len(original_event.messages)) - self.assertEqual(deserialized_event.messages[0].id, original_event.messages[0].id) + self.assertEqual( + len(deserialized_event.messages), len(original_event.messages) + ) + self.assertEqual( + deserialized_event.messages[0].id, original_event.messages[0].id + ) elif isinstance(original_event, RunStartedEvent): self.assertEqual(deserialized_event.thread_id, original_event.thread_id) self.assertEqual(deserialized_event.run_id, original_event.run_id) @@ -434,16 +402,16 @@ def test_raw_event_with_null_source(self): """Test RawEvent with null source""" event = RawEvent( event={"data": "test"}, - source=None # Explicit None + source=None, # Explicit None ) self.assertIsNone(event.source) - + # Test serialization serialized = event.model_dump(by_alias=True) self.assertEqual(serialized["type"], "RAW") self.assertEqual(serialized["event"]["data"], "test") self.assertIsNone(serialized["source"]) - + # Test round-trip event_adapter = TypeAdapter(Event) json_str = event.model_dump_json(by_alias=True) @@ -460,44 +428,39 @@ def test_complex_nested_event_structures(self): "preferences": { "theme": "dark", "notifications": True, - "filters": ["news", "social", "tech"] - } + "filters": ["news", "social", "tech"], + }, }, "stats": { "messages": 42, - "interactions": { - "clicks": 18, - "searches": 7 - } - } + "interactions": {"clicks": 18, "searches": 7}, + }, }, "active_tools": ["search", "calculator", "weather"], - "settings": { - "language": "en", - "timezone": "UTC-5" - } + "settings": {"language": "en", "timezone": "UTC-5"}, } - - event = StateSnapshotEvent( - snapshot=complex_state, - timestamp=1648214400000 - ) - + + event = StateSnapshotEvent(snapshot=complex_state, timestamp=1648214400000) + # Verify complex state structure self.assertEqual(event.snapshot["session"]["user"]["id"], "user_123") - self.assertEqual(event.snapshot["session"]["user"]["preferences"]["theme"], "dark") - self.assertEqual(event.snapshot["session"]["stats"]["interactions"]["searches"], 7) + self.assertEqual( + event.snapshot["session"]["user"]["preferences"]["theme"], "dark" + ) + self.assertEqual( + event.snapshot["session"]["stats"]["interactions"]["searches"], 7 + ) self.assertEqual(event.snapshot["active_tools"][1], "calculator") - + # Test serialization and deserialization event_adapter = TypeAdapter(Event) json_str = event.model_dump_json(by_alias=True) deserialized = event_adapter.validate_json(json_str) - + # Verify structure is preserved self.assertEqual( deserialized.snapshot["session"]["user"]["preferences"]["filters"], - ["news", "social", "tech"] + ["news", "social", "tech"], ) self.assertEqual(deserialized.snapshot["settings"]["timezone"], "UTC-5") @@ -505,21 +468,19 @@ def test_event_with_unicode_and_special_chars(self): """Test events with Unicode and special characters""" # Text with Unicode and special characters text = "Hello 你好 こんにちは 안녕하세요 👋 🌍 \n\t\"'\\/<>{}[]" - + event = TextMessageContentEvent( - message_id="msg_unicode", - delta=text, - timestamp=1648214400000 + message_id="msg_unicode", delta=text, timestamp=1648214400000 ) - + # Verify text is stored correctly self.assertEqual(event.delta, text) - + # Test serialization and deserialization event_adapter = TypeAdapter(Event) json_str = event.model_dump_json(by_alias=True) deserialized = event_adapter.validate_json(json_str) - + # Verify Unicode and special characters are preserved self.assertEqual(deserialized.delta, text) diff --git a/python-sdk/tests/test_types.py b/python-sdk/tests/test_types.py index e534aa5ab..6ad77a7e1 100644 --- a/python-sdk/tests/test_types.py +++ b/python-sdk/tests/test_types.py @@ -11,7 +11,7 @@ UserMessage, ToolMessage, Message, - RunAgentInput + RunAgentInput, ) @@ -26,10 +26,7 @@ def test_function_call_creation(self): def test_message_serialization(self): """Test serialization of a basic message""" - user_msg = UserMessage( - id="msg_123", - content="Hello, world!" - ) + user_msg = UserMessage(id="msg_123", content="Hello, world!") serialized = user_msg.model_dump(by_alias=True) self.assertEqual(serialized["id"], "msg_123") self.assertEqual(serialized["role"], "user") @@ -38,8 +35,7 @@ def test_message_serialization(self): def test_tool_call_serialization(self): """Test camel case serialization for ConfiguredBaseModel subclasses""" tool_call = ToolCall( - id="call_123", - function=FunctionCall(name="test_function", arguments="{}") + id="call_123", function=FunctionCall(name="test_function", arguments="{}") ) serialized = tool_call.model_dump(by_alias=True) # Should convert function to camelCase @@ -48,9 +44,7 @@ def test_tool_call_serialization(self): def test_tool_message_camel_case(self): """Test camel case serialization for ToolMessage""" tool_msg = ToolMessage( - id="tool_123", - content="Tool result", - tool_call_id="call_456" + id="tool_123", content="Tool result", tool_call_id="call_456" ) serialized = tool_msg.model_dump(by_alias=True) self.assertIn("toolCallId", serialized) @@ -63,7 +57,7 @@ def test_parse_camel_case_json_tool_message(self): "id": "tool_789", "role": "tool", "content": "Result from tool", - "toolCallId": "call_123" # camelCase field name + "toolCallId": "call_123", # camelCase field name } # Parse the JSON data into a ToolMessage instance @@ -81,10 +75,7 @@ def test_parse_camel_case_json_function_call(self): json_data = { "id": "call_abc", "type": "function", - "function": { - "name": "get_weather", - "arguments": '{"location":"New York"}' - } + "function": {"name": "get_weather", "arguments": '{"location":"New York"}'}, } # Parse JSON into a ToolCall instance @@ -98,20 +89,14 @@ def test_parse_camel_case_json_function_call(self): def test_developer_message(self): """Test creating and serializing a developer message""" - msg = DeveloperMessage( - id="dev_123", - content="Developer note" - ) + msg = DeveloperMessage(id="dev_123", content="Developer note") serialized = msg.model_dump(by_alias=True) self.assertEqual(serialized["role"], "developer") self.assertEqual(serialized["content"], "Developer note") def test_system_message(self): """Test creating and serializing a system message""" - msg = SystemMessage( - id="sys_123", - content="System instruction" - ) + msg = SystemMessage(id="sys_123", content="System instruction") serialized = msg.model_dump(by_alias=True) self.assertEqual(serialized["role"], "system") self.assertEqual(serialized["content"], "System instruction") @@ -120,12 +105,10 @@ def test_assistant_message(self): """Test creating and serializing an assistant message with tool calls""" tool_call = ToolCall( id="call_456", - function=FunctionCall(name="get_data", arguments='{"param": "value"}') + function=FunctionCall(name="get_data", arguments='{"param": "value"}'), ) msg = AssistantMessage( - id="asst_123", - content="Assistant response", - tool_calls=[tool_call] + id="asst_123", content="Assistant response", tool_calls=[tool_call] ) serialized = msg.model_dump(by_alias=True) self.assertEqual(serialized["role"], "assistant") @@ -135,10 +118,7 @@ def test_assistant_message(self): def test_user_message(self): """Test creating and serializing a user message""" - msg = UserMessage( - id="user_123", - content="User query" - ) + msg = UserMessage(id="user_123", content="User query") serialized = msg.model_dump(by_alias=True) self.assertEqual(serialized["role"], "user") self.assertEqual(serialized["content"], "User query") @@ -155,11 +135,11 @@ def test_message_union_deserialization(self): {"id": "asst_789", "role": "assistant", "content": "Assistant response"}, {"id": "user_101", "role": "user", "content": "User query"}, { - "id": "tool_202", - "role": "tool", - "content": "Tool result", - "toolCallId": "call_303" - } + "id": "tool_202", + "role": "tool", + "content": "Tool result", + "toolCallId": "call_303", + }, ] expected_types = [ @@ -167,7 +147,7 @@ def test_message_union_deserialization(self): SystemMessage, AssistantMessage, UserMessage, - ToolMessage + ToolMessage, ] for data, expected_type in zip(message_data, expected_types): @@ -192,10 +172,10 @@ def test_message_union_with_tool_calls(self): "type": "function", "function": { "name": "search_data", - "arguments": '{"query": "python"}' - } + "arguments": '{"query": "python"}', + }, } - ] + ], } msg = message_adapter.validate_python(data) @@ -215,19 +195,19 @@ def test_run_agent_input_deserialization(self): { "id": "sys_001", "role": "system", - "content": "You are a helpful assistant." + "content": "You are a helpful assistant.", }, # User message { "id": "user_001", "role": "user", - "content": "Can you help me analyze this data?" + "content": "Can you help me analyze this data?", }, # Developer message { "id": "dev_001", "role": "developer", - "content": "The assistant should provide a detailed analysis." + "content": "The assistant should provide a detailed analysis.", }, # Assistant message with tool calls { @@ -240,24 +220,24 @@ def test_run_agent_input_deserialization(self): "type": "function", "function": { "name": "analyze_data", - "arguments": '{"dataset": "sales_2023", "metrics": ["mean", "median"]}' # pylint: disable=line-too-long - } + "arguments": '{"dataset": "sales_2023", "metrics": ["mean", "median"]}', # pylint: disable=line-too-long + }, } - ] + ], }, # Tool message responding to tool call { "id": "tool_001", "role": "tool", "content": '{"mean": 42.5, "median": 38.0}', - "toolCallId": "call_001" + "toolCallId": "call_001", }, # Another user message { "id": "user_002", "role": "user", - "content": "Can you explain these results?" - } + "content": "Can you explain these results?", + }, ], "tools": [ { @@ -267,10 +247,10 @@ def test_run_agent_input_deserialization(self): "type": "object", "properties": { "dataset": {"type": "string"}, - "metrics": {"type": "array", "items": {"type": "string"}} + "metrics": {"type": "array", "items": {"type": "string"}}, }, - "required": ["dataset"] - } + "required": ["dataset"], + }, }, { "name": "fetch_data", @@ -279,26 +259,23 @@ def test_run_agent_input_deserialization(self): "type": "object", "properties": { "source": {"type": "string"}, - "query": {"type": "string"} + "query": {"type": "string"}, }, - "required": ["source", "query"] - } - } + "required": ["source", "query"], + }, + }, ], "context": [ { "description": "User preferences", - "value": '{"theme": "dark", "language": "English"}' + "value": '{"theme": "dark", "language": "English"}', }, - { - "description": "Environment", - "value": "production" - } + {"description": "Environment", "value": "production"}, ], "forwardedProps": { "api_version": "v1", - "custom_settings": {"max_tokens": 500} - } + "custom_settings": {"max_tokens": 500}, + }, } # Deserialize using TypeAdapter @@ -319,8 +296,12 @@ def test_run_agent_input_deserialization(self): self.assertIsInstance(run_agent_input.messages[5], UserMessage) # Verify specific message content - self.assertEqual(run_agent_input.messages[0].content, "You are a helpful assistant.") - self.assertEqual(run_agent_input.messages[1].content, "Can you help me analyze this data?") + self.assertEqual( + run_agent_input.messages[0].content, "You are a helpful assistant." + ) + self.assertEqual( + run_agent_input.messages[1].content, "Can you help me analyze this data?" + ) # Verify assistant message with tool call assistant_msg = run_agent_input.messages[3] @@ -344,7 +325,9 @@ def test_run_agent_input_deserialization(self): # Verify forwarded props self.assertEqual(run_agent_input.forwarded_props["api_version"], "v1") - self.assertEqual(run_agent_input.forwarded_props["custom_settings"]["max_tokens"], 500) + self.assertEqual( + run_agent_input.forwarded_props["custom_settings"]["max_tokens"], 500 + ) def test_validation_errors(self): """Test validation errors for various message types""" @@ -354,7 +337,7 @@ def test_validation_errors(self): invalid_role_data = { "id": "msg_123", "role": "invalid_role", # Invalid role - "content": "Hello" + "content": "Hello", } with self.assertRaises(ValidationError): message_adapter.validate_python(invalid_role_data) @@ -363,7 +346,7 @@ def test_validation_errors(self): missing_id_data = { # Missing "id" field "role": "user", - "content": "Hello" + "content": "Hello", } with self.assertRaises(ValidationError): UserMessage.model_validate(missing_id_data) @@ -373,7 +356,7 @@ def test_validation_errors(self): "id": "msg_456", "role": "user", "content": "Hello", - "extra_field": "This shouldn't be here" # Extra field + "extra_field": "This shouldn't be here", # Extra field } with self.assertRaises(ValidationError): UserMessage.model_validate(extra_field_data) @@ -396,9 +379,9 @@ def test_empty_collections(self): "runId": "run_empty", "state": {}, "messages": [], # Empty messages - "tools": [], # Empty tools - "context": [], # Empty context - "forwardedProps": {} + "tools": [], # Empty tools + "context": [], # Empty context + "forwardedProps": {}, } # Deserialize and verify @@ -423,26 +406,26 @@ def test_multiple_tool_calls(self): "type": "function", "function": { "name": "get_weather", - "arguments": '{"location": "New York"}' - } + "arguments": '{"location": "New York"}', + }, }, { "id": "call_2", "type": "function", "function": { "name": "search_database", - "arguments": '{"query": "recent sales"}' - } + "arguments": '{"query": "recent sales"}', + }, }, { "id": "call_3", "type": "function", "function": { "name": "calculate", - "arguments": '{"operation": "sum", "values": [1, 2, 3, 4, 5]}' - } - } - ] + "arguments": '{"operation": "sum", "values": [1, 2, 3, 4, 5]}', + }, + }, + ], } # Deserialize and verify @@ -471,13 +454,9 @@ def test_serialization_round_trip(self): { "id": "sys_rt", "role": "system", - "content": "You are a helpful assistant." - }, - { - "id": "user_rt", - "role": "user", - "content": "Help me with my task." + "content": "You are a helpful assistant.", }, + {"id": "user_rt", "role": "user", "content": "Help me with my task."}, { "id": "asst_rt", "role": "assistant", @@ -486,33 +465,20 @@ def test_serialization_round_trip(self): { "id": "call_rt", "type": "function", - "function": { - "name": "get_task_info", - "arguments": "{}" - } + "function": {"name": "get_task_info", "arguments": "{}"}, } - ] - } + ], + }, ], "tools": [ { "name": "get_task_info", "description": "Get task information", - "parameters": { - "type": "object", - "properties": {} - } - } - ], - "context": [ - { - "description": "Session", - "value": "123456" + "parameters": {"type": "object", "properties": {}}, } ], - "forwardedProps": { - "timestamp": 1648214400 - } + "context": [{"description": "Session", "value": "123456"}], + "forwardedProps": {"timestamp": 1648214400}, } # Deserialize @@ -538,7 +504,7 @@ def test_serialization_round_trip(self): self.assertEqual(len(deserialized_obj.messages[2].tool_calls), 1) self.assertEqual( deserialized_obj.messages[2].tool_calls[0].function.name, - original_obj.messages[2].tool_calls[0].function.name + original_obj.messages[2].tool_calls[0].function.name, ) def test_content_edge_cases(self): @@ -548,7 +514,7 @@ def test_content_edge_cases(self): empty_content_data = { "id": "msg_empty", "role": "user", - "content": "" # Empty string + "content": "", # Empty string } empty_msg = UserMessage.model_validate(empty_content_data) self.assertEqual(empty_msg.content, "") @@ -562,12 +528,9 @@ def test_content_edge_cases(self): { "id": "call_null", "type": "function", - "function": { - "name": "get_data", - "arguments": "{}" - } + "function": {"name": "get_data", "arguments": "{}"}, } - ] + ], } null_msg = AssistantMessage.model_validate(null_content_data) self.assertIsNone(null_msg.content) @@ -577,17 +540,19 @@ def test_content_edge_cases(self): large_content_data = { "id": "msg_large", "role": "user", - "content": large_content + "content": large_content, } large_msg = UserMessage.model_validate(large_content_data) self.assertEqual(len(large_msg.content), 10000) # Test content with special characters - special_chars = "Special chars: 你好 こんにちは 안녕하세요 👋 🌍 \n\t\"'\\/<>{}[]" + special_chars = ( + "Special chars: 你好 こんにちは 안녕하세요 👋 🌍 \n\t\"'\\/<>{}[]" + ) special_chars_data = { "id": "msg_special", "role": "user", - "content": special_chars + "content": special_chars, } special_msg = UserMessage.model_validate(special_chars_data) self.assertEqual(special_msg.content, special_chars) @@ -599,7 +564,7 @@ def test_name_field_handling(self): "id": "user_named", "role": "user", "content": "Hello", - "name": "John" + "name": "John", } user_msg = UserMessage.model_validate(user_with_name_data) self.assertEqual(user_msg.name, "John") @@ -609,7 +574,7 @@ def test_name_field_handling(self): "id": "asst_named", "role": "assistant", "content": "Hello", - "name": "AI Assistant" + "name": "AI Assistant", } assistant_msg = AssistantMessage.model_validate(assistant_with_name_data) self.assertEqual(assistant_msg.name, "AI Assistant") @@ -633,7 +598,7 @@ def test_state_variations(self): "messages": [], "tools": [], "context": [], - "forwardedProps": {} + "forwardedProps": {}, } scalar_input = RunAgentInput.model_validate(scalar_state_data) self.assertEqual(scalar_input.state, "ACTIVE") @@ -647,19 +612,13 @@ def test_state_variations(self): "preferences": { "theme": "dark", "notifications": True, - "filters": ["important", "urgent"] - } + "filters": ["important", "urgent"], + }, }, - "metrics": { - "requests": 42, - "tokens": { - "input": 1024, - "output": 2048 - } - } + "metrics": {"requests": 42, "tokens": {"input": 1024, "output": 2048}}, }, "timestamp": 1648214400, - "version": "1.0.0" + "version": "1.0.0", } complex_state_data = { @@ -669,15 +628,19 @@ def test_state_variations(self): "messages": [], "tools": [], "context": [], - "forwardedProps": {} + "forwardedProps": {}, } complex_input = RunAgentInput.model_validate(complex_state_data) # Verify nested state structure is preserved self.assertEqual(complex_input.state["session"]["id"], "sess_123") self.assertEqual(complex_input.state["session"]["user"]["id"], "user_456") - self.assertEqual(complex_input.state["session"]["user"]["preferences"]["theme"], "dark") - self.assertEqual(complex_input.state["session"]["metrics"]["tokens"]["output"], 2048) + self.assertEqual( + complex_input.state["session"]["user"]["preferences"]["theme"], "dark" + ) + self.assertEqual( + complex_input.state["session"]["metrics"]["tokens"]["output"], 2048 + ) self.assertEqual(complex_input.state["version"], "1.0.0") # Verify serialization round-trip works with complex state @@ -685,7 +648,7 @@ def test_state_variations(self): deserialized = RunAgentInput.model_validate(serialized) self.assertEqual( deserialized.state["session"]["user"]["preferences"]["filters"], - ["important", "urgent"] + ["important", "urgent"], ) From 42f0d19bba55b3bcff286a7b232e090fbf392416 Mon Sep 17 00:00:00 2001 From: Wouter Doppenberg Date: Fri, 15 Aug 2025 10:47:24 +0200 Subject: [PATCH 3/8] CI schema error fix --- .github/workflows/test.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ddfd0bfd9..c1bbdcbee 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -2,9 +2,13 @@ name: test on: push: - branches: main + branches: [ + "main" + ] pull_request: - branches: main + branches: [ + "main" + ] jobs: python: From f147fad877d94a93568a86e7d194ca0a483ab497 Mon Sep 17 00:00:00 2001 From: Wouter Doppenberg Date: Fri, 15 Aug 2025 10:50:02 +0200 Subject: [PATCH 4/8] Python 3.9 compatibility changes; pre-commit updates --- python-sdk/.pre-commit-config.yaml | 6 +++--- python-sdk/ag_ui/encoder/encoder.py | 4 +++- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/python-sdk/.pre-commit-config.yaml b/python-sdk/.pre-commit-config.yaml index f52033e00..a0e329a04 100644 --- a/python-sdk/.pre-commit-config.yaml +++ b/python-sdk/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: v0.11.8 + rev: v0.12.9 hooks: - id: ruff args: [ @@ -10,12 +10,12 @@ repos: - id: ruff-format files: ^python-sdk/ - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.15.0 + rev: v1.17.1 hooks: - id: mypy files: ^python-sdk/ args: [ - --python-version=3.12, + --python-version=3.9, --disallow-untyped-calls, --disallow-untyped-defs, --disallow-incomplete-defs, diff --git a/python-sdk/ag_ui/encoder/encoder.py b/python-sdk/ag_ui/encoder/encoder.py index a30957568..227a552d6 100644 --- a/python-sdk/ag_ui/encoder/encoder.py +++ b/python-sdk/ag_ui/encoder/encoder.py @@ -2,6 +2,8 @@ This module contains the EventEncoder class """ +from typing import Union + from ag_ui.core.events import BaseEvent AGUI_MEDIA_TYPE = "application/vnd.ag-ui.event+proto" @@ -12,7 +14,7 @@ class EventEncoder: Encodes Agent User Interaction events. """ - def __init__(self, accept: str | None = None): + def __init__(self, accept: Union[str, None] = None): pass def get_content_type(self) -> str: From 2a240a15febc15ef93e1fdab65d1d2b1f88a5c9d Mon Sep 17 00:00:00 2001 From: Wouter Doppenberg Date: Fri, 15 Aug 2025 10:55:29 +0200 Subject: [PATCH 5/8] AgentStateT -> StateT --- python-sdk/ag_ui/core/events.py | 6 +++--- python-sdk/ag_ui/core/types.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/python-sdk/ag_ui/core/events.py b/python-sdk/ag_ui/core/events.py index b0b2bab40..256df6778 100644 --- a/python-sdk/ag_ui/core/events.py +++ b/python-sdk/ag_ui/core/events.py @@ -7,7 +7,7 @@ from pydantic import Field -from .types import ConfiguredBaseModel, Message, AgentStateT, JSONValue +from .types import ConfiguredBaseModel, Message, StateT, JSONValue class EventType(str, Enum): @@ -193,13 +193,13 @@ class ThinkingEndEvent(BaseEvent): type: Literal[EventType.THINKING_END] = EventType.THINKING_END # pyright: ignore[reportIncompatibleVariableOverride] -class StateSnapshotEvent(BaseEvent, Generic[AgentStateT]): +class StateSnapshotEvent(BaseEvent, Generic[StateT]): """ Event containing a snapshot of the state. """ type: Literal[EventType.STATE_SNAPSHOT] = EventType.STATE_SNAPSHOT # pyright: ignore[reportIncompatibleVariableOverride] - snapshot: AgentStateT + snapshot: StateT class StateDeltaEvent(BaseEvent): diff --git a/python-sdk/ag_ui/core/types.py b/python-sdk/ag_ui/core/types.py index 824f0bd70..6aaa13729 100644 --- a/python-sdk/ag_ui/core/types.py +++ b/python-sdk/ag_ui/core/types.py @@ -9,7 +9,7 @@ from pydantic.alias_generators import to_camel JSONValue = Union[str, int, float, bool, None, dict[str, Any], list[Any]] -AgentStateT = TypeVar("AgentStateT", default=JSONValue, contravariant=True) +StateT = TypeVar("StateT", default=JSONValue, contravariant=True) FwdPropsT = TypeVar("FwdPropsT", default=JSONValue, contravariant=True) @@ -127,14 +127,14 @@ class Tool(ConfiguredBaseModel): parameters: Any # JSON Schema for the tool parameters -class RunAgentInput(ConfiguredBaseModel, Generic[AgentStateT, FwdPropsT]): +class RunAgentInput(ConfiguredBaseModel, Generic[StateT, FwdPropsT]): """ Input for running an agent. """ thread_id: str run_id: str - state: AgentStateT + state: StateT messages: List[Message] tools: List[Tool] context: List[Context] From 0d04958260b2dbc6c161d9b0aa2fafe446827ae7 Mon Sep 17 00:00:00 2001 From: Wouter Doppenberg Date: Fri, 15 Aug 2025 11:05:57 +0200 Subject: [PATCH 6/8] `ag-ui-protocol` -> `ag-ui` --- python-sdk/pyproject.toml | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/python-sdk/pyproject.toml b/python-sdk/pyproject.toml index 15b8b9649..dbc7fe56c 100644 --- a/python-sdk/pyproject.toml +++ b/python-sdk/pyproject.toml @@ -1,5 +1,5 @@ [project] -name = "ag-ui-protocol" +name = "ag-ui" version = "0.1.9" description = "" authors = [ @@ -10,13 +10,6 @@ requires-python = ">=3.9,<4.0" dependencies = [ "pydantic>=2.11.2,<3.0.0", ] -packages = [ - { include = "ag_ui", from = "ag_ui" } -] - -[tool.hatch.build.targets.wheel] -packages = ["ag_ui"] - [build-system] requires = ["hatchling"] From 99f5e684bb03c793617af3439ca41ad2e5677d8a Mon Sep 17 00:00:00 2001 From: Wouter Doppenberg Date: Wed, 17 Sep 2025 22:07:33 +0200 Subject: [PATCH 7/8] Revert --- .github/workflows/dojo-e2e.yml | 128 +++- .gitignore | 2 + README.md | 30 +- docs/concepts/events.mdx | 24 +- docs/development/roadmap.mdx | 16 +- docs/introduction.mdx | 17 +- docs/quickstart/applications.mdx | 7 + docs/quickstart/server.mdx | 6 +- python-sdk/ag_ui/core/__init__.py | 3 +- python-sdk/ag_ui/core/events.py | 7 +- python-sdk/ag_ui/core/types.py | 2 - typescript-sdk/.gitignore | 1 + typescript-sdk/apps/dojo/e2e/package.json | 3 +- .../e2e/pages/agnoPages/AgenticChatPage.ts | 67 -- .../e2e/pages/agnoPages/ToolBaseGenUIPage.ts | 114 --- .../e2e/pages/crewAIPages/AgenticChatPage.ts | 120 ---- .../crewAIPages/PredictiveStateUpdatesPage.ts | 12 +- .../e2e/pages/crewAIPages/SharedStatePage.ts | 74 -- .../pages/crewAIPages/ToolBaseGenUIPage.ts | 114 --- .../langGraphFastAPIPages/AgenticChatPage.ts | 120 ---- .../langGraphFastAPIPages/SharedStatePage.ts | 74 -- .../ToolBaseGenUIPage.ts | 123 ---- .../pages/langGraphPages/AgenticChatPage.ts | 120 ---- .../pages/langGraphPages/SharedStatePage.ts | 74 -- .../pages/langGraphPages/ToolBaseGenUIPage.ts | 123 ---- .../pages/llamaIndexPages/AgenticChatPage.ts | 120 ---- .../pages/llamaIndexPages/SharedStatePage.ts | 74 -- .../mastraAgentLocalPages/AgenticChatPage.ts | 120 ---- .../mastraAgentLocalPages/SharedStatePage.ts | 74 -- .../ToolBaseGenUIPage.ts | 114 --- .../e2e/pages/mastraPages/AgenticChatPage.ts | 79 -- .../pages/mastraPages/ToolBaseGenUIPage.ts | 114 --- .../middlewareStarterPages/AgenticChatPage.ts | 120 ---- .../pages/pydanticAIPages/AgenticChatPage.ts | 120 ---- .../PredictiveStateUpdatesPage.ts | 12 +- .../pages/pydanticAIPages/SharedStatePage.ts | 74 -- .../pydanticAIPages/ToolBaseGenUIPage.ts | 114 --- .../AgenticChatPage.ts | 120 ---- .../SharedStatePage.ts | 74 -- .../ToolBaseGenUIPage.ts | 116 --- .../serverStarterPages/AgenticChatPage.ts | 120 ---- .../pages/vercelAISdkPages/AgenticChatPage.ts | 120 ---- .../apps/dojo/e2e/playwright.config.ts | 8 +- .../tests/agnoTests/agenticChatPage.spec.ts | 12 +- .../agnoTests/toolBasedGenUIPage.spec.ts | 6 +- .../tests/crewAITests/agenticChatPage.spec.ts | 4 +- .../tests/crewAITests/agenticGenUI.spec.ts | 6 +- .../predictvieStateUpdatePage.spec.ts | 84 --- .../tests/crewAITests/sharedStatePage.spec.ts | 8 +- .../crewAITests/toolBasedGenUIPage.spec.ts | 2 +- .../agenticChatPage.spec.ts | 4 +- .../agenticGenUI.spec.ts | 2 +- .../predictvieStateUpdatePage.spec.ts | 96 --- .../sharedStatePage.spec.ts | 10 +- .../toolBasedGenUIPage.spec.ts | 6 +- .../langgraphTests/agenticChatPage.spec.ts | 4 +- .../predictvieStateUpdatePage.spec.ts | 96 --- .../langgraphTests/sharedStatePage.spec.ts | 8 +- .../langgraphTests/toolBasedGenUIPage.spec.ts | 6 +- .../llamaIndexTests/agenticChatPage.spec.ts | 4 +- .../llamaIndexTests/agenticGenUI.spec.ts | 2 + .../llamaIndexTests/sharedStatePage.spec.ts | 8 +- .../agenticChatPage.spec.ts | 4 +- .../sharedStatePage.spec.ts | 8 +- .../toolBasedGenUIPage.spec.ts | 6 +- .../tests/mastraTests/agenticChatPage.spec.ts | 4 +- .../mastraTests/toolBasedGenUIPage.spec.ts | 4 +- .../agenticChatPage.spec.ts | 2 +- .../pydanticAITests/agenticChatPage.spec.ts | 4 +- .../pydanticAITests/agenticGenUI.spec.ts | 11 +- .../humanInTheLoopPage.spec.ts | 6 +- .../predictvieStateUpdatePage.spec.ts | 84 --- .../pydanticAITests/sharedStatePage.spec.ts | 8 +- .../toolBasedGenUIPage.spec.ts | 6 +- .../agenticChatPage.spec.ts | 8 +- .../predictvieStateUpdatePage.spec.ts | 82 --- .../sharedStatePage.spec.ts | 21 +- .../toolBasedGenUIPage.spec.ts | 2 +- .../agenticChatPage.spec.ts | 2 +- .../vercelAISdkTests/agenticChatPage.spec.ts | 4 +- typescript-sdk/apps/dojo/e2e2/.gitignore | 7 - typescript-sdk/apps/dojo/e2e2/package.json | 14 - .../apps/dojo/e2e2/playwright.config.ts | 79 -- typescript-sdk/apps/dojo/e2e2/pnpm-lock.yaml | 67 -- .../apps/dojo/e2e2/pnpm-workspace.yaml | 2 - .../dojo/e2e2/tests/agno-agentic-chat.spec.ts | 22 - .../e2e2/tests/crewai-agentic-chat.spec.ts | 22 - .../e2e2/tests/langgraph-agentic-chat.spec.ts | 22 - .../langgraph-fastapi-agentic-chat.spec.ts | 22 - .../langgraph-typescript-agentic-chat.spec.ts | 22 - .../tests/llama-index-agentic-chat.spec.ts | 22 - .../e2e2/tests/mastra-agentic-chat.spec.ts | 22 - .../e2e2/tests/pydantic-agentic-chat.spec.ts | 22 - .../tests/vercel-ai-sdk-agentic-chat.spec.ts | 22 - .../dojo/scripts/generate-content-json.ts | 8 +- .../apps/dojo/scripts/prep-dojo-everything.js | 183 ++--- .../apps/dojo/scripts/run-dojo-everything.js | 259 +++---- typescript-sdk/apps/dojo/src/agents.ts | 30 +- .../app/[integrationId]/feature/layout.tsx | 14 +- .../feature/tool_based_generative_ui/page.tsx | 528 +++++++------- typescript-sdk/apps/dojo/src/app/layout.tsx | 5 +- .../dojo/src/components/sidebar/sidebar.tsx | 36 +- .../dojo/src/components/theme-provider.tsx | 13 +- typescript-sdk/apps/dojo/src/config.ts | 6 + typescript-sdk/apps/dojo/src/env.ts | 4 +- typescript-sdk/apps/dojo/src/files.json | 236 +++++- typescript-sdk/apps/dojo/src/mastra/index.ts | 35 +- typescript-sdk/apps/dojo/src/menu.ts | 91 ++- .../apps/dojo/src/types/integration.ts | 3 +- typescript-sdk/apps/dojo/tsconfig.json | 2 +- .../integrations/agno/examples/pyproject.toml | 3 +- .../agno/examples/server/__init__.py | 3 +- .../agno/examples/server/api/agentic_chat.py | 14 +- .../server/api/tool_based_generative_ui.py | 35 +- typescript-sdk/integrations/agno/package.json | 7 +- .../integrations/crewai/package.json | 7 +- .../langgraph/examples/python/agents/dojo.py | 12 + .../agents/tool_based_generative_ui/agent.py | 80 +-- .../langgraph/examples/python/langgraph.json | 3 +- .../langgraph/examples/python/poetry.lock | 8 +- .../langgraph/examples/python/pyproject.toml | 2 +- .../examples/typescript/langgraph.json | 3 +- .../agents/tool_based_generative_ui/agent.ts | 99 +-- .../integrations/langgraph/package.json | 11 +- .../langgraph/python/ag_ui_langgraph/agent.py | 167 ++++- .../python/ag_ui_langgraph/endpoint.py | 12 +- .../langgraph/python/ag_ui_langgraph/types.py | 3 +- .../langgraph/python/ag_ui_langgraph/utils.py | 59 ++ .../langgraph/python/pyproject.toml | 2 +- .../integrations/langgraph/src/agent.ts | 291 +++++--- .../integrations/langgraph/src/types.ts | 27 +- .../integrations/llamaindex/package.json | 7 +- .../mastra/agents/tool-based-generative-ui.ts | 15 - .../integrations/mastra/package.json | 42 +- .../integrations/mastra/src/mastra.ts | 5 +- .../integrations/mastra/src/utils.ts | 59 +- .../server/api/agentic_generative_ui.py | 1 + .../examples/server/api/human_in_the_loop.py | 1 + .../integrations/pydantic-ai/package.json | 7 +- .../integrations/vercel-ai-sdk/package.json | 5 +- typescript-sdk/package.json | 2 + typescript-sdk/packages/cli/package.json | 2 +- typescript-sdk/packages/client/package.json | 2 +- .../agent/__tests__/legacy-bridged.test.ts | 42 +- .../packages/client/src/agent/index.ts | 3 +- .../packages/client/src/apply/default.ts | 114 ++- .../packages/client/src/chunks/transform.ts | 2 +- .../packages/client/src/legacy/convert.ts | 14 +- .../packages/client/src/legacy/types.ts | 1 + .../verify/__tests__/verify.events.test.ts | 16 +- .../verify/__tests__/verify.lifecycle.test.ts | 2 +- .../__tests__/verify.text-messages.test.ts | 673 ++++++------------ .../__tests__/verify.tool-calls.test.ts | 577 ++++++++++----- .../packages/client/src/verify/verify.ts | 201 +++--- typescript-sdk/packages/core/package.json | 2 +- typescript-sdk/packages/core/src/events.ts | 16 +- typescript-sdk/packages/encoder/package.json | 2 +- typescript-sdk/packages/proto/package.json | 2 +- typescript-sdk/pnpm-lock.yaml | 170 +++-- typescript-sdk/turbo.json | 5 + 160 files changed, 2695 insertions(+), 5671 deletions(-) delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/agnoPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/agnoPages/ToolBaseGenUIPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/crewAIPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/crewAIPages/SharedStatePage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/crewAIPages/ToolBaseGenUIPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/SharedStatePage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/ToolBaseGenUIPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/langGraphPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/langGraphPages/SharedStatePage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/langGraphPages/ToolBaseGenUIPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/llamaIndexPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/llamaIndexPages/SharedStatePage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/SharedStatePage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/ToolBaseGenUIPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/mastraPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/mastraPages/ToolBaseGenUIPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/middlewareStarterPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/SharedStatePage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/ToolBaseGenUIPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/SharedStatePage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/ToolBaseGenUIPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/serverStarterPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/pages/vercelAISdkPages/AgenticChatPage.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/tests/crewAITests/predictvieStateUpdatePage.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/predictvieStateUpdatePage.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/tests/langgraphTests/predictvieStateUpdatePage.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/predictvieStateUpdatePage.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/predictvieStateUpdatePage.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e2/.gitignore delete mode 100644 typescript-sdk/apps/dojo/e2e2/package.json delete mode 100644 typescript-sdk/apps/dojo/e2e2/playwright.config.ts delete mode 100644 typescript-sdk/apps/dojo/e2e2/pnpm-lock.yaml delete mode 100644 typescript-sdk/apps/dojo/e2e2/pnpm-workspace.yaml delete mode 100644 typescript-sdk/apps/dojo/e2e2/tests/agno-agentic-chat.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e2/tests/crewai-agentic-chat.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e2/tests/langgraph-agentic-chat.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e2/tests/langgraph-fastapi-agentic-chat.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e2/tests/langgraph-typescript-agentic-chat.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e2/tests/llama-index-agentic-chat.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e2/tests/mastra-agentic-chat.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e2/tests/pydantic-agentic-chat.spec.ts delete mode 100644 typescript-sdk/apps/dojo/e2e2/tests/vercel-ai-sdk-agentic-chat.spec.ts diff --git a/.github/workflows/dojo-e2e.yml b/.github/workflows/dojo-e2e.yml index f0597e633..eed4944c9 100644 --- a/.github/workflows/dojo-e2e.yml +++ b/.github/workflows/dojo-e2e.yml @@ -8,8 +8,60 @@ on: jobs: e2e: - name: E2E Tests - runs-on: depot-ubuntu-latest-8 + name: ${{ matrix.suite }} + runs-on: depot-ubuntu-24.04 + strategy: + fail-fast: false + matrix: + include: + - suite: agno + test_path: tests/agnoTests + services: ["dojo","agno"] + wait_on: http://localhost:9999,tcp:localhost:8002 + - suite: crew-ai + test_path: tests/crewAITests + services: ["dojo","crew-ai"] + wait_on: http://localhost:9999,tcp:localhost:8003 + - suite: langgraph + test_path: tests/langgraphTests + services: ["dojo","langgraph-platform-python","langgraph-platform-typescript"] + wait_on: http://localhost:9999,tcp:localhost:8005,tcp:localhost:8006 + - suite: langgraph-fastapi + test_path: tests/langgraphFastAPITests + services: ["dojo","langgraph-fastapi"] + wait_on: http://localhost:9999,tcp:localhost:8004 + - suite: llama-index + test_path: tests/llamaIndexTests + services: ["dojo","llama-index"] + wait_on: http://localhost:9999,tcp:localhost:8007 + - suite: mastra + test_path: tests/mastraTests + services: ["dojo","mastra"] + wait_on: http://localhost:9999,tcp:localhost:8008 + - suite: mastra-agent-local + test_path: tests/mastraAgentLocalTests + services: ["dojo"] + wait_on: http://localhost:9999 + - suite: middleware-starter + test_path: tests/middlewareStarterTests + services: ["dojo"] + wait_on: http://localhost:9999 + - suite: pydantic-ai + test_path: tests/pydanticAITests + services: ["dojo","pydantic-ai"] + wait_on: http://localhost:9999,tcp:localhost:8009 + - suite: server-starter + test_path: tests/serverStarterTests + services: ["dojo","server-starter"] + wait_on: http://localhost:9999,tcp:localhost:8000 + - suite: server-starter-all + test_path: tests/serverStarterAllFeaturesTests + services: ["dojo","server-starter-all"] + wait_on: http://localhost:9999,tcp:localhost:8001 + - suite: vercel-ai-sdk + test_path: tests/vercelAISdkTests + services: ["dojo"] + wait_on: http://localhost:9999 steps: - name: Checkout code @@ -25,6 +77,33 @@ jobs: with: version: 10.13.1 + # Now that pnpm is available, cache its store to speed installs + - name: Resolve pnpm store path + id: pnpm-store + run: echo "STORE_PATH=$(pnpm store path --silent)" >> $GITHUB_ENV + + - name: Cache pnpm store + uses: actions/cache@v4 + with: + path: ${{ env.STORE_PATH }} + key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} + restore-keys: | + ${{ runner.os }}-pnpm-store- + + # Cache Python tool caches and virtualenvs; restore only to avoid long saves + - name: Cache Python dependencies (restore-only) + id: cache-python + uses: actions/cache/restore@v4 + with: + path: | + ~/.cache/pip + ~/.cache/pypoetry + ~/.cache/uv + **/.venv + key: ${{ runner.os }}-pydeps-${{ hashFiles('**/poetry.lock', '**/pyproject.toml') }} + restore-keys: | + ${{ runner.os }}-pydeps- + - name: Install Poetry uses: snok/install-poetry@v1 with: @@ -35,21 +114,14 @@ jobs: - name: Install uv uses: astral-sh/setup-uv@v6 - - name: Setup pnpm cache - uses: actions/cache@v4 - with: - path: ~/.local/share/pnpm/store - key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} - restore-keys: | - ${{ runner.os }}-pnpm-store- - - name: Install dependencies working-directory: typescript-sdk run: pnpm install --frozen-lockfile - name: Prepare dojo for e2e working-directory: typescript-sdk/apps/dojo - run: node ./scripts/prep-dojo-everything.js -e2e + if: ${{ join(matrix.services, ',') != '' }} + run: node ./scripts/prep-dojo-everything.js --only ${{ join(matrix.services, ',') }} - name: Install e2e dependencies working-directory: typescript-sdk/apps/dojo/e2e @@ -61,6 +133,7 @@ jobs: env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} LANGSMITH_API_KEY: ${{ secrets.LANGSMITH_API_KEY }} + if: ${{ contains(join(matrix.services, ','), 'langgraph-fastapi') || contains(join(matrix.services, ','), 'langgraph-platform-python') || contains(join(matrix.services, ','), 'langgraph-platform-typescript') }} run: | echo "OPENAI_API_KEY=${OPENAI_API_KEY}" > examples/python/.env echo "LANGSMITH_API_KEY=${LANGSMITH_API_KEY}" >> examples/python/.env @@ -74,33 +147,28 @@ jobs: env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} LANGSMITH_API_KEY: ${{ secrets.LANGSMITH_API_KEY }} + if: ${{ join(matrix.services, ',') != '' && contains(join(matrix.services, ','), 'dojo') }} with: run: | - node ../scripts/run-dojo-everything.js + node ../scripts/run-dojo-everything.js --only ${{ join(matrix.services, ',') }} working-directory: typescript-sdk/apps/dojo/e2e - wait-on: | - http://localhost:9999 - tcp:localhost:8000 - tcp:localhost:8001 - tcp:localhost:8002 - tcp:localhost:8003 - tcp:localhost:8004 - tcp:localhost:8005 - tcp:localhost:8006 - tcp:localhost:8007 - tcp:localhost:8008 - tcp:localhost:8009 - - - name: Run tests + wait-on: ${{ matrix.wait_on }} + wait-for: 300000 + + - name: Run tests – ${{ matrix.suite }} working-directory: typescript-sdk/apps/dojo/e2e env: BASE_URL: http://localhost:9999 - run: pnpm test + PLAYWRIGHT_SUITE: ${{ matrix.suite }} + run: | + pnpm test -- ${{ matrix.test_path }} - - name: Upload traces + - name: Upload traces – ${{ matrix.suite }} if: always() # Uploads artifacts even if tests fail uses: actions/upload-artifact@v4 with: - name: playwright-traces - path: typescript-sdk/apps/dojo/e2e/test-results/ + name: ${{ matrix.suite }}-playwright-traces + path: | + typescript-sdk/apps/dojo/e2e/test-results/${{ matrix.suite }}/**/* + typescript-sdk/apps/dojo/e2e/playwright-report/**/* retention-days: 7 diff --git a/.gitignore b/.gitignore index 1d74e2196..abb94deaa 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,3 @@ +**/.claude/settings.local.json .vscode/ +.idea/ diff --git a/README.md b/README.md index c4f13d30d..afeed2c92 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,6 @@ Built for simplicity and flexibility, it enables seamless integration between AI --- -[📅 Upcoming Event: August 6th - AG-UI + Mastra: Build a Project Management Canvas](https://lu.ma/94688z7e)
@@ -68,9 +67,8 @@ AG-UI is complementary to the other 2 top agentic protocols - AG-UI brings agents into user-facing applications
- + The Agent Protocol Stack
- ## 🚀 Features @@ -86,22 +84,22 @@ AG-UI is complementary to the other 2 top agentic protocols AG-UI was born from CopilotKit's initial partnership with LangGraph and CrewAI - and brings the incredibly popular agent-user-interactivity infrastructure to the wider agentic ecosystem. -| Framework | Status | AG-UI Resources | Integrations | +| Framework | Status | AG-UI Resources | Integrations | | ------------------------------------------------------------------ | ------------------------ | ---------------------------------------------------------------------------- | ------------------------ | | No-framework | ✅ Supported | ➡️ Docs coming soon | | -| [LangGraph](https://www.langchain.com/langgraph) | ✅ Supported | ➡️ [Demo](https://v0-langgraph-land.vercel.app/) | Partnership | -| [CrewAI](https://crewai.com/) | ✅ Supported | ➡️ [Demo](https://v0-crew-land.vercel.app/) | Partnership | -| [Mastra](https://mastra.ai/) | ✅ Supported | ➡️ [Demo](https://v0-mastra-land.vercel.app/) | 1st party | -| [AG2](https://ag2.ai/) | ✅ Supported | ➡️ [Demo](https://v0-ag2-land.vercel.app/) | 1st party | -| [Agno](https://github.com/agno-agi/agno) | ✅ Supported | ➡️ [Docs](https://docs.copilotkit.ai/agno) | 1st party | -| [LlamaIndex](https://github.com/run-llama/llama_index) | ✅ Supported | ➡️ [Docs](https://docs.copilotkit.ai/llamaindex) | 1st party | -| [Pydantic AI](https://github.com/pydantic/pydantic-ai) | ✅ Supported | ➡️ [Docs](https://docs.copilotkit.ai/pydantic-ai) | 1st party | +| [LangGraph](https://www.langchain.com/langgraph) | ✅ Supported | ➡️ [Docs](https://docs.copilotkit.ai/langgraph/) [Demos](https://dojo.ag-ui.com/langgraph-fastapi/feature/shared_state) | Partnership | +| [CrewAI](https://crewai.com/) | ✅ Supported | ➡️ [Docs](https://docs.copilotkit.ai/crewai-flows) [Demos](https://dojo.ag-ui.com/crewai/feature/shared_state) | Partnership | +| [Mastra](https://mastra.ai/) | ✅ Supported | ➡️ [Docs](https://docs.copilotkit.ai/mastra/) [Demos](https://dojo.ag-ui.com/mastra/feature/shared_state) | 1st party | +| [AG2](https://ag2.ai/) | ✅ Supported | ➡️ [Docs](https://docs.copilotkit.ai/ag2/) [Demos](https://dojo.ag-ui.com/ag2/feature/shared_state) | 1st party | +| [Agno](https://github.com/agno-agi/agno) | ✅ Supported | ➡️ [Docs](https://docs.copilotkit.ai/agno/) [Demos](https://dojo.ag-ui.com/agno/feature/shared_state) | 1st party | +| [LlamaIndex](https://github.com/run-llama/llama_index) | ✅ Supported | ➡️ [Docs](https://docs.copilotkit.ai/llamaindex/) [Demos](https://dojo.ag-ui.com/llamaindex/feature/shared_state) | 1st party | +| [Pydantic AI](https://github.com/pydantic/pydantic-ai) | ✅ Supported | ➡️ [Docs](https://docs.copilotkit.ai/pydantic-ai/) [Demos](https://dojo.ag-ui.com/pydantic-ai/feature/shared_state) | 1st party | +| [Google ADK](https://google.github.io/adk-docs/get-started/) | 🛠️ [PR](https://github.com/ag-ui-protocol/ag-ui/pull/274) | – | 1st party | +| [AWS Bedrock Agents](https://aws.amazon.com/bedrock/agents/) | 🛠️ In Progress | – | 1st party | +| [AWS Strands Agents](https://github.com/strands-agents/sdk-python) | 🛠️ In Progress | – | 1st Party | | [Vercel AI SDK](https://github.com/vercel/ai) | 🛠️ In Progress | – | Community | -| [Google ADK](https://google.github.io/adk-docs/get-started/) | 🛠️ In Progress | – | Community | | [OpenAI Agent SDK](https://openai.github.io/openai-agents-python/) | 🛠️ In Progress | – | Community | -| [AWS Bedrock Agents](https://aws.amazon.com/bedrock/agents/) | 🛠️ In Progress | – | 1st party | | [Cloudflare Agents](https://developers.cloudflare.com/agents/) | 💡 Open to Contributions | – | Community | -| [Strands Agents SDK](https://github.com/strands-agents/sdk-python) | 💡 Open to Contributions | – | Community | [View all supported frameworks →](https://ag-ui.com/frameworks) @@ -131,7 +129,7 @@ https://agui-demo.vercel.app/ ## 🧩 AG-UI Showcase: The AG-UI Dojo (Building-Blocks Viewer) -The [AG-UI Dojo](https://copilotkit-feature-viewer.vercel.app/) showcases many of the building blocks that AG-UI supports ([AG-UI Dojo Source Code](https://github.com/ag-ui-protocol/ag-ui/tree/main/typescript-sdk/apps/dojo)). +The [AG-UI Dojo](https://dojo.ag-ui.com/langgraph-fastapi/feature/shared_state) showcases many of the building blocks that AG-UI supports ([AG-UI Dojo Source Code](https://github.com/ag-ui-protocol/ag-ui/tree/main/typescript-sdk/apps/dojo)) with each Agent Framework integration. The building blocks are designed to be simple and focused -- between 50-200 lines of code. @@ -142,7 +140,7 @@ https://github.com/user-attachments/assets/a67d3d54-36b2-4c7a-ac69-a0ca01365d5b Check out the [Contributing guide](https://github.com/ag-ui-protocol/ag-ui/blob/main/CONTRIBUTING.md) -- **[Weekely AG-UI Working Group](https://lu.ma/CopilotKit?k=c)** +- **[Bi-Weekely AG-UI Working Group](https://lu.ma/CopilotKit?k=c)** 📅 Follow the CopilotKit Luma Events Calendar ## Roadmap diff --git a/docs/concepts/events.mdx b/docs/concepts/events.mdx index f7b88b9a9..d35cecd9a 100644 --- a/docs/concepts/events.mdx +++ b/docs/concepts/events.mdx @@ -194,10 +194,10 @@ for an incoming message, such as creating a new message bubble with a loading indicator. The `role` property identifies whether the message is coming from the assistant or potentially another participant in the conversation. -| Property | Description | -| ----------- | ---------------------------------------------- | -| `messageId` | Unique identifier for the message | -| `role` | Role of the message sender (e.g., "assistant") | +| Property | Description | +| ----------- | --------------------------------------------------------------------------------- | +| `messageId` | Unique identifier for the message | +| `role` | Role of the message sender ("developer", "system", "assistant", "user", "tool") | ### TextMessageContent @@ -231,6 +231,22 @@ automatic scrolling to ensure the full message is visible. | ----------- | -------------------------------------- | | `messageId` | Matches the ID from `TextMessageStart` | +### TextMessageChunk + +A self-contained text message event that combines start, content, and end. + +The `TextMessageChunk` event provides a convenient way to send complete text messages +in a single event instead of the three-event sequence (start, content, end). This is +particularly useful for simple messages or when the entire content is available at once. +The event includes both the message metadata and content, making it more efficient for +non-streaming scenarios. + +| Property | Description | +| ----------- | ------------------------------------------------------------------------------------- | +| `messageId` | Optional unique identifier for the message | +| `role` | Optional role of the sender ("developer", "system", "assistant", "user", "tool") | +| `delta` | Optional text content of the message | + ## Tool Call Events These events represent the lifecycle of tool calls made by agents. Tool calls diff --git a/docs/development/roadmap.mdx b/docs/development/roadmap.mdx index bff76ce7a..e5c188f71 100644 --- a/docs/development/roadmap.mdx +++ b/docs/development/roadmap.mdx @@ -3,17 +3,11 @@ title: Roadmap description: Our plans for evolving Agent User Interaction Protocol --- -The Agent User Interaction Protocol is rapidly evolving. This page outlines our -current thinking on key priorities and future direction. - - - The ideas presented here are not commitments—we may solve these challenges - differently than described, or some may not materialize at all. This is also - not an _exhaustive_ list; we may incorporate work that isn't mentioned here. - +You can follow the progress of the AG-UI Protocol on our +[public roadmap](https://github.com/orgs/ag-ui-protocol/projects/1). ## Get Involved -We welcome community participation in shaping AG-UI's future. Visit our -[GitHub Discussions](https://github.com/orgs/ag-ui-protocol/discussions) to join -the conversation and contribute your ideas. +If you’d like to contribute ideas, feature requests, or bug reports to +the roadmap, please see the [Contributing Guide](https://github.com/ag-ui-protocol/ag-ui/blob/main/CONTRIBUTING.md) +for details on how to get involved. diff --git a/docs/introduction.mdx b/docs/introduction.mdx index 7ef26cb96..161dfe0a4 100644 --- a/docs/introduction.mdx +++ b/docs/introduction.mdx @@ -29,16 +29,13 @@ AG-UI provides: AG-UI has been integrated with several popular agent frameworks, making it easy to adopt regardless of your preferred tooling: -- **[LangGraph](https://docs.copilotkit.ai/coagents)**: Build agent-native - applications with shared state and human-in-the-loop workflows using - LangGraph's powerful orchestration capabilities. -- **[CrewAI Flows](https://docs.copilotkit.ai/crewai-flows)**: Create sequential - multi-agent workflows with well-defined stages and process control. -- **[CrewAI Crews](https://docs.copilotkit.ai/crewai-crews)**: Design - collaborative agent teams with specialized roles and inter-agent - communication. -- **[Mastra](/mastra)**: Leverage TypeScript for building strongly-typed agent - implementations with enhanced developer experience. +- **[LangGraph](https://docs.copilotkit.ai/coagents)**: Build agent-native applications with shared state and human-in-the-loop workflows using LangGraph's powerful orchestration capabilities. +- **[Mastra](/mastra)**: Leverage TypeScript for building strongly-typed agent implementations with enhanced developer experience. +- **[Pydantic AI](https://docs.copilotkit.ai/pydantic-ai)**: Painlessly build production grade agentic applications and workflows using fully type-safe Python. +- **[CrewAI Flows](https://docs.copilotkit.ai/crewai-flows)**: Create sequential multi-agent workflows with well-defined stages and process control. +- **[CrewAI Crews](https://docs.copilotkit.ai/crewai-crews)**: Design collaborative agent teams with specialized roles and inter-agent communication. +- **[Agno](https://docs.copilotkit.ai/agno)**: Build, run and manage secure multi-agent systems in your cloud with Agno's AgentOS. +- **[LlamaIndex](https://docs.copilotkit.ai/llamaindex)**: A simple, flexible framework for building agentic generative AI applications that allow large language models to work with your data in any format. - **[AG2](/ag2)**: Utilize the open-source AgentOS for scalable, production-ready agent deployments. diff --git a/docs/quickstart/applications.mdx b/docs/quickstart/applications.mdx index d2febc723..8f71ed2fa 100644 --- a/docs/quickstart/applications.mdx +++ b/docs/quickstart/applications.mdx @@ -26,3 +26,10 @@ npx create-ag-ui-app@latest src="https://copilotkit-public-assets.s3.us-east-1.amazonaws.com/docs/ag-ui/quickstart.gif" /> +Once the setup is done, start the server with + +```sh +npm run dev +``` + +For the copilotkit example you can head to http://localhost:3000/copilotkit to see the app in action. diff --git a/docs/quickstart/server.mdx b/docs/quickstart/server.mdx index cc7883299..552952342 100644 --- a/docs/quickstart/server.mdx +++ b/docs/quickstart/server.mdx @@ -174,7 +174,7 @@ Open `apps/dojo/package.json` and add the package `@ag-ui/openai-server`: Now let's see your work in action. First, start your Python server: ```bash -cd integrations/openai/server/python +cd integrations/openai-server/server/python poetry install && poetry run dev ``` @@ -197,7 +197,7 @@ world!** for now. Here's what's happening with that stub server: ```python -# integrations/openai/server/python/example_server/__init__.py +# integrations/openai-server/server/python/example_server/__init__.py @app.post("/") async def agentic_chat_endpoint(input_data: RunAgentInput, request: Request): """Agentic chat endpoint""" @@ -268,7 +268,7 @@ OpenAI. First, we need the OpenAI SDK: ```bash -cd integrations/openai/server/python +cd integrations/openai-server/server/python poetry add openai ``` diff --git a/python-sdk/ag_ui/core/__init__.py b/python-sdk/ag_ui/core/__init__.py index a545ee13d..d726d984d 100644 --- a/python-sdk/ag_ui/core/__init__.py +++ b/python-sdk/ag_ui/core/__init__.py @@ -42,7 +42,6 @@ UserMessage, ToolMessage, Message, - Role, Context, Tool, RunAgentInput, @@ -87,7 +86,7 @@ "UserMessage", "ToolMessage", "Message", - "Role", + "TextMessageRole", "Context", "Tool", "RunAgentInput", diff --git a/python-sdk/ag_ui/core/events.py b/python-sdk/ag_ui/core/events.py index 256df6778..91b84ab3f 100644 --- a/python-sdk/ag_ui/core/events.py +++ b/python-sdk/ag_ui/core/events.py @@ -9,6 +9,9 @@ from .types import ConfiguredBaseModel, Message, StateT, JSONValue +# Text messages can have any role except "tool" +TextMessageRole = Literal["developer", "system", "assistant", "user"] + class EventType(str, Enum): """ @@ -58,7 +61,7 @@ class TextMessageStartEvent(BaseEvent): type: Literal[EventType.TEXT_MESSAGE_START] = EventType.TEXT_MESSAGE_START # pyright: ignore[reportIncompatibleVariableOverride] message_id: str - role: Literal["assistant"] = "assistant" + role: TextMessageRole = "assistant" class TextMessageContentEvent(BaseEvent): @@ -87,7 +90,7 @@ class TextMessageChunkEvent(BaseEvent): type: Literal[EventType.TEXT_MESSAGE_CHUNK] = EventType.TEXT_MESSAGE_CHUNK # pyright: ignore[reportIncompatibleVariableOverride] message_id: Optional[str] = None - role: Optional[Literal["assistant"]] = None + role: Optional[TextMessageRole] = None delta: Optional[str] = None diff --git a/python-sdk/ag_ui/core/types.py b/python-sdk/ag_ui/core/types.py index 6aaa13729..ba2cedf08 100644 --- a/python-sdk/ag_ui/core/types.py +++ b/python-sdk/ag_ui/core/types.py @@ -105,8 +105,6 @@ class ToolMessage(ConfiguredBaseModel): Field(discriminator="role"), ] -Role = Literal["developer", "system", "assistant", "user", "tool"] - class Context(ConfiguredBaseModel): """ diff --git a/typescript-sdk/.gitignore b/typescript-sdk/.gitignore index 5c8126db7..fcaeddba0 100644 --- a/typescript-sdk/.gitignore +++ b/typescript-sdk/.gitignore @@ -44,4 +44,5 @@ packages/proto/src/generated **/**/.langgraph_api # Python +venv __pycache__/ diff --git a/typescript-sdk/apps/dojo/e2e/package.json b/typescript-sdk/apps/dojo/e2e/package.json index bf2af8e93..8ea54b975 100644 --- a/typescript-sdk/apps/dojo/e2e/package.json +++ b/typescript-sdk/apps/dojo/e2e/package.json @@ -6,7 +6,8 @@ "scripts": { "postinstall": "playwright install --with-deps", "test": "playwright test", - "test:ui": "playwright test --ui" + "test:ui": "playwright test --ui", + "report": "playwright show-report" }, "devDependencies": { "@playwright/test": "^1.43.1", diff --git a/typescript-sdk/apps/dojo/e2e/pages/agnoPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/agnoPages/AgenticChatPage.ts deleted file mode 100644 index c46329528..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/agnoPages/AgenticChatPage.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } - - async assertAgentReplyContains(expectedText: string) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage").last(); - await expect(agentMessage).toContainText(expectedText, { timeout: 10000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/agnoPages/ToolBaseGenUIPage.ts b/typescript-sdk/apps/dojo/e2e/pages/agnoPages/ToolBaseGenUIPage.ts deleted file mode 100644 index f2d648a5e..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/agnoPages/ToolBaseGenUIPage.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class ToolBaseGenUIPage { - readonly page: Page; - readonly haikuAgentIntro: Locator; - readonly messageBox: Locator; - readonly sendButton: Locator; - readonly applyButton: Locator; - readonly appliedButton: Locator; - readonly haikuBlock: Locator; - readonly japaneseLines: Locator; - - constructor(page: Page) { - this.page = page; - this.haikuAgentIntro = page.getByText("I'm a haiku generator 👋. How can I help you?"); - this.messageBox = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.haikuBlock = page.locator('[data-testid="haiku-card"]'); - this.applyButton = page.getByRole('button', { name: 'Apply' }); - this.japaneseLines = page.locator('[data-testid="haiku-line"]'); - } - - async generateHaiku(message: string) { - await this.messageBox.click(); - await this.messageBox.fill(message); - await this.sendButton.click(); - } - - async checkGeneratedHaiku() { - await this.page.locator('[data-testid="haiku-card"]').last().isVisible(); - const mostRecentCard = this.page.locator('[data-testid="haiku-card"]').last(); - await mostRecentCard.locator('[data-testid="haiku-line"]').first().waitFor({ state: 'visible', timeout: 10000 }); - } - - async extractChatHaikuContent(page: Page): Promise { - await page.waitForTimeout(3000); - await page.locator('[data-testid="haiku-card"]').first().waitFor({ state: 'visible' }); - const allHaikuCards = page.locator('[data-testid="haiku-card"]'); - const cardCount = await allHaikuCards.count(); - let chatHaikuContainer; - let chatHaikuLines; - - for (let cardIndex = cardCount - 1; cardIndex >= 0; cardIndex--) { - chatHaikuContainer = allHaikuCards.nth(cardIndex); - chatHaikuLines = chatHaikuContainer.locator('[data-testid="haiku-line"]'); - const linesCount = await chatHaikuLines.count(); - - if (linesCount > 0) { - try { - await chatHaikuLines.first().waitFor({ state: 'visible', timeout: 5000 }); - break; - } catch (error) { - continue; - } - } - } - - if (!chatHaikuLines) { - throw new Error('No haiku cards with visible lines found'); - } - - const count = await chatHaikuLines.count(); - const lines: string[] = []; - - for (let i = 0; i < count; i++) { - const haikuLine = chatHaikuLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - - const chatHaikuContent = lines.join('').replace(/\s/g, ''); - return chatHaikuContent; - } - - async extractMainDisplayHaikuContent(page: Page): Promise { - const mainDisplayLines = page.locator('[data-testid="main-haiku-line"]'); - const mainCount = await mainDisplayLines.count(); - const lines: string[] = []; - - if (mainCount > 0) { - for (let i = 0; i < mainCount; i++) { - const haikuLine = mainDisplayLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - } - - const mainHaikuContent = lines.join('').replace(/\s/g, ''); - return mainHaikuContent; - } - - async checkHaikuDisplay(page: Page): Promise { - const chatHaikuContent = await this.extractChatHaikuContent(page); - - await page.waitForTimeout(5000); - - const mainHaikuContent = await this.extractMainDisplayHaikuContent(page); - - if (mainHaikuContent === '') { - expect(chatHaikuContent.length).toBeGreaterThan(0); - return; - } - - if (chatHaikuContent === mainHaikuContent) { - expect(mainHaikuContent).toBe(chatHaikuContent); - } else { - await page.waitForTimeout(3000); - - const updatedMainContent = await this.extractMainDisplayHaikuContent(page); - - expect(updatedMainContent).toBe(chatHaikuContent); - } - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/AgenticChatPage.ts deleted file mode 100644 index 85be9da44..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/AgenticChatPage.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly chatBackground: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.chatBackground = page - .locator('div[style*="background"]') - .or(page.locator('.flex.justify-center.items-center.h-full.w-full')) - .or(page.locator('body')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async getBackground( - property: "backgroundColor" | "backgroundImage" = "backgroundColor" -): Promise { - // Wait a bit for background to apply - await this.page.waitForTimeout(500); - - // Try multiple selectors for the background element - const selectors = [ - 'div[style*="background"]', - 'div[style*="background-color"]', - '.flex.justify-center.items-center.h-full.w-full', - 'div.flex.justify-center.items-center.h-full.w-full', - '[class*="bg-"]', - 'div[class*="background"]' - ]; - - for (const selector of selectors) { - try { - const element = this.page.locator(selector).first(); - if (await element.isVisible({ timeout: 1000 })) { - const value = await element.evaluate( - (el, prop) => { - // Check inline style first - if (el.style.background) return el.style.background; - if (el.style.backgroundColor) return el.style.backgroundColor; - // Then computed style - return getComputedStyle(el)[prop as any]; - }, - property - ); - if (value && value !== "rgba(0, 0, 0, 0)" && value !== "transparent") { - console.log(`[${selector}] ${property}: ${value}`); - return value; - } - } - } catch (e) { - continue; - } - } - - // Fallback to original element - const value = await this.chatBackground.first().evaluate( - (el, prop) => getComputedStyle(el)[prop as any], - property - ); - console.log(`[Fallback] ${property}: ${value}`); - return value; -} - - async getGradientButtonByName(name: string | RegExp) { - return this.page.getByRole("button", { name }); - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/PredictiveStateUpdatesPage.ts b/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/PredictiveStateUpdatesPage.ts index b7dcd9c3f..005bee3a7 100644 --- a/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/PredictiveStateUpdatesPage.ts +++ b/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/PredictiveStateUpdatesPage.ts @@ -30,7 +30,7 @@ export class PredictiveStateUpdatesPage { this.userMessage = page.locator('.copilotKitUserMessage'); } - async openChat() { + async openChat() { await this.agentGreeting.isVisible(); } @@ -54,13 +54,13 @@ export class PredictiveStateUpdatesPage { } async getUserApproval() { - await this.userApprovalModal.isVisible(); + await this.userApprovalModal.last().isVisible(); await this.getButton(this.page, "Confirm"); - const acceptedLabel = this.userApprovalModal.locator('text=✓ Accepted'); + const acceptedLabel = this.userApprovalModal.last().locator('text=✓ Accepted'); } async getUserRejection() { - await this.userApprovalModal.isVisible(); + await this.userApprovalModal.last().isVisible(); await this.getButton(this.page, "Reject"); const rejectedLabel = await this.getStatusLabelOfButton(this.page, "✕ Rejected"); await rejectedLabel.isVisible(); @@ -85,7 +85,7 @@ export class PredictiveStateUpdatesPage { 'div.tiptap em', 'div.tiptap s' ]; - + let count = 0; for (const selector of highlightSelectors) { count = await this.page.locator(selector).count(); @@ -93,7 +93,7 @@ export class PredictiveStateUpdatesPage { break; } } - + if (count > 0) { expect(count).toBeGreaterThan(0); } else { diff --git a/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/SharedStatePage.ts b/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/SharedStatePage.ts deleted file mode 100644 index 807b61bd6..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/SharedStatePage.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class SharedStatePage { - readonly page: Page; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly agentGreeting: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - readonly promptResponseLoader: Locator; - readonly ingredientCards: Locator; - readonly instructionsContainer: Locator; - readonly addIngredient: Locator; - - constructor(page: Page) { - this.page = page; - // Remove iframe references and use actual greeting text - this.agentGreeting = page.getByText("Hi 👋 How can I help with your recipe?"); - this.chatInput = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.promptResponseLoader = page.getByRole('button', { name: 'Please Wait...', disabled: true }); - this.instructionsContainer = page.locator('.instructions-container'); - this.addIngredient = page.getByRole('button', { name: '+ Add Ingredient' }); - this.agentMessage = page.locator('.copilotKitAssistantMessage'); - this.userMessage = page.locator('.copilotKitUserMessage'); - } - - async openChat() { - await this.agentGreeting.isVisible(); - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - await this.sendButton.click(); - } - - async loader() { - const timeout = (ms) => new Promise((_, reject) => { - setTimeout(() => reject(new Error("Timeout waiting for promptResponseLoader to become visible")), ms); - }); - - await Promise.race([ - this.promptResponseLoader.isVisible(), - timeout(5000) // 5 seconds timeout - ]); - } - - async getIngredientCard(name) { - return this.page.locator(`.ingredient-card:has(input.ingredient-name-input[value="${name}"])`); - } - - async addNewIngredient(placeholderText) { - this.addIngredient.click(); - this.page.locator(`input[placeholder="${placeholderText}"]`); - } - - async getInstructionItems(containerLocator) { - const count = await containerLocator.locator('.instruction-item').count(); - if (count <= 0) { - throw new Error('No instruction items found in the container.'); - } - console.log(`✅ Found ${count} instruction items.`); - return count; - } - - async assertAgentReplyVisible(expectedText: RegExp) { - await expect(this.agentMessage.getByText(expectedText)).toBeVisible(); - } - - async assertUserMessageVisible(message: string) { - await expect(this.page.getByText(message)).toBeVisible(); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/ToolBaseGenUIPage.ts b/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/ToolBaseGenUIPage.ts deleted file mode 100644 index f2d648a5e..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/crewAIPages/ToolBaseGenUIPage.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class ToolBaseGenUIPage { - readonly page: Page; - readonly haikuAgentIntro: Locator; - readonly messageBox: Locator; - readonly sendButton: Locator; - readonly applyButton: Locator; - readonly appliedButton: Locator; - readonly haikuBlock: Locator; - readonly japaneseLines: Locator; - - constructor(page: Page) { - this.page = page; - this.haikuAgentIntro = page.getByText("I'm a haiku generator 👋. How can I help you?"); - this.messageBox = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.haikuBlock = page.locator('[data-testid="haiku-card"]'); - this.applyButton = page.getByRole('button', { name: 'Apply' }); - this.japaneseLines = page.locator('[data-testid="haiku-line"]'); - } - - async generateHaiku(message: string) { - await this.messageBox.click(); - await this.messageBox.fill(message); - await this.sendButton.click(); - } - - async checkGeneratedHaiku() { - await this.page.locator('[data-testid="haiku-card"]').last().isVisible(); - const mostRecentCard = this.page.locator('[data-testid="haiku-card"]').last(); - await mostRecentCard.locator('[data-testid="haiku-line"]').first().waitFor({ state: 'visible', timeout: 10000 }); - } - - async extractChatHaikuContent(page: Page): Promise { - await page.waitForTimeout(3000); - await page.locator('[data-testid="haiku-card"]').first().waitFor({ state: 'visible' }); - const allHaikuCards = page.locator('[data-testid="haiku-card"]'); - const cardCount = await allHaikuCards.count(); - let chatHaikuContainer; - let chatHaikuLines; - - for (let cardIndex = cardCount - 1; cardIndex >= 0; cardIndex--) { - chatHaikuContainer = allHaikuCards.nth(cardIndex); - chatHaikuLines = chatHaikuContainer.locator('[data-testid="haiku-line"]'); - const linesCount = await chatHaikuLines.count(); - - if (linesCount > 0) { - try { - await chatHaikuLines.first().waitFor({ state: 'visible', timeout: 5000 }); - break; - } catch (error) { - continue; - } - } - } - - if (!chatHaikuLines) { - throw new Error('No haiku cards with visible lines found'); - } - - const count = await chatHaikuLines.count(); - const lines: string[] = []; - - for (let i = 0; i < count; i++) { - const haikuLine = chatHaikuLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - - const chatHaikuContent = lines.join('').replace(/\s/g, ''); - return chatHaikuContent; - } - - async extractMainDisplayHaikuContent(page: Page): Promise { - const mainDisplayLines = page.locator('[data-testid="main-haiku-line"]'); - const mainCount = await mainDisplayLines.count(); - const lines: string[] = []; - - if (mainCount > 0) { - for (let i = 0; i < mainCount; i++) { - const haikuLine = mainDisplayLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - } - - const mainHaikuContent = lines.join('').replace(/\s/g, ''); - return mainHaikuContent; - } - - async checkHaikuDisplay(page: Page): Promise { - const chatHaikuContent = await this.extractChatHaikuContent(page); - - await page.waitForTimeout(5000); - - const mainHaikuContent = await this.extractMainDisplayHaikuContent(page); - - if (mainHaikuContent === '') { - expect(chatHaikuContent.length).toBeGreaterThan(0); - return; - } - - if (chatHaikuContent === mainHaikuContent) { - expect(mainHaikuContent).toBe(chatHaikuContent); - } else { - await page.waitForTimeout(3000); - - const updatedMainContent = await this.extractMainDisplayHaikuContent(page); - - expect(updatedMainContent).toBe(chatHaikuContent); - } - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/AgenticChatPage.ts deleted file mode 100644 index 85be9da44..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/AgenticChatPage.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly chatBackground: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.chatBackground = page - .locator('div[style*="background"]') - .or(page.locator('.flex.justify-center.items-center.h-full.w-full')) - .or(page.locator('body')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async getBackground( - property: "backgroundColor" | "backgroundImage" = "backgroundColor" -): Promise { - // Wait a bit for background to apply - await this.page.waitForTimeout(500); - - // Try multiple selectors for the background element - const selectors = [ - 'div[style*="background"]', - 'div[style*="background-color"]', - '.flex.justify-center.items-center.h-full.w-full', - 'div.flex.justify-center.items-center.h-full.w-full', - '[class*="bg-"]', - 'div[class*="background"]' - ]; - - for (const selector of selectors) { - try { - const element = this.page.locator(selector).first(); - if (await element.isVisible({ timeout: 1000 })) { - const value = await element.evaluate( - (el, prop) => { - // Check inline style first - if (el.style.background) return el.style.background; - if (el.style.backgroundColor) return el.style.backgroundColor; - // Then computed style - return getComputedStyle(el)[prop as any]; - }, - property - ); - if (value && value !== "rgba(0, 0, 0, 0)" && value !== "transparent") { - console.log(`[${selector}] ${property}: ${value}`); - return value; - } - } - } catch (e) { - continue; - } - } - - // Fallback to original element - const value = await this.chatBackground.first().evaluate( - (el, prop) => getComputedStyle(el)[prop as any], - property - ); - console.log(`[Fallback] ${property}: ${value}`); - return value; -} - - async getGradientButtonByName(name: string | RegExp) { - return this.page.getByRole("button", { name }); - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/SharedStatePage.ts b/typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/SharedStatePage.ts deleted file mode 100644 index 807b61bd6..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/SharedStatePage.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class SharedStatePage { - readonly page: Page; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly agentGreeting: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - readonly promptResponseLoader: Locator; - readonly ingredientCards: Locator; - readonly instructionsContainer: Locator; - readonly addIngredient: Locator; - - constructor(page: Page) { - this.page = page; - // Remove iframe references and use actual greeting text - this.agentGreeting = page.getByText("Hi 👋 How can I help with your recipe?"); - this.chatInput = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.promptResponseLoader = page.getByRole('button', { name: 'Please Wait...', disabled: true }); - this.instructionsContainer = page.locator('.instructions-container'); - this.addIngredient = page.getByRole('button', { name: '+ Add Ingredient' }); - this.agentMessage = page.locator('.copilotKitAssistantMessage'); - this.userMessage = page.locator('.copilotKitUserMessage'); - } - - async openChat() { - await this.agentGreeting.isVisible(); - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - await this.sendButton.click(); - } - - async loader() { - const timeout = (ms) => new Promise((_, reject) => { - setTimeout(() => reject(new Error("Timeout waiting for promptResponseLoader to become visible")), ms); - }); - - await Promise.race([ - this.promptResponseLoader.isVisible(), - timeout(5000) // 5 seconds timeout - ]); - } - - async getIngredientCard(name) { - return this.page.locator(`.ingredient-card:has(input.ingredient-name-input[value="${name}"])`); - } - - async addNewIngredient(placeholderText) { - this.addIngredient.click(); - this.page.locator(`input[placeholder="${placeholderText}"]`); - } - - async getInstructionItems(containerLocator) { - const count = await containerLocator.locator('.instruction-item').count(); - if (count <= 0) { - throw new Error('No instruction items found in the container.'); - } - console.log(`✅ Found ${count} instruction items.`); - return count; - } - - async assertAgentReplyVisible(expectedText: RegExp) { - await expect(this.agentMessage.getByText(expectedText)).toBeVisible(); - } - - async assertUserMessageVisible(message: string) { - await expect(this.page.getByText(message)).toBeVisible(); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/ToolBaseGenUIPage.ts b/typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/ToolBaseGenUIPage.ts deleted file mode 100644 index 71ec39310..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/langGraphFastAPIPages/ToolBaseGenUIPage.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class ToolBaseGenUIPage { - readonly page: Page; - readonly haikuAgentIntro: Locator; - readonly messageBox: Locator; - readonly sendButton: Locator; - readonly applyButton: Locator; - readonly appliedButton: Locator; - readonly haikuBlock: Locator; - readonly japaneseLines: Locator; - - constructor(page: Page) { - this.page = page; - this.haikuAgentIntro = page.getByText("I'm a haiku generator 👋. How can I help you?"); - this.messageBox = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.haikuBlock = page.locator('[data-testid="haiku-card"]'); - this.applyButton = page.getByRole('button', { name: 'Apply' }); - this.japaneseLines = page.locator('[data-testid="haiku-line"]'); - } - - async checkGeneratedHaiku() { - await this.page.locator('[data-testid="haiku-card"]').last().isVisible(); - const mostRecentCard = this.page.locator('[data-testid="haiku-card"]').last(); - await mostRecentCard.locator('[data-testid="haiku-line"]').first().waitFor({ state: 'visible', timeout: 10000 }); - } - - async extractChatHaikuContent(page: Page): Promise { - await page.waitForTimeout(3000); - await page.locator('[data-testid="haiku-card"]').first().waitFor({ state: 'visible' }); - const allHaikuCards = page.locator('[data-testid="haiku-card"]'); - const cardCount = await allHaikuCards.count(); - - const expectedCardCount = await this.getExpectedHaikuCount(); - - if (cardCount < expectedCardCount) { - throw new Error(`Expected ${expectedCardCount} haiku cards but found ${cardCount} - haiku generation may have failed`); - } - - const mostRecentCard = allHaikuCards.last(); - const chatHaikuLines = mostRecentCard.locator('[data-testid="haiku-line"]'); - const linesCount = await chatHaikuLines.count(); - - if (linesCount === 0) { - throw new Error('Most recent haiku card has no visible lines - haiku generation failed'); - } - - try { - await chatHaikuLines.first().waitFor({ state: 'visible', timeout: 5000 }); - } catch (error) { - throw new Error('Most recent haiku card lines are not visible - haiku generation failed'); - } - - const count = await chatHaikuLines.count(); - const lines: string[] = []; - - for (let i = 0; i < count; i++) { - const haikuLine = chatHaikuLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - - const chatHaikuContent = lines.join('').replace(/\s/g, ''); - return chatHaikuContent; - } - - private haikuGenerationCount = 0; - - async generateHaiku(message: string) { - await this.messageBox.click(); - await this.messageBox.fill(message); - await this.sendButton.click(); - this.haikuGenerationCount++; - } - - async getExpectedHaikuCount(): Promise { - return this.haikuGenerationCount; - } - - async extractMainDisplayHaikuContent(page: Page): Promise { - const mainDisplayLines = page.locator('[data-testid="main-haiku-line"]'); - const mainCount = await mainDisplayLines.count(); - const lines: string[] = []; - - if (mainCount > 0) { - for (let i = 0; i < mainCount; i++) { - const haikuLine = mainDisplayLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - } - - const mainHaikuContent = lines.join('').replace(/\s/g, ''); - return mainHaikuContent; - } - - async checkHaikuDisplay(page: Page): Promise { - const chatHaikuContent = await this.extractChatHaikuContent(page); - - await page.waitForTimeout(5000); - - const mainHaikuContent = await this.extractMainDisplayHaikuContent(page); - - if (mainHaikuContent === '') { - throw new Error('Main display haiku content is empty - haiku was not properly generated or applied'); - } - - if (chatHaikuContent === mainHaikuContent) { - expect(mainHaikuContent).toBe(chatHaikuContent); - } else { - await page.waitForTimeout(3000); - - const updatedMainContent = await this.extractMainDisplayHaikuContent(page); - - if (updatedMainContent === '') { - throw new Error('Main display haiku content is still empty after additional wait - haiku generation failed'); - } - - expect(updatedMainContent).toBe(chatHaikuContent); - } - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/langGraphPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/langGraphPages/AgenticChatPage.ts deleted file mode 100644 index 85be9da44..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/langGraphPages/AgenticChatPage.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly chatBackground: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.chatBackground = page - .locator('div[style*="background"]') - .or(page.locator('.flex.justify-center.items-center.h-full.w-full')) - .or(page.locator('body')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async getBackground( - property: "backgroundColor" | "backgroundImage" = "backgroundColor" -): Promise { - // Wait a bit for background to apply - await this.page.waitForTimeout(500); - - // Try multiple selectors for the background element - const selectors = [ - 'div[style*="background"]', - 'div[style*="background-color"]', - '.flex.justify-center.items-center.h-full.w-full', - 'div.flex.justify-center.items-center.h-full.w-full', - '[class*="bg-"]', - 'div[class*="background"]' - ]; - - for (const selector of selectors) { - try { - const element = this.page.locator(selector).first(); - if (await element.isVisible({ timeout: 1000 })) { - const value = await element.evaluate( - (el, prop) => { - // Check inline style first - if (el.style.background) return el.style.background; - if (el.style.backgroundColor) return el.style.backgroundColor; - // Then computed style - return getComputedStyle(el)[prop as any]; - }, - property - ); - if (value && value !== "rgba(0, 0, 0, 0)" && value !== "transparent") { - console.log(`[${selector}] ${property}: ${value}`); - return value; - } - } - } catch (e) { - continue; - } - } - - // Fallback to original element - const value = await this.chatBackground.first().evaluate( - (el, prop) => getComputedStyle(el)[prop as any], - property - ); - console.log(`[Fallback] ${property}: ${value}`); - return value; -} - - async getGradientButtonByName(name: string | RegExp) { - return this.page.getByRole("button", { name }); - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/langGraphPages/SharedStatePage.ts b/typescript-sdk/apps/dojo/e2e/pages/langGraphPages/SharedStatePage.ts deleted file mode 100644 index 807b61bd6..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/langGraphPages/SharedStatePage.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class SharedStatePage { - readonly page: Page; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly agentGreeting: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - readonly promptResponseLoader: Locator; - readonly ingredientCards: Locator; - readonly instructionsContainer: Locator; - readonly addIngredient: Locator; - - constructor(page: Page) { - this.page = page; - // Remove iframe references and use actual greeting text - this.agentGreeting = page.getByText("Hi 👋 How can I help with your recipe?"); - this.chatInput = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.promptResponseLoader = page.getByRole('button', { name: 'Please Wait...', disabled: true }); - this.instructionsContainer = page.locator('.instructions-container'); - this.addIngredient = page.getByRole('button', { name: '+ Add Ingredient' }); - this.agentMessage = page.locator('.copilotKitAssistantMessage'); - this.userMessage = page.locator('.copilotKitUserMessage'); - } - - async openChat() { - await this.agentGreeting.isVisible(); - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - await this.sendButton.click(); - } - - async loader() { - const timeout = (ms) => new Promise((_, reject) => { - setTimeout(() => reject(new Error("Timeout waiting for promptResponseLoader to become visible")), ms); - }); - - await Promise.race([ - this.promptResponseLoader.isVisible(), - timeout(5000) // 5 seconds timeout - ]); - } - - async getIngredientCard(name) { - return this.page.locator(`.ingredient-card:has(input.ingredient-name-input[value="${name}"])`); - } - - async addNewIngredient(placeholderText) { - this.addIngredient.click(); - this.page.locator(`input[placeholder="${placeholderText}"]`); - } - - async getInstructionItems(containerLocator) { - const count = await containerLocator.locator('.instruction-item').count(); - if (count <= 0) { - throw new Error('No instruction items found in the container.'); - } - console.log(`✅ Found ${count} instruction items.`); - return count; - } - - async assertAgentReplyVisible(expectedText: RegExp) { - await expect(this.agentMessage.getByText(expectedText)).toBeVisible(); - } - - async assertUserMessageVisible(message: string) { - await expect(this.page.getByText(message)).toBeVisible(); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/langGraphPages/ToolBaseGenUIPage.ts b/typescript-sdk/apps/dojo/e2e/pages/langGraphPages/ToolBaseGenUIPage.ts deleted file mode 100644 index 71ec39310..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/langGraphPages/ToolBaseGenUIPage.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class ToolBaseGenUIPage { - readonly page: Page; - readonly haikuAgentIntro: Locator; - readonly messageBox: Locator; - readonly sendButton: Locator; - readonly applyButton: Locator; - readonly appliedButton: Locator; - readonly haikuBlock: Locator; - readonly japaneseLines: Locator; - - constructor(page: Page) { - this.page = page; - this.haikuAgentIntro = page.getByText("I'm a haiku generator 👋. How can I help you?"); - this.messageBox = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.haikuBlock = page.locator('[data-testid="haiku-card"]'); - this.applyButton = page.getByRole('button', { name: 'Apply' }); - this.japaneseLines = page.locator('[data-testid="haiku-line"]'); - } - - async checkGeneratedHaiku() { - await this.page.locator('[data-testid="haiku-card"]').last().isVisible(); - const mostRecentCard = this.page.locator('[data-testid="haiku-card"]').last(); - await mostRecentCard.locator('[data-testid="haiku-line"]').first().waitFor({ state: 'visible', timeout: 10000 }); - } - - async extractChatHaikuContent(page: Page): Promise { - await page.waitForTimeout(3000); - await page.locator('[data-testid="haiku-card"]').first().waitFor({ state: 'visible' }); - const allHaikuCards = page.locator('[data-testid="haiku-card"]'); - const cardCount = await allHaikuCards.count(); - - const expectedCardCount = await this.getExpectedHaikuCount(); - - if (cardCount < expectedCardCount) { - throw new Error(`Expected ${expectedCardCount} haiku cards but found ${cardCount} - haiku generation may have failed`); - } - - const mostRecentCard = allHaikuCards.last(); - const chatHaikuLines = mostRecentCard.locator('[data-testid="haiku-line"]'); - const linesCount = await chatHaikuLines.count(); - - if (linesCount === 0) { - throw new Error('Most recent haiku card has no visible lines - haiku generation failed'); - } - - try { - await chatHaikuLines.first().waitFor({ state: 'visible', timeout: 5000 }); - } catch (error) { - throw new Error('Most recent haiku card lines are not visible - haiku generation failed'); - } - - const count = await chatHaikuLines.count(); - const lines: string[] = []; - - for (let i = 0; i < count; i++) { - const haikuLine = chatHaikuLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - - const chatHaikuContent = lines.join('').replace(/\s/g, ''); - return chatHaikuContent; - } - - private haikuGenerationCount = 0; - - async generateHaiku(message: string) { - await this.messageBox.click(); - await this.messageBox.fill(message); - await this.sendButton.click(); - this.haikuGenerationCount++; - } - - async getExpectedHaikuCount(): Promise { - return this.haikuGenerationCount; - } - - async extractMainDisplayHaikuContent(page: Page): Promise { - const mainDisplayLines = page.locator('[data-testid="main-haiku-line"]'); - const mainCount = await mainDisplayLines.count(); - const lines: string[] = []; - - if (mainCount > 0) { - for (let i = 0; i < mainCount; i++) { - const haikuLine = mainDisplayLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - } - - const mainHaikuContent = lines.join('').replace(/\s/g, ''); - return mainHaikuContent; - } - - async checkHaikuDisplay(page: Page): Promise { - const chatHaikuContent = await this.extractChatHaikuContent(page); - - await page.waitForTimeout(5000); - - const mainHaikuContent = await this.extractMainDisplayHaikuContent(page); - - if (mainHaikuContent === '') { - throw new Error('Main display haiku content is empty - haiku was not properly generated or applied'); - } - - if (chatHaikuContent === mainHaikuContent) { - expect(mainHaikuContent).toBe(chatHaikuContent); - } else { - await page.waitForTimeout(3000); - - const updatedMainContent = await this.extractMainDisplayHaikuContent(page); - - if (updatedMainContent === '') { - throw new Error('Main display haiku content is still empty after additional wait - haiku generation failed'); - } - - expect(updatedMainContent).toBe(chatHaikuContent); - } - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/llamaIndexPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/llamaIndexPages/AgenticChatPage.ts deleted file mode 100644 index 85be9da44..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/llamaIndexPages/AgenticChatPage.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly chatBackground: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.chatBackground = page - .locator('div[style*="background"]') - .or(page.locator('.flex.justify-center.items-center.h-full.w-full')) - .or(page.locator('body')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async getBackground( - property: "backgroundColor" | "backgroundImage" = "backgroundColor" -): Promise { - // Wait a bit for background to apply - await this.page.waitForTimeout(500); - - // Try multiple selectors for the background element - const selectors = [ - 'div[style*="background"]', - 'div[style*="background-color"]', - '.flex.justify-center.items-center.h-full.w-full', - 'div.flex.justify-center.items-center.h-full.w-full', - '[class*="bg-"]', - 'div[class*="background"]' - ]; - - for (const selector of selectors) { - try { - const element = this.page.locator(selector).first(); - if (await element.isVisible({ timeout: 1000 })) { - const value = await element.evaluate( - (el, prop) => { - // Check inline style first - if (el.style.background) return el.style.background; - if (el.style.backgroundColor) return el.style.backgroundColor; - // Then computed style - return getComputedStyle(el)[prop as any]; - }, - property - ); - if (value && value !== "rgba(0, 0, 0, 0)" && value !== "transparent") { - console.log(`[${selector}] ${property}: ${value}`); - return value; - } - } - } catch (e) { - continue; - } - } - - // Fallback to original element - const value = await this.chatBackground.first().evaluate( - (el, prop) => getComputedStyle(el)[prop as any], - property - ); - console.log(`[Fallback] ${property}: ${value}`); - return value; -} - - async getGradientButtonByName(name: string | RegExp) { - return this.page.getByRole("button", { name }); - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/llamaIndexPages/SharedStatePage.ts b/typescript-sdk/apps/dojo/e2e/pages/llamaIndexPages/SharedStatePage.ts deleted file mode 100644 index 807b61bd6..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/llamaIndexPages/SharedStatePage.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class SharedStatePage { - readonly page: Page; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly agentGreeting: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - readonly promptResponseLoader: Locator; - readonly ingredientCards: Locator; - readonly instructionsContainer: Locator; - readonly addIngredient: Locator; - - constructor(page: Page) { - this.page = page; - // Remove iframe references and use actual greeting text - this.agentGreeting = page.getByText("Hi 👋 How can I help with your recipe?"); - this.chatInput = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.promptResponseLoader = page.getByRole('button', { name: 'Please Wait...', disabled: true }); - this.instructionsContainer = page.locator('.instructions-container'); - this.addIngredient = page.getByRole('button', { name: '+ Add Ingredient' }); - this.agentMessage = page.locator('.copilotKitAssistantMessage'); - this.userMessage = page.locator('.copilotKitUserMessage'); - } - - async openChat() { - await this.agentGreeting.isVisible(); - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - await this.sendButton.click(); - } - - async loader() { - const timeout = (ms) => new Promise((_, reject) => { - setTimeout(() => reject(new Error("Timeout waiting for promptResponseLoader to become visible")), ms); - }); - - await Promise.race([ - this.promptResponseLoader.isVisible(), - timeout(5000) // 5 seconds timeout - ]); - } - - async getIngredientCard(name) { - return this.page.locator(`.ingredient-card:has(input.ingredient-name-input[value="${name}"])`); - } - - async addNewIngredient(placeholderText) { - this.addIngredient.click(); - this.page.locator(`input[placeholder="${placeholderText}"]`); - } - - async getInstructionItems(containerLocator) { - const count = await containerLocator.locator('.instruction-item').count(); - if (count <= 0) { - throw new Error('No instruction items found in the container.'); - } - console.log(`✅ Found ${count} instruction items.`); - return count; - } - - async assertAgentReplyVisible(expectedText: RegExp) { - await expect(this.agentMessage.getByText(expectedText)).toBeVisible(); - } - - async assertUserMessageVisible(message: string) { - await expect(this.page.getByText(message)).toBeVisible(); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/AgenticChatPage.ts deleted file mode 100644 index 85be9da44..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/AgenticChatPage.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly chatBackground: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.chatBackground = page - .locator('div[style*="background"]') - .or(page.locator('.flex.justify-center.items-center.h-full.w-full')) - .or(page.locator('body')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async getBackground( - property: "backgroundColor" | "backgroundImage" = "backgroundColor" -): Promise { - // Wait a bit for background to apply - await this.page.waitForTimeout(500); - - // Try multiple selectors for the background element - const selectors = [ - 'div[style*="background"]', - 'div[style*="background-color"]', - '.flex.justify-center.items-center.h-full.w-full', - 'div.flex.justify-center.items-center.h-full.w-full', - '[class*="bg-"]', - 'div[class*="background"]' - ]; - - for (const selector of selectors) { - try { - const element = this.page.locator(selector).first(); - if (await element.isVisible({ timeout: 1000 })) { - const value = await element.evaluate( - (el, prop) => { - // Check inline style first - if (el.style.background) return el.style.background; - if (el.style.backgroundColor) return el.style.backgroundColor; - // Then computed style - return getComputedStyle(el)[prop as any]; - }, - property - ); - if (value && value !== "rgba(0, 0, 0, 0)" && value !== "transparent") { - console.log(`[${selector}] ${property}: ${value}`); - return value; - } - } - } catch (e) { - continue; - } - } - - // Fallback to original element - const value = await this.chatBackground.first().evaluate( - (el, prop) => getComputedStyle(el)[prop as any], - property - ); - console.log(`[Fallback] ${property}: ${value}`); - return value; -} - - async getGradientButtonByName(name: string | RegExp) { - return this.page.getByRole("button", { name }); - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/SharedStatePage.ts b/typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/SharedStatePage.ts deleted file mode 100644 index 807b61bd6..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/SharedStatePage.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class SharedStatePage { - readonly page: Page; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly agentGreeting: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - readonly promptResponseLoader: Locator; - readonly ingredientCards: Locator; - readonly instructionsContainer: Locator; - readonly addIngredient: Locator; - - constructor(page: Page) { - this.page = page; - // Remove iframe references and use actual greeting text - this.agentGreeting = page.getByText("Hi 👋 How can I help with your recipe?"); - this.chatInput = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.promptResponseLoader = page.getByRole('button', { name: 'Please Wait...', disabled: true }); - this.instructionsContainer = page.locator('.instructions-container'); - this.addIngredient = page.getByRole('button', { name: '+ Add Ingredient' }); - this.agentMessage = page.locator('.copilotKitAssistantMessage'); - this.userMessage = page.locator('.copilotKitUserMessage'); - } - - async openChat() { - await this.agentGreeting.isVisible(); - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - await this.sendButton.click(); - } - - async loader() { - const timeout = (ms) => new Promise((_, reject) => { - setTimeout(() => reject(new Error("Timeout waiting for promptResponseLoader to become visible")), ms); - }); - - await Promise.race([ - this.promptResponseLoader.isVisible(), - timeout(5000) // 5 seconds timeout - ]); - } - - async getIngredientCard(name) { - return this.page.locator(`.ingredient-card:has(input.ingredient-name-input[value="${name}"])`); - } - - async addNewIngredient(placeholderText) { - this.addIngredient.click(); - this.page.locator(`input[placeholder="${placeholderText}"]`); - } - - async getInstructionItems(containerLocator) { - const count = await containerLocator.locator('.instruction-item').count(); - if (count <= 0) { - throw new Error('No instruction items found in the container.'); - } - console.log(`✅ Found ${count} instruction items.`); - return count; - } - - async assertAgentReplyVisible(expectedText: RegExp) { - await expect(this.agentMessage.getByText(expectedText)).toBeVisible(); - } - - async assertUserMessageVisible(message: string) { - await expect(this.page.getByText(message)).toBeVisible(); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/ToolBaseGenUIPage.ts b/typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/ToolBaseGenUIPage.ts deleted file mode 100644 index f2d648a5e..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/mastraAgentLocalPages/ToolBaseGenUIPage.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class ToolBaseGenUIPage { - readonly page: Page; - readonly haikuAgentIntro: Locator; - readonly messageBox: Locator; - readonly sendButton: Locator; - readonly applyButton: Locator; - readonly appliedButton: Locator; - readonly haikuBlock: Locator; - readonly japaneseLines: Locator; - - constructor(page: Page) { - this.page = page; - this.haikuAgentIntro = page.getByText("I'm a haiku generator 👋. How can I help you?"); - this.messageBox = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.haikuBlock = page.locator('[data-testid="haiku-card"]'); - this.applyButton = page.getByRole('button', { name: 'Apply' }); - this.japaneseLines = page.locator('[data-testid="haiku-line"]'); - } - - async generateHaiku(message: string) { - await this.messageBox.click(); - await this.messageBox.fill(message); - await this.sendButton.click(); - } - - async checkGeneratedHaiku() { - await this.page.locator('[data-testid="haiku-card"]').last().isVisible(); - const mostRecentCard = this.page.locator('[data-testid="haiku-card"]').last(); - await mostRecentCard.locator('[data-testid="haiku-line"]').first().waitFor({ state: 'visible', timeout: 10000 }); - } - - async extractChatHaikuContent(page: Page): Promise { - await page.waitForTimeout(3000); - await page.locator('[data-testid="haiku-card"]').first().waitFor({ state: 'visible' }); - const allHaikuCards = page.locator('[data-testid="haiku-card"]'); - const cardCount = await allHaikuCards.count(); - let chatHaikuContainer; - let chatHaikuLines; - - for (let cardIndex = cardCount - 1; cardIndex >= 0; cardIndex--) { - chatHaikuContainer = allHaikuCards.nth(cardIndex); - chatHaikuLines = chatHaikuContainer.locator('[data-testid="haiku-line"]'); - const linesCount = await chatHaikuLines.count(); - - if (linesCount > 0) { - try { - await chatHaikuLines.first().waitFor({ state: 'visible', timeout: 5000 }); - break; - } catch (error) { - continue; - } - } - } - - if (!chatHaikuLines) { - throw new Error('No haiku cards with visible lines found'); - } - - const count = await chatHaikuLines.count(); - const lines: string[] = []; - - for (let i = 0; i < count; i++) { - const haikuLine = chatHaikuLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - - const chatHaikuContent = lines.join('').replace(/\s/g, ''); - return chatHaikuContent; - } - - async extractMainDisplayHaikuContent(page: Page): Promise { - const mainDisplayLines = page.locator('[data-testid="main-haiku-line"]'); - const mainCount = await mainDisplayLines.count(); - const lines: string[] = []; - - if (mainCount > 0) { - for (let i = 0; i < mainCount; i++) { - const haikuLine = mainDisplayLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - } - - const mainHaikuContent = lines.join('').replace(/\s/g, ''); - return mainHaikuContent; - } - - async checkHaikuDisplay(page: Page): Promise { - const chatHaikuContent = await this.extractChatHaikuContent(page); - - await page.waitForTimeout(5000); - - const mainHaikuContent = await this.extractMainDisplayHaikuContent(page); - - if (mainHaikuContent === '') { - expect(chatHaikuContent.length).toBeGreaterThan(0); - return; - } - - if (chatHaikuContent === mainHaikuContent) { - expect(mainHaikuContent).toBe(chatHaikuContent); - } else { - await page.waitForTimeout(3000); - - const updatedMainContent = await this.extractMainDisplayHaikuContent(page); - - expect(updatedMainContent).toBe(chatHaikuContent); - } - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/mastraPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/mastraPages/AgenticChatPage.ts deleted file mode 100644 index 18fa245b4..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/mastraPages/AgenticChatPage.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } - - async assertWeatherResponseStructure() { - const agentMessage = this.page.locator(".copilotKitAssistantMessage").last(); - - // Check for main weather response structure - await expect(agentMessage).toContainText("The current weather in Islamabad is as follows:", { timeout: 10000 }); - - // Check for temperature information - await expect(agentMessage).toContainText("Temperature:", { timeout: 5000 }); - // Check for humidity - await expect(agentMessage).toContainText("Humidity:", { timeout: 5000 }); - - // Check for wind speed - await expect(agentMessage).toContainText("Wind Speed:", { timeout: 5000 }); - // Check for conditions - await expect(agentMessage).toContainText("Conditions:", { timeout: 5000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/mastraPages/ToolBaseGenUIPage.ts b/typescript-sdk/apps/dojo/e2e/pages/mastraPages/ToolBaseGenUIPage.ts deleted file mode 100644 index f2d648a5e..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/mastraPages/ToolBaseGenUIPage.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class ToolBaseGenUIPage { - readonly page: Page; - readonly haikuAgentIntro: Locator; - readonly messageBox: Locator; - readonly sendButton: Locator; - readonly applyButton: Locator; - readonly appliedButton: Locator; - readonly haikuBlock: Locator; - readonly japaneseLines: Locator; - - constructor(page: Page) { - this.page = page; - this.haikuAgentIntro = page.getByText("I'm a haiku generator 👋. How can I help you?"); - this.messageBox = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.haikuBlock = page.locator('[data-testid="haiku-card"]'); - this.applyButton = page.getByRole('button', { name: 'Apply' }); - this.japaneseLines = page.locator('[data-testid="haiku-line"]'); - } - - async generateHaiku(message: string) { - await this.messageBox.click(); - await this.messageBox.fill(message); - await this.sendButton.click(); - } - - async checkGeneratedHaiku() { - await this.page.locator('[data-testid="haiku-card"]').last().isVisible(); - const mostRecentCard = this.page.locator('[data-testid="haiku-card"]').last(); - await mostRecentCard.locator('[data-testid="haiku-line"]').first().waitFor({ state: 'visible', timeout: 10000 }); - } - - async extractChatHaikuContent(page: Page): Promise { - await page.waitForTimeout(3000); - await page.locator('[data-testid="haiku-card"]').first().waitFor({ state: 'visible' }); - const allHaikuCards = page.locator('[data-testid="haiku-card"]'); - const cardCount = await allHaikuCards.count(); - let chatHaikuContainer; - let chatHaikuLines; - - for (let cardIndex = cardCount - 1; cardIndex >= 0; cardIndex--) { - chatHaikuContainer = allHaikuCards.nth(cardIndex); - chatHaikuLines = chatHaikuContainer.locator('[data-testid="haiku-line"]'); - const linesCount = await chatHaikuLines.count(); - - if (linesCount > 0) { - try { - await chatHaikuLines.first().waitFor({ state: 'visible', timeout: 5000 }); - break; - } catch (error) { - continue; - } - } - } - - if (!chatHaikuLines) { - throw new Error('No haiku cards with visible lines found'); - } - - const count = await chatHaikuLines.count(); - const lines: string[] = []; - - for (let i = 0; i < count; i++) { - const haikuLine = chatHaikuLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - - const chatHaikuContent = lines.join('').replace(/\s/g, ''); - return chatHaikuContent; - } - - async extractMainDisplayHaikuContent(page: Page): Promise { - const mainDisplayLines = page.locator('[data-testid="main-haiku-line"]'); - const mainCount = await mainDisplayLines.count(); - const lines: string[] = []; - - if (mainCount > 0) { - for (let i = 0; i < mainCount; i++) { - const haikuLine = mainDisplayLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - } - - const mainHaikuContent = lines.join('').replace(/\s/g, ''); - return mainHaikuContent; - } - - async checkHaikuDisplay(page: Page): Promise { - const chatHaikuContent = await this.extractChatHaikuContent(page); - - await page.waitForTimeout(5000); - - const mainHaikuContent = await this.extractMainDisplayHaikuContent(page); - - if (mainHaikuContent === '') { - expect(chatHaikuContent.length).toBeGreaterThan(0); - return; - } - - if (chatHaikuContent === mainHaikuContent) { - expect(mainHaikuContent).toBe(chatHaikuContent); - } else { - await page.waitForTimeout(3000); - - const updatedMainContent = await this.extractMainDisplayHaikuContent(page); - - expect(updatedMainContent).toBe(chatHaikuContent); - } - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/middlewareStarterPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/middlewareStarterPages/AgenticChatPage.ts deleted file mode 100644 index 85be9da44..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/middlewareStarterPages/AgenticChatPage.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly chatBackground: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.chatBackground = page - .locator('div[style*="background"]') - .or(page.locator('.flex.justify-center.items-center.h-full.w-full')) - .or(page.locator('body')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async getBackground( - property: "backgroundColor" | "backgroundImage" = "backgroundColor" -): Promise { - // Wait a bit for background to apply - await this.page.waitForTimeout(500); - - // Try multiple selectors for the background element - const selectors = [ - 'div[style*="background"]', - 'div[style*="background-color"]', - '.flex.justify-center.items-center.h-full.w-full', - 'div.flex.justify-center.items-center.h-full.w-full', - '[class*="bg-"]', - 'div[class*="background"]' - ]; - - for (const selector of selectors) { - try { - const element = this.page.locator(selector).first(); - if (await element.isVisible({ timeout: 1000 })) { - const value = await element.evaluate( - (el, prop) => { - // Check inline style first - if (el.style.background) return el.style.background; - if (el.style.backgroundColor) return el.style.backgroundColor; - // Then computed style - return getComputedStyle(el)[prop as any]; - }, - property - ); - if (value && value !== "rgba(0, 0, 0, 0)" && value !== "transparent") { - console.log(`[${selector}] ${property}: ${value}`); - return value; - } - } - } catch (e) { - continue; - } - } - - // Fallback to original element - const value = await this.chatBackground.first().evaluate( - (el, prop) => getComputedStyle(el)[prop as any], - property - ); - console.log(`[Fallback] ${property}: ${value}`); - return value; -} - - async getGradientButtonByName(name: string | RegExp) { - return this.page.getByRole("button", { name }); - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/AgenticChatPage.ts deleted file mode 100644 index 85be9da44..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/AgenticChatPage.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly chatBackground: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.chatBackground = page - .locator('div[style*="background"]') - .or(page.locator('.flex.justify-center.items-center.h-full.w-full')) - .or(page.locator('body')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async getBackground( - property: "backgroundColor" | "backgroundImage" = "backgroundColor" -): Promise { - // Wait a bit for background to apply - await this.page.waitForTimeout(500); - - // Try multiple selectors for the background element - const selectors = [ - 'div[style*="background"]', - 'div[style*="background-color"]', - '.flex.justify-center.items-center.h-full.w-full', - 'div.flex.justify-center.items-center.h-full.w-full', - '[class*="bg-"]', - 'div[class*="background"]' - ]; - - for (const selector of selectors) { - try { - const element = this.page.locator(selector).first(); - if (await element.isVisible({ timeout: 1000 })) { - const value = await element.evaluate( - (el, prop) => { - // Check inline style first - if (el.style.background) return el.style.background; - if (el.style.backgroundColor) return el.style.backgroundColor; - // Then computed style - return getComputedStyle(el)[prop as any]; - }, - property - ); - if (value && value !== "rgba(0, 0, 0, 0)" && value !== "transparent") { - console.log(`[${selector}] ${property}: ${value}`); - return value; - } - } - } catch (e) { - continue; - } - } - - // Fallback to original element - const value = await this.chatBackground.first().evaluate( - (el, prop) => getComputedStyle(el)[prop as any], - property - ); - console.log(`[Fallback] ${property}: ${value}`); - return value; -} - - async getGradientButtonByName(name: string | RegExp) { - return this.page.getByRole("button", { name }); - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/PredictiveStateUpdatesPage.ts b/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/PredictiveStateUpdatesPage.ts index b7dcd9c3f..005bee3a7 100644 --- a/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/PredictiveStateUpdatesPage.ts +++ b/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/PredictiveStateUpdatesPage.ts @@ -30,7 +30,7 @@ export class PredictiveStateUpdatesPage { this.userMessage = page.locator('.copilotKitUserMessage'); } - async openChat() { + async openChat() { await this.agentGreeting.isVisible(); } @@ -54,13 +54,13 @@ export class PredictiveStateUpdatesPage { } async getUserApproval() { - await this.userApprovalModal.isVisible(); + await this.userApprovalModal.last().isVisible(); await this.getButton(this.page, "Confirm"); - const acceptedLabel = this.userApprovalModal.locator('text=✓ Accepted'); + const acceptedLabel = this.userApprovalModal.last().locator('text=✓ Accepted'); } async getUserRejection() { - await this.userApprovalModal.isVisible(); + await this.userApprovalModal.last().isVisible(); await this.getButton(this.page, "Reject"); const rejectedLabel = await this.getStatusLabelOfButton(this.page, "✕ Rejected"); await rejectedLabel.isVisible(); @@ -85,7 +85,7 @@ export class PredictiveStateUpdatesPage { 'div.tiptap em', 'div.tiptap s' ]; - + let count = 0; for (const selector of highlightSelectors) { count = await this.page.locator(selector).count(); @@ -93,7 +93,7 @@ export class PredictiveStateUpdatesPage { break; } } - + if (count > 0) { expect(count).toBeGreaterThan(0); } else { diff --git a/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/SharedStatePage.ts b/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/SharedStatePage.ts deleted file mode 100644 index 807b61bd6..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/SharedStatePage.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class SharedStatePage { - readonly page: Page; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly agentGreeting: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - readonly promptResponseLoader: Locator; - readonly ingredientCards: Locator; - readonly instructionsContainer: Locator; - readonly addIngredient: Locator; - - constructor(page: Page) { - this.page = page; - // Remove iframe references and use actual greeting text - this.agentGreeting = page.getByText("Hi 👋 How can I help with your recipe?"); - this.chatInput = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.promptResponseLoader = page.getByRole('button', { name: 'Please Wait...', disabled: true }); - this.instructionsContainer = page.locator('.instructions-container'); - this.addIngredient = page.getByRole('button', { name: '+ Add Ingredient' }); - this.agentMessage = page.locator('.copilotKitAssistantMessage'); - this.userMessage = page.locator('.copilotKitUserMessage'); - } - - async openChat() { - await this.agentGreeting.isVisible(); - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - await this.sendButton.click(); - } - - async loader() { - const timeout = (ms) => new Promise((_, reject) => { - setTimeout(() => reject(new Error("Timeout waiting for promptResponseLoader to become visible")), ms); - }); - - await Promise.race([ - this.promptResponseLoader.isVisible(), - timeout(5000) // 5 seconds timeout - ]); - } - - async getIngredientCard(name) { - return this.page.locator(`.ingredient-card:has(input.ingredient-name-input[value="${name}"])`); - } - - async addNewIngredient(placeholderText) { - this.addIngredient.click(); - this.page.locator(`input[placeholder="${placeholderText}"]`); - } - - async getInstructionItems(containerLocator) { - const count = await containerLocator.locator('.instruction-item').count(); - if (count <= 0) { - throw new Error('No instruction items found in the container.'); - } - console.log(`✅ Found ${count} instruction items.`); - return count; - } - - async assertAgentReplyVisible(expectedText: RegExp) { - await expect(this.agentMessage.getByText(expectedText)).toBeVisible(); - } - - async assertUserMessageVisible(message: string) { - await expect(this.page.getByText(message)).toBeVisible(); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/ToolBaseGenUIPage.ts b/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/ToolBaseGenUIPage.ts deleted file mode 100644 index f2d648a5e..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/pydanticAIPages/ToolBaseGenUIPage.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class ToolBaseGenUIPage { - readonly page: Page; - readonly haikuAgentIntro: Locator; - readonly messageBox: Locator; - readonly sendButton: Locator; - readonly applyButton: Locator; - readonly appliedButton: Locator; - readonly haikuBlock: Locator; - readonly japaneseLines: Locator; - - constructor(page: Page) { - this.page = page; - this.haikuAgentIntro = page.getByText("I'm a haiku generator 👋. How can I help you?"); - this.messageBox = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.haikuBlock = page.locator('[data-testid="haiku-card"]'); - this.applyButton = page.getByRole('button', { name: 'Apply' }); - this.japaneseLines = page.locator('[data-testid="haiku-line"]'); - } - - async generateHaiku(message: string) { - await this.messageBox.click(); - await this.messageBox.fill(message); - await this.sendButton.click(); - } - - async checkGeneratedHaiku() { - await this.page.locator('[data-testid="haiku-card"]').last().isVisible(); - const mostRecentCard = this.page.locator('[data-testid="haiku-card"]').last(); - await mostRecentCard.locator('[data-testid="haiku-line"]').first().waitFor({ state: 'visible', timeout: 10000 }); - } - - async extractChatHaikuContent(page: Page): Promise { - await page.waitForTimeout(3000); - await page.locator('[data-testid="haiku-card"]').first().waitFor({ state: 'visible' }); - const allHaikuCards = page.locator('[data-testid="haiku-card"]'); - const cardCount = await allHaikuCards.count(); - let chatHaikuContainer; - let chatHaikuLines; - - for (let cardIndex = cardCount - 1; cardIndex >= 0; cardIndex--) { - chatHaikuContainer = allHaikuCards.nth(cardIndex); - chatHaikuLines = chatHaikuContainer.locator('[data-testid="haiku-line"]'); - const linesCount = await chatHaikuLines.count(); - - if (linesCount > 0) { - try { - await chatHaikuLines.first().waitFor({ state: 'visible', timeout: 5000 }); - break; - } catch (error) { - continue; - } - } - } - - if (!chatHaikuLines) { - throw new Error('No haiku cards with visible lines found'); - } - - const count = await chatHaikuLines.count(); - const lines: string[] = []; - - for (let i = 0; i < count; i++) { - const haikuLine = chatHaikuLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - - const chatHaikuContent = lines.join('').replace(/\s/g, ''); - return chatHaikuContent; - } - - async extractMainDisplayHaikuContent(page: Page): Promise { - const mainDisplayLines = page.locator('[data-testid="main-haiku-line"]'); - const mainCount = await mainDisplayLines.count(); - const lines: string[] = []; - - if (mainCount > 0) { - for (let i = 0; i < mainCount; i++) { - const haikuLine = mainDisplayLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - } - - const mainHaikuContent = lines.join('').replace(/\s/g, ''); - return mainHaikuContent; - } - - async checkHaikuDisplay(page: Page): Promise { - const chatHaikuContent = await this.extractChatHaikuContent(page); - - await page.waitForTimeout(5000); - - const mainHaikuContent = await this.extractMainDisplayHaikuContent(page); - - if (mainHaikuContent === '') { - expect(chatHaikuContent.length).toBeGreaterThan(0); - return; - } - - if (chatHaikuContent === mainHaikuContent) { - expect(mainHaikuContent).toBe(chatHaikuContent); - } else { - await page.waitForTimeout(3000); - - const updatedMainContent = await this.extractMainDisplayHaikuContent(page); - - expect(updatedMainContent).toBe(chatHaikuContent); - } - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/AgenticChatPage.ts deleted file mode 100644 index 85be9da44..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/AgenticChatPage.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly chatBackground: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.chatBackground = page - .locator('div[style*="background"]') - .or(page.locator('.flex.justify-center.items-center.h-full.w-full')) - .or(page.locator('body')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async getBackground( - property: "backgroundColor" | "backgroundImage" = "backgroundColor" -): Promise { - // Wait a bit for background to apply - await this.page.waitForTimeout(500); - - // Try multiple selectors for the background element - const selectors = [ - 'div[style*="background"]', - 'div[style*="background-color"]', - '.flex.justify-center.items-center.h-full.w-full', - 'div.flex.justify-center.items-center.h-full.w-full', - '[class*="bg-"]', - 'div[class*="background"]' - ]; - - for (const selector of selectors) { - try { - const element = this.page.locator(selector).first(); - if (await element.isVisible({ timeout: 1000 })) { - const value = await element.evaluate( - (el, prop) => { - // Check inline style first - if (el.style.background) return el.style.background; - if (el.style.backgroundColor) return el.style.backgroundColor; - // Then computed style - return getComputedStyle(el)[prop as any]; - }, - property - ); - if (value && value !== "rgba(0, 0, 0, 0)" && value !== "transparent") { - console.log(`[${selector}] ${property}: ${value}`); - return value; - } - } - } catch (e) { - continue; - } - } - - // Fallback to original element - const value = await this.chatBackground.first().evaluate( - (el, prop) => getComputedStyle(el)[prop as any], - property - ); - console.log(`[Fallback] ${property}: ${value}`); - return value; -} - - async getGradientButtonByName(name: string | RegExp) { - return this.page.getByRole("button", { name }); - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/SharedStatePage.ts b/typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/SharedStatePage.ts deleted file mode 100644 index 807b61bd6..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/SharedStatePage.ts +++ /dev/null @@ -1,74 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class SharedStatePage { - readonly page: Page; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly agentGreeting: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - readonly promptResponseLoader: Locator; - readonly ingredientCards: Locator; - readonly instructionsContainer: Locator; - readonly addIngredient: Locator; - - constructor(page: Page) { - this.page = page; - // Remove iframe references and use actual greeting text - this.agentGreeting = page.getByText("Hi 👋 How can I help with your recipe?"); - this.chatInput = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.promptResponseLoader = page.getByRole('button', { name: 'Please Wait...', disabled: true }); - this.instructionsContainer = page.locator('.instructions-container'); - this.addIngredient = page.getByRole('button', { name: '+ Add Ingredient' }); - this.agentMessage = page.locator('.copilotKitAssistantMessage'); - this.userMessage = page.locator('.copilotKitUserMessage'); - } - - async openChat() { - await this.agentGreeting.isVisible(); - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - await this.sendButton.click(); - } - - async loader() { - const timeout = (ms) => new Promise((_, reject) => { - setTimeout(() => reject(new Error("Timeout waiting for promptResponseLoader to become visible")), ms); - }); - - await Promise.race([ - this.promptResponseLoader.isVisible(), - timeout(5000) // 5 seconds timeout - ]); - } - - async getIngredientCard(name) { - return this.page.locator(`.ingredient-card:has(input.ingredient-name-input[value="${name}"])`); - } - - async addNewIngredient(placeholderText) { - this.addIngredient.click(); - this.page.locator(`input[placeholder="${placeholderText}"]`); - } - - async getInstructionItems(containerLocator) { - const count = await containerLocator.locator('.instruction-item').count(); - if (count <= 0) { - throw new Error('No instruction items found in the container.'); - } - console.log(`✅ Found ${count} instruction items.`); - return count; - } - - async assertAgentReplyVisible(expectedText: RegExp) { - await expect(this.agentMessage.getByText(expectedText)).toBeVisible(); - } - - async assertUserMessageVisible(message: string) { - await expect(this.page.getByText(message)).toBeVisible(); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/ToolBaseGenUIPage.ts b/typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/ToolBaseGenUIPage.ts deleted file mode 100644 index a5c452e24..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/serverStarterAllFeaturesPages/ToolBaseGenUIPage.ts +++ /dev/null @@ -1,116 +0,0 @@ -import { Page, Locator, expect } from '@playwright/test'; - -export class ToolBaseGenUIPage { - readonly page: Page; - readonly haikuAgentIntro: Locator; - readonly messageBox: Locator; - readonly sendButton: Locator; - readonly applyButton: Locator; - readonly appliedButton: Locator; - readonly haikuBlock: Locator; - readonly japaneseLines: Locator; - private haikuGenerationCount = 0; - - constructor(page: Page) { - this.page = page; - this.haikuAgentIntro = page.getByText("I'm a haiku generator 👋. How can I help you?"); - this.messageBox = page.getByRole('textbox', { name: 'Type a message...' }); - this.sendButton = page.locator('[data-test-id="copilot-chat-ready"]'); - this.haikuBlock = page.locator('[data-testid="haiku-card"]'); - this.applyButton = page.getByRole('button', { name: 'Apply' }); - this.japaneseLines = page.locator('[data-testid="haiku-line"]'); - } - - async generateHaiku(message: string) { - await this.messageBox.click(); - await this.messageBox.fill(message); - await this.sendButton.click(); - this.haikuGenerationCount++; - } - - async checkGeneratedHaiku() { - const cardWithContent = this.page.locator('[data-testid="haiku-card"]:has([data-testid="haiku-line"])').last(); - await cardWithContent.waitFor({ state: 'visible', timeout: 10000 }); - await cardWithContent.locator('[data-testid="haiku-line"]').first().waitFor({ state: 'visible', timeout: 10000 }); - } - - async extractChatHaikuContent(page: Page): Promise { - await page.waitForTimeout(3000); - await page.locator('[data-testid="haiku-card"]').first().waitFor({ state: 'visible' }); - const allHaikuCards = page.locator('[data-testid="haiku-card"]'); - const cardCount = await allHaikuCards.count(); - let chatHaikuContainer; - let chatHaikuLines; - - for (let cardIndex = cardCount - 1; cardIndex >= 0; cardIndex--) { - chatHaikuContainer = allHaikuCards.nth(cardIndex); - chatHaikuLines = chatHaikuContainer.locator('[data-testid="haiku-line"]'); - const linesCount = await chatHaikuLines.count(); - - if (linesCount > 0) { - try { - await chatHaikuLines.first().waitFor({ state: 'visible', timeout: 5000 }); - break; - } catch (error) { - continue; - } - } - } - - if (!chatHaikuLines) { - throw new Error('No haiku cards with visible lines found'); - } - - const count = await chatHaikuLines.count(); - const lines: string[] = []; - - for (let i = 0; i < count; i++) { - const haikuLine = chatHaikuLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - - const chatHaikuContent = lines.join('').replace(/\s/g, ''); - return chatHaikuContent; - } - - async extractMainDisplayHaikuContent(page: Page): Promise { - const mainDisplayLines = page.locator('[data-testid="main-haiku-line"]'); - const mainCount = await mainDisplayLines.count(); - const lines: string[] = []; - - if (mainCount > 0) { - for (let i = 0; i < mainCount; i++) { - const haikuLine = mainDisplayLines.nth(i); - const japaneseText = await haikuLine.locator('p').first().innerText(); - lines.push(japaneseText); - } - } - - const mainHaikuContent = lines.join('').replace(/\s/g, ''); - return mainHaikuContent; - } - - async checkHaikuDisplay(page: Page): Promise { - const chatHaikuContent = await this.extractChatHaikuContent(page); - - await page.waitForTimeout(5000); - - const mainHaikuContent = await this.extractMainDisplayHaikuContent(page); - - if (mainHaikuContent === '') { - expect(chatHaikuContent.length).toBeGreaterThan(0); - return; - } - - if (chatHaikuContent === mainHaikuContent) { - expect(mainHaikuContent).toBe(chatHaikuContent); - } else { - await page.waitForTimeout(3000); - - const updatedMainContent = await this.extractMainDisplayHaikuContent(page); - - expect(updatedMainContent).toBe(chatHaikuContent); - } - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/serverStarterPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/serverStarterPages/AgenticChatPage.ts deleted file mode 100644 index 85be9da44..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/serverStarterPages/AgenticChatPage.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly chatBackground: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.chatBackground = page - .locator('div[style*="background"]') - .or(page.locator('.flex.justify-center.items-center.h-full.w-full')) - .or(page.locator('body')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async getBackground( - property: "backgroundColor" | "backgroundImage" = "backgroundColor" -): Promise { - // Wait a bit for background to apply - await this.page.waitForTimeout(500); - - // Try multiple selectors for the background element - const selectors = [ - 'div[style*="background"]', - 'div[style*="background-color"]', - '.flex.justify-center.items-center.h-full.w-full', - 'div.flex.justify-center.items-center.h-full.w-full', - '[class*="bg-"]', - 'div[class*="background"]' - ]; - - for (const selector of selectors) { - try { - const element = this.page.locator(selector).first(); - if (await element.isVisible({ timeout: 1000 })) { - const value = await element.evaluate( - (el, prop) => { - // Check inline style first - if (el.style.background) return el.style.background; - if (el.style.backgroundColor) return el.style.backgroundColor; - // Then computed style - return getComputedStyle(el)[prop as any]; - }, - property - ); - if (value && value !== "rgba(0, 0, 0, 0)" && value !== "transparent") { - console.log(`[${selector}] ${property}: ${value}`); - return value; - } - } - } catch (e) { - continue; - } - } - - // Fallback to original element - const value = await this.chatBackground.first().evaluate( - (el, prop) => getComputedStyle(el)[prop as any], - property - ); - console.log(`[Fallback] ${property}: ${value}`); - return value; -} - - async getGradientButtonByName(name: string | RegExp) { - return this.page.getByRole("button", { name }); - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/pages/vercelAISdkPages/AgenticChatPage.ts b/typescript-sdk/apps/dojo/e2e/pages/vercelAISdkPages/AgenticChatPage.ts deleted file mode 100644 index 85be9da44..000000000 --- a/typescript-sdk/apps/dojo/e2e/pages/vercelAISdkPages/AgenticChatPage.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { Page, Locator, expect } from "@playwright/test"; - -export class AgenticChatPage { - readonly page: Page; - readonly openChatButton: Locator; - readonly agentGreeting: Locator; - readonly chatInput: Locator; - readonly sendButton: Locator; - readonly chatBackground: Locator; - readonly agentMessage: Locator; - readonly userMessage: Locator; - - constructor(page: Page) { - this.page = page; - this.openChatButton = page.getByRole("button", { - name: /chat/i, - }); - this.agentGreeting = page - .getByText("Hi, I'm an agent. Want to chat?"); - this.chatInput = page - .getByRole("textbox", { name: "Type a message..." }) - .or(page.getByRole("textbox")) - .or(page.locator('input[type="text"]')) - .or(page.locator('textarea')); - this.sendButton = page - .locator('[data-test-id="copilot-chat-ready"]') - .or(page.getByRole("button", { name: /send/i })) - .or(page.locator('button[type="submit"]')); - this.chatBackground = page - .locator('div[style*="background"]') - .or(page.locator('.flex.justify-center.items-center.h-full.w-full')) - .or(page.locator('body')); - this.agentMessage = page - .locator(".copilotKitAssistantMessage"); - this.userMessage = page - .locator(".copilotKitUserMessage"); - } - - async openChat() { - try { - await this.openChatButton.click({ timeout: 3000 }); - } catch (error) { - // Chat might already be open - } - } - - async sendMessage(message: string) { - await this.chatInput.click(); - await this.chatInput.fill(message); - try { - await this.sendButton.click(); - } catch (error) { - await this.chatInput.press("Enter"); - } - } - - async getBackground( - property: "backgroundColor" | "backgroundImage" = "backgroundColor" -): Promise { - // Wait a bit for background to apply - await this.page.waitForTimeout(500); - - // Try multiple selectors for the background element - const selectors = [ - 'div[style*="background"]', - 'div[style*="background-color"]', - '.flex.justify-center.items-center.h-full.w-full', - 'div.flex.justify-center.items-center.h-full.w-full', - '[class*="bg-"]', - 'div[class*="background"]' - ]; - - for (const selector of selectors) { - try { - const element = this.page.locator(selector).first(); - if (await element.isVisible({ timeout: 1000 })) { - const value = await element.evaluate( - (el, prop) => { - // Check inline style first - if (el.style.background) return el.style.background; - if (el.style.backgroundColor) return el.style.backgroundColor; - // Then computed style - return getComputedStyle(el)[prop as any]; - }, - property - ); - if (value && value !== "rgba(0, 0, 0, 0)" && value !== "transparent") { - console.log(`[${selector}] ${property}: ${value}`); - return value; - } - } - } catch (e) { - continue; - } - } - - // Fallback to original element - const value = await this.chatBackground.first().evaluate( - (el, prop) => getComputedStyle(el)[prop as any], - property - ); - console.log(`[Fallback] ${property}: ${value}`); - return value; -} - - async getGradientButtonByName(name: string | RegExp) { - return this.page.getByRole("button", { name }); - } - - async assertUserMessageVisible(text: string | RegExp) { - await expect(this.userMessage.getByText(text)).toBeVisible(); - } - - async assertAgentReplyVisible(expectedText: RegExp) { - const agentMessage = this.page.locator(".copilotKitAssistantMessage", { - hasText: expectedText, - }); - await expect(agentMessage.last()).toBeVisible({ timeout: 10000 }); - } -} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/playwright.config.ts b/typescript-sdk/apps/dojo/e2e/playwright.config.ts index ccd077d20..7571ff1e2 100644 --- a/typescript-sdk/apps/dojo/e2e/playwright.config.ts +++ b/typescript-sdk/apps/dojo/e2e/playwright.config.ts @@ -46,8 +46,10 @@ function getBaseUrl(): string { export default defineConfig({ timeout: process.env.CI ? 300_000 : 120_000, // 5min in CI, 2min locally for AI tests testDir: "./tests", - retries: process.env.CI ? 1 : 0, // More retries for flaky AI tests in CI, 0 for local - fullyParallel: true, + retries: process.env.CI ? 3 : 0, // More retries for flaky AI tests in CI, 0 for local + // Make this sequential for now to avoid race conditions + workers: process.env.CI ? 1 : undefined, + fullyParallel: process.env.CI ? false : true, use: { headless: true, viewport: { width: 1280, height: 720 }, @@ -64,7 +66,7 @@ export default defineConfig({ baseURL: getBaseUrl(), }, expect: { - timeout: 90_000, // 1.5 minutes for AI-generated content to appear + timeout: 120_000, // 2 minutes for AI-generated content to appear }, // Test isolation between each test projects: [ diff --git a/typescript-sdk/apps/dojo/e2e/tests/agnoTests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/agnoTests/agenticChatPage.spec.ts index bd84be4b3..c91995c25 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/agnoTests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/agnoTests/agenticChatPage.spec.ts @@ -4,7 +4,9 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/agnoPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; + +const appleAsk = "What is the current stock price of AAPL? Please respond in the format of 'The current stock price of Apple Inc. (AAPL) is {{price}}'" test("[Agno] Agentic Chat sends and receives a greeting message", async ({ page, @@ -40,8 +42,8 @@ test("[Agno] Agentic Chat provides stock price information", async ({ await chat.agentGreeting.waitFor({ state: "visible" }); // Ask for AAPL stock price - await chat.sendMessage("What is the current stock price of AAPL"); - await chat.assertUserMessageVisible("What is the current stock price of AAPL"); + await chat.sendMessage(appleAsk); + await chat.assertUserMessageVisible(appleAsk); await waitForAIResponse(page); // Check if the response contains the expected stock price information @@ -63,8 +65,8 @@ test("[Agno] Agentic Chat retains memory of previous questions", async ({ // First question await chat.sendMessage("Hi"); - await chat.sendMessage("What is the current stock price of AAPL"); - await chat.assertUserMessageVisible("What is the current stock price of AAPL"); + await chat.sendMessage(appleAsk); + await chat.assertUserMessageVisible(appleAsk); await waitForAIResponse(page); await chat.assertAgentReplyContains("The current stock price of Apple Inc. (AAPL) is"); diff --git a/typescript-sdk/apps/dojo/e2e/tests/agnoTests/toolBasedGenUIPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/agnoTests/toolBasedGenUIPage.spec.ts index 27078ba32..10380d93a 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/agnoTests/toolBasedGenUIPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/agnoTests/toolBasedGenUIPage.spec.ts @@ -1,10 +1,10 @@ import { test, expect } from "@playwright/test"; -import { ToolBaseGenUIPage } from "../../pages/agnoPages/ToolBaseGenUIPage"; +import { ToolBaseGenUIPage } from "../../featurePages/ToolBaseGenUIPage"; const pageURL = "/agno/feature/tool_based_generative_ui"; -test.fixme('[Agno] Haiku generation and display verification', async ({ +test('[Agno] Haiku generation and display verification', async ({ page, }) => { await page.goto(pageURL); @@ -17,7 +17,7 @@ test.fixme('[Agno] Haiku generation and display verification', async ({ await genAIAgent.checkHaikuDisplay(page); }); -test.fixme('[Agno] Haiku generation and UI consistency for two different prompts', async ({ +test('[Agno] Haiku generation and UI consistency for two different prompts', async ({ page, }) => { await page.goto(pageURL); diff --git a/typescript-sdk/apps/dojo/e2e/tests/crewAITests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/crewAITests/agenticChatPage.spec.ts index fedc8075a..cb6820d30 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/crewAITests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/crewAITests/agenticChatPage.spec.ts @@ -4,7 +4,7 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/crewAIPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; test("[CrewAI] Agentic Chat sends and receives a message", async ({ page, @@ -26,7 +26,7 @@ test("[CrewAI] Agentic Chat sends and receives a message", async ({ }); }); -test.fixme("[CrewAI] Agentic Chat changes background on message and reset", async ({ +test("[CrewAI] Agentic Chat changes background on message and reset", async ({ page, }) => { await retryOnAIFailure(async () => { diff --git a/typescript-sdk/apps/dojo/e2e/tests/crewAITests/agenticGenUI.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/crewAITests/agenticGenUI.spec.ts index 7b29ff884..19296c236 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/crewAITests/agenticGenUI.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/crewAITests/agenticGenUI.spec.ts @@ -2,7 +2,8 @@ import { test, expect } from "@playwright/test"; import { AgenticGenUIPage } from "../../pages/crewAIPages/AgenticUIGenPage"; test.describe("Agent Generative UI Feature", () => { - test.fixme("[CrewAI] should interact with the chat to get a planner on prompt", async ({ + // Flaky + test("[CrewAI] should interact with the chat to get a planner on prompt", async ({ page, }) => { const genUIAgent = new AgenticGenUIPage(page); @@ -33,7 +34,8 @@ test.describe("Agent Generative UI Feature", () => { ); }); - test.fixme("[CrewAI] should interact with the chat using predefined prompts and perform steps", async ({ + // Flaky + test("[CrewAI] should interact with the chat using predefined prompts and perform steps", async ({ page, }) => { const genUIAgent = new AgenticGenUIPage(page); diff --git a/typescript-sdk/apps/dojo/e2e/tests/crewAITests/predictvieStateUpdatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/crewAITests/predictvieStateUpdatePage.spec.ts deleted file mode 100644 index e676e5043..000000000 --- a/typescript-sdk/apps/dojo/e2e/tests/crewAITests/predictvieStateUpdatePage.spec.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { - test, - expect, - waitForAIResponse, - retryOnAIFailure, -} from "../../test-isolation-helper"; -import { PredictiveStateUpdatesPage } from "../../pages/crewAIPages/PredictiveStateUpdatesPage"; - -test.describe("Predictive Status Updates Feature", () => { - test.fixme("[CrewAI] should interact with agent and approve asked changes", async ({ - page, - }) => { - await retryOnAIFailure(async () => { - const predictiveStateUpdates = new PredictiveStateUpdatesPage(page); - - // Update URL to new domain - await page.goto( - "/crewai/feature/predictive_state_updates" - ); - - await predictiveStateUpdates.openChat(); - await predictiveStateUpdates.sendMessage( - "Give me a story for a dragon called Atlantis in document" - ); - await waitForAIResponse(page); - await predictiveStateUpdates.getPredictiveResponse(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - const dragonName = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonName).not.toBeNull(); - - // Send update to change the dragon name - await predictiveStateUpdates.sendMessage("Change dragon name to Lola"); - await waitForAIResponse(page); - await predictiveStateUpdates.verifyHighlightedText(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.nth(1).isVisible(); - const dragonNameNew = await predictiveStateUpdates.verifyAgentResponse( - "Lola" - ); - expect(dragonNameNew).not.toBe(dragonName); - }); - }); - - test.fixme("[CrewAI] should interact with agent and reject asked changes", async ({ - page, - }) => { - await retryOnAIFailure(async () => { - const predictiveStateUpdates = new PredictiveStateUpdatesPage(page); - - // Update URL to new domain - await page.goto( - "/crewai/feature/predictive_state_updates" - ); - - await predictiveStateUpdates.openChat(); - - await predictiveStateUpdates.sendMessage( - "Give me a story for a dragon called called Atlantis in document" - ); - await predictiveStateUpdates.getPredictiveResponse(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - const dragonName = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonName).not.toBeNull(); - - // Send update to change the dragon name - await predictiveStateUpdates.sendMessage("Change dragon name to Lola"); - await waitForAIResponse(page); - await predictiveStateUpdates.verifyHighlightedText(); - await predictiveStateUpdates.getUserRejection(); - await predictiveStateUpdates.rejectedChangesResponse.isVisible(); - const dragonNameAfterRejection = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonNameAfterRejection).toBe(dragonName); - expect(dragonNameAfterRejection).not.toBe("Lola"); - }); - }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/tests/crewAITests/sharedStatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/crewAITests/sharedStatePage.spec.ts index b406b0ae5..847d3e569 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/crewAITests/sharedStatePage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/crewAITests/sharedStatePage.spec.ts @@ -1,5 +1,5 @@ import { test, expect } from "@playwright/test"; -import { SharedStatePage } from "../../pages/crewAIPages/SharedStatePage"; +import { SharedStatePage } from "../../featurePages/SharedStatePage"; test.describe("Shared State Feature", () => { test("[CrewAI] should interact with the chat to get a recipe on prompt", async ({ @@ -13,9 +13,9 @@ test.describe("Shared State Feature", () => { ); await sharedStateAgent.openChat(); - await sharedStateAgent.sendMessage("give me recipe for pasta"); + await sharedStateAgent.sendMessage('Please give me a pasta recipe of your choosing, but one of the ingredients should be "Pasta"'); await sharedStateAgent.loader(); - await sharedStateAgent.getIngredientCard(/Pasta/); + await sharedStateAgent.awaitIngredientCard('Pasta'); await sharedStateAgent.getInstructionItems( sharedStateAgent.instructionsContainer ); @@ -34,7 +34,7 @@ test.describe("Shared State Feature", () => { // Add new ingredient via UI await sharedStateAgent.addIngredient.click(); - + // Fill in the new ingredient details const newIngredientCard = page.locator('.ingredient-card').last(); await newIngredientCard.locator('.ingredient-name-input').fill('Potatoes'); diff --git a/typescript-sdk/apps/dojo/e2e/tests/crewAITests/toolBasedGenUIPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/crewAITests/toolBasedGenUIPage.spec.ts index c98a8a3c5..398a1b80b 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/crewAITests/toolBasedGenUIPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/crewAITests/toolBasedGenUIPage.spec.ts @@ -1,5 +1,5 @@ import { test, expect } from "@playwright/test"; -import { ToolBaseGenUIPage } from "../../pages/crewAIPages/ToolBaseGenUIPage"; +import { ToolBaseGenUIPage } from "../../featurePages/ToolBaseGenUIPage"; const pageURL = "/crewai/feature/tool_based_generative_ui"; diff --git a/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/agenticChatPage.spec.ts index 7806f9544..2ac0d2853 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/agenticChatPage.spec.ts @@ -4,7 +4,7 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/langGraphFastAPIPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; test("[LangGraph FastAPI] Agentic Chat sends and receives a message", async ({ page, @@ -42,7 +42,7 @@ test("[LangGraph FastAPI] Agentic Chat changes background on message and reset", // Store initial background color const initialBackground = await chat.getBackground(); console.log("Initial background color:", initialBackground); - + // 1. Send message to change background to blue await chat.sendMessage("Hi change the background color to blue"); await chat.assertUserMessageVisible( diff --git a/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/agenticGenUI.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/agenticGenUI.spec.ts index 2ffda9f37..66b8ab3dd 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/agenticGenUI.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/agenticGenUI.spec.ts @@ -35,7 +35,7 @@ test.describe("Agent Generative UI Feature", () => { ); }); - test.fixme("[LangGraph FastAPI] should interact with the chat using predefined prompts and perform steps", async ({ + test("[LangGraph FastAPI] should interact with the chat using predefined prompts and perform steps", async ({ page, }) => { const genUIAgent = new AgenticGenUIPage(page); diff --git a/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/predictvieStateUpdatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/predictvieStateUpdatePage.spec.ts deleted file mode 100644 index 3ca0f0e37..000000000 --- a/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/predictvieStateUpdatePage.spec.ts +++ /dev/null @@ -1,96 +0,0 @@ -import { - test, - expect, - waitForAIResponse, - retryOnAIFailure, -} from "../../test-isolation-helper"; -import { PredictiveStateUpdatesPage } from "../../pages/langGraphFastAPIPages/PredictiveStateUpdatesPage"; - -test.describe("Predictive Status Updates Feature", () => { - test.fixme("[LangGraph FastAPI] should interact with agent and approve asked changes", async ({ - page, - }) => { - await retryOnAIFailure(async () => { - const predictiveStateUpdates = new PredictiveStateUpdatesPage(page); - - await page.goto( - "/langgraph-fastapi/feature/predictive_state_updates" - ); - - await predictiveStateUpdates.openChat(); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.sendMessage( - "Give me a story for a dragon called Atlantis in document" - ); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.getPredictiveResponse(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - const dragonName = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonName).not.toBeNull(); - - await page.waitForTimeout(3000); - - await predictiveStateUpdates.sendMessage("Change dragon name to Lola"); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.verifyHighlightedText(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - const dragonNameNew = await predictiveStateUpdates.verifyAgentResponse( - "Lola" - ); - expect(dragonNameNew).not.toBe(dragonName); - }); - }); - - test.fixme("[LangGraph FastAPI] should interact with agent and reject asked changes", async ({ - page, - }) => { - await retryOnAIFailure(async () => { - const predictiveStateUpdates = new PredictiveStateUpdatesPage(page); - - await page.goto( - "/langgraph-fastapi/feature/predictive_state_updates" - ); - - await predictiveStateUpdates.openChat(); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.sendMessage( - "Give me a story for a dragon called Atlantis in document" - ); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.getPredictiveResponse(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - const dragonName = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonName).not.toBeNull(); - - await page.waitForTimeout(3000); - - await predictiveStateUpdates.sendMessage("Change dragon name to Lola"); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.verifyHighlightedText(); - await predictiveStateUpdates.getUserRejection(); - await predictiveStateUpdates.rejectedChangesResponse.isVisible(); - const dragonNameAfterRejection = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonNameAfterRejection).toBe(dragonName); - expect(dragonNameAfterRejection).not.toBe("Lola"); - }); - }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/sharedStatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/sharedStatePage.spec.ts index c5213667d..bbd02d14b 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/sharedStatePage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/sharedStatePage.spec.ts @@ -1,5 +1,5 @@ import { test, expect } from "@playwright/test"; -import { SharedStatePage } from "../../pages/langGraphFastAPIPages/SharedStatePage"; +import { SharedStatePage } from "../../featurePages/SharedStatePage"; test.describe("Shared State Feature", () => { test("[LangGraph FastAPI] should interact with the chat to get a recipe on prompt", async ({ @@ -13,15 +13,15 @@ test.describe("Shared State Feature", () => { ); await sharedStateAgent.openChat(); - await sharedStateAgent.sendMessage("give me recipe for pasta"); + await sharedStateAgent.sendMessage('Please give me a pasta recipe of your choosing, but one of the ingredients should be "Pasta"'); await sharedStateAgent.loader(); - await sharedStateAgent.getIngredientCard(/Pasta/); + await sharedStateAgent.awaitIngredientCard('Pasta'); await sharedStateAgent.getInstructionItems( sharedStateAgent.instructionsContainer ); }); - test.fixme("[LangGraph FastAPI] should share state between UI and chat", async ({ + test("[LangGraph FastAPI] should share state between UI and chat", async ({ page, }) => { const sharedStateAgent = new SharedStatePage(page); @@ -44,7 +44,7 @@ test.describe("Shared State Feature", () => { await page.waitForTimeout(1000); // Ask chat for all ingredients - await sharedStateAgent.sendMessage("Give me all the ingredients"); + await sharedStateAgent.sendMessage("Give me all the ingredients, also list them in your message"); await sharedStateAgent.loader(); // Verify chat response includes both existing and new ingredients diff --git a/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/toolBasedGenUIPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/toolBasedGenUIPage.spec.ts index 38560a9df..053d4715e 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/toolBasedGenUIPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/langgraphFastAPITests/toolBasedGenUIPage.spec.ts @@ -1,10 +1,10 @@ import { test, expect } from "@playwright/test"; -import { ToolBaseGenUIPage } from "../../pages/langGraphFastAPIPages/ToolBaseGenUIPage"; +import { ToolBaseGenUIPage } from "../../featurePages/ToolBaseGenUIPage"; const pageURL = "/langgraph-fastapi/feature/tool_based_generative_ui"; -test.fixme('[LangGraph FastAPI] Haiku generation and display verification', async ({ +test('[LangGraph FastAPI] Haiku generation and display verification', async ({ page, }) => { await page.goto(pageURL); @@ -17,7 +17,7 @@ test.fixme('[LangGraph FastAPI] Haiku generation and display verification', asyn await genAIAgent.checkHaikuDisplay(page); }); -test.fixme('[LangGraph FastAPI] Haiku generation and UI consistency for two different prompts', async ({ +test('[LangGraph FastAPI] Haiku generation and UI consistency for two different prompts', async ({ page, }) => { await page.goto(pageURL); diff --git a/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/agenticChatPage.spec.ts index 0a8b225f4..c39e5a92e 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/agenticChatPage.spec.ts @@ -4,7 +4,7 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/langGraphPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; test("[LangGraph] Agentic Chat sends and receives a message", async ({ page, @@ -42,7 +42,7 @@ test("[LangGraph] Agentic Chat changes background on message and reset", async ( // Store initial background color const initialBackground = await chat.getBackground(); console.log("Initial background color:", initialBackground); - + // 1. Send message to change background to blue await chat.sendMessage("Hi change the background color to blue"); await chat.assertUserMessageVisible( diff --git a/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/predictvieStateUpdatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/predictvieStateUpdatePage.spec.ts deleted file mode 100644 index ac3833ad2..000000000 --- a/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/predictvieStateUpdatePage.spec.ts +++ /dev/null @@ -1,96 +0,0 @@ -import { - test, - expect, - waitForAIResponse, - retryOnAIFailure, -} from "../../test-isolation-helper"; -import { PredictiveStateUpdatesPage } from "../../pages/langGraphPages/PredictiveStateUpdatesPage"; - -test.describe("Predictive Status Updates Feature", () => { - test.fixme("[LangGraph] should interact with agent and approve asked changes", async ({ - page, - }) => { - await retryOnAIFailure(async () => { - const predictiveStateUpdates = new PredictiveStateUpdatesPage(page); - - await page.goto( - "/langgraph/feature/predictive_state_updates" - ); - - await predictiveStateUpdates.openChat(); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.sendMessage( - "Give me a story for a dragon called Atlantis in document" - ); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.getPredictiveResponse(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - const dragonName = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonName).not.toBeNull(); - - await page.waitForTimeout(3000); - - await predictiveStateUpdates.sendMessage("Change dragon name to Lola"); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.verifyHighlightedText(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - const dragonNameNew = await predictiveStateUpdates.verifyAgentResponse( - "Lola" - ); - expect(dragonNameNew).not.toBe(dragonName); - }); - }); - - test.fixme("[LangGraph] should interact with agent and reject asked changes", async ({ - page, - }) => { - await retryOnAIFailure(async () => { - const predictiveStateUpdates = new PredictiveStateUpdatesPage(page); - - await page.goto( - "/langgraph/feature/predictive_state_updates" - ); - - await predictiveStateUpdates.openChat(); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.sendMessage( - "Give me a story for a dragon called Atlantis in document" - ); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.getPredictiveResponse(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - const dragonName = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonName).not.toBeNull(); - - await page.waitForTimeout(3000); - - await predictiveStateUpdates.sendMessage("Change dragon name to Lola"); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.verifyHighlightedText(); - await predictiveStateUpdates.getUserRejection(); - await predictiveStateUpdates.rejectedChangesResponse.isVisible(); - const dragonNameAfterRejection = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonNameAfterRejection).toBe(dragonName); - expect(dragonNameAfterRejection).not.toBe("Lola"); - }); - }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/sharedStatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/sharedStatePage.spec.ts index ed758be36..2cccfcf88 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/sharedStatePage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/sharedStatePage.spec.ts @@ -1,5 +1,5 @@ import { test, expect } from "@playwright/test"; -import { SharedStatePage } from "../../pages/langGraphPages/SharedStatePage"; +import { SharedStatePage } from "../../featurePages/SharedStatePage"; test.describe("Shared State Feature", () => { test("[LangGraph] should interact with the chat to get a recipe on prompt", async ({ @@ -13,9 +13,9 @@ test.describe("Shared State Feature", () => { ); await sharedStateAgent.openChat(); - await sharedStateAgent.sendMessage("give me recipe for pasta"); + await sharedStateAgent.sendMessage('Please give me a pasta recipe of your choosing, but one of the ingredients should be "Pasta"'); await sharedStateAgent.loader(); - await sharedStateAgent.getIngredientCard(/Pasta/); + await sharedStateAgent.awaitIngredientCard('Pasta'); await sharedStateAgent.getInstructionItems( sharedStateAgent.instructionsContainer ); @@ -34,7 +34,7 @@ test.describe("Shared State Feature", () => { // Add new ingredient via UI await sharedStateAgent.addIngredient.click(); - + // Fill in the new ingredient details const newIngredientCard = page.locator('.ingredient-card').last(); await newIngredientCard.locator('.ingredient-name-input').fill('Potatoes'); diff --git a/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/toolBasedGenUIPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/toolBasedGenUIPage.spec.ts index 676bcb364..13d7d7e3d 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/toolBasedGenUIPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/langgraphTests/toolBasedGenUIPage.spec.ts @@ -1,10 +1,10 @@ import { test, expect } from "@playwright/test"; -import { ToolBaseGenUIPage } from "../../pages/langGraphPages/ToolBaseGenUIPage"; +import { ToolBaseGenUIPage } from "../../featurePages/ToolBaseGenUIPage"; const pageURL = "/langgraph/feature/tool_based_generative_ui"; -test.fixme('[LangGraph] Haiku generation and display verification', async ({ +test('[LangGraph] Haiku generation and display verification', async ({ page, }) => { await page.goto(pageURL); @@ -17,7 +17,7 @@ test.fixme('[LangGraph] Haiku generation and display verification', async ({ await genAIAgent.checkHaikuDisplay(page); }); -test.fixme('[LangGraph] Haiku generation and UI consistency for two different prompts', async ({ +test('[LangGraph] Haiku generation and UI consistency for two different prompts', async ({ page, }) => { await page.goto(pageURL); diff --git a/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/agenticChatPage.spec.ts index 75a730c8d..0a9a262c2 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/agenticChatPage.spec.ts @@ -4,7 +4,7 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/llamaIndexPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; test("[LlamaIndex] Agentic Chat sends and receives a message", async ({ page, @@ -42,7 +42,7 @@ test("[LlamaIndex] Agentic Chat changes background on message and reset", async // Store initial background color const initialBackground = await chat.getBackground(); console.log("Initial background color:", initialBackground); - + // 1. Send message to change background to blue await chat.sendMessage("Hi change the background color to blue"); await chat.assertUserMessageVisible( diff --git a/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/agenticGenUI.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/agenticGenUI.spec.ts index ca16782c4..26eee678d 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/agenticGenUI.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/agenticGenUI.spec.ts @@ -2,6 +2,7 @@ import { test, expect } from "@playwright/test"; import { AgenticGenUIPage } from "../../pages/llamaIndexPages/AgenticUIGenPage"; test.describe("Agent Generative UI Feature", () => { + // Fails. Issue with integration or something. test.fixme("[LlamaIndex] should interact with the chat to get a planner on prompt", async ({ page, }) => { @@ -35,6 +36,7 @@ test.describe("Agent Generative UI Feature", () => { ); }); + // Fails. Issue with integration or something. test.fixme("[LlamaIndex] should interact with the chat using predefined prompts and perform steps", async ({ page, }) => { diff --git a/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/sharedStatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/sharedStatePage.spec.ts index 8ab2b0875..b0d9740d7 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/sharedStatePage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/llamaIndexTests/sharedStatePage.spec.ts @@ -1,5 +1,5 @@ import { test, expect } from "@playwright/test"; -import { SharedStatePage } from "../../pages/llamaIndexPages/SharedStatePage"; +import { SharedStatePage } from "../../featurePages/SharedStatePage"; test.describe("Shared State Feature", () => { test("[LlamaIndex] should interact with the chat to get a recipe on prompt", async ({ @@ -13,9 +13,9 @@ test.describe("Shared State Feature", () => { ); await sharedStateAgent.openChat(); - await sharedStateAgent.sendMessage("give me recipe for pasta"); + await sharedStateAgent.sendMessage('Please give me a pasta recipe of your choosing, but one of the ingredients should be "Pasta"'); await sharedStateAgent.loader(); - await sharedStateAgent.getIngredientCard(/Pasta/); + await sharedStateAgent.awaitIngredientCard('Pasta'); await sharedStateAgent.getInstructionItems( sharedStateAgent.instructionsContainer ); @@ -34,7 +34,7 @@ test.describe("Shared State Feature", () => { // Add new ingredient via UI await sharedStateAgent.addIngredient.click(); - + // Fill in the new ingredient details const newIngredientCard = page.locator('.ingredient-card').last(); await newIngredientCard.locator('.ingredient-name-input').fill('Potatoes'); diff --git a/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/agenticChatPage.spec.ts index 8826a05f1..2c3c2ac35 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/agenticChatPage.spec.ts @@ -4,7 +4,7 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/llamaIndexPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; test("[MastraAgentLocal] Agentic Chat sends and receives a message", async ({ page, @@ -42,7 +42,7 @@ test("[MastraAgentLocal] Agentic Chat changes background on message and reset", // Store initial background color const initialBackground = await chat.getBackground(); console.log("Initial background color:", initialBackground); - + // 1. Send message to change background to blue await chat.sendMessage("Hi change the background color to blue"); await chat.assertUserMessageVisible( diff --git a/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/sharedStatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/sharedStatePage.spec.ts index 5da363d8e..16ff216ea 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/sharedStatePage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/sharedStatePage.spec.ts @@ -1,5 +1,5 @@ import { test, expect } from "@playwright/test"; -import { SharedStatePage } from "../../pages/mastraAgentLocalPages/SharedStatePage"; +import { SharedStatePage } from "../../featurePages/SharedStatePage"; test.describe("Shared State Feature", () => { test("[MastraAgentLocal] should interact with the chat to get a recipe on prompt", async ({ @@ -13,9 +13,9 @@ test.describe("Shared State Feature", () => { ); await sharedStateAgent.openChat(); - await sharedStateAgent.sendMessage("give me recipe for pasta"); + await sharedStateAgent.sendMessage('Please give me a pasta recipe of your choosing, but one of the ingredients should be "Pasta"'); await sharedStateAgent.loader(); - await sharedStateAgent.getIngredientCard(/Pasta/); + await sharedStateAgent.awaitIngredientCard('Pasta'); await sharedStateAgent.getInstructionItems( sharedStateAgent.instructionsContainer ); @@ -34,7 +34,7 @@ test.describe("Shared State Feature", () => { // Add new ingredient via UI await sharedStateAgent.addIngredient.click(); - + // Fill in the new ingredient details const newIngredientCard = page.locator('.ingredient-card').last(); await newIngredientCard.locator('.ingredient-name-input').fill('Potatoes'); diff --git a/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/toolBasedGenUIPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/toolBasedGenUIPage.spec.ts index c5f32c1f9..c73388faf 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/toolBasedGenUIPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/mastraAgentLocalTests/toolBasedGenUIPage.spec.ts @@ -1,10 +1,10 @@ import { test, expect } from "@playwright/test"; -import { ToolBaseGenUIPage } from "../../pages/mastraAgentLocalPages/ToolBaseGenUIPage"; +import { ToolBaseGenUIPage } from "../../featurePages/ToolBaseGenUIPage"; const pageURL = "/mastra-agent-local/feature/tool_based_generative_ui"; -test.fixme('[Mastra Agent Local] Haiku generation and display verification', async ({ +test('[Mastra Agent Local] Haiku generation and display verification', async ({ page, }) => { await page.goto(pageURL); @@ -17,7 +17,7 @@ test.fixme('[Mastra Agent Local] Haiku generation and display verification', asy await genAIAgent.checkHaikuDisplay(page); }); -test.fixme('[Mastra Agent Local] Haiku generation and UI consistency for two different prompts', async ({ +test('[Mastra Agent Local] Haiku generation and UI consistency for two different prompts', async ({ page, }) => { await page.goto(pageURL); diff --git a/typescript-sdk/apps/dojo/e2e/tests/mastraTests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/mastraTests/agenticChatPage.spec.ts index f22220243..e8b910165 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/mastraTests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/mastraTests/agenticChatPage.spec.ts @@ -4,7 +4,7 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/mastraPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; test("[Mastra] Agentic Chat sends and receives a greeting message", async ({ page, @@ -71,7 +71,7 @@ test("[Mastra] Agentic Chat retains memory of previous questions", async ({ await chat.sendMessage("What was my first question"); await chat.assertUserMessageVisible("What was my first question"); await waitForAIResponse(page); - + // Check if the agent remembers the first question about weather await chat.assertAgentReplyVisible(/weather|Islamabad/i); }); diff --git a/typescript-sdk/apps/dojo/e2e/tests/mastraTests/toolBasedGenUIPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/mastraTests/toolBasedGenUIPage.spec.ts index 2c2845211..d9787d8e5 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/mastraTests/toolBasedGenUIPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/mastraTests/toolBasedGenUIPage.spec.ts @@ -1,9 +1,10 @@ import { test, expect } from "@playwright/test"; -import { ToolBaseGenUIPage } from "../../pages/mastraPages/ToolBaseGenUIPage"; +import { ToolBaseGenUIPage } from "../../featurePages/ToolBaseGenUIPage"; const pageURL = "/mastra/feature/tool_based_generative_ui"; +// Fails. Not a test issue, issue with the integration or cpk. test.fixme('[Mastra] Haiku generation and display verification', async ({ page, }) => { @@ -17,6 +18,7 @@ test.fixme('[Mastra] Haiku generation and display verification', async ({ await genAIAgent.checkHaikuDisplay(page); }); +// Fails. Not a test issue, issue with the integration or cpk. test.fixme('[Mastra] Haiku generation and UI consistency for two different prompts', async ({ page, }) => { diff --git a/typescript-sdk/apps/dojo/e2e/tests/middlewareStarterTests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/middlewareStarterTests/agenticChatPage.spec.ts index 6e598d8ad..5666a33c0 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/middlewareStarterTests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/middlewareStarterTests/agenticChatPage.spec.ts @@ -4,7 +4,7 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/middlewareStarterPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; test("[Middleware Starter] Testing Agentic Chat", async ({ page, diff --git a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/agenticChatPage.spec.ts index 8c5b9d4c3..df8ab753f 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/agenticChatPage.spec.ts @@ -4,7 +4,7 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/pydanticAIPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; test("[PydanticAI] Agentic Chat sends and receives a message", async ({ page, @@ -42,7 +42,7 @@ test("[PydanticAI] Agentic Chat changes background on message and reset", async // Store initial background color const initialBackground = await chat.getBackground(); console.log("Initial background color:", initialBackground); - + // 1. Send message to change background to blue await chat.sendMessage("Hi change the background color to blue"); await chat.assertUserMessageVisible( diff --git a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/agenticGenUI.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/agenticGenUI.spec.ts index 0a7a722a6..a10f14356 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/agenticGenUI.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/agenticGenUI.spec.ts @@ -2,6 +2,7 @@ import { test, expect } from "@playwright/test"; import { AgenticGenUIPage } from "../../pages/pydanticAIPages/AgenticUIGenPage"; test.describe("Agent Generative UI Feature", () => { + // Flaky. Sometimes the steps render but never process. test("[PydanticAI] should interact with the chat to get a planner on prompt", async ({ page, }) => { @@ -20,13 +21,13 @@ test.describe("Agent Generative UI Feature", () => { await genUIAgent.sendButton.click(); await expect(genUIAgent.agentPlannerContainer).toBeVisible({ timeout: 15000 }); await genUIAgent.plan(); - + await page.waitForFunction( () => { const messages = Array.from(document.querySelectorAll('.copilotKitAssistantMessage')); const lastMessage = messages[messages.length - 1]; const content = lastMessage?.textContent?.trim() || ''; - + return messages.length >= 3 && content.length > 0; }, { timeout: 30000 } @@ -49,16 +50,16 @@ test.describe("Agent Generative UI Feature", () => { await genUIAgent.sendMessage("Go to Mars"); await genUIAgent.sendButton.click(); - + await expect(genUIAgent.agentPlannerContainer).toBeVisible({ timeout: 15000 }); await genUIAgent.plan(); - + await page.waitForFunction( () => { const messages = Array.from(document.querySelectorAll('.copilotKitAssistantMessage')); const lastMessage = messages[messages.length - 1]; const content = lastMessage?.textContent?.trim() || ''; - + return messages.length >= 3 && content.length > 0; }, { timeout: 30000 } diff --git a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/humanInTheLoopPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/humanInTheLoopPage.spec.ts index efd0c162b..faf8b4416 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/humanInTheLoopPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/humanInTheLoopPage.spec.ts @@ -27,7 +27,7 @@ test.describe("Human in the Loop Feature", () => { await page.waitForTimeout(5000); await humanInLoop.uncheckItem(itemText); await humanInLoop.performSteps(); - + await page.waitForFunction( () => { const messages = Array.from(document.querySelectorAll('.copilotKitAssistantMessage')); @@ -70,13 +70,13 @@ test.describe("Human in the Loop Feature", () => { await page.waitForTimeout(5000); await humanInLoop.uncheckItem(uncheckedItem); await humanInLoop.performSteps(); - + await page.waitForFunction( () => { const messages = Array.from(document.querySelectorAll('.copilotKitAssistantMessage')); const lastMessage = messages[messages.length - 1]; const content = lastMessage?.textContent?.trim() || ''; - + return messages.length >= 3 && content.length > 0; }, { timeout: 30000 } diff --git a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/predictvieStateUpdatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/predictvieStateUpdatePage.spec.ts deleted file mode 100644 index 0221871aa..000000000 --- a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/predictvieStateUpdatePage.spec.ts +++ /dev/null @@ -1,84 +0,0 @@ -import { - test, - expect, - waitForAIResponse, - retryOnAIFailure, -} from "../../test-isolation-helper"; -import { PredictiveStateUpdatesPage } from "../../pages/pydanticAIPages/PredictiveStateUpdatesPage"; - -test.describe("Predictive Status Updates Feature", () => { - test.fixme("[PydanticAI] should interact with agent and approve asked changes", async ({ - page, - }) => { - await retryOnAIFailure(async () => { - const predictiveStateUpdates = new PredictiveStateUpdatesPage(page); - - // Update URL to new domain - await page.goto( - "/pydantic-ai/feature/predictive_state_updates" - ); - - await predictiveStateUpdates.openChat(); - await predictiveStateUpdates.sendMessage( - "Give me a story for a dragon called Atlantis in document" - ); - await waitForAIResponse(page); - await predictiveStateUpdates.getPredictiveResponse(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - const dragonName = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonName).not.toBeNull(); - - // Send update to change the dragon name - await predictiveStateUpdates.sendMessage("Change dragon name to Lola"); - await waitForAIResponse(page); - await predictiveStateUpdates.verifyHighlightedText(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.nth(1).isVisible(); - const dragonNameNew = await predictiveStateUpdates.verifyAgentResponse( - "Lola" - ); - expect(dragonNameNew).not.toBe(dragonName); - }); - }); - - test.fixme("[PydanticAI] should interact with agent and reject asked changes", async ({ - page, - }) => { - await retryOnAIFailure(async () => { - const predictiveStateUpdates = new PredictiveStateUpdatesPage(page); - - // Update URL to new domain - await page.goto( - "/pydantic-ai/feature/predictive_state_updates" - ); - - await predictiveStateUpdates.openChat(); - - await predictiveStateUpdates.sendMessage( - "Give me a story for a dragon called called Atlantis in document" - ); - await predictiveStateUpdates.getPredictiveResponse(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - const dragonName = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonName).not.toBeNull(); - - // Send update to change the dragon name - await predictiveStateUpdates.sendMessage("Change dragon name to Lola"); - await waitForAIResponse(page); - await predictiveStateUpdates.verifyHighlightedText(); - await predictiveStateUpdates.getUserRejection(); - await predictiveStateUpdates.rejectedChangesResponse.isVisible(); - const dragonNameAfterRejection = await predictiveStateUpdates.verifyAgentResponse( - "Atlantis" - ); - expect(dragonNameAfterRejection).toBe(dragonName); - expect(dragonNameAfterRejection).not.toBe("Lola"); - }); - }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/sharedStatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/sharedStatePage.spec.ts index 5be1b4dda..51aae72de 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/sharedStatePage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/sharedStatePage.spec.ts @@ -1,5 +1,5 @@ import { test, expect } from "@playwright/test"; -import { SharedStatePage } from "../../pages/pydanticAIPages/SharedStatePage"; +import { SharedStatePage } from "../../featurePages/SharedStatePage"; test.describe("Shared State Feature", () => { test("[PydanticAI] should interact with the chat to get a recipe on prompt", async ({ @@ -13,9 +13,9 @@ test.describe("Shared State Feature", () => { ); await sharedStateAgent.openChat(); - await sharedStateAgent.sendMessage("give me recipe for pasta"); + await sharedStateAgent.sendMessage('Please give me a pasta recipe of your choosing, but one of the ingredients should be "Pasta"'); await sharedStateAgent.loader(); - await sharedStateAgent.getIngredientCard(/Pasta/); + await sharedStateAgent.awaitIngredientCard('Pasta'); await sharedStateAgent.getInstructionItems( sharedStateAgent.instructionsContainer ); @@ -34,7 +34,7 @@ test.describe("Shared State Feature", () => { // Add new ingredient via UI await sharedStateAgent.addIngredient.click(); - + // Fill in the new ingredient details const newIngredientCard = page.locator('.ingredient-card').last(); await newIngredientCard.locator('.ingredient-name-input').fill('Potatoes'); diff --git a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/toolBasedGenUIPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/toolBasedGenUIPage.spec.ts index aeee3755a..3a21d88bc 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/toolBasedGenUIPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/pydanticAITests/toolBasedGenUIPage.spec.ts @@ -1,10 +1,10 @@ import { test, expect } from "@playwright/test"; -import { ToolBaseGenUIPage } from "../../pages/pydanticAIPages/ToolBaseGenUIPage"; +import { ToolBaseGenUIPage } from "../../featurePages/ToolBaseGenUIPage"; const pageURL = "/pydantic-ai/feature/tool_based_generative_ui"; -test.fixme('[PydanticAI] Haiku generation and display verification', async ({ +test('[PydanticAI] Haiku generation and display verification', async ({ page, }) => { await page.goto(pageURL); @@ -17,7 +17,7 @@ test.fixme('[PydanticAI] Haiku generation and display verification', async ({ await genAIAgent.checkHaikuDisplay(page); }); -test.fixme('[PydanticAI] Haiku generation and UI consistency for two different prompts', async ({ +test('[PydanticAI] Haiku generation and UI consistency for two different prompts', async ({ page, }) => { await page.goto(pageURL); diff --git a/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/agenticChatPage.spec.ts index 30d6ca45c..00f423305 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/agenticChatPage.spec.ts @@ -4,7 +4,7 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/serverStarterAllFeaturesPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; test("[Server Starter all features] Agentic Chat displays countdown from 10 to 1 with tick mark", async ({ page, @@ -24,13 +24,13 @@ test("[Server Starter all features] Agentic Chat displays countdown from 10 to 1 const countdownMessage = page .locator('.copilotKitAssistantMessage') .filter({ hasText: 'counting down:' }); - + await expect(countdownMessage).toBeVisible({ timeout: 30000 }); - + // Wait for countdown to complete by checking for the tick mark await expect(countdownMessage.locator('.copilotKitMarkdownElement')) .toContainText('✓', { timeout: 15000 }); - + const countdownText = await countdownMessage .locator('.copilotKitMarkdownElement') .textContent(); diff --git a/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/predictvieStateUpdatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/predictvieStateUpdatePage.spec.ts deleted file mode 100644 index a5584c22a..000000000 --- a/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/predictvieStateUpdatePage.spec.ts +++ /dev/null @@ -1,82 +0,0 @@ -import { test, expect, waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { PredictiveStateUpdatesPage } from "../../pages/serverStarterAllFeaturesPages/PredictiveStateUpdatesPage"; - -test.describe("Predictive Status Updates Feature", () => { - test.fixme("[Server Starter all features] should interact with agent and approve asked changes", async ({ page, }) => { - await retryOnAIFailure(async () => { - const predictiveStateUpdates = new PredictiveStateUpdatesPage(page); - - await page.goto( - "/server-starter-all-features/feature/predictive_state_updates" - ); - - await predictiveStateUpdates.openChat(); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.sendMessage("Hi"); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.getPredictiveResponse(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - - const originalContent = await predictiveStateUpdates.getResponseContent(); - expect(originalContent).not.toBeNull(); - - await page.waitForTimeout(3000); - - await predictiveStateUpdates.sendMessage("Change the dog name"); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.verifyHighlightedText(); - - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - - const updatedContent = await predictiveStateUpdates.getResponseContent(); - - expect(updatedContent).not.toBe(originalContent); - }); - }); - - test.fixme("[Server Starter all features] should interact with agent and reject asked changes", async ({ page, }) => { - await retryOnAIFailure(async () => { - const predictiveStateUpdates = new PredictiveStateUpdatesPage(page); - - await page.goto( - "/server-starter-all-features/feature/predictive_state_updates" - ); - - await predictiveStateUpdates.openChat(); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.sendMessage("Hi"); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.getPredictiveResponse(); - await predictiveStateUpdates.getUserApproval(); - await predictiveStateUpdates.confirmedChangesResponse.isVisible(); - - const originalContent = await predictiveStateUpdates.getResponseContent(); - expect(originalContent).not.toBeNull(); - - await page.waitForTimeout(3000); - - await predictiveStateUpdates.sendMessage("Change the dog name"); - await waitForAIResponse(page); - await page.waitForTimeout(2000); - - await predictiveStateUpdates.verifyHighlightedText(); - - await predictiveStateUpdates.getUserRejection(); - await predictiveStateUpdates.rejectedChangesResponse.isVisible(); - - const currentContent = await predictiveStateUpdates.getResponseContent(); - - expect(currentContent).toBe(originalContent); - }); - }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/sharedStatePage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/sharedStatePage.spec.ts index 3729c1e14..5700d9b8b 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/sharedStatePage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/sharedStatePage.spec.ts @@ -1,5 +1,5 @@ import { test, expect } from "@playwright/test"; -import { SharedStatePage } from "../../pages/serverStarterAllFeaturesPages/SharedStatePage"; +import { SharedStatePage } from "../../featurePages/SharedStatePage"; test.describe("Shared State Feature", () => { test("[Server Starter all features] should interact with the chat to get a recipe on prompt", async ({ @@ -13,15 +13,16 @@ test.describe("Shared State Feature", () => { ); await sharedStateAgent.openChat(); - await sharedStateAgent.sendMessage("give me recipe for pasta"); + await sharedStateAgent.sendMessage('Please give me a pasta recipe of your choosing, but one of the ingredients should be "Pasta"'); await sharedStateAgent.loader(); - await sharedStateAgent.getIngredientCard(/Pasta/); + await sharedStateAgent.awaitIngredientCard('Salt'); await sharedStateAgent.getInstructionItems( sharedStateAgent.instructionsContainer ); }); - test.fixme("[Server Starter all features] should share state between UI and chat", async ({ + // Fails. Issue with the test, most likely + test("[Server Starter all features] should share state between UI and chat", async ({ page, }) => { const sharedStateAgent = new SharedStatePage(page); @@ -47,10 +48,12 @@ test.describe("Shared State Feature", () => { await sharedStateAgent.sendMessage("Give me all the ingredients"); await sharedStateAgent.loader(); - // Verify chat response includes both existing and new ingredients - await expect(sharedStateAgent.agentMessage.getByText(/Potatoes/)).toBeVisible(); - await expect(sharedStateAgent.agentMessage.getByText(/12/)).toBeVisible(); - await expect(sharedStateAgent.agentMessage.getByText(/Carrots/)).toBeVisible(); - await expect(sharedStateAgent.agentMessage.getByText(/All-Purpose Flour/)).toBeVisible(); + // Verify hardcoded ingredients + await sharedStateAgent.awaitIngredientCard('chicken breast'); + await sharedStateAgent.awaitIngredientCard('chili powder'); + await sharedStateAgent.awaitIngredientCard('Salt'); + await sharedStateAgent.awaitIngredientCard('Lettuce leaves'); + + expect(await sharedStateAgent.getInstructionItems(sharedStateAgent.instructionsContainer)).toBe(3); }); }); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/toolBasedGenUIPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/toolBasedGenUIPage.spec.ts index 56080aac0..34598e4ce 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/toolBasedGenUIPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/serverStarterAllFeaturesTests/toolBasedGenUIPage.spec.ts @@ -1,5 +1,5 @@ import { test, expect } from "@playwright/test"; -import { ToolBaseGenUIPage } from "../../pages/serverStarterAllFeaturesPages/ToolBaseGenUIPage"; +import { ToolBaseGenUIPage } from "../../featurePages/ToolBaseGenUIPage"; const pageURL = "/server-starter-all-features/feature/tool_based_generative_ui"; diff --git a/typescript-sdk/apps/dojo/e2e/tests/serverStarterTests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/serverStarterTests/agenticChatPage.spec.ts index 9d4e0f9c7..a83903c87 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/serverStarterTests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/serverStarterTests/agenticChatPage.spec.ts @@ -4,7 +4,7 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/serverStarterPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; test("[Server Starter] Testing Agentic Chat", async ({ page, diff --git a/typescript-sdk/apps/dojo/e2e/tests/vercelAISdkTests/agenticChatPage.spec.ts b/typescript-sdk/apps/dojo/e2e/tests/vercelAISdkTests/agenticChatPage.spec.ts index 265d3deb1..640fb9f9f 100644 --- a/typescript-sdk/apps/dojo/e2e/tests/vercelAISdkTests/agenticChatPage.spec.ts +++ b/typescript-sdk/apps/dojo/e2e/tests/vercelAISdkTests/agenticChatPage.spec.ts @@ -4,7 +4,7 @@ import { waitForAIResponse, retryOnAIFailure, } from "../../test-isolation-helper"; -import { AgenticChatPage } from "../../pages/vercelAISdkPages/AgenticChatPage"; +import { AgenticChatPage } from "../../featurePages/AgenticChatPage"; test("[verceAISdkPages] Agentic Chat sends and receives a message", async ({ page, @@ -42,7 +42,7 @@ test("[Vercel AI SDK] Agentic Chat changes background on message and reset", asy // Store initial background color const initialBackground = await chat.getBackground(); console.log("Initial background color:", initialBackground); - + // 1. Send message to change background to blue await chat.sendMessage("Hi change the background color to blue"); await chat.assertUserMessageVisible( diff --git a/typescript-sdk/apps/dojo/e2e2/.gitignore b/typescript-sdk/apps/dojo/e2e2/.gitignore deleted file mode 100644 index 58786aac7..000000000 --- a/typescript-sdk/apps/dojo/e2e2/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ - -# Playwright -node_modules/ -/test-results/ -/playwright-report/ -/blob-report/ -/playwright/.cache/ diff --git a/typescript-sdk/apps/dojo/e2e2/package.json b/typescript-sdk/apps/dojo/e2e2/package.json deleted file mode 100644 index e15283561..000000000 --- a/typescript-sdk/apps/dojo/e2e2/package.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "name": "e2e2", - "version": "1.0.0", - "description": "", - "main": "index.js", - "scripts": {}, - "keywords": [], - "author": "", - "license": "ISC", - "devDependencies": { - "@playwright/test": "^1.54.2", - "@types/node": "^24.1.0" - } -} diff --git a/typescript-sdk/apps/dojo/e2e2/playwright.config.ts b/typescript-sdk/apps/dojo/e2e2/playwright.config.ts deleted file mode 100644 index 390646d4d..000000000 --- a/typescript-sdk/apps/dojo/e2e2/playwright.config.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { defineConfig, devices } from '@playwright/test'; - -/** - * Read environment variables from file. - * https://github.com/motdotla/dotenv - */ -// import dotenv from 'dotenv'; -// import path from 'path'; -// dotenv.config({ path: path.resolve(__dirname, '.env') }); - -/** - * See https://playwright.dev/docs/test-configuration. - */ -export default defineConfig({ - testDir: './tests', - /* Run tests in files in parallel */ - fullyParallel: true, - /* Fail the build on CI if you accidentally left test.only in the source code. */ - forbidOnly: !!process.env.CI, - /* Retry on CI only */ - retries: process.env.CI ? 2 : 0, - /* Opt out of parallel tests on CI. */ - workers: process.env.CI ? 1 : undefined, - /* Reporter to use. See https://playwright.dev/docs/test-reporters */ - reporter: 'html', - /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ - use: { - /* Base URL to use in actions like `await page.goto('/')`. */ - // baseURL: 'http://localhost:3000', - - /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ - trace: 'on-first-retry', - }, - - /* Configure projects for major browsers */ - projects: [ - { - name: 'chromium', - use: { ...devices['Desktop Chrome'] }, - }, - - { - name: 'firefox', - use: { ...devices['Desktop Firefox'] }, - }, - - { - name: 'webkit', - use: { ...devices['Desktop Safari'] }, - }, - - /* Test against mobile viewports. */ - // { - // name: 'Mobile Chrome', - // use: { ...devices['Pixel 5'] }, - // }, - // { - // name: 'Mobile Safari', - // use: { ...devices['iPhone 12'] }, - // }, - - /* Test against branded browsers. */ - // { - // name: 'Microsoft Edge', - // use: { ...devices['Desktop Edge'], channel: 'msedge' }, - // }, - // { - // name: 'Google Chrome', - // use: { ...devices['Desktop Chrome'], channel: 'chrome' }, - // }, - ], - - /* Run your local dev server before starting the tests */ - // webServer: { - // command: 'npm run start', - // url: 'http://localhost:3000', - // reuseExistingServer: !process.env.CI, - // }, -}); diff --git a/typescript-sdk/apps/dojo/e2e2/pnpm-lock.yaml b/typescript-sdk/apps/dojo/e2e2/pnpm-lock.yaml deleted file mode 100644 index 26f24d9ed..000000000 --- a/typescript-sdk/apps/dojo/e2e2/pnpm-lock.yaml +++ /dev/null @@ -1,67 +0,0 @@ -lockfileVersion: '9.0' - -settings: - autoInstallPeers: true - excludeLinksFromLockfile: false - -importers: - - .: - devDependencies: - '@playwright/test': - specifier: ^1.54.2 - version: 1.54.2 - '@types/node': - specifier: ^24.1.0 - version: 24.1.0 - -packages: - - '@playwright/test@1.54.2': - resolution: {integrity: sha512-A+znathYxPf+72riFd1r1ovOLqsIIB0jKIoPjyK2kqEIe30/6jF6BC7QNluHuwUmsD2tv1XZVugN8GqfTMOxsA==} - engines: {node: '>=18'} - hasBin: true - - '@types/node@24.1.0': - resolution: {integrity: sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w==} - - fsevents@2.3.2: - resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} - engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} - os: [darwin] - - playwright-core@1.54.2: - resolution: {integrity: sha512-n5r4HFbMmWsB4twG7tJLDN9gmBUeSPcsBZiWSE4DnYz9mJMAFqr2ID7+eGC9kpEnxExJ1epttwR59LEWCk8mtA==} - engines: {node: '>=18'} - hasBin: true - - playwright@1.54.2: - resolution: {integrity: sha512-Hu/BMoA1NAdRUuulyvQC0pEqZ4vQbGfn8f7wPXcnqQmM+zct9UliKxsIkLNmz/ku7LElUNqmaiv1TG/aL5ACsw==} - engines: {node: '>=18'} - hasBin: true - - undici-types@7.8.0: - resolution: {integrity: sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==} - -snapshots: - - '@playwright/test@1.54.2': - dependencies: - playwright: 1.54.2 - - '@types/node@24.1.0': - dependencies: - undici-types: 7.8.0 - - fsevents@2.3.2: - optional: true - - playwright-core@1.54.2: {} - - playwright@1.54.2: - dependencies: - playwright-core: 1.54.2 - optionalDependencies: - fsevents: 2.3.2 - - undici-types@7.8.0: {} diff --git a/typescript-sdk/apps/dojo/e2e2/pnpm-workspace.yaml b/typescript-sdk/apps/dojo/e2e2/pnpm-workspace.yaml deleted file mode 100644 index e4aab11a2..000000000 --- a/typescript-sdk/apps/dojo/e2e2/pnpm-workspace.yaml +++ /dev/null @@ -1,2 +0,0 @@ -packages: - - '.' \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e2/tests/agno-agentic-chat.spec.ts b/typescript-sdk/apps/dojo/e2e2/tests/agno-agentic-chat.spec.ts deleted file mode 100644 index e556bc3ff..000000000 --- a/typescript-sdk/apps/dojo/e2e2/tests/agno-agentic-chat.spec.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { test, expect } from '@playwright/test'; - -test('renders initial message', async ({ page }) => { - await page.goto('http://localhost:9999/agno/feature/agentic_chat'); - - await expect(page.getByText('Hi, I\'m an agent. Want to chat?')).toBeVisible(); -}); - -test('responds to user message', async ({ page }) => { - await page.goto('http://localhost:9999/agno/feature/agentic_chat'); - - const textarea = page.getByPlaceholder('Type a message...'); - textarea.fill('How many sides are in a square? Please answer in one word. Do not use any punctuation, just the number in word form.'); - await page.keyboard.press('Enter'); - - page.locator('.copilotKitInputControls button.copilotKitInputControlButton').click(); - - await expect(page.locator('.copilotKitMessage')).toHaveCount(3); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage')).toHaveCount(2); - await expect(page.locator('.copilotKitMessage.copilotKitUserMessage')).toHaveCount(1); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage').last()).toHaveText('four', { ignoreCase: true }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e2/tests/crewai-agentic-chat.spec.ts b/typescript-sdk/apps/dojo/e2e2/tests/crewai-agentic-chat.spec.ts deleted file mode 100644 index 24ca33b95..000000000 --- a/typescript-sdk/apps/dojo/e2e2/tests/crewai-agentic-chat.spec.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { test, expect } from '@playwright/test'; - -test('renders initial message', async ({ page }) => { - await page.goto('http://localhost:9999/crewai/feature/agentic_chat'); - - await expect(page.getByText('Hi, I\'m an agent. Want to chat?')).toBeVisible(); -}); - -test('responds to user message', async ({ page }) => { - await page.goto('http://localhost:9999/crewai/feature/agentic_chat'); - - const textarea = page.getByPlaceholder('Type a message...'); - textarea.fill('How many sides are in a square? Please answer in one word. Do not use any punctuation, just the number in word form.'); - await page.keyboard.press('Enter'); - - page.locator('.copilotKitInputControls button.copilotKitInputControlButton').click(); - - await expect(page.locator('.copilotKitMessage')).toHaveCount(3); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage')).toHaveCount(2); - await expect(page.locator('.copilotKitMessage.copilotKitUserMessage')).toHaveCount(1); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage').last()).toHaveText('four', { ignoreCase: true }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e2/tests/langgraph-agentic-chat.spec.ts b/typescript-sdk/apps/dojo/e2e2/tests/langgraph-agentic-chat.spec.ts deleted file mode 100644 index 0d421f0dd..000000000 --- a/typescript-sdk/apps/dojo/e2e2/tests/langgraph-agentic-chat.spec.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { test, expect } from '@playwright/test'; - -test('renders initial message', async ({ page }) => { - await page.goto('http://localhost:9999/langgraph/feature/agentic_chat'); - - await expect(page.getByText('Hi, I\'m an agent. Want to chat?')).toBeVisible(); -}); - -test('responds to user message', async ({ page }) => { - await page.goto('http://localhost:9999/langgraph/feature/agentic_chat'); - - const textarea = page.getByPlaceholder('Type a message...'); - textarea.fill('How many sides are in a square? Please answer in one word. Do not use any punctuation, just the number in word form.'); - await page.keyboard.press('Enter'); - - page.locator('.copilotKitInputControls button.copilotKitInputControlButton').click(); - - await expect(page.locator('.copilotKitMessage')).toHaveCount(3); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage')).toHaveCount(2); - await expect(page.locator('.copilotKitMessage.copilotKitUserMessage')).toHaveCount(1); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage').last()).toHaveText('four', { ignoreCase: true }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e2/tests/langgraph-fastapi-agentic-chat.spec.ts b/typescript-sdk/apps/dojo/e2e2/tests/langgraph-fastapi-agentic-chat.spec.ts deleted file mode 100644 index 45aae48d7..000000000 --- a/typescript-sdk/apps/dojo/e2e2/tests/langgraph-fastapi-agentic-chat.spec.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { test, expect } from '@playwright/test'; - -test('renders initial message', async ({ page }) => { - await page.goto('http://localhost:9999/langgraph-fastapi/feature/agentic_chat'); - - await expect(page.getByText('Hi, I\'m an agent. Want to chat?')).toBeVisible(); -}); - -test('responds to user message', async ({ page }) => { - await page.goto('http://localhost:9999/langgraph-fastapi/feature/agentic_chat'); - - const textarea = page.getByPlaceholder('Type a message...'); - textarea.fill('How many sides are in a square? Please answer in one word. Do not use any punctuation, just the number in word form.'); - await page.keyboard.press('Enter'); - - page.locator('.copilotKitInputControls button.copilotKitInputControlButton').click(); - - await expect(page.locator('.copilotKitMessage')).toHaveCount(3); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage')).toHaveCount(2); - await expect(page.locator('.copilotKitMessage.copilotKitUserMessage')).toHaveCount(1); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage').last()).toHaveText('four', { ignoreCase: true }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e2/tests/langgraph-typescript-agentic-chat.spec.ts b/typescript-sdk/apps/dojo/e2e2/tests/langgraph-typescript-agentic-chat.spec.ts deleted file mode 100644 index 011efd3ff..000000000 --- a/typescript-sdk/apps/dojo/e2e2/tests/langgraph-typescript-agentic-chat.spec.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { test, expect } from '@playwright/test'; - -test('renders initial message', async ({ page }) => { - await page.goto('http://localhost:9999/langgraph-typescript/feature/agentic_chat'); - - await expect(page.getByText('Hi, I\'m an agent. Want to chat?')).toBeVisible(); -}); - -test('responds to user message', async ({ page }) => { - await page.goto('http://localhost:9999/langgraph-typescript/feature/agentic_chat'); - - const textarea = page.getByPlaceholder('Type a message...'); - textarea.fill('How many sides are in a square? Please answer in one word. Do not use any punctuation, just the number in word form.'); - await page.keyboard.press('Enter'); - - page.locator('.copilotKitInputControls button.copilotKitInputControlButton').click(); - - await expect(page.locator('.copilotKitMessage')).toHaveCount(3); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage')).toHaveCount(2); - await expect(page.locator('.copilotKitMessage.copilotKitUserMessage')).toHaveCount(1); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage').last()).toHaveText('four', { ignoreCase: true }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e2/tests/llama-index-agentic-chat.spec.ts b/typescript-sdk/apps/dojo/e2e2/tests/llama-index-agentic-chat.spec.ts deleted file mode 100644 index cf2871800..000000000 --- a/typescript-sdk/apps/dojo/e2e2/tests/llama-index-agentic-chat.spec.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { test, expect } from '@playwright/test'; - -test('renders initial message', async ({ page }) => { - await page.goto('http://localhost:9999/llama-index/feature/agentic_chat'); - - await expect(page.getByText('Hi, I\'m an agent. Want to chat?')).toBeVisible(); -}); - -test('responds to user message', async ({ page }) => { - await page.goto('http://localhost:9999/llama-index/feature/agentic_chat'); - - const textarea = page.getByPlaceholder('Type a message...'); - textarea.fill('How many sides are in a square? Please answer in one word. Do not use any punctuation, just the number in word form.'); - await page.keyboard.press('Enter'); - - page.locator('.copilotKitInputControls button.copilotKitInputControlButton').click(); - - await expect(page.locator('.copilotKitMessage')).toHaveCount(3); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage')).toHaveCount(2); - await expect(page.locator('.copilotKitMessage.copilotKitUserMessage')).toHaveCount(1); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage').last()).toHaveText('four', { ignoreCase: true }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e2/tests/mastra-agentic-chat.spec.ts b/typescript-sdk/apps/dojo/e2e2/tests/mastra-agentic-chat.spec.ts deleted file mode 100644 index 604b0652e..000000000 --- a/typescript-sdk/apps/dojo/e2e2/tests/mastra-agentic-chat.spec.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { test, expect } from '@playwright/test'; - -test('renders initial message', async ({ page }) => { - await page.goto('http://localhost:9999/mastra/feature/agentic_chat'); - - await expect(page.getByText('Hi, I\'m an agent. Want to chat?')).toBeVisible(); -}); - -test('responds to user message', async ({ page }) => { - await page.goto('http://localhost:9999/mastra/feature/agentic_chat'); - - const textarea = page.getByPlaceholder('Type a message...'); - textarea.fill('How many sides are in a square? Please answer in one word. Do not use any punctuation, just the number in word form.'); - await page.keyboard.press('Enter'); - - page.locator('.copilotKitInputControls button.copilotKitInputControlButton').click(); - - await expect(page.locator('.copilotKitMessage')).toHaveCount(3); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage')).toHaveCount(2); - await expect(page.locator('.copilotKitMessage.copilotKitUserMessage')).toHaveCount(1); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage').last()).toHaveText('four', { ignoreCase: true }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e2/tests/pydantic-agentic-chat.spec.ts b/typescript-sdk/apps/dojo/e2e2/tests/pydantic-agentic-chat.spec.ts deleted file mode 100644 index 1f26ad2cf..000000000 --- a/typescript-sdk/apps/dojo/e2e2/tests/pydantic-agentic-chat.spec.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { test, expect } from '@playwright/test'; - -test('renders initial message', async ({ page }) => { - await page.goto('http://localhost:9999/pydantic-ai/feature/agentic_chat'); - - await expect(page.getByText('Hi, I\'m an agent. Want to chat?')).toBeVisible(); -}); - -test('responds to user message', async ({ page }) => { - await page.goto('http://localhost:9999/pydantic-ai/feature/agentic_chat'); - - const textarea = page.getByPlaceholder('Type a message...'); - textarea.fill('How many sides are in a square? Please answer in one word. Do not use any punctuation, just the number in word form.'); - await page.keyboard.press('Enter'); - - page.locator('.copilotKitInputControls button.copilotKitInputControlButton').click(); - - await expect(page.locator('.copilotKitMessage')).toHaveCount(3); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage')).toHaveCount(2); - await expect(page.locator('.copilotKitMessage.copilotKitUserMessage')).toHaveCount(1); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage').last()).toHaveText('four', { ignoreCase: true }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/e2e2/tests/vercel-ai-sdk-agentic-chat.spec.ts b/typescript-sdk/apps/dojo/e2e2/tests/vercel-ai-sdk-agentic-chat.spec.ts deleted file mode 100644 index 49640fde8..000000000 --- a/typescript-sdk/apps/dojo/e2e2/tests/vercel-ai-sdk-agentic-chat.spec.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { test, expect } from '@playwright/test'; - -test('renders initial message', async ({ page }) => { - await page.goto('http://localhost:9999/vercel-ai-sdk/feature/agentic_chat'); - - await expect(page.getByText('Hi, I\'m an agent. Want to chat?')).toBeVisible(); -}); - -test('responds to user message', async ({ page }) => { - await page.goto('http://localhost:9999/vercel-ai-sdk/feature/agentic_chat'); - - const textarea = page.getByPlaceholder('Type a message...'); - textarea.fill('How many sides are in a square? Please answer in one word. Do not use any punctuation, just the number in word form.'); - await page.keyboard.press('Enter'); - - page.locator('.copilotKitInputControls button.copilotKitInputControlButton').click(); - - await expect(page.locator('.copilotKitMessage')).toHaveCount(3); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage')).toHaveCount(2); - await expect(page.locator('.copilotKitMessage.copilotKitUserMessage')).toHaveCount(1); - await expect(page.locator('.copilotKitMessage.copilotKitAssistantMessage').last()).toHaveText('four', { ignoreCase: true }); -}); \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/scripts/generate-content-json.ts b/typescript-sdk/apps/dojo/scripts/generate-content-json.ts index b24119081..d0a29c236 100644 --- a/typescript-sdk/apps/dojo/scripts/generate-content-json.ts +++ b/typescript-sdk/apps/dojo/scripts/generate-content-json.ts @@ -147,7 +147,7 @@ const agentFilesMapper: Record Record { return agentKeys.reduce((acc, agentId) => ({ ...acc, - [agentId]: [`https://github.com/pydantic/pydantic-ai/blob/main/examples/pydantic_ai_examples/ag_ui/api/${agentId}.py`] + [agentId]: [path.join(__dirname, integrationsFolderPath, `/pydantic-ai/examples/server/api/${agentId}.py`)] }), {}) }, 'server-starter': () => ({ @@ -204,6 +204,12 @@ const agentFilesMapper: Record Record { + return agentKeys.reduce((acc, agentId) => ({ + ...acc, + [agentId]: [path.join(__dirname, integrationsFolderPath, `/adk-middleware/examples/server/api/${agentId}.py`)] + }), {}) } } diff --git a/typescript-sdk/apps/dojo/scripts/prep-dojo-everything.js b/typescript-sdk/apps/dojo/scripts/prep-dojo-everything.js index bf4faf463..d801168ef 100755 --- a/typescript-sdk/apps/dojo/scripts/prep-dojo-everything.js +++ b/typescript-sdk/apps/dojo/scripts/prep-dojo-everything.js @@ -9,17 +9,36 @@ const args = process.argv.slice(2); const showHelp = args.includes('--help') || args.includes('-h'); const dryRun = args.includes('--dry-run'); +// selection controls +function parseList(flag) { + const idx = args.indexOf(flag); + if (idx !== -1 && args[idx + 1]) { + return args[idx + 1] + .split(',') + .map((s) => s.trim()) + .filter(Boolean); + } + return null; +} + +const onlyList = parseList('--only') || parseList('--include'); +const excludeList = parseList('--exclude') || []; + if (showHelp) { console.log(` Usage: node prep-dojo-everything.js [options] Options: --dry-run Show what would be installed without actually running + --only list Comma-separated services to include (defaults to all) + --exclude list Comma-separated services to exclude --help, -h Show this help message Examples: node prep-dojo-everything.js node prep-dojo-everything.js --dry-run + node prep-dojo-everything.js --only dojo,agno + node prep-dojo-everything.js --exclude crew-ai,mastra `); process.exit(0); } @@ -29,78 +48,67 @@ const integrationsRoot = path.join(gitRoot, 'typescript-sdk', 'integrations'); -// Server Starter -const serverStarter = { - command: 'poetry install', - name: 'Server Starter', - cwd: path.join(integrationsRoot, 'server-starter/server/python'), -} - -// Server Starter All Features -const serverStarterAllFeatures = { - command: 'poetry install', - name: 'Server AF', - cwd: path.join(integrationsRoot, 'server-starter-all-features/server/python'), -} - -// Agno -const agno = { - command: 'uv sync', - name: 'Agno', - cwd: path.join(integrationsRoot, 'agno/examples'), -} - -// CrewAI -const crewai = { - command: 'poetry install', - name: 'CrewAI', - cwd: path.join(integrationsRoot, 'crewai/python'), -} - -// Langgraph (FastAPI) -const langgraphFastapi = { - command: 'poetry install', - name: 'LG FastAPI', - cwd: path.join(integrationsRoot, 'langgraph/examples/python'), - env: { - POETRY_VIRTUALENVS_IN_PROJECT: "false" - } -} - -// Langgraph (Platorm {typescript}) -const langgraphPlatformTypescript = { - command: 'pnpm install', - name: 'LG Platform TS', - cwd: path.join(integrationsRoot, 'langgraph/examples/typescript/'), -} - -// Llama Index -const llamaIndex = { - command: 'uv sync', - name: 'Llama Index', - cwd: path.join(integrationsRoot, 'llamaindex/server-py'), -} - -// Mastra -const mastra = { - command: 'npm install', - name: 'Mastra', - cwd: path.join(integrationsRoot, 'mastra/example'), -} - -// Pydantic AI -const pydanticAi = { - command: 'uv sync', - name: 'Pydantic AI', - cwd: path.join(integrationsRoot, 'pydantic-ai/examples'), -} - -// THE ACTUAL DOJO -const dojo = { - command: 'pnpm install --no-frozen-lockfile && pnpm build --filter=demo-viewer...', - name: 'Dojo', - cwd: path.join(gitRoot, 'typescript-sdk'), -} +// Define all prep targets keyed by a stable id +const ALL_TARGETS = { + 'server-starter': { + command: 'poetry install', + name: 'Server Starter', + cwd: path.join(integrationsRoot, 'server-starter/server/python'), + }, + 'server-starter-all': { + command: 'poetry install', + name: 'Server AF', + cwd: path.join(integrationsRoot, 'server-starter-all-features/server/python'), + }, + 'agno': { + command: 'uv sync', + name: 'Agno', + cwd: path.join(integrationsRoot, 'agno/examples'), + }, + 'crew-ai': { + command: 'poetry install', + name: 'CrewAI', + cwd: path.join(integrationsRoot, 'crewai/python'), + }, + 'langgraph-fastapi': { + command: 'poetry install', + name: 'LG FastAPI', + cwd: path.join(integrationsRoot, 'langgraph/examples/python'), + env: { + POETRY_VIRTUALENVS_IN_PROJECT: "false", + }, + }, + 'langgraph-platform-typescript': { + command: 'pnpm install', + name: 'LG Platform TS', + cwd: path.join(integrationsRoot, 'langgraph/examples/typescript/'), + }, + 'llama-index': { + command: 'uv sync', + name: 'Llama Index', + cwd: path.join(integrationsRoot, 'llamaindex/server-py'), + }, + 'mastra': { + command: 'npm install', + name: 'Mastra', + cwd: path.join(integrationsRoot, 'mastra/example'), + }, + 'pydantic-ai': { + command: 'uv sync', + name: 'Pydantic AI', + cwd: path.join(integrationsRoot, 'pydantic-ai/examples'), + }, + 'adk-middleware': { + command: 'uv sync', + name: 'ADK Middleware', + cwd: path.join(integrationsRoot, 'adk-middleware/examples'), + }, + 'dojo': { + command: 'pnpm install --no-frozen-lockfile && pnpm build --filter=demo-viewer...', + name: 'Dojo', + cwd: path.join(gitRoot, 'typescript-sdk'), + }, +}; function printDryRunServices(procs) { console.log('Dry run - would install dependencies for the following services:'); @@ -113,18 +121,25 @@ function printDryRunServices(procs) { } async function main() { - const procs = [ - serverStarter, - serverStarterAllFeatures, - agno, - crewai, - langgraphFastapi, - langgraphPlatformTypescript, - llamaIndex, - mastra, - pydanticAi, - dojo - ]; + // determine selection + let selectedKeys = Object.keys(ALL_TARGETS); + if (onlyList && onlyList.length) { + selectedKeys = onlyList; + } + if (excludeList && excludeList.length) { + selectedKeys = selectedKeys.filter((k) => !excludeList.includes(k)); + } + + // Build procs list, warning on unknown keys + const procs = []; + for (const key of selectedKeys) { + const target = ALL_TARGETS[key]; + if (!target) { + console.warn(`Skipping unknown service: ${key}`); + continue; + } + procs.push(target); + } if (dryRun) { printDryRunServices(procs); diff --git a/typescript-sdk/apps/dojo/scripts/run-dojo-everything.js b/typescript-sdk/apps/dojo/scripts/run-dojo-everything.js index ec945ec00..1d52082b4 100755 --- a/typescript-sdk/apps/dojo/scripts/run-dojo-everything.js +++ b/typescript-sdk/apps/dojo/scripts/run-dojo-everything.js @@ -9,17 +9,35 @@ const args = process.argv.slice(2); const showHelp = args.includes('--help') || args.includes('-h'); const dryRun = args.includes('--dry-run'); +function parseList(flag) { + const idx = args.indexOf(flag); + if (idx !== -1 && args[idx + 1]) { + return args[idx + 1] + .split(',') + .map((s) => s.trim()) + .filter(Boolean); + } + return null; +} + +const onlyList = parseList('--only') || parseList('--include'); +const excludeList = parseList('--exclude') || []; + if (showHelp) { console.log(` Usage: node run-dojo-everything.js [options] Options: --dry-run Show what would be started without actually running + --only list Comma-separated services to include (defaults to all) + --exclude list Comma-separated services to exclude --help, -h Show this help message Examples: node run-dojo-everything.js node run-dojo-everything.js --dry-run + node run-dojo-everything.js --only dojo,server-starter + node run-dojo-everything.js --exclude crew-ai,mastra `); process.exit(0); } @@ -27,123 +45,98 @@ Examples: const gitRoot = execSync('git rev-parse --show-toplevel', { encoding: 'utf-8' }).trim(); const integrationsRoot = path.join(gitRoot, 'typescript-sdk', 'integrations'); -// Server Starter -const serverStarter = { - command: 'poetry run dev', - name: 'Server Starter', - cwd: path.join(integrationsRoot, 'server-starter/server/python'), - env: {PORT: 8000}, -} - -// Server Starter All Features -const serverStarterAllFeatures = { - command: 'poetry run dev', - name: 'Server AF', - cwd: path.join(integrationsRoot, 'server-starter-all-features/server/python'), - env: {PORT: 8001}, -} - -// Agno -const agno = { - command: 'uv run dev', - name: 'Agno', - cwd: path.join(integrationsRoot, 'agno/examples'), - env: {PORT: 8002}, -} - -// CrewAI -const crewai = { - command: 'poetry run dev', - name: 'CrewAI', - cwd: path.join(integrationsRoot, 'crewai/python'), - env: {PORT: 8003}, -} - -// Langgraph (FastAPI) -const langgraphFastapi = { - command: 'poetry run dev', - name: 'LG FastAPI', - cwd: path.join(integrationsRoot, 'langgraph/examples/python'), - env: { - PORT: 8004, - POETRY_VIRTUALENVS_IN_PROJECT: "false" +// Define all runnable services keyed by a stable id +const ALL_SERVICES = { + 'server-starter': { + command: 'poetry run dev', + name: 'Server Starter', + cwd: path.join(integrationsRoot, 'server-starter/server/python'), + env: { PORT: 8000 }, }, -} - -// Langgraph (Platform {python}) -const langgraphPlatformPython = { - command: 'pnpx @langchain/langgraph-cli@latest dev --no-browser --port 8005', - name: 'LG Platform Py', - cwd: path.join(integrationsRoot, 'langgraph/examples/python'), - env: {PORT: 8005}, -} - -// Langgraph (Platform {typescript}) -const langgraphPlatformTypescript = { - command: 'pnpx @langchain/langgraph-cli@latest dev --no-browser --port 8006', - name: 'LG Platform TS', - cwd: path.join(integrationsRoot, 'langgraph/examples/typescript/'), - env: {PORT: 8006}, -} - -// Llama Index -const llamaIndex = { - command: 'uv run dev', - name: 'Llama Index', - cwd: path.join(integrationsRoot, 'llamaindex/server-py'), - env: {PORT: 8007}, -} - -// Mastra -const mastra = { - command: 'npm run dev', - name: 'Mastra', - cwd: path.join(integrationsRoot, 'mastra/example'), - env: {PORT: 8008}, -} - -// Pydantic AI -const pydanticAi = { - command: 'uv run dev', - name: 'Pydantic AI', - cwd: path.join(integrationsRoot, 'pydantic-ai/examples'), - env: {PORT: 8009}, -} - -// THE ACTUAL DOJO -const dojo = { - command: 'pnpm run start', - name: 'Dojo', - cwd: path.join(gitRoot, 'typescript-sdk/apps/dojo'), - env: { - PORT: 9999, - SERVER_STARTER_URL: 'http://localhost:8000', - SERVER_STARTER_ALL_FEATURES_URL: 'http://localhost:8001', - AGNO_URL: 'http://localhost:8002', - CREW_AI_URL: 'http://localhost:8003', - LANGGRAPH_FAST_API_URL: 'http://localhost:8004', - LANGGRAPH_PYTHON_URL: 'http://localhost:8005', - LANGGRAPH_TYPESCRIPT_URL: 'http://localhost:8006', - LLAMA_INDEX_URL: 'http://localhost:8007', - MASTRA_URL: 'http://localhost:8008', - PYDANTIC_AI_URL: 'http://localhost:8009', - NEXT_PUBLIC_CUSTOM_DOMAIN_TITLE: 'cpkdojo.local___CopilotKit Feature Viewer', - } -} - -const procs = [ - serverStarter, - serverStarterAllFeatures, - agno, - crewai, - langgraphFastapi, - langgraphPlatformPython, - langgraphPlatformTypescript, - llamaIndex, - mastra, - pydanticAi, - dojo -]; + 'server-starter-all': { + command: 'poetry run dev', + name: 'Server AF', + cwd: path.join(integrationsRoot, 'server-starter-all-features/server/python'), + env: { PORT: 8001 }, + }, + 'agno': { + command: 'uv run dev', + name: 'Agno', + cwd: path.join(integrationsRoot, 'agno/examples'), + env: { PORT: 8002 }, + }, + 'crew-ai': { + command: 'poetry run dev', + name: 'CrewAI', + cwd: path.join(integrationsRoot, 'crewai/python'), + env: { PORT: 8003 }, + }, + 'langgraph-fastapi': { + command: 'poetry run dev', + name: 'LG FastAPI', + cwd: path.join(integrationsRoot, 'langgraph/examples/python'), + env: { + PORT: 8004, + POETRY_VIRTUALENVS_IN_PROJECT: 'false', + }, + }, + 'langgraph-platform-python': { + command: 'pnpx @langchain/langgraph-cli@latest dev --no-browser --host 127.0.0.1 --port 8005', + name: 'LG Platform Py', + cwd: path.join(integrationsRoot, 'langgraph/examples/python'), + env: { PORT: 8005 }, + }, + 'langgraph-platform-typescript': { + command: 'pnpx @langchain/langgraph-cli@latest dev --no-browser --host 127.0.0.1 --port 8006', + name: 'LG Platform TS', + cwd: path.join(integrationsRoot, 'langgraph/examples/typescript/'), + env: { PORT: 8006 }, + }, + 'llama-index': { + command: 'uv run dev', + name: 'Llama Index', + cwd: path.join(integrationsRoot, 'llamaindex/server-py'), + env: { PORT: 8007 }, + }, + 'mastra': { + command: 'npm run dev', + name: 'Mastra', + cwd: path.join(integrationsRoot, 'mastra/example'), + env: { PORT: 8008 }, + }, + 'pydantic-ai': { + command: 'uv run dev', + name: 'Pydantic AI', + cwd: path.join(integrationsRoot, 'pydantic-ai/examples'), + env: { PORT: 8009 }, + }, + 'adk-middleware': { + command: 'uv run dev', + name: 'ADK Middleware', + cwd: path.join(integrationsRoot, 'adk-middleware/examples'), + env: { PORT: 8010 }, + }, + 'dojo': { + command: 'pnpm run start', + name: 'Dojo', + cwd: path.join(gitRoot, 'typescript-sdk/apps/dojo'), + env: { + PORT: 9999, + SERVER_STARTER_URL: 'http://localhost:8000', + SERVER_STARTER_ALL_FEATURES_URL: 'http://localhost:8001', + AGNO_URL: 'http://localhost:8002', + CREW_AI_URL: 'http://localhost:8003', + LANGGRAPH_FAST_API_URL: 'http://localhost:8004', + LANGGRAPH_PYTHON_URL: 'http://localhost:8005', + LANGGRAPH_TYPESCRIPT_URL: 'http://localhost:8006', + LLAMA_INDEX_URL: 'http://localhost:8007', + MASTRA_URL: 'http://localhost:8008', + PYDANTIC_AI_URL: 'http://localhost:8009', + ADK_MIDDLEWARE_URL: 'http://localhost:8010', + NEXT_PUBLIC_CUSTOM_DOMAIN_TITLE: 'cpkdojo.local___CopilotKit Feature Viewer', + }, + }, +}; function printDryRunServices(procs) { console.log('Dry run - would start the following services:'); @@ -164,18 +157,40 @@ function printDryRunServices(procs) { } async function main() { + // determine selection + let selectedKeys = Object.keys(ALL_SERVICES); + if (onlyList && onlyList.length) { + selectedKeys = onlyList; + } + if (excludeList && excludeList.length) { + selectedKeys = selectedKeys.filter((k) => !excludeList.includes(k)); + } + + // Build processes, warn for unknown keys + const procs = []; + for (const key of selectedKeys) { + const svc = ALL_SERVICES[key]; + if (!svc) { + console.warn(`Skipping unknown service: ${key}`); + continue; + } + procs.push(svc); + } + if (dryRun) { printDryRunServices(procs); } - console.log('Starting services: ', procs.map(p => p.name).join(', ')); + console.log('Starting services: ', procs.map((p) => p.name).join(', ')); - const {result} = concurrently(procs, {killOthersOn: ['failure', 'success']}); + const { result } = concurrently(procs, { killOthersOn: ['failure', 'success'] }); - result.then(() => process.exit(0)).catch((err) => { - console.error(err); - process.exit(1); - }); + result + .then(() => process.exit(0)) + .catch((err) => { + console.error(err); + process.exit(1); + }); } main(); diff --git a/typescript-sdk/apps/dojo/src/agents.ts b/typescript-sdk/apps/dojo/src/agents.ts index 5e2e85044..ba84a99c5 100644 --- a/typescript-sdk/apps/dojo/src/agents.ts +++ b/typescript-sdk/apps/dojo/src/agents.ts @@ -39,9 +39,10 @@ export const agentsIntegrations: AgentIntegrationConfig[] = [ human_in_the_loop: new PydanticAIAgent({ url: `${envVars.pydanticAIUrl}/human_in_the_loop/`, }), - predictive_state_updates: new PydanticAIAgent({ - url: `${envVars.pydanticAIUrl}/predictive_state_updates/`, - }), + // Disabled until we can figure out why production builds break + // predictive_state_updates: new PydanticAIAgent({ + // url: `${envVars.pydanticAIUrl}/predictive_state_updates/`, + // }), shared_state: new PydanticAIAgent({ url: `${envVars.pydanticAIUrl}/shared_state/`, }), @@ -59,6 +60,18 @@ export const agentsIntegrations: AgentIntegrationConfig[] = [ }; }, }, + { + id: "adk-middleware", + agents: async () => { + return { + agentic_chat: new ServerStarterAgent({ url: `${envVars.adkMiddlewareUrl}/chat` }), + tool_based_generative_ui: new ServerStarterAgent({ url: `${envVars.adkMiddlewareUrl}/adk-tool-based-generative-ui` }), + human_in_the_loop: new ServerStarterAgent({ url: `${envVars.adkMiddlewareUrl}/adk-human-in-loop-agent` }), + shared_state: new ServerStarterAgent({ url: `${envVars.adkMiddlewareUrl}/adk-shared-state-agent` }), + // predictive_state_updates: new ServerStarterAgent({ url: `${envVars.adkMiddlewareUrl}/adk-predictive-state-agent` }), + }; + }, + }, { id: "server-starter-all-features", agents: async () => { @@ -141,6 +154,10 @@ export const agentsIntegrations: AgentIntegrationConfig[] = [ agentic_chat_reasoning: new LangGraphHttpAgent({ url: `${envVars.langgraphPythonUrl}/agent/agentic_chat_reasoning`, }), + subgraphs: new LangGraphAgent({ + deploymentUrl: envVars.langgraphPythonUrl, + graphId: "subgraphs", + }), }; }, }, @@ -169,6 +186,9 @@ export const agentsIntegrations: AgentIntegrationConfig[] = [ agentic_chat_reasoning: new LangGraphHttpAgent({ url: `${envVars.langgraphFastApiUrl}/agent/agentic_chat_reasoning`, }), + subgraphs: new LangGraphHttpAgent({ + url: `${envVars.langgraphFastApiUrl}/agent/subgraphs`, + }), }; }, }, @@ -199,6 +219,10 @@ export const agentsIntegrations: AgentIntegrationConfig[] = [ tool_based_generative_ui: new LangGraphAgent({ deploymentUrl: envVars.langgraphTypescriptUrl, graphId: "tool_based_generative_ui", + }), + subgraphs: new LangGraphAgent({ + deploymentUrl: envVars.langgraphTypescriptUrl, + graphId: "subgraphs", }) }; }, diff --git a/typescript-sdk/apps/dojo/src/app/[integrationId]/feature/layout.tsx b/typescript-sdk/apps/dojo/src/app/[integrationId]/feature/layout.tsx index 3a2bc8ddb..bd13869fa 100644 --- a/typescript-sdk/apps/dojo/src/app/[integrationId]/feature/layout.tsx +++ b/typescript-sdk/apps/dojo/src/app/[integrationId]/feature/layout.tsx @@ -34,8 +34,10 @@ export default function FeatureLayout({ children, params }: Props) { const files = (filesJSON as FilesJsonType)[`${integrationId}::${featureId}`] || []; - const readme = files.find(file => file.name.includes('.mdx')); - const codeFiles = files.filter(file => !file.name.includes('.mdx')); + const readme = files.find((file) => file?.name?.includes(".mdx")) || null; + const codeFiles = files.filter( + (file) => file && Object.keys(file).length > 0 && !file.name?.includes(".mdx"), + ); const content = useMemo(() => { @@ -55,5 +57,11 @@ export default function FeatureLayout({ children, params }: Props) { } }, [children, codeFiles, readme, view]) - return
{content}
; + return ( +
+
+ {content} +
+
+ ); } diff --git a/typescript-sdk/apps/dojo/src/app/[integrationId]/feature/tool_based_generative_ui/page.tsx b/typescript-sdk/apps/dojo/src/app/[integrationId]/feature/tool_based_generative_ui/page.tsx index cd7e19a77..e8387c8f7 100644 --- a/typescript-sdk/apps/dojo/src/app/[integrationId]/feature/tool_based_generative_ui/page.tsx +++ b/typescript-sdk/apps/dojo/src/app/[integrationId]/feature/tool_based_generative_ui/page.tsx @@ -14,31 +14,23 @@ interface ToolBasedGenerativeUIProps { }>; } -interface GenerateHaiku{ - japanese : string[] | [], - english : string[] | [], - image_names : string[] | [], - selectedImage : string | null, +interface GenerateHaiku { + japanese: string[] | [], + english: string[] | [], + image_names: string[] | [], + selectedImage: string | null, } -interface HaikuCardProps{ - generatedHaiku : GenerateHaiku | Partial - setHaikus : Dispatch> - haikus : GenerateHaiku[] +interface HaikuCardProps { + generatedHaiku: GenerateHaiku | Partial + setHaikus: Dispatch> + haikus: GenerateHaiku[] } export default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) { const { integrationId } = React.use(params); const { isMobile } = useMobileView(); - const defaultChatHeight = 50 - const { - isChatOpen, - setChatHeight, - setIsChatOpen, - isDragging, - chatHeight, - handleDragStart - } = useMobileChat(defaultChatHeight) + const chatTitle = 'Haiku Generator' const chatDescription = 'Ask me to create haikus' @@ -52,12 +44,6 @@ export default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIP >
@@ -74,92 +60,105 @@ export default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIP )} {/* Mobile Pull-Up Chat */} - {isMobile && ( - <> - {/* Chat Toggle Button */} -
-
-
{ - if (!isChatOpen) { - setChatHeight(defaultChatHeight); // Reset to good default when opening - } - setIsChatOpen(!isChatOpen); - }} - > -
-
-
{chatTitle}
-
{chatDescription}
-
-
-
- - - -
-
+ {isMobile && } +
+ + ); +} + +function MobileChat({ chatTitle, chatDescription, initialLabel }: { chatTitle: string, chatDescription: string, initialLabel: string }) { + const defaultChatHeight = 50 + + const { + isChatOpen, + setChatHeight, + setIsChatOpen, + isDragging, + chatHeight, + handleDragStart + } = useMobileChat(defaultChatHeight) + return ( + <> + {/* Chat Toggle Button */} +
+
+
{ + if (!isChatOpen) { + setChatHeight(defaultChatHeight); // Reset to good default when opening + } + setIsChatOpen(!isChatOpen); + }} + > +
+
+
{chatTitle}
+
{chatDescription}
+
+
+ + + +
+
+
- {/* Pull-Up Chat Container */} -
- {/* Drag Handle Bar */} -
-
-
- - {/* Chat Header */} -
-
-
-

{chatTitle}

-
- -
-
+ {/* Pull-Up Chat Container */} +
+ {/* Drag Handle Bar */} +
+
+
- {/* Chat Content - Flexible container for messages and input */} -
- -
+ {/* Chat Header */} +
+
+
+

{chatTitle}

+ +
+
- {/* Backdrop */} - {isChatOpen && ( -
setIsChatOpen(false)} - /> - )} - - )} + {/* Chat Content - Flexible container for messages and input */} +
+ +
- - ); + + {/* Backdrop */} + {isChatOpen && ( +
setIsChatOpen(false)} + /> + )} + + ) } const VALID_IMAGE_NAMES = [ @@ -175,14 +174,45 @@ const VALID_IMAGE_NAMES = [ "Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg" ]; -function HaikuCard({generatedHaiku, setHaikus, haikus} : HaikuCardProps) { +function getRandomImage(): string { + return VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)]; +} + +const validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => { + if (!rawNames || rawNames.length !== 3) { + return null; + } + + const correctedNames: string[] = []; + const usedValidNames = new Set(); + + for (const name of rawNames) { + if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) { + correctedNames.push(name); + usedValidNames.add(name); + if (correctedNames.length === 3) break; + } + } + + while (correctedNames.length < 3) { + const nextImage = getRandomImage(); + if (!usedValidNames.has(nextImage)) { + correctedNames.push(nextImage); + usedValidNames.add(nextImage); + } + } + + return correctedNames.slice(0, 3); +}; + +function HaikuCard({ generatedHaiku, setHaikus, haikus }: HaikuCardProps) { return ( -
+
{generatedHaiku?.japanese?.map((line, index) => ( -
+

{line}

{generatedHaiku.english?.[index]} @@ -250,48 +280,8 @@ function Haiku() { const [activeIndex, setActiveIndex] = useState(0); const [isJustApplied, setIsJustApplied] = useState(false); - const validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => { - if (!rawNames || rawNames.length !== 3) { - return null; - } - - const correctedNames: string[] = []; - const usedValidNames = new Set(); - - for (const name of rawNames) { - if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) { - correctedNames.push(name); - usedValidNames.add(name); - if (correctedNames.length === 3) break; - } - } - - if (correctedNames.length < 3) { - const availableFallbacks = VALID_IMAGE_NAMES.filter(name => !usedValidNames.has(name)); - for (let i = availableFallbacks.length - 1; i > 0; i--) { - const j = Math.floor(Math.random() * (i + 1)); - [availableFallbacks[i], availableFallbacks[j]] = [availableFallbacks[j], availableFallbacks[i]]; - } - - while (correctedNames.length < 3 && availableFallbacks.length > 0) { - const fallbackName = availableFallbacks.pop(); - if (fallbackName) { - correctedNames.push(fallbackName); - } - } - } - - while (correctedNames.length < 3 && VALID_IMAGE_NAMES.length > 0) { - const fallbackName = VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)]; - correctedNames.push(fallbackName); - } - - return correctedNames.slice(0, 3); - }; - useCopilotAction({ name: "generate_haiku", - available: "frontend", parameters: [ { name: "japanese", @@ -304,7 +294,7 @@ function Haiku() { { name: "image_names", type: "string[]", - description: "Names of 3 relevant images", + description: `Names of 3 relevant images selected from the following: \n -${VALID_IMAGE_NAMES.join('\n -')}`, }, ], followUp: false, @@ -316,7 +306,7 @@ function Haiku() { image_names: finalCorrectedImages || [], selectedImage: finalCorrectedImages?.[0] || null, }; - setHaikus(prev => [...prev, newHaiku]); + setHaikus(prev => [newHaiku, ...prev].filter(h => h.english[0] !== "A placeholder verse—")); setActiveIndex(haikus.length - 1); setIsJustApplied(true); setTimeout(() => setIsJustApplied(false), 600); @@ -329,135 +319,141 @@ function Haiku() { }, }, [haikus]); - const generatedHaikus = useMemo(() => ( - haikus.filter((haiku) => haiku.english[0] !== "A placeholder verse—") - ), [haikus]); - const { isMobile } = useMobileView(); return (

- {/* Thumbnail List */} - {Boolean(generatedHaikus.length) && !isMobile && ( -
- {generatedHaikus.map((haiku, index) => ( -
setActiveIndex(index)} - > - {haiku.japanese.map((line, lineIndex) => ( -
-

{line}

-

{haiku.english?.[lineIndex]}

-
- ))} - {haiku.image_names && haiku.image_names.length === 3 && ( -
- {haiku.image_names.map((imageName, imgIndex) => ( - {imageName - ))} -
- )} -
- ))} -
- )} + {/* Main Display */} -
+
- {haikus.filter((_haiku: Haiku, index: number) => { - if (haikus.length == 1) return true; - else return index == activeIndex + 1; - }).map((haiku, index) => ( -
- {haiku.japanese.map((line, lineIndex) => ( -
( + (haikus.length == 1 || index == activeIndex) && ( + +
+ {haiku.japanese.map((line, lineIndex) => ( +
-

+

- {line} -

-

+ {line} +

+

- {haiku.english?.[lineIndex]} -

-
- ))} - {haiku.image_names && haiku.image_names.length === 3 && ( -
+ {haiku.english?.[lineIndex]} +

+
+ ))} + {haiku.image_names && haiku.image_names.length === 3 && ( +
- {haiku.image_names.map((imageName, imgIndex) => ( - {imageName - ))} -
- )} -
+ }`}> + {haiku.image_names.map((imageName, imgIndex) => ( + {imageName setHaikus((prevHaikus) => { + return prevHaikus.map((h, idx) => { + if (idx === index) { + return { ...h, selectedImage: imageName } + } else { + return { ...h } + } + }) + })} + /> + ))} +
+ )} +
+ ) ))}
); } + +function Thumbnails({ haikus, activeIndex, setActiveIndex, isMobile }: { haikus: Haiku[], activeIndex: number, setActiveIndex: (index: number) => void, isMobile: boolean }) { + if (haikus.length == 0 || isMobile) { return null } + return ( +
+ {haikus.map((haiku, index) => ( +
setActiveIndex(index)} + > + {haiku.japanese.map((line, lineIndex) => ( +
+

{line}

+

{haiku.english?.[lineIndex]}

+
+ ))} + {haiku.image_names && haiku.image_names.length === 3 && ( +
+ {haiku.image_names.map((imageName, imgIndex) => ( + {imageName + ))} +
+ )} +
+ ))} +
+ ) + +} \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/src/app/layout.tsx b/typescript-sdk/apps/dojo/src/app/layout.tsx index e50d64878..33145d123 100644 --- a/typescript-sdk/apps/dojo/src/app/layout.tsx +++ b/typescript-sdk/apps/dojo/src/app/layout.tsx @@ -32,8 +32,9 @@ export default function RootLayout({ diff --git a/typescript-sdk/apps/dojo/src/components/sidebar/sidebar.tsx b/typescript-sdk/apps/dojo/src/components/sidebar/sidebar.tsx index bd069df3b..d2b1a83c9 100644 --- a/typescript-sdk/apps/dojo/src/components/sidebar/sidebar.tsx +++ b/typescript-sdk/apps/dojo/src/components/sidebar/sidebar.tsx @@ -21,6 +21,7 @@ import { Feature } from "@/types/integration"; import { useURLParams } from "@/contexts/url-params-context"; import { View } from "@/types/interface"; import { getTitleForCurrentDomain } from "@/utils/domain-config"; +import { useTheme } from "next-themes"; interface SidebarProps { isMobile?: boolean; @@ -30,8 +31,9 @@ interface SidebarProps { export function Sidebar({ isMobile, onMobileClose }: SidebarProps) { const router = useRouter(); const pathname = usePathname(); + const { theme, setTheme } = useTheme(); + const isDarkTheme = theme === "dark" const { view, frameworkPickerHidden, viewPickerHidden, featurePickerHidden, setView} = useURLParams(); - const [isDarkTheme, setIsDarkTheme] = useState(false); // Extract the current integration ID from the pathname const pathParts = pathname.split("/"); @@ -67,38 +69,6 @@ export function Sidebar({ isMobile, onMobileClose }: SidebarProps) { router.push(`/${integrationId}`); }; - // Check for dark mode using media query - useEffect(() => { - // Check if we're in the browser - if (typeof window !== "undefined") { - // Initial check - setIsDarkTheme(window.matchMedia("(prefers-color-scheme: dark)").matches); - - // Listen for changes - const mediaQuery = window.matchMedia("(prefers-color-scheme: dark)"); - const handleChange = (e: MediaQueryListEvent) => { - setIsDarkTheme(e.matches); - }; - - mediaQuery.addEventListener("change", handleChange); - - // Also check for .dark class which is added by next-themes - const observer = new MutationObserver(() => { - setIsDarkTheme(document.documentElement.classList.contains("dark")); - }); - - observer.observe(document.documentElement, { - attributes: true, - attributeFilter: ["class"], - }); - - return () => { - mediaQuery.removeEventListener("change", handleChange); - observer.disconnect(); - }; - } - }, []); - const tabClass = `cursor-pointer flex-1 h-8 px-2 text-sm text-primary shadow-none bg-none border-none font-medium gap-1 rounded-lg data-[state=active]:bg-white data-[state=active]:text-primary data-[state=active]:shadow-none` return ( diff --git a/typescript-sdk/apps/dojo/src/components/theme-provider.tsx b/typescript-sdk/apps/dojo/src/components/theme-provider.tsx index ae7a50260..9c7db5d49 100644 --- a/typescript-sdk/apps/dojo/src/components/theme-provider.tsx +++ b/typescript-sdk/apps/dojo/src/components/theme-provider.tsx @@ -1,17 +1,8 @@ "use client"; import * as React from "react"; -import { ThemeProvider as NextThemesProvider } from "next-themes"; +import { ThemeProvider as NextThemesProvider, ThemeProviderProps as NextThemeProviderProps } from "next-themes"; -type ThemeProviderProps = { - children: React.ReactNode; - attribute?: string; - defaultTheme?: string; - enableSystem?: boolean; - disableTransitionOnChange?: boolean; -}; - -export function ThemeProvider({ children, ...props }: ThemeProviderProps) { - // @ts-expect-error -- ignore +export function ThemeProvider({ children, ...props }: NextThemeProviderProps) { return {children}; } diff --git a/typescript-sdk/apps/dojo/src/config.ts b/typescript-sdk/apps/dojo/src/config.ts index 198cb4eb2..2dfa78a1e 100644 --- a/typescript-sdk/apps/dojo/src/config.ts +++ b/typescript-sdk/apps/dojo/src/config.ts @@ -59,6 +59,12 @@ export const featureConfig: FeatureConfig[] = [ description: "Chat with a reasoning Copilot and call frontend tools", tags: ["Chat", "Tools", "Streaming", "Reasoning"], }), + createFeatureConfig({ + id: "subgraphs", + name: "Subgraphs", + description: "Have your tasks performed by multiple agents, working together", + tags: ["Chat", "Multi-agent architecture", "Streaming", "Subgraphs"], + }), ]; export default featureConfig; diff --git a/typescript-sdk/apps/dojo/src/env.ts b/typescript-sdk/apps/dojo/src/env.ts index 2e9a6a66a..d39498c84 100644 --- a/typescript-sdk/apps/dojo/src/env.ts +++ b/typescript-sdk/apps/dojo/src/env.ts @@ -9,6 +9,7 @@ type envVars = { llamaIndexUrl: string; crewAiUrl: string; pydanticAIUrl: string; + adkMiddlewareUrl: string; customDomainTitle: Record; } @@ -27,11 +28,12 @@ export default function getEnvVars(): envVars { mastraUrl: process.env.MASTRA_URL || 'http://localhost:4111', langgraphPythonUrl: process.env.LANGGRAPH_PYTHON_URL || 'http://localhost:2024', langgraphFastApiUrl: process.env.LANGGRAPH_FAST_API_URL || 'http://localhost:8000', - langgraphTypescriptUrl: process.env.LANGGRAPH_TYPESCRIPT_URL || 'http://localhost:8000', + langgraphTypescriptUrl: process.env.LANGGRAPH_TYPESCRIPT_URL || 'http://localhost:2024', agnoUrl: process.env.AGNO_URL || 'http://localhost:9001', llamaIndexUrl: process.env.LLAMA_INDEX_URL || 'http://localhost:9000', crewAiUrl: process.env.CREW_AI_URL || 'http://localhost:9002', pydanticAIUrl: process.env.PYDANTIC_AI_URL || 'http://localhost:9000', + adkMiddlewareUrl: process.env.ADK_MIDDLEWARE_URL || 'http://localhost:8000', customDomainTitle: customDomainTitle, } } \ No newline at end of file diff --git a/typescript-sdk/apps/dojo/src/files.json b/typescript-sdk/apps/dojo/src/files.json index 4949e57b6..0658d496f 100644 --- a/typescript-sdk/apps/dojo/src/files.json +++ b/typescript-sdk/apps/dojo/src/files.json @@ -72,7 +72,7 @@ }, { "name": "agentic_generative_ui.py", - "content": "\"\"\"Agentic Generative UI feature.\"\"\"\n\nfrom __future__ import annotations\n\nfrom textwrap import dedent\nfrom typing import Any, Literal\n\nfrom pydantic import BaseModel, Field\n\nfrom ag_ui.core import EventType, StateDeltaEvent, StateSnapshotEvent\nfrom pydantic_ai import Agent\n\nStepStatus = Literal['pending', 'completed']\n\n\nclass Step(BaseModel):\n \"\"\"Represents a step in a plan.\"\"\"\n\n description: str = Field(description='The description of the step')\n status: StepStatus = Field(\n default='pending',\n description='The status of the step (e.g., pending, completed)',\n )\n\n\nclass Plan(BaseModel):\n \"\"\"Represents a plan with multiple steps.\"\"\"\n\n steps: list[Step] = Field(default_factory=list, description='The steps in the plan')\n\n\nclass JSONPatchOp(BaseModel):\n \"\"\"A class representing a JSON Patch operation (RFC 6902).\"\"\"\n\n op: Literal['add', 'remove', 'replace', 'move', 'copy', 'test'] = Field(\n description='The operation to perform: add, remove, replace, move, copy, or test',\n )\n path: str = Field(description='JSON Pointer (RFC 6901) to the target location')\n value: Any = Field(\n default=None,\n description='The value to apply (for add, replace operations)',\n )\n from_: str | None = Field(\n default=None,\n alias='from',\n description='Source path (for move, copy operations)',\n )\n\n\nagent = Agent(\n 'openai:gpt-4o-mini',\n instructions=dedent(\n \"\"\"\n When planning use tools only, without any other messages.\n IMPORTANT:\n - Use the `create_plan` tool to set the initial state of the steps\n - Use the `update_plan_step` tool to update the status of each step\n - Do NOT repeat the plan or summarise it in a message\n - Do NOT confirm the creation or updates in a message\n - Do NOT ask the user for additional information or next steps\n\n Only one plan can be active at a time, so do not call the `create_plan` tool\n again until all the steps in current plan are completed.\n \"\"\"\n ),\n)\n\n\n@agent.tool_plain\nasync def create_plan(steps: list[str]) -> StateSnapshotEvent:\n \"\"\"Create a plan with multiple steps.\n\n Args:\n steps: List of step descriptions to create the plan.\n\n Returns:\n StateSnapshotEvent containing the initial state of the steps.\n \"\"\"\n plan: Plan = Plan(\n steps=[Step(description=step) for step in steps],\n )\n return StateSnapshotEvent(\n type=EventType.STATE_SNAPSHOT,\n snapshot=plan.model_dump(),\n )\n\n\n@agent.tool_plain\nasync def update_plan_step(\n index: int, description: str | None = None, status: StepStatus | None = None\n) -> StateDeltaEvent:\n \"\"\"Update the plan with new steps or changes.\n\n Args:\n index: The index of the step to update.\n description: The new description for the step.\n status: The new status for the step.\n\n Returns:\n StateDeltaEvent containing the changes made to the plan.\n \"\"\"\n changes: list[JSONPatchOp] = []\n if description is not None:\n changes.append(\n JSONPatchOp(\n op='replace', path=f'/steps/{index}/description', value=description\n )\n )\n if status is not None:\n changes.append(\n JSONPatchOp(op='replace', path=f'/steps/{index}/status', value=status)\n )\n return StateDeltaEvent(\n type=EventType.STATE_DELTA,\n delta=changes,\n )\n\n\napp = agent.to_ag_ui()\n", + "content": "\"\"\"Agentic Generative UI feature.\"\"\"\n\nfrom __future__ import annotations\n\nfrom textwrap import dedent\nfrom typing import Any, Literal\n\nfrom pydantic import BaseModel, Field\n\nfrom ag_ui.core import EventType, StateDeltaEvent, StateSnapshotEvent\nfrom pydantic_ai import Agent\n\nStepStatus = Literal['pending', 'completed']\n\n\nclass Step(BaseModel):\n \"\"\"Represents a step in a plan.\"\"\"\n\n description: str = Field(description='The description of the step')\n status: StepStatus = Field(\n default='pending',\n description='The status of the step (e.g., pending, completed)',\n )\n\n\nclass Plan(BaseModel):\n \"\"\"Represents a plan with multiple steps.\"\"\"\n\n steps: list[Step] = Field(default_factory=list, description='The steps in the plan')\n\n\nclass JSONPatchOp(BaseModel):\n \"\"\"A class representing a JSON Patch operation (RFC 6902).\"\"\"\n\n op: Literal['add', 'remove', 'replace', 'move', 'copy', 'test'] = Field(\n description='The operation to perform: add, remove, replace, move, copy, or test',\n )\n path: str = Field(description='JSON Pointer (RFC 6901) to the target location')\n value: Any = Field(\n default=None,\n description='The value to apply (for add, replace operations)',\n )\n from_: str | None = Field(\n default=None,\n alias='from',\n description='Source path (for move, copy operations)',\n )\n\n\nagent = Agent(\n 'openai:gpt-4o-mini',\n instructions=dedent(\n \"\"\"\n When planning use tools only, without any other messages.\n IMPORTANT:\n - Use the `create_plan` tool to set the initial state of the steps\n - Use the `update_plan_step` tool to update the status of each step\n - Do NOT repeat the plan or summarise it in a message\n - Do NOT confirm the creation or updates in a message\n - Do NOT ask the user for additional information or next steps\n - Do NOT leave a plan hanging, always complete the plan via `update_plan_step` if one is ongoing.\n\n Only one plan can be active at a time, so do not call the `create_plan` tool\n again until all the steps in current plan are completed.\n \"\"\"\n ),\n)\n\n\n@agent.tool_plain\nasync def create_plan(steps: list[str]) -> StateSnapshotEvent:\n \"\"\"Create a plan with multiple steps.\n\n Args:\n steps: List of step descriptions to create the plan.\n\n Returns:\n StateSnapshotEvent containing the initial state of the steps.\n \"\"\"\n plan: Plan = Plan(\n steps=[Step(description=step) for step in steps],\n )\n return StateSnapshotEvent(\n type=EventType.STATE_SNAPSHOT,\n snapshot=plan.model_dump(),\n )\n\n\n@agent.tool_plain\nasync def update_plan_step(\n index: int, description: str | None = None, status: StepStatus | None = None\n) -> StateDeltaEvent:\n \"\"\"Update the plan with new steps or changes.\n\n Args:\n index: The index of the step to update.\n description: The new description for the step.\n status: The new status for the step.\n\n Returns:\n StateDeltaEvent containing the changes made to the plan.\n \"\"\"\n changes: list[JSONPatchOp] = []\n if description is not None:\n changes.append(\n JSONPatchOp(\n op='replace', path=f'/steps/{index}/description', value=description\n )\n )\n if status is not None:\n changes.append(\n JSONPatchOp(op='replace', path=f'/steps/{index}/status', value=status)\n )\n return StateDeltaEvent(\n type=EventType.STATE_DELTA,\n delta=changes,\n )\n\n\napp = agent.to_ag_ui()\n", "language": "python", "type": "file" } @@ -98,90 +98,90 @@ }, { "name": "human_in_the_loop.py", - "content": "\"\"\"Human in the Loop Feature.\n\nNo special handling is required for this feature.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom textwrap import dedent\n\nfrom pydantic_ai import Agent\n\nagent = Agent(\n 'openai:gpt-4o-mini',\n instructions=dedent(\n \"\"\"\n When planning tasks use tools only, without any other messages.\n IMPORTANT:\n - Use the `generate_task_steps` tool to display the suggested steps to the user\n - Never repeat the plan, or send a message detailing steps\n - If accepted, confirm the creation of the plan and the number of selected (enabled) steps only\n - If not accepted, ask the user for more information, DO NOT use the `generate_task_steps` tool again\n \"\"\"\n ),\n)\n\napp = agent.to_ag_ui()\n", + "content": "\"\"\"Human in the Loop Feature.\n\nNo special handling is required for this feature.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom textwrap import dedent\n\nfrom pydantic_ai import Agent\n\nagent = Agent(\n 'openai:gpt-4o-mini',\n instructions=dedent(\n \"\"\"\n When planning tasks use tools only, without any other messages.\n IMPORTANT:\n - Use the `generate_task_steps` tool to display the suggested steps to the user\n - Do not call the `generate_task_steps` twice in a row, ever.\n - Never repeat the plan, or send a message detailing steps\n - If accepted, confirm the creation of the plan and the number of selected (enabled) steps only\n - If not accepted, ask the user for more information, DO NOT use the `generate_task_steps` tool again\n \"\"\"\n ),\n)\n\napp = agent.to_ag_ui()\n", "language": "python", "type": "file" } ], - "pydantic-ai::predictive_state_updates": [ + "pydantic-ai::shared_state": [ { "name": "page.tsx", - "content": "\"use client\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\n\nimport MarkdownIt from \"markdown-it\";\nimport React from \"react\";\n\nimport { diffWords } from \"diff\";\nimport { useEditor, EditorContent } from \"@tiptap/react\";\nimport StarterKit from \"@tiptap/starter-kit\";\nimport { useEffect, useState } from \"react\";\nimport { CopilotKit, useCoAgent, useCopilotAction, useCopilotChat } from \"@copilotkit/react-core\";\nimport { CopilotChat, CopilotSidebar } from \"@copilotkit/react-ui\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\nconst extensions = [StarterKit];\n\ninterface PredictiveStateUpdatesProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\nexport default function PredictiveStateUpdates({ params }: PredictiveStateUpdatesProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n const chatTitle = 'AI Document Editor'\n const chatDescription = 'Ask me to create or edit a document'\n const initialLabel = 'Hi 👋 How can I help with your document?'\n\n return (\n \n \n {isMobile ? (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n
\n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n
\n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n
\n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n ) : (\n \n )}\n \n
\n \n );\n}\n\ninterface AgentState {\n document: string;\n}\n\nconst DocumentEditor = () => {\n const editor = useEditor({\n extensions,\n immediatelyRender: false,\n editorProps: {\n attributes: { class: \"min-h-screen p-10\" },\n },\n });\n const [placeholderVisible, setPlaceholderVisible] = useState(false);\n const [currentDocument, setCurrentDocument] = useState(\"\");\n const { isLoading } = useCopilotChat();\n\n const {\n state: agentState,\n setState: setAgentState,\n nodeName,\n } = useCoAgent({\n name: \"predictive_state_updates\",\n initialState: {\n document: \"\",\n },\n });\n\n useEffect(() => {\n if (isLoading) {\n setCurrentDocument(editor?.getText() || \"\");\n }\n editor?.setEditable(!isLoading);\n }, [isLoading]);\n\n useEffect(() => {\n if (nodeName == \"end\") {\n // set the text one final time when loading is done\n if (currentDocument.trim().length > 0 && currentDocument !== agentState?.document) {\n const newDocument = agentState?.document || \"\";\n const diff = diffPartialText(currentDocument, newDocument, true);\n const markdown = fromMarkdown(diff);\n editor?.commands.setContent(markdown);\n }\n }\n }, [nodeName]);\n\n useEffect(() => {\n if (isLoading) {\n if (currentDocument.trim().length > 0) {\n const newDocument = agentState?.document || \"\";\n const diff = diffPartialText(currentDocument, newDocument);\n const markdown = fromMarkdown(diff);\n editor?.commands.setContent(markdown);\n } else {\n const markdown = fromMarkdown(agentState?.document || \"\");\n editor?.commands.setContent(markdown);\n }\n }\n }, [agentState?.document]);\n\n const text = editor?.getText() || \"\";\n\n useEffect(() => {\n setPlaceholderVisible(text.length === 0);\n\n if (!isLoading) {\n setCurrentDocument(text);\n setAgentState({\n document: text,\n });\n }\n }, [text]);\n\n // TODO(steve): Remove this when all agents have been updated to use write_document tool.\n useCopilotAction({\n name: \"confirm_changes\",\n renderAndWaitForResponse: ({ args, respond, status }) => (\n {\n editor?.commands.setContent(fromMarkdown(currentDocument));\n setAgentState({ document: currentDocument });\n }}\n onConfirm={() => {\n editor?.commands.setContent(fromMarkdown(agentState?.document || \"\"));\n setCurrentDocument(agentState?.document || \"\");\n setAgentState({ document: agentState?.document || \"\" });\n }}\n />\n ),\n }, [agentState?.document]);\n\n // Action to write the document.\n useCopilotAction({\n name: \"write_document\",\n description: `Present the proposed changes to the user for review`,\n parameters: [\n {\n name: \"document\",\n type: \"string\",\n description: \"The full updated document in markdown format\",\n },\n ],\n renderAndWaitForResponse({ args, status, respond }) {\n if (status === \"executing\") {\n return (\n {\n editor?.commands.setContent(fromMarkdown(currentDocument));\n setAgentState({ document: currentDocument });\n }}\n onConfirm={() => {\n editor?.commands.setContent(fromMarkdown(agentState?.document || \"\"));\n setCurrentDocument(agentState?.document || \"\");\n setAgentState({ document: agentState?.document || \"\" });\n }}\n />\n );\n }\n return <>;\n },\n }, [agentState?.document]);\n\n return (\n
\n {placeholderVisible && (\n
\n Write whatever you want here in Markdown format...\n
\n )}\n \n
\n );\n};\n\ninterface ConfirmChangesProps {\n args: any;\n respond: any;\n status: any;\n onReject: () => void;\n onConfirm: () => void;\n}\n\nfunction ConfirmChanges({ args, respond, status, onReject, onConfirm }: ConfirmChangesProps) {\n const [accepted, setAccepted] = useState(null);\n return (\n
\n

Confirm Changes

\n

Do you want to accept the changes?

\n {accepted === null && (\n
\n {\n if (respond) {\n setAccepted(false);\n onReject();\n respond({ accepted: false });\n }\n }}\n >\n Reject\n \n {\n if (respond) {\n setAccepted(true);\n onConfirm();\n respond({ accepted: true });\n }\n }}\n >\n Confirm\n \n
\n )}\n {accepted !== null && (\n
\n
\n {accepted ? \"✓ Accepted\" : \"✗ Rejected\"}\n
\n
\n )}\n
\n );\n}\n\nfunction fromMarkdown(text: string) {\n const md = new MarkdownIt({\n typographer: true,\n html: true,\n });\n\n return md.render(text);\n}\n\nfunction diffPartialText(oldText: string, newText: string, isComplete: boolean = false) {\n let oldTextToCompare = oldText;\n if (oldText.length > newText.length && !isComplete) {\n // make oldText shorter\n oldTextToCompare = oldText.slice(0, newText.length);\n }\n\n const changes = diffWords(oldTextToCompare, newText);\n\n let result = \"\";\n changes.forEach((part) => {\n if (part.added) {\n result += `${part.value}`;\n } else if (part.removed) {\n result += `${part.value}`;\n } else {\n result += part.value;\n }\n });\n\n if (oldText.length > newText.length && !isComplete) {\n result += oldText.slice(newText.length);\n }\n\n return result;\n}\n\nfunction isAlpha(text: string) {\n return /[a-zA-Z\\u00C0-\\u017F]/.test(text.trim());\n}\n", + "content": "\"use client\";\nimport { CopilotKit, useCoAgent, useCopilotChat } from \"@copilotkit/react-core\";\nimport { CopilotChat, CopilotSidebar } from \"@copilotkit/react-ui\";\nimport React, { useState, useEffect, useRef } from \"react\";\nimport { Role, TextMessage } from \"@copilotkit/runtime-client-gql\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface SharedStateProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\nexport default function SharedState({ params }: SharedStateProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n\n const chatTitle = 'AI Recipe Assistant'\n const chatDescription = 'Ask me to craft recipes'\n const initialLabel = 'Hi 👋 How can I help with your recipe?'\n\n return (\n \n \n \n {isMobile ? (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n
\n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n
\n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n
\n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n ) : (\n \n )}\n
\n \n );\n}\n\nenum SkillLevel {\n BEGINNER = \"Beginner\",\n INTERMEDIATE = \"Intermediate\",\n ADVANCED = \"Advanced\",\n}\n\nenum CookingTime {\n FiveMin = \"5 min\",\n FifteenMin = \"15 min\",\n ThirtyMin = \"30 min\",\n FortyFiveMin = \"45 min\",\n SixtyPlusMin = \"60+ min\",\n}\n\nconst cookingTimeValues = [\n { label: CookingTime.FiveMin, value: 0 },\n { label: CookingTime.FifteenMin, value: 1 },\n { label: CookingTime.ThirtyMin, value: 2 },\n { label: CookingTime.FortyFiveMin, value: 3 },\n { label: CookingTime.SixtyPlusMin, value: 4 },\n];\n\nenum SpecialPreferences {\n HighProtein = \"High Protein\",\n LowCarb = \"Low Carb\",\n Spicy = \"Spicy\",\n BudgetFriendly = \"Budget-Friendly\",\n OnePotMeal = \"One-Pot Meal\",\n Vegetarian = \"Vegetarian\",\n Vegan = \"Vegan\",\n}\n\ninterface Ingredient {\n icon: string;\n name: string;\n amount: string;\n}\n\ninterface Recipe {\n title: string;\n skill_level: SkillLevel;\n cooking_time: CookingTime;\n special_preferences: string[];\n ingredients: Ingredient[];\n instructions: string[];\n}\n\ninterface RecipeAgentState {\n recipe: Recipe;\n}\n\nconst INITIAL_STATE: RecipeAgentState = {\n recipe: {\n title: \"Make Your Recipe\",\n skill_level: SkillLevel.INTERMEDIATE,\n cooking_time: CookingTime.FortyFiveMin,\n special_preferences: [],\n ingredients: [\n { icon: \"🥕\", name: \"Carrots\", amount: \"3 large, grated\" },\n { icon: \"🌾\", name: \"All-Purpose Flour\", amount: \"2 cups\" },\n ],\n instructions: [\"Preheat oven to 350°F (175°C)\"],\n },\n};\n\nfunction Recipe() {\n const { isMobile } = useMobileView();\n const { state: agentState, setState: setAgentState } = useCoAgent({\n name: \"shared_state\",\n initialState: INITIAL_STATE,\n });\n\n const [recipe, setRecipe] = useState(INITIAL_STATE.recipe);\n const { appendMessage, isLoading } = useCopilotChat();\n const [editingInstructionIndex, setEditingInstructionIndex] = useState(null);\n const newInstructionRef = useRef(null);\n\n const updateRecipe = (partialRecipe: Partial) => {\n setAgentState({\n ...agentState,\n recipe: {\n ...recipe,\n ...partialRecipe,\n },\n });\n setRecipe({\n ...recipe,\n ...partialRecipe,\n });\n };\n\n const newRecipeState = { ...recipe };\n const newChangedKeys = [];\n const changedKeysRef = useRef([]);\n\n for (const key in recipe) {\n if (\n agentState &&\n agentState.recipe &&\n (agentState.recipe as any)[key] !== undefined &&\n (agentState.recipe as any)[key] !== null\n ) {\n let agentValue = (agentState.recipe as any)[key];\n const recipeValue = (recipe as any)[key];\n\n // Check if agentValue is a string and replace \\n with actual newlines\n if (typeof agentValue === \"string\") {\n agentValue = agentValue.replace(/\\\\n/g, \"\\n\");\n }\n\n if (JSON.stringify(agentValue) !== JSON.stringify(recipeValue)) {\n (newRecipeState as any)[key] = agentValue;\n newChangedKeys.push(key);\n }\n }\n }\n\n if (newChangedKeys.length > 0) {\n changedKeysRef.current = newChangedKeys;\n } else if (!isLoading) {\n changedKeysRef.current = [];\n }\n\n useEffect(() => {\n setRecipe(newRecipeState);\n }, [JSON.stringify(newRecipeState)]);\n\n const handleTitleChange = (event: React.ChangeEvent) => {\n updateRecipe({\n title: event.target.value,\n });\n };\n\n const handleSkillLevelChange = (event: React.ChangeEvent) => {\n updateRecipe({\n skill_level: event.target.value as SkillLevel,\n });\n };\n\n const handleDietaryChange = (preference: string, checked: boolean) => {\n if (checked) {\n updateRecipe({\n special_preferences: [...recipe.special_preferences, preference],\n });\n } else {\n updateRecipe({\n special_preferences: recipe.special_preferences.filter((p) => p !== preference),\n });\n }\n };\n\n const handleCookingTimeChange = (event: React.ChangeEvent) => {\n updateRecipe({\n cooking_time: cookingTimeValues[Number(event.target.value)].label,\n });\n };\n\n const addIngredient = () => {\n // Pick a random food emoji from our valid list\n updateRecipe({\n ingredients: [...recipe.ingredients, { icon: \"🍴\", name: \"\", amount: \"\" }],\n });\n };\n\n const updateIngredient = (index: number, field: keyof Ingredient, value: string) => {\n const updatedIngredients = [...recipe.ingredients];\n updatedIngredients[index] = {\n ...updatedIngredients[index],\n [field]: value,\n };\n updateRecipe({ ingredients: updatedIngredients });\n };\n\n const removeIngredient = (index: number) => {\n const updatedIngredients = [...recipe.ingredients];\n updatedIngredients.splice(index, 1);\n updateRecipe({ ingredients: updatedIngredients });\n };\n\n const addInstruction = () => {\n const newIndex = recipe.instructions.length;\n updateRecipe({\n instructions: [...recipe.instructions, \"\"],\n });\n // Set the new instruction as the editing one\n setEditingInstructionIndex(newIndex);\n\n // Focus the new instruction after render\n setTimeout(() => {\n const textareas = document.querySelectorAll(\".instructions-container textarea\");\n const newTextarea = textareas[textareas.length - 1] as HTMLTextAreaElement;\n if (newTextarea) {\n newTextarea.focus();\n }\n }, 50);\n };\n\n const updateInstruction = (index: number, value: string) => {\n const updatedInstructions = [...recipe.instructions];\n updatedInstructions[index] = value;\n updateRecipe({ instructions: updatedInstructions });\n };\n\n const removeInstruction = (index: number) => {\n const updatedInstructions = [...recipe.instructions];\n updatedInstructions.splice(index, 1);\n updateRecipe({ instructions: updatedInstructions });\n };\n\n // Simplified icon handler that defaults to a fork/knife for any problematic icons\n const getProperIcon = (icon: string | undefined): string => {\n // If icon is undefined return the default\n if (!icon) {\n return \"🍴\";\n }\n\n return icon;\n };\n\n return (\n
\n {/* Recipe Title */}\n
\n \n\n
\n
\n 🕒\n t.label === recipe.cooking_time)?.value || 3}\n onChange={handleCookingTimeChange}\n style={{\n backgroundImage:\n \"url(\\\"data:image/svg+xml;charset=UTF-8,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none' stroke='%23555' stroke-width='2' stroke-linecap='round' stroke-linejoin='round'%3e%3cpolyline points='6 9 12 15 18 9'%3e%3c/polyline%3e%3c/svg%3e\\\")\",\n backgroundRepeat: \"no-repeat\",\n backgroundPosition: \"right 0px center\",\n backgroundSize: \"12px\",\n appearance: \"none\",\n WebkitAppearance: \"none\",\n }}\n >\n {cookingTimeValues.map((time) => (\n \n ))}\n \n
\n\n
\n 🏆\n \n {Object.values(SkillLevel).map((level) => (\n \n ))}\n \n
\n
\n
\n\n {/* Dietary Preferences */}\n
\n {changedKeysRef.current.includes(\"special_preferences\") && }\n

Dietary Preferences

\n
\n {Object.values(SpecialPreferences).map((option) => (\n \n ))}\n
\n
\n\n {/* Ingredients */}\n
\n {changedKeysRef.current.includes(\"ingredients\") && }\n
\n

Ingredients

\n \n + Add Ingredient\n \n
\n \n {recipe.ingredients.map((ingredient, index) => (\n
\n
{getProperIcon(ingredient.icon)}
\n
\n updateIngredient(index, \"name\", e.target.value)}\n placeholder=\"Ingredient name\"\n className=\"ingredient-name-input\"\n />\n updateIngredient(index, \"amount\", e.target.value)}\n placeholder=\"Amount\"\n className=\"ingredient-amount-input\"\n />\n
\n removeIngredient(index)}\n aria-label=\"Remove ingredient\"\n >\n ×\n \n
\n ))}\n
\n
\n\n {/* Instructions */}\n
\n {changedKeysRef.current.includes(\"instructions\") && }\n
\n

Instructions

\n \n
\n
\n {recipe.instructions.map((instruction, index) => (\n
\n {/* Number Circle */}\n
{index + 1}
\n\n {/* Vertical Line */}\n {index < recipe.instructions.length - 1 &&
}\n\n {/* Instruction Content */}\n setEditingInstructionIndex(index)}\n >\n updateInstruction(index, e.target.value)}\n placeholder={!instruction ? \"Enter cooking instruction...\" : \"\"}\n onFocus={() => setEditingInstructionIndex(index)}\n onBlur={(e) => {\n // Only blur if clicking outside this instruction\n if (!e.relatedTarget || !e.currentTarget.contains(e.relatedTarget as Node)) {\n setEditingInstructionIndex(null);\n }\n }}\n />\n\n {/* Delete Button (only visible on hover) */}\n {\n e.stopPropagation(); // Prevent triggering parent onClick\n removeInstruction(index);\n }}\n aria-label=\"Remove instruction\"\n >\n ×\n \n
\n
\n ))}\n
\n
\n\n {/* Improve with AI Button */}\n
\n {\n if (!isLoading) {\n appendMessage(\n new TextMessage({\n content: \"Improve the recipe\",\n role: Role.User,\n }),\n );\n }\n }}\n disabled={isLoading}\n >\n {isLoading ? \"Please Wait...\" : \"Improve with AI\"}\n \n
\n \n );\n}\n\nfunction Ping() {\n return (\n \n \n \n \n );\n}\n", "language": "typescript", "type": "file" }, { "name": "style.css", - "content": "/* Basic editor styles */\n.tiptap-container {\n height: 100vh; /* Full viewport height */\n width: 100vw; /* Full viewport width */\n display: flex;\n flex-direction: column;\n}\n\n.tiptap {\n flex: 1; /* Take up remaining space */\n overflow: auto; /* Allow scrolling if content overflows */\n}\n\n.tiptap :first-child {\n margin-top: 0;\n}\n\n/* List styles */\n.tiptap ul,\n.tiptap ol {\n padding: 0 1rem;\n margin: 1.25rem 1rem 1.25rem 0.4rem;\n}\n\n.tiptap ul li p,\n.tiptap ol li p {\n margin-top: 0.25em;\n margin-bottom: 0.25em;\n}\n\n/* Heading styles */\n.tiptap h1,\n.tiptap h2,\n.tiptap h3,\n.tiptap h4,\n.tiptap h5,\n.tiptap h6 {\n line-height: 1.1;\n margin-top: 2.5rem;\n text-wrap: pretty;\n font-weight: bold;\n}\n\n.tiptap h1,\n.tiptap h2,\n.tiptap h3,\n.tiptap h4,\n.tiptap h5,\n.tiptap h6 {\n margin-top: 3.5rem;\n margin-bottom: 1.5rem;\n}\n\n.tiptap p {\n margin-bottom: 1rem;\n}\n\n.tiptap h1 {\n font-size: 1.4rem;\n}\n\n.tiptap h2 {\n font-size: 1.2rem;\n}\n\n.tiptap h3 {\n font-size: 1.1rem;\n}\n\n.tiptap h4,\n.tiptap h5,\n.tiptap h6 {\n font-size: 1rem;\n}\n\n/* Code and preformatted text styles */\n.tiptap code {\n background-color: var(--purple-light);\n border-radius: 0.4rem;\n color: var(--black);\n font-size: 0.85rem;\n padding: 0.25em 0.3em;\n}\n\n.tiptap pre {\n background: var(--black);\n border-radius: 0.5rem;\n color: var(--white);\n font-family: \"JetBrainsMono\", monospace;\n margin: 1.5rem 0;\n padding: 0.75rem 1rem;\n}\n\n.tiptap pre code {\n background: none;\n color: inherit;\n font-size: 0.8rem;\n padding: 0;\n}\n\n.tiptap blockquote {\n border-left: 3px solid var(--gray-3);\n margin: 1.5rem 0;\n padding-left: 1rem;\n}\n\n.tiptap hr {\n border: none;\n border-top: 1px solid var(--gray-2);\n margin: 2rem 0;\n}\n\n.tiptap s {\n background-color: #f9818150;\n padding: 2px;\n font-weight: bold;\n color: rgba(0, 0, 0, 0.7);\n}\n\n.tiptap em {\n background-color: #b2f2bb;\n padding: 2px;\n font-weight: bold;\n font-style: normal;\n}\n\n.copilotKitWindow {\n box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);\n}\n\n", + "content": ".copilotKitWindow {\n box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);\n}\n\n.copilotKitHeader {\n border-top-left-radius: 5px !important;\n background-color: #fff;\n color: #000;\n border-bottom: 0px;\n}\n\n/* Recipe App Styles */\n.app-container {\n min-height: 100vh;\n width: 100%;\n display: flex;\n align-items: center;\n justify-content: center;\n background-size: cover;\n background-position: center;\n background-repeat: no-repeat;\n background-attachment: fixed;\n position: relative;\n overflow: auto;\n}\n\n.recipe-card {\n background-color: rgba(255, 255, 255, 0.97);\n border-radius: 16px;\n box-shadow: 0 15px 30px rgba(0, 0, 0, 0.25), 0 5px 15px rgba(0, 0, 0, 0.15);\n width: 100%;\n max-width: 750px;\n margin: 20px auto;\n padding: 14px 32px;\n position: relative;\n z-index: 1;\n backdrop-filter: blur(5px);\n border: 1px solid rgba(255, 255, 255, 0.3);\n transition: transform 0.2s ease, box-shadow 0.2s ease;\n animation: fadeIn 0.5s ease-out forwards;\n box-sizing: border-box;\n overflow: hidden;\n}\n\n.recipe-card:hover {\n transform: translateY(-5px);\n box-shadow: 0 20px 40px rgba(0, 0, 0, 0.3), 0 10px 20px rgba(0, 0, 0, 0.2);\n}\n\n/* Recipe Header */\n.recipe-header {\n margin-bottom: 24px;\n}\n\n.recipe-title-input {\n width: 100%;\n font-size: 24px;\n font-weight: bold;\n border: none;\n outline: none;\n padding: 8px 0;\n margin-bottom: 0px;\n}\n\n.recipe-meta {\n display: flex;\n align-items: center;\n gap: 20px;\n margin-top: 5px;\n margin-bottom: 14px;\n}\n\n.meta-item {\n display: flex;\n align-items: center;\n gap: 8px;\n color: #555;\n}\n\n.meta-icon {\n font-size: 20px;\n color: #777;\n}\n\n.meta-text {\n font-size: 15px;\n}\n\n/* Recipe Meta Selects */\n.meta-item select {\n border: none;\n background: transparent;\n font-size: 15px;\n color: #555;\n cursor: pointer;\n outline: none;\n padding-right: 18px;\n transition: color 0.2s, transform 0.1s;\n font-weight: 500;\n}\n\n.meta-item select:hover,\n.meta-item select:focus {\n color: #FF5722;\n}\n\n.meta-item select:active {\n transform: scale(0.98);\n}\n\n.meta-item select option {\n color: #333;\n background-color: white;\n font-weight: normal;\n padding: 8px;\n}\n\n/* Section Container */\n.section-container {\n margin-bottom: 20px;\n position: relative;\n width: 100%;\n}\n\n.section-title {\n font-size: 20px;\n font-weight: 700;\n margin-bottom: 20px;\n color: #333;\n position: relative;\n display: inline-block;\n}\n\n.section-title:after {\n content: \"\";\n position: absolute;\n bottom: -8px;\n left: 0;\n width: 40px;\n height: 3px;\n background-color: #ff7043;\n border-radius: 3px;\n}\n\n/* Dietary Preferences */\n.dietary-options {\n display: flex;\n flex-wrap: wrap;\n gap: 10px 16px;\n margin-bottom: 16px;\n width: 100%;\n}\n\n.dietary-option {\n display: flex;\n align-items: center;\n gap: 6px;\n font-size: 14px;\n cursor: pointer;\n margin-bottom: 4px;\n}\n\n.dietary-option input {\n cursor: pointer;\n}\n\n/* Ingredients */\n.ingredients-container {\n display: flex;\n flex-wrap: wrap;\n gap: 10px;\n margin-bottom: 15px;\n width: 100%;\n box-sizing: border-box;\n}\n\n.ingredient-card {\n display: flex;\n align-items: center;\n background-color: rgba(255, 255, 255, 0.9);\n border-radius: 12px;\n padding: 12px;\n margin-bottom: 10px;\n box-shadow: 0 4px 10px rgba(0, 0, 0, 0.08);\n position: relative;\n transition: all 0.2s ease;\n border: 1px solid rgba(240, 240, 240, 0.8);\n width: calc(33.333% - 7px);\n box-sizing: border-box;\n}\n\n.ingredient-card:hover {\n transform: translateY(-2px);\n box-shadow: 0 6px 15px rgba(0, 0, 0, 0.12);\n}\n\n.ingredient-card .remove-button {\n position: absolute;\n right: 10px;\n top: 10px;\n background: none;\n border: none;\n color: #ccc;\n font-size: 16px;\n cursor: pointer;\n display: none;\n padding: 0;\n width: 24px;\n height: 24px;\n line-height: 1;\n}\n\n.ingredient-card:hover .remove-button {\n display: block;\n}\n\n.ingredient-icon {\n font-size: 24px;\n margin-right: 12px;\n display: flex;\n align-items: center;\n justify-content: center;\n width: 40px;\n height: 40px;\n background-color: #f7f7f7;\n border-radius: 50%;\n flex-shrink: 0;\n}\n\n.ingredient-content {\n flex: 1;\n display: flex;\n flex-direction: column;\n gap: 3px;\n min-width: 0;\n}\n\n.ingredient-name-input,\n.ingredient-amount-input {\n border: none;\n background: transparent;\n outline: none;\n width: 100%;\n padding: 0;\n text-overflow: ellipsis;\n overflow: hidden;\n white-space: nowrap;\n}\n\n.ingredient-name-input {\n font-weight: 500;\n font-size: 14px;\n}\n\n.ingredient-amount-input {\n font-size: 13px;\n color: #666;\n}\n\n.ingredient-name-input::placeholder,\n.ingredient-amount-input::placeholder {\n color: #aaa;\n}\n\n.remove-button {\n background: none;\n border: none;\n color: #999;\n font-size: 20px;\n cursor: pointer;\n padding: 0;\n width: 28px;\n height: 28px;\n display: flex;\n align-items: center;\n justify-content: center;\n margin-left: 10px;\n}\n\n.remove-button:hover {\n color: #FF5722;\n}\n\n/* Instructions */\n.instructions-container {\n display: flex;\n flex-direction: column;\n gap: 6px;\n position: relative;\n margin-bottom: 12px;\n width: 100%;\n}\n\n.instruction-item {\n position: relative;\n display: flex;\n width: 100%;\n box-sizing: border-box;\n margin-bottom: 8px;\n align-items: flex-start;\n}\n\n.instruction-number {\n display: flex;\n align-items: center;\n justify-content: center;\n min-width: 26px;\n height: 26px;\n background-color: #ff7043;\n color: white;\n border-radius: 50%;\n font-weight: 600;\n flex-shrink: 0;\n box-shadow: 0 2px 4px rgba(255, 112, 67, 0.3);\n z-index: 1;\n font-size: 13px;\n margin-top: 2px;\n}\n\n.instruction-line {\n position: absolute;\n left: 13px; /* Half of the number circle width */\n top: 22px;\n bottom: -18px;\n width: 2px;\n background: linear-gradient(to bottom, #ff7043 60%, rgba(255, 112, 67, 0.4));\n z-index: 0;\n}\n\n.instruction-content {\n background-color: white;\n border-radius: 10px;\n padding: 10px 14px;\n margin-left: 12px;\n flex-grow: 1;\n transition: all 0.2s ease;\n box-shadow: 0 2px 6px rgba(0, 0, 0, 0.08);\n border: 1px solid rgba(240, 240, 240, 0.8);\n position: relative;\n width: calc(100% - 38px);\n box-sizing: border-box;\n display: flex;\n align-items: center;\n}\n\n.instruction-content-editing {\n background-color: #fff9f6;\n box-shadow: 0 6px 16px rgba(0, 0, 0, 0.12), 0 0 0 2px rgba(255, 112, 67, 0.2);\n}\n\n.instruction-content:hover {\n transform: translateY(-2px);\n box-shadow: 0 6px 16px rgba(0, 0, 0, 0.12);\n}\n\n.instruction-textarea {\n width: 100%;\n background: transparent;\n border: none;\n resize: vertical;\n font-family: inherit;\n font-size: 14px;\n line-height: 1.4;\n min-height: 20px;\n outline: none;\n padding: 0;\n margin: 0;\n}\n\n.instruction-delete-btn {\n position: absolute;\n background: none;\n border: none;\n color: #ccc;\n font-size: 16px;\n cursor: pointer;\n display: none;\n padding: 0;\n width: 20px;\n height: 20px;\n line-height: 1;\n top: 50%;\n transform: translateY(-50%);\n right: 8px;\n}\n\n.instruction-content:hover .instruction-delete-btn {\n display: flex;\n align-items: center;\n justify-content: center;\n}\n\n/* Action Button */\n.action-container {\n display: flex;\n justify-content: center;\n margin-top: 40px;\n padding-bottom: 20px;\n position: relative;\n}\n\n.improve-button {\n background-color: #ff7043;\n border: none;\n color: white;\n border-radius: 30px;\n font-size: 18px;\n font-weight: 600;\n padding: 14px 28px;\n cursor: pointer;\n transition: all 0.3s ease;\n box-shadow: 0 4px 15px rgba(255, 112, 67, 0.4);\n display: flex;\n align-items: center;\n justify-content: center;\n text-align: center;\n position: relative;\n min-width: 180px;\n}\n\n.improve-button:hover {\n background-color: #ff5722;\n transform: translateY(-2px);\n box-shadow: 0 8px 20px rgba(255, 112, 67, 0.5);\n}\n\n.improve-button.loading {\n background-color: #ff7043;\n opacity: 0.8;\n cursor: not-allowed;\n padding-left: 42px; /* Reduced padding to bring text closer to icon */\n padding-right: 22px; /* Balance the button */\n justify-content: flex-start; /* Left align text for better alignment with icon */\n}\n\n.improve-button.loading:after {\n content: \"\"; /* Add space between icon and text */\n display: inline-block;\n width: 8px; /* Width of the space */\n}\n\n.improve-button:before {\n content: \"\";\n background-image: url(\"data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='24' height='24' viewBox='0 0 24 24' fill='none' stroke='white' stroke-width='2' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M12 2v4M12 18v4M4.93 4.93l2.83 2.83M16.24 16.24l2.83 2.83M2 12h4M18 12h4M4.93 19.07l2.83-2.83M16.24 7.76l2.83-2.83'/%3E%3C/svg%3E\");\n width: 20px; /* Slightly smaller icon */\n height: 20px;\n background-repeat: no-repeat;\n background-size: contain;\n position: absolute;\n left: 16px; /* Slightly adjusted */\n top: 50%;\n transform: translateY(-50%);\n display: none;\n}\n\n.improve-button.loading:before {\n display: block;\n animation: spin 1.5s linear infinite;\n}\n\n@keyframes spin {\n 0% { transform: translateY(-50%) rotate(0deg); }\n 100% { transform: translateY(-50%) rotate(360deg); }\n}\n\n/* Ping Animation */\n.ping-animation {\n position: absolute;\n display: flex;\n width: 12px;\n height: 12px;\n top: 0;\n right: 0;\n}\n\n.ping-circle {\n position: absolute;\n display: inline-flex;\n width: 100%;\n height: 100%;\n border-radius: 50%;\n background-color: #38BDF8;\n opacity: 0.75;\n animation: ping 1.5s cubic-bezier(0, 0, 0.2, 1) infinite;\n}\n\n.ping-dot {\n position: relative;\n display: inline-flex;\n width: 12px;\n height: 12px;\n border-radius: 50%;\n background-color: #0EA5E9;\n}\n\n@keyframes ping {\n 75%, 100% {\n transform: scale(2);\n opacity: 0;\n }\n}\n\n/* Instruction hover effects */\n.instruction-item:hover .instruction-delete-btn {\n display: flex !important;\n}\n\n/* Add some subtle animations */\n@keyframes fadeIn {\n from { opacity: 0; transform: translateY(20px); }\n to { opacity: 1; transform: translateY(0); }\n}\n\n/* Better center alignment for the recipe card */\n.recipe-card-container {\n display: flex;\n justify-content: center;\n width: 100%;\n position: relative;\n z-index: 1;\n margin: 0 auto;\n box-sizing: border-box;\n}\n\n/* Add Buttons */\n.add-button {\n background-color: transparent;\n color: #FF5722;\n border: 1px dashed #FF5722;\n border-radius: 8px;\n padding: 10px 16px;\n cursor: pointer;\n font-weight: 500;\n display: inline-block;\n font-size: 14px;\n margin-bottom: 0;\n}\n\n.add-step-button {\n background-color: transparent;\n color: #FF5722;\n border: 1px dashed #FF5722;\n border-radius: 6px;\n padding: 6px 12px;\n cursor: pointer;\n font-weight: 500;\n font-size: 13px;\n}\n\n/* Section Headers */\n.section-header {\n display: flex;\n justify-content: space-between;\n align-items: center;\n margin-bottom: 12px;\n}", "language": "css", "type": "file" }, { "name": "README.mdx", - "content": "# 📝 Predictive State Updates Document Editor\n\n## What This Demo Shows\n\nThis demo showcases CopilotKit's **predictive state updates** for real-time\ndocument collaboration:\n\n1. **Live Document Editing**: Watch as your Copilot makes changes to a document\n in real-time\n2. **Diff Visualization**: See exactly what's being changed as it happens\n3. **Streaming Updates**: Changes are displayed character-by-character as the\n Copilot works\n\n## How to Interact\n\nTry these interactions with the collaborative document editor:\n\n- \"Fix the grammar and typos in this document\"\n- \"Make this text more professional\"\n- \"Add a section about [topic]\"\n- \"Summarize this content in bullet points\"\n- \"Change the tone to be more casual\"\n\nWatch as the Copilot processes your request and edits the document in real-time\nright before your eyes.\n\n## ✨ Predictive State Updates in Action\n\n**What's happening technically:**\n\n- The document state is shared between your UI and the Copilot\n- As the Copilot generates content, changes are streamed to the UI\n- Each modification is visualized with additions and deletions\n- The UI renders these changes progressively, without waiting for completion\n- All edits are tracked and displayed in a visually intuitive way\n\n**What you'll see in this demo:**\n\n- Text changes are highlighted in different colors (green for additions, red for\n deletions)\n- The document updates character-by-character, creating a typing-like effect\n- You can see the Copilot's thought process as it refines the content\n- The final document seamlessly incorporates all changes\n- The experience feels collaborative, as if someone is editing alongside you\n\nThis pattern of real-time collaborative editing with diff visualization is\nperfect for document editors, code review tools, content creation platforms, or\nany application where users benefit from seeing exactly how content is being\ntransformed!\n", + "content": "# 🍳 Shared State Recipe Creator\n\n## What This Demo Shows\n\nThis demo showcases CopilotKit's **shared state** functionality - a powerful\nfeature that enables bidirectional data flow between:\n\n1. **Frontend → Agent**: UI controls update the agent's context in real-time\n2. **Agent → Frontend**: The Copilot's recipe creations instantly update the UI\n components\n\nIt's like having a cooking buddy who not only listens to what you want but also\nupdates your recipe card as you chat - no refresh needed! ✨\n\n## How to Interact\n\nMix and match any of these parameters (or none at all - it's up to you!):\n\n- **Skill Level**: Beginner to expert 👨‍🍳\n- **Cooking Time**: Quick meals or slow cooking ⏱️\n- **Special Preferences**: Dietary needs, flavor profiles, health goals 🥗\n- **Ingredients**: Items you want to include 🧅🥩🍄\n- **Instructions**: Any specific steps\n\nThen chat with your Copilot chef with prompts like:\n\n- \"I'm a beginner cook. Can you make me a quick dinner?\"\n- \"I need something spicy with chicken that takes under 30 minutes!\"\n\n## ✨ Shared State Magic in Action\n\n**What's happening technically:**\n\n- The UI and Copilot agent share the same state object (**Agent State = UI\n State**)\n- Changes from either side automatically update the other\n- Neither side needs to manually request updates from the other\n\n**What you'll see in this demo:**\n\n- Set cooking time to 20 minutes in the UI and watch the Copilot immediately\n respect your time constraint\n- Add ingredients through the UI and see them appear in your recipe\n- When the Copilot suggests new ingredients, watch them automatically appear in\n the UI ingredients list\n- Change your skill level and see how the Copilot adapts its instructions in\n real-time\n\nThis synchronized state creates a seamless experience where the agent always has\nyour current preferences, and any updates to the recipe are instantly reflected\nin both places.\n\nThis shared state pattern can be applied to any application where you want your\nUI and Copilot to work together in perfect harmony!\n", "language": "markdown", "type": "file" }, { - "name": "predictive_state_updates.py", - "content": "\"\"\"Predictive State feature.\"\"\"\n\nfrom __future__ import annotations\n\nfrom textwrap import dedent\n\nfrom pydantic import BaseModel\n\nfrom ag_ui.core import CustomEvent, EventType\nfrom pydantic_ai import Agent, RunContext\nfrom pydantic_ai.ag_ui import StateDeps\n\n\nclass DocumentState(BaseModel):\n \"\"\"State for the document being written.\"\"\"\n\n document: str = ''\n\n\nagent = Agent('openai:gpt-4o-mini', deps_type=StateDeps[DocumentState])\n\n\n# Tools which return AG-UI events will be sent to the client as part of the\n# event stream, single events and iterables of events are supported.\n@agent.tool_plain\nasync def document_predict_state() -> list[CustomEvent]:\n \"\"\"Enable document state prediction.\n\n Returns:\n CustomEvent containing the event to enable state prediction.\n \"\"\"\n return [\n CustomEvent(\n type=EventType.CUSTOM,\n name='PredictState',\n value=[\n {\n 'state_key': 'document',\n 'tool': 'write_document',\n 'tool_argument': 'document',\n },\n ],\n ),\n ]\n\n\n@agent.instructions()\nasync def story_instructions(ctx: RunContext[StateDeps[DocumentState]]) -> str:\n \"\"\"Provide instructions for writing document if present.\n\n Args:\n ctx: The run context containing document state information.\n\n Returns:\n Instructions string for the document writing agent.\n \"\"\"\n return dedent(\n f\"\"\"You are a helpful assistant for writing documents.\n\n Before you start writing, you MUST call the `document_predict_state`\n tool to enable state prediction.\n\n To present the document to the user for review, you MUST use the\n `write_document` tool.\n\n When you have written the document, DO NOT repeat it as a message.\n If accepted briefly summarize the changes you made, 2 sentences\n max, otherwise ask the user to clarify what they want to change.\n\n This is the current document:\n\n {ctx.deps.state.document}\n \"\"\"\n )\n\n\napp = agent.to_ag_ui(deps=StateDeps(DocumentState()))\n", + "name": "shared_state.py", + "content": "\"\"\"Shared State feature.\"\"\"\n\nfrom __future__ import annotations\n\nfrom enum import StrEnum\nfrom textwrap import dedent\n\nfrom pydantic import BaseModel, Field\n\nfrom ag_ui.core import EventType, StateSnapshotEvent\nfrom pydantic_ai import Agent, RunContext\nfrom pydantic_ai.ag_ui import StateDeps\n\n\nclass SkillLevel(StrEnum):\n \"\"\"The level of skill required for the recipe.\"\"\"\n\n BEGINNER = 'Beginner'\n INTERMEDIATE = 'Intermediate'\n ADVANCED = 'Advanced'\n\n\nclass SpecialPreferences(StrEnum):\n \"\"\"Special preferences for the recipe.\"\"\"\n\n HIGH_PROTEIN = 'High Protein'\n LOW_CARB = 'Low Carb'\n SPICY = 'Spicy'\n BUDGET_FRIENDLY = 'Budget-Friendly'\n ONE_POT_MEAL = 'One-Pot Meal'\n VEGETARIAN = 'Vegetarian'\n VEGAN = 'Vegan'\n\n\nclass CookingTime(StrEnum):\n \"\"\"The cooking time of the recipe.\"\"\"\n\n FIVE_MIN = '5 min'\n FIFTEEN_MIN = '15 min'\n THIRTY_MIN = '30 min'\n FORTY_FIVE_MIN = '45 min'\n SIXTY_PLUS_MIN = '60+ min'\n\n\nclass Ingredient(BaseModel):\n \"\"\"A class representing an ingredient in a recipe.\"\"\"\n\n icon: str = Field(\n default='ingredient',\n description=\"The icon emoji (not emoji code like '\\x1f35e', but the actual emoji like 🥕) of the ingredient\",\n )\n name: str\n amount: str\n\n\nclass Recipe(BaseModel):\n \"\"\"A class representing a recipe.\"\"\"\n\n skill_level: SkillLevel = Field(\n default=SkillLevel.BEGINNER,\n description='The skill level required for the recipe',\n )\n special_preferences: list[SpecialPreferences] = Field(\n default_factory=list,\n description='Any special preferences for the recipe',\n )\n cooking_time: CookingTime = Field(\n default=CookingTime.FIVE_MIN, description='The cooking time of the recipe'\n )\n ingredients: list[Ingredient] = Field(\n default_factory=list,\n description='Ingredients for the recipe',\n )\n instructions: list[str] = Field(\n default_factory=list, description='Instructions for the recipe'\n )\n\n\nclass RecipeSnapshot(BaseModel):\n \"\"\"A class representing the state of the recipe.\"\"\"\n\n recipe: Recipe = Field(\n default_factory=Recipe, description='The current state of the recipe'\n )\n\n\nagent = Agent('openai:gpt-4o-mini', deps_type=StateDeps[RecipeSnapshot])\n\n\n@agent.tool_plain\nasync def display_recipe(recipe: Recipe) -> StateSnapshotEvent:\n \"\"\"Display the recipe to the user.\n\n Args:\n recipe: The recipe to display.\n\n Returns:\n StateSnapshotEvent containing the recipe snapshot.\n \"\"\"\n return StateSnapshotEvent(\n type=EventType.STATE_SNAPSHOT,\n snapshot={'recipe': recipe},\n )\n\n\n@agent.instructions\nasync def recipe_instructions(ctx: RunContext[StateDeps[RecipeSnapshot]]) -> str:\n \"\"\"Instructions for the recipe generation agent.\n\n Args:\n ctx: The run context containing recipe state information.\n\n Returns:\n Instructions string for the recipe generation agent.\n \"\"\"\n return dedent(\n f\"\"\"\n You are a helpful assistant for creating recipes.\n\n IMPORTANT:\n - Create a complete recipe using the existing ingredients\n - Append new ingredients to the existing ones\n - Use the `display_recipe` tool to present the recipe to the user\n - Do NOT repeat the recipe in the message, use the tool instead\n - Do NOT run the `display_recipe` tool multiple times in a row\n\n Once you have created the updated recipe and displayed it to the user,\n summarise the changes in one sentence, don't describe the recipe in\n detail or send it as a message to the user.\n\n The current state of the recipe is:\n\n {ctx.deps.state.recipe.model_dump_json(indent=2)}\n \"\"\",\n )\n\n\napp = agent.to_ag_ui(deps=StateDeps(RecipeSnapshot()))\n", "language": "python", "type": "file" } ], - "pydantic-ai::shared_state": [ + "pydantic-ai::tool_based_generative_ui": [ { "name": "page.tsx", - "content": "\"use client\";\nimport { CopilotKit, useCoAgent, useCopilotChat } from \"@copilotkit/react-core\";\nimport { CopilotChat, CopilotSidebar } from \"@copilotkit/react-ui\";\nimport React, { useState, useEffect, useRef } from \"react\";\nimport { Role, TextMessage } from \"@copilotkit/runtime-client-gql\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface SharedStateProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\nexport default function SharedState({ params }: SharedStateProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n\n const chatTitle = 'AI Recipe Assistant'\n const chatDescription = 'Ask me to craft recipes'\n const initialLabel = 'Hi 👋 How can I help with your recipe?'\n\n return (\n \n \n \n {isMobile ? (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n
\n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n ) : (\n \n )}\n \n \n );\n}\n\nenum SkillLevel {\n BEGINNER = \"Beginner\",\n INTERMEDIATE = \"Intermediate\",\n ADVANCED = \"Advanced\",\n}\n\nenum CookingTime {\n FiveMin = \"5 min\",\n FifteenMin = \"15 min\",\n ThirtyMin = \"30 min\",\n FortyFiveMin = \"45 min\",\n SixtyPlusMin = \"60+ min\",\n}\n\nconst cookingTimeValues = [\n { label: CookingTime.FiveMin, value: 0 },\n { label: CookingTime.FifteenMin, value: 1 },\n { label: CookingTime.ThirtyMin, value: 2 },\n { label: CookingTime.FortyFiveMin, value: 3 },\n { label: CookingTime.SixtyPlusMin, value: 4 },\n];\n\nenum SpecialPreferences {\n HighProtein = \"High Protein\",\n LowCarb = \"Low Carb\",\n Spicy = \"Spicy\",\n BudgetFriendly = \"Budget-Friendly\",\n OnePotMeal = \"One-Pot Meal\",\n Vegetarian = \"Vegetarian\",\n Vegan = \"Vegan\",\n}\n\ninterface Ingredient {\n icon: string;\n name: string;\n amount: string;\n}\n\ninterface Recipe {\n title: string;\n skill_level: SkillLevel;\n cooking_time: CookingTime;\n special_preferences: string[];\n ingredients: Ingredient[];\n instructions: string[];\n}\n\ninterface RecipeAgentState {\n recipe: Recipe;\n}\n\nconst INITIAL_STATE: RecipeAgentState = {\n recipe: {\n title: \"Make Your Recipe\",\n skill_level: SkillLevel.INTERMEDIATE,\n cooking_time: CookingTime.FortyFiveMin,\n special_preferences: [],\n ingredients: [\n { icon: \"🥕\", name: \"Carrots\", amount: \"3 large, grated\" },\n { icon: \"🌾\", name: \"All-Purpose Flour\", amount: \"2 cups\" },\n ],\n instructions: [\"Preheat oven to 350°F (175°C)\"],\n },\n};\n\nfunction Recipe() {\n const { isMobile } = useMobileView();\n const { state: agentState, setState: setAgentState } = useCoAgent({\n name: \"shared_state\",\n initialState: INITIAL_STATE,\n });\n\n const [recipe, setRecipe] = useState(INITIAL_STATE.recipe);\n const { appendMessage, isLoading } = useCopilotChat();\n const [editingInstructionIndex, setEditingInstructionIndex] = useState(null);\n const newInstructionRef = useRef(null);\n\n const updateRecipe = (partialRecipe: Partial) => {\n setAgentState({\n ...agentState,\n recipe: {\n ...recipe,\n ...partialRecipe,\n },\n });\n setRecipe({\n ...recipe,\n ...partialRecipe,\n });\n };\n\n const newRecipeState = { ...recipe };\n const newChangedKeys = [];\n const changedKeysRef = useRef([]);\n\n for (const key in recipe) {\n if (\n agentState &&\n agentState.recipe &&\n (agentState.recipe as any)[key] !== undefined &&\n (agentState.recipe as any)[key] !== null\n ) {\n let agentValue = (agentState.recipe as any)[key];\n const recipeValue = (recipe as any)[key];\n\n // Check if agentValue is a string and replace \\n with actual newlines\n if (typeof agentValue === \"string\") {\n agentValue = agentValue.replace(/\\\\n/g, \"\\n\");\n }\n\n if (JSON.stringify(agentValue) !== JSON.stringify(recipeValue)) {\n (newRecipeState as any)[key] = agentValue;\n newChangedKeys.push(key);\n }\n }\n }\n\n if (newChangedKeys.length > 0) {\n changedKeysRef.current = newChangedKeys;\n } else if (!isLoading) {\n changedKeysRef.current = [];\n }\n\n useEffect(() => {\n setRecipe(newRecipeState);\n }, [JSON.stringify(newRecipeState)]);\n\n const handleTitleChange = (event: React.ChangeEvent) => {\n updateRecipe({\n title: event.target.value,\n });\n };\n\n const handleSkillLevelChange = (event: React.ChangeEvent) => {\n updateRecipe({\n skill_level: event.target.value as SkillLevel,\n });\n };\n\n const handleDietaryChange = (preference: string, checked: boolean) => {\n if (checked) {\n updateRecipe({\n special_preferences: [...recipe.special_preferences, preference],\n });\n } else {\n updateRecipe({\n special_preferences: recipe.special_preferences.filter((p) => p !== preference),\n });\n }\n };\n\n const handleCookingTimeChange = (event: React.ChangeEvent) => {\n updateRecipe({\n cooking_time: cookingTimeValues[Number(event.target.value)].label,\n });\n };\n\n const addIngredient = () => {\n // Pick a random food emoji from our valid list\n updateRecipe({\n ingredients: [...recipe.ingredients, { icon: \"🍴\", name: \"\", amount: \"\" }],\n });\n };\n\n const updateIngredient = (index: number, field: keyof Ingredient, value: string) => {\n const updatedIngredients = [...recipe.ingredients];\n updatedIngredients[index] = {\n ...updatedIngredients[index],\n [field]: value,\n };\n updateRecipe({ ingredients: updatedIngredients });\n };\n\n const removeIngredient = (index: number) => {\n const updatedIngredients = [...recipe.ingredients];\n updatedIngredients.splice(index, 1);\n updateRecipe({ ingredients: updatedIngredients });\n };\n\n const addInstruction = () => {\n const newIndex = recipe.instructions.length;\n updateRecipe({\n instructions: [...recipe.instructions, \"\"],\n });\n // Set the new instruction as the editing one\n setEditingInstructionIndex(newIndex);\n\n // Focus the new instruction after render\n setTimeout(() => {\n const textareas = document.querySelectorAll(\".instructions-container textarea\");\n const newTextarea = textareas[textareas.length - 1] as HTMLTextAreaElement;\n if (newTextarea) {\n newTextarea.focus();\n }\n }, 50);\n };\n\n const updateInstruction = (index: number, value: string) => {\n const updatedInstructions = [...recipe.instructions];\n updatedInstructions[index] = value;\n updateRecipe({ instructions: updatedInstructions });\n };\n\n const removeInstruction = (index: number) => {\n const updatedInstructions = [...recipe.instructions];\n updatedInstructions.splice(index, 1);\n updateRecipe({ instructions: updatedInstructions });\n };\n\n // Simplified icon handler that defaults to a fork/knife for any problematic icons\n const getProperIcon = (icon: string | undefined): string => {\n // If icon is undefined return the default\n if (!icon) {\n return \"🍴\";\n }\n\n return icon;\n };\n\n return (\n
\n {/* Recipe Title */}\n
\n \n\n
\n
\n 🕒\n t.label === recipe.cooking_time)?.value || 3}\n onChange={handleCookingTimeChange}\n style={{\n backgroundImage:\n \"url(\\\"data:image/svg+xml;charset=UTF-8,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none' stroke='%23555' stroke-width='2' stroke-linecap='round' stroke-linejoin='round'%3e%3cpolyline points='6 9 12 15 18 9'%3e%3c/polyline%3e%3c/svg%3e\\\")\",\n backgroundRepeat: \"no-repeat\",\n backgroundPosition: \"right 0px center\",\n backgroundSize: \"12px\",\n appearance: \"none\",\n WebkitAppearance: \"none\",\n }}\n >\n {cookingTimeValues.map((time) => (\n \n ))}\n \n
\n\n
\n 🏆\n \n {Object.values(SkillLevel).map((level) => (\n \n ))}\n \n
\n
\n
\n\n {/* Dietary Preferences */}\n
\n {changedKeysRef.current.includes(\"special_preferences\") && }\n

Dietary Preferences

\n
\n {Object.values(SpecialPreferences).map((option) => (\n \n ))}\n
\n
\n\n {/* Ingredients */}\n
\n {changedKeysRef.current.includes(\"ingredients\") && }\n
\n

Ingredients

\n \n + Add Ingredient\n \n
\n \n {recipe.ingredients.map((ingredient, index) => (\n
\n
{getProperIcon(ingredient.icon)}
\n
\n updateIngredient(index, \"name\", e.target.value)}\n placeholder=\"Ingredient name\"\n className=\"ingredient-name-input\"\n />\n updateIngredient(index, \"amount\", e.target.value)}\n placeholder=\"Amount\"\n className=\"ingredient-amount-input\"\n />\n
\n removeIngredient(index)}\n aria-label=\"Remove ingredient\"\n >\n ×\n \n
\n ))}\n
\n \n\n {/* Instructions */}\n
\n {changedKeysRef.current.includes(\"instructions\") && }\n
\n

Instructions

\n \n
\n
\n {recipe.instructions.map((instruction, index) => (\n
\n {/* Number Circle */}\n
{index + 1}
\n\n {/* Vertical Line */}\n {index < recipe.instructions.length - 1 &&
}\n\n {/* Instruction Content */}\n setEditingInstructionIndex(index)}\n >\n updateInstruction(index, e.target.value)}\n placeholder={!instruction ? \"Enter cooking instruction...\" : \"\"}\n onFocus={() => setEditingInstructionIndex(index)}\n onBlur={(e) => {\n // Only blur if clicking outside this instruction\n if (!e.relatedTarget || !e.currentTarget.contains(e.relatedTarget as Node)) {\n setEditingInstructionIndex(null);\n }\n }}\n />\n\n {/* Delete Button (only visible on hover) */}\n {\n e.stopPropagation(); // Prevent triggering parent onClick\n removeInstruction(index);\n }}\n aria-label=\"Remove instruction\"\n >\n ×\n \n
\n
\n ))}\n
\n
\n\n {/* Improve with AI Button */}\n
\n {\n if (!isLoading) {\n appendMessage(\n new TextMessage({\n content: \"Improve the recipe\",\n role: Role.User,\n }),\n );\n }\n }}\n disabled={isLoading}\n >\n {isLoading ? \"Please Wait...\" : \"Improve with AI\"}\n \n
\n
\n );\n}\n\nfunction Ping() {\n return (\n \n \n \n \n );\n}\n", + "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku {\n japanese: string[] | [],\n english: string[] | [],\n image_names: string[] | [],\n selectedImage: string | null,\n}\n\ninterface HaikuCardProps {\n generatedHaiku: GenerateHaiku | Partial\n setHaikus: Dispatch>\n haikus: GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && }\n \n \n );\n}\n\nfunction MobileChat({ chatTitle, chatDescription, initialLabel }: { chatTitle: string, chatDescription: string, initialLabel: string }) {\n const defaultChatHeight = 50\n\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n return (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction getRandomImage(): string {\n return VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n}\n\nconst validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n while (correctedNames.length < 3) {\n const nextImage = getRandomImage();\n if (!usedValidNames.has(nextImage)) {\n correctedNames.push(nextImage);\n usedValidNames.add(nextImage);\n }\n }\n\n return correctedNames.slice(0, 3);\n};\n\nfunction HaikuCard({ generatedHaiku, setHaikus, haikus }: HaikuCardProps) {\n return (\n \n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n \n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n useCopilotAction({\n name: \"generate_haiku\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: `Names of 3 relevant images selected from the following: \\n -${VALID_IMAGE_NAMES.join('\\n -')}`,\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [newHaiku, ...prev].filter(h => h.english[0] !== \"A placeholder verse—\"));\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n \n\n {/* Main Display */}\n
\n
\n {haikus.map((haiku, index) => (\n (haikus.length == 1 || index == activeIndex) && (\n\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n setHaikus((prevHaikus) => {\n return prevHaikus.map((h, idx) => {\n if (idx === index) {\n return { ...h, selectedImage: imageName }\n } else {\n return { ...h }\n }\n })\n })}\n />\n ))}\n
\n )}\n
\n )\n ))}\n
\n \n \n );\n}\n\nfunction Thumbnails({ haikus, activeIndex, setActiveIndex, isMobile }: { haikus: Haiku[], activeIndex: number, setActiveIndex: (index: number) => void, isMobile: boolean }) {\n if (haikus.length == 0 || isMobile) { return null }\n return (\n
\n {haikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n \n ))}\n \n )\n\n}", "language": "typescript", "type": "file" }, { "name": "style.css", - "content": ".copilotKitWindow {\n box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);\n}\n\n.copilotKitHeader {\n border-top-left-radius: 5px !important;\n background-color: #fff;\n color: #000;\n border-bottom: 0px;\n}\n\n/* Recipe App Styles */\n.app-container {\n min-height: 100vh;\n width: 100%;\n display: flex;\n align-items: center;\n justify-content: center;\n background-size: cover;\n background-position: center;\n background-repeat: no-repeat;\n background-attachment: fixed;\n position: relative;\n overflow: auto;\n}\n\n.recipe-card {\n background-color: rgba(255, 255, 255, 0.97);\n border-radius: 16px;\n box-shadow: 0 15px 30px rgba(0, 0, 0, 0.25), 0 5px 15px rgba(0, 0, 0, 0.15);\n width: 100%;\n max-width: 750px;\n margin: 20px auto;\n padding: 14px 32px;\n position: relative;\n z-index: 1;\n backdrop-filter: blur(5px);\n border: 1px solid rgba(255, 255, 255, 0.3);\n transition: transform 0.2s ease, box-shadow 0.2s ease;\n animation: fadeIn 0.5s ease-out forwards;\n box-sizing: border-box;\n overflow: hidden;\n}\n\n.recipe-card:hover {\n transform: translateY(-5px);\n box-shadow: 0 20px 40px rgba(0, 0, 0, 0.3), 0 10px 20px rgba(0, 0, 0, 0.2);\n}\n\n/* Recipe Header */\n.recipe-header {\n margin-bottom: 24px;\n}\n\n.recipe-title-input {\n width: 100%;\n font-size: 24px;\n font-weight: bold;\n border: none;\n outline: none;\n padding: 8px 0;\n margin-bottom: 0px;\n}\n\n.recipe-meta {\n display: flex;\n align-items: center;\n gap: 20px;\n margin-top: 5px;\n margin-bottom: 14px;\n}\n\n.meta-item {\n display: flex;\n align-items: center;\n gap: 8px;\n color: #555;\n}\n\n.meta-icon {\n font-size: 20px;\n color: #777;\n}\n\n.meta-text {\n font-size: 15px;\n}\n\n/* Recipe Meta Selects */\n.meta-item select {\n border: none;\n background: transparent;\n font-size: 15px;\n color: #555;\n cursor: pointer;\n outline: none;\n padding-right: 18px;\n transition: color 0.2s, transform 0.1s;\n font-weight: 500;\n}\n\n.meta-item select:hover,\n.meta-item select:focus {\n color: #FF5722;\n}\n\n.meta-item select:active {\n transform: scale(0.98);\n}\n\n.meta-item select option {\n color: #333;\n background-color: white;\n font-weight: normal;\n padding: 8px;\n}\n\n/* Section Container */\n.section-container {\n margin-bottom: 20px;\n position: relative;\n width: 100%;\n}\n\n.section-title {\n font-size: 20px;\n font-weight: 700;\n margin-bottom: 20px;\n color: #333;\n position: relative;\n display: inline-block;\n}\n\n.section-title:after {\n content: \"\";\n position: absolute;\n bottom: -8px;\n left: 0;\n width: 40px;\n height: 3px;\n background-color: #ff7043;\n border-radius: 3px;\n}\n\n/* Dietary Preferences */\n.dietary-options {\n display: flex;\n flex-wrap: wrap;\n gap: 10px 16px;\n margin-bottom: 16px;\n width: 100%;\n}\n\n.dietary-option {\n display: flex;\n align-items: center;\n gap: 6px;\n font-size: 14px;\n cursor: pointer;\n margin-bottom: 4px;\n}\n\n.dietary-option input {\n cursor: pointer;\n}\n\n/* Ingredients */\n.ingredients-container {\n display: flex;\n flex-wrap: wrap;\n gap: 10px;\n margin-bottom: 15px;\n width: 100%;\n box-sizing: border-box;\n}\n\n.ingredient-card {\n display: flex;\n align-items: center;\n background-color: rgba(255, 255, 255, 0.9);\n border-radius: 12px;\n padding: 12px;\n margin-bottom: 10px;\n box-shadow: 0 4px 10px rgba(0, 0, 0, 0.08);\n position: relative;\n transition: all 0.2s ease;\n border: 1px solid rgba(240, 240, 240, 0.8);\n width: calc(33.333% - 7px);\n box-sizing: border-box;\n}\n\n.ingredient-card:hover {\n transform: translateY(-2px);\n box-shadow: 0 6px 15px rgba(0, 0, 0, 0.12);\n}\n\n.ingredient-card .remove-button {\n position: absolute;\n right: 10px;\n top: 10px;\n background: none;\n border: none;\n color: #ccc;\n font-size: 16px;\n cursor: pointer;\n display: none;\n padding: 0;\n width: 24px;\n height: 24px;\n line-height: 1;\n}\n\n.ingredient-card:hover .remove-button {\n display: block;\n}\n\n.ingredient-icon {\n font-size: 24px;\n margin-right: 12px;\n display: flex;\n align-items: center;\n justify-content: center;\n width: 40px;\n height: 40px;\n background-color: #f7f7f7;\n border-radius: 50%;\n flex-shrink: 0;\n}\n\n.ingredient-content {\n flex: 1;\n display: flex;\n flex-direction: column;\n gap: 3px;\n min-width: 0;\n}\n\n.ingredient-name-input,\n.ingredient-amount-input {\n border: none;\n background: transparent;\n outline: none;\n width: 100%;\n padding: 0;\n text-overflow: ellipsis;\n overflow: hidden;\n white-space: nowrap;\n}\n\n.ingredient-name-input {\n font-weight: 500;\n font-size: 14px;\n}\n\n.ingredient-amount-input {\n font-size: 13px;\n color: #666;\n}\n\n.ingredient-name-input::placeholder,\n.ingredient-amount-input::placeholder {\n color: #aaa;\n}\n\n.remove-button {\n background: none;\n border: none;\n color: #999;\n font-size: 20px;\n cursor: pointer;\n padding: 0;\n width: 28px;\n height: 28px;\n display: flex;\n align-items: center;\n justify-content: center;\n margin-left: 10px;\n}\n\n.remove-button:hover {\n color: #FF5722;\n}\n\n/* Instructions */\n.instructions-container {\n display: flex;\n flex-direction: column;\n gap: 6px;\n position: relative;\n margin-bottom: 12px;\n width: 100%;\n}\n\n.instruction-item {\n position: relative;\n display: flex;\n width: 100%;\n box-sizing: border-box;\n margin-bottom: 8px;\n align-items: flex-start;\n}\n\n.instruction-number {\n display: flex;\n align-items: center;\n justify-content: center;\n min-width: 26px;\n height: 26px;\n background-color: #ff7043;\n color: white;\n border-radius: 50%;\n font-weight: 600;\n flex-shrink: 0;\n box-shadow: 0 2px 4px rgba(255, 112, 67, 0.3);\n z-index: 1;\n font-size: 13px;\n margin-top: 2px;\n}\n\n.instruction-line {\n position: absolute;\n left: 13px; /* Half of the number circle width */\n top: 22px;\n bottom: -18px;\n width: 2px;\n background: linear-gradient(to bottom, #ff7043 60%, rgba(255, 112, 67, 0.4));\n z-index: 0;\n}\n\n.instruction-content {\n background-color: white;\n border-radius: 10px;\n padding: 10px 14px;\n margin-left: 12px;\n flex-grow: 1;\n transition: all 0.2s ease;\n box-shadow: 0 2px 6px rgba(0, 0, 0, 0.08);\n border: 1px solid rgba(240, 240, 240, 0.8);\n position: relative;\n width: calc(100% - 38px);\n box-sizing: border-box;\n display: flex;\n align-items: center;\n}\n\n.instruction-content-editing {\n background-color: #fff9f6;\n box-shadow: 0 6px 16px rgba(0, 0, 0, 0.12), 0 0 0 2px rgba(255, 112, 67, 0.2);\n}\n\n.instruction-content:hover {\n transform: translateY(-2px);\n box-shadow: 0 6px 16px rgba(0, 0, 0, 0.12);\n}\n\n.instruction-textarea {\n width: 100%;\n background: transparent;\n border: none;\n resize: vertical;\n font-family: inherit;\n font-size: 14px;\n line-height: 1.4;\n min-height: 20px;\n outline: none;\n padding: 0;\n margin: 0;\n}\n\n.instruction-delete-btn {\n position: absolute;\n background: none;\n border: none;\n color: #ccc;\n font-size: 16px;\n cursor: pointer;\n display: none;\n padding: 0;\n width: 20px;\n height: 20px;\n line-height: 1;\n top: 50%;\n transform: translateY(-50%);\n right: 8px;\n}\n\n.instruction-content:hover .instruction-delete-btn {\n display: flex;\n align-items: center;\n justify-content: center;\n}\n\n/* Action Button */\n.action-container {\n display: flex;\n justify-content: center;\n margin-top: 40px;\n padding-bottom: 20px;\n position: relative;\n}\n\n.improve-button {\n background-color: #ff7043;\n border: none;\n color: white;\n border-radius: 30px;\n font-size: 18px;\n font-weight: 600;\n padding: 14px 28px;\n cursor: pointer;\n transition: all 0.3s ease;\n box-shadow: 0 4px 15px rgba(255, 112, 67, 0.4);\n display: flex;\n align-items: center;\n justify-content: center;\n text-align: center;\n position: relative;\n min-width: 180px;\n}\n\n.improve-button:hover {\n background-color: #ff5722;\n transform: translateY(-2px);\n box-shadow: 0 8px 20px rgba(255, 112, 67, 0.5);\n}\n\n.improve-button.loading {\n background-color: #ff7043;\n opacity: 0.8;\n cursor: not-allowed;\n padding-left: 42px; /* Reduced padding to bring text closer to icon */\n padding-right: 22px; /* Balance the button */\n justify-content: flex-start; /* Left align text for better alignment with icon */\n}\n\n.improve-button.loading:after {\n content: \"\"; /* Add space between icon and text */\n display: inline-block;\n width: 8px; /* Width of the space */\n}\n\n.improve-button:before {\n content: \"\";\n background-image: url(\"data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='24' height='24' viewBox='0 0 24 24' fill='none' stroke='white' stroke-width='2' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M12 2v4M12 18v4M4.93 4.93l2.83 2.83M16.24 16.24l2.83 2.83M2 12h4M18 12h4M4.93 19.07l2.83-2.83M16.24 7.76l2.83-2.83'/%3E%3C/svg%3E\");\n width: 20px; /* Slightly smaller icon */\n height: 20px;\n background-repeat: no-repeat;\n background-size: contain;\n position: absolute;\n left: 16px; /* Slightly adjusted */\n top: 50%;\n transform: translateY(-50%);\n display: none;\n}\n\n.improve-button.loading:before {\n display: block;\n animation: spin 1.5s linear infinite;\n}\n\n@keyframes spin {\n 0% { transform: translateY(-50%) rotate(0deg); }\n 100% { transform: translateY(-50%) rotate(360deg); }\n}\n\n/* Ping Animation */\n.ping-animation {\n position: absolute;\n display: flex;\n width: 12px;\n height: 12px;\n top: 0;\n right: 0;\n}\n\n.ping-circle {\n position: absolute;\n display: inline-flex;\n width: 100%;\n height: 100%;\n border-radius: 50%;\n background-color: #38BDF8;\n opacity: 0.75;\n animation: ping 1.5s cubic-bezier(0, 0, 0.2, 1) infinite;\n}\n\n.ping-dot {\n position: relative;\n display: inline-flex;\n width: 12px;\n height: 12px;\n border-radius: 50%;\n background-color: #0EA5E9;\n}\n\n@keyframes ping {\n 75%, 100% {\n transform: scale(2);\n opacity: 0;\n }\n}\n\n/* Instruction hover effects */\n.instruction-item:hover .instruction-delete-btn {\n display: flex !important;\n}\n\n/* Add some subtle animations */\n@keyframes fadeIn {\n from { opacity: 0; transform: translateY(20px); }\n to { opacity: 1; transform: translateY(0); }\n}\n\n/* Better center alignment for the recipe card */\n.recipe-card-container {\n display: flex;\n justify-content: center;\n width: 100%;\n position: relative;\n z-index: 1;\n margin: 0 auto;\n box-sizing: border-box;\n}\n\n/* Add Buttons */\n.add-button {\n background-color: transparent;\n color: #FF5722;\n border: 1px dashed #FF5722;\n border-radius: 8px;\n padding: 10px 16px;\n cursor: pointer;\n font-weight: 500;\n display: inline-block;\n font-size: 14px;\n margin-bottom: 0;\n}\n\n.add-step-button {\n background-color: transparent;\n color: #FF5722;\n border: 1px dashed #FF5722;\n border-radius: 6px;\n padding: 6px 12px;\n cursor: pointer;\n font-weight: 500;\n font-size: 13px;\n}\n\n/* Section Headers */\n.section-header {\n display: flex;\n justify-content: space-between;\n align-items: center;\n margin-bottom: 12px;\n}", + "content": ".copilotKitWindow {\n box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);\n}\n\n.copilotKitHeader {\n border-top-left-radius: 5px !important;\n}\n\n.page-background {\n /* Darker gradient background */\n background: linear-gradient(170deg, #e9ecef 0%, #ced4da 100%);\n}\n\n@keyframes fade-scale-in {\n from {\n opacity: 0;\n transform: translateY(10px) scale(0.98);\n }\n to {\n opacity: 1;\n transform: translateY(0) scale(1);\n }\n}\n\n/* Updated card entry animation */\n@keyframes pop-in {\n 0% {\n opacity: 0;\n transform: translateY(15px) scale(0.95);\n }\n 70% {\n opacity: 1;\n transform: translateY(-2px) scale(1.02);\n }\n 100% {\n opacity: 1;\n transform: translateY(0) scale(1);\n }\n}\n\n/* Animation for subtle background gradient movement */\n@keyframes animated-gradient {\n 0% {\n background-position: 0% 50%;\n }\n 50% {\n background-position: 100% 50%;\n }\n 100% {\n background-position: 0% 50%;\n }\n}\n\n/* Animation for flash effect on apply */\n@keyframes flash-border-glow {\n 0% {\n /* Start slightly intensified */\n border-top-color: #ff5b4a !important;\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.07),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 25px rgba(255, 91, 74, 0.5);\n }\n 50% {\n /* Peak intensity */\n border-top-color: #ff4733 !important;\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.08),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 35px rgba(255, 71, 51, 0.7);\n }\n 100% {\n /* Return to default state appearance */\n border-top-color: #ff6f61 !important;\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.07),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 10px rgba(255, 111, 97, 0.15);\n }\n}\n\n/* Existing animation for haiku lines */\n@keyframes fade-slide-in {\n from {\n opacity: 0;\n transform: translateX(-15px);\n }\n to {\n opacity: 1;\n transform: translateX(0);\n }\n}\n\n.animated-fade-in {\n /* Use the new pop-in animation */\n animation: pop-in 0.6s ease-out forwards;\n}\n\n.haiku-card {\n /* Subtle animated gradient background */\n background: linear-gradient(120deg, #ffffff 0%, #fdfdfd 50%, #ffffff 100%);\n background-size: 200% 200%;\n animation: animated-gradient 10s ease infinite;\n\n /* === Explicit Border Override Attempt === */\n /* 1. Set the default grey border for all sides */\n border: 1px solid #dee2e6;\n\n /* 2. Explicitly override the top border immediately after */\n border-top: 10px solid #ff6f61 !important; /* Orange top - Added !important */\n /* === End Explicit Border Override Attempt === */\n\n padding: 2.5rem 3rem;\n border-radius: 20px;\n\n /* Default glow intensity */\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.07),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 15px rgba(255, 111, 97, 0.25);\n text-align: left;\n max-width: 745px;\n margin: 3rem auto;\n min-width: 600px;\n\n /* Transition */\n transition: transform 0.35s ease, box-shadow 0.35s ease, border-top-width 0.35s ease, border-top-color 0.35s ease;\n}\n\n.haiku-card:hover {\n transform: translateY(-8px) scale(1.03);\n /* Enhanced shadow + Glow */\n box-shadow: 0 15px 35px rgba(0, 0, 0, 0.1),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 25px rgba(255, 91, 74, 0.5);\n /* Modify only top border properties */\n border-top-width: 14px !important; /* Added !important */\n border-top-color: #ff5b4a !important; /* Added !important */\n}\n\n.haiku-card .flex {\n margin-bottom: 1.5rem;\n}\n\n.haiku-card .flex.haiku-line { /* Target the lines specifically */\n margin-bottom: 1.5rem;\n opacity: 0; /* Start hidden for animation */\n animation: fade-slide-in 0.5s ease-out forwards;\n /* animation-delay is set inline in page.tsx */\n}\n\n/* Remove previous explicit color overrides - rely on Tailwind */\n/* .haiku-card p.text-4xl {\n color: #212529;\n}\n\n.haiku-card p.text-base {\n color: #495057;\n} */\n\n.haiku-card.applied-flash {\n /* Apply the flash animation once */\n /* Note: animation itself has !important on border-top-color */\n animation: flash-border-glow 0.6s ease-out forwards;\n}\n\n/* Styling for images within the main haiku card */\n.haiku-card-image {\n width: 9.5rem; /* Increased size (approx w-48) */\n height: 9.5rem; /* Increased size (approx h-48) */\n object-fit: cover;\n border-radius: 1.5rem; /* rounded-xl */\n border: 1px solid #e5e7eb;\n /* Enhanced shadow with subtle orange hint */\n box-shadow: 0 8px 15px rgba(0, 0, 0, 0.1),\n 0 3px 6px rgba(0, 0, 0, 0.08),\n 0 0 10px rgba(255, 111, 97, 0.2);\n /* Inherit animation delay from inline style */\n animation-name: fadeIn;\n animation-duration: 0.5s;\n animation-fill-mode: both;\n}\n\n/* Styling for images within the suggestion card */\n.suggestion-card-image {\n width: 6.5rem; /* Increased slightly (w-20) */\n height: 6.5rem; /* Increased slightly (h-20) */\n object-fit: cover;\n border-radius: 1rem; /* Equivalent to rounded-md */\n border: 1px solid #d1d5db; /* Equivalent to border (using Tailwind gray-300) */\n margin-top: 0.5rem;\n /* Added shadow for suggestion images */\n box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1),\n 0 2px 4px rgba(0, 0, 0, 0.06);\n transition: all 0.2s ease-in-out; /* Added for smooth deselection */\n}\n\n/* Styling for the focused suggestion card image */\n.suggestion-card-image-focus {\n width: 6.5rem;\n height: 6.5rem;\n object-fit: cover;\n border-radius: 1rem;\n margin-top: 0.5rem;\n /* Highlight styles */\n border: 2px solid #ff6f61; /* Thicker, themed border */\n box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1), /* Base shadow for depth */\n 0 0 12px rgba(255, 111, 97, 0.6); /* Orange glow */\n transform: scale(1.05); /* Slightly scale up */\n transition: all 0.2s ease-in-out; /* Smooth transition for focus */\n}\n\n/* Styling for the suggestion card container in the sidebar */\n.suggestion-card {\n border: 1px solid #dee2e6; /* Same default border as haiku-card */\n border-top: 10px solid #ff6f61; /* Same orange top border */\n border-radius: 0.375rem; /* Default rounded-md */\n /* Note: background-color is set by Tailwind bg-gray-100 */\n /* Other styles like padding, margin, flex are handled by Tailwind */\n}\n\n.suggestion-image-container {\n display: flex;\n gap: 1rem;\n justify-content: space-between;\n width: 100%;\n height: 6.5rem;\n}\n\n/* Mobile responsive styles - matches useMobileView hook breakpoint */\n@media (max-width: 767px) {\n .haiku-card {\n padding: 1rem 1.5rem; /* Reduced from 2.5rem 3rem */\n min-width: auto; /* Remove min-width constraint */\n max-width: 100%; /* Full width on mobile */\n margin: 1rem auto; /* Reduced margin */\n }\n\n .haiku-card-image {\n width: 5.625rem; /* 90px - smaller on mobile */\n height: 5.625rem; /* 90px - smaller on mobile */\n }\n\n .suggestion-card-image {\n width: 5rem; /* Slightly smaller on mobile */\n height: 5rem; /* Slightly smaller on mobile */\n }\n\n .suggestion-card-image-focus {\n width: 5rem; /* Slightly smaller on mobile */\n height: 5rem; /* Slightly smaller on mobile */\n }\n}\n", "language": "css", "type": "file" }, { "name": "README.mdx", - "content": "# 🍳 Shared State Recipe Creator\n\n## What This Demo Shows\n\nThis demo showcases CopilotKit's **shared state** functionality - a powerful\nfeature that enables bidirectional data flow between:\n\n1. **Frontend → Agent**: UI controls update the agent's context in real-time\n2. **Agent → Frontend**: The Copilot's recipe creations instantly update the UI\n components\n\nIt's like having a cooking buddy who not only listens to what you want but also\nupdates your recipe card as you chat - no refresh needed! ✨\n\n## How to Interact\n\nMix and match any of these parameters (or none at all - it's up to you!):\n\n- **Skill Level**: Beginner to expert 👨‍🍳\n- **Cooking Time**: Quick meals or slow cooking ⏱️\n- **Special Preferences**: Dietary needs, flavor profiles, health goals 🥗\n- **Ingredients**: Items you want to include 🧅🥩🍄\n- **Instructions**: Any specific steps\n\nThen chat with your Copilot chef with prompts like:\n\n- \"I'm a beginner cook. Can you make me a quick dinner?\"\n- \"I need something spicy with chicken that takes under 30 minutes!\"\n\n## ✨ Shared State Magic in Action\n\n**What's happening technically:**\n\n- The UI and Copilot agent share the same state object (**Agent State = UI\n State**)\n- Changes from either side automatically update the other\n- Neither side needs to manually request updates from the other\n\n**What you'll see in this demo:**\n\n- Set cooking time to 20 minutes in the UI and watch the Copilot immediately\n respect your time constraint\n- Add ingredients through the UI and see them appear in your recipe\n- When the Copilot suggests new ingredients, watch them automatically appear in\n the UI ingredients list\n- Change your skill level and see how the Copilot adapts its instructions in\n real-time\n\nThis synchronized state creates a seamless experience where the agent always has\nyour current preferences, and any updates to the recipe are instantly reflected\nin both places.\n\nThis shared state pattern can be applied to any application where you want your\nUI and Copilot to work together in perfect harmony!\n", + "content": "# 🪶 Tool-Based Generative UI Haiku Creator\n\n## What This Demo Shows\n\nThis demo showcases CopilotKit's **tool-based generative UI** capabilities:\n\n1. **Frontend Rendering of Tool Calls**: Backend tool calls are automatically\n rendered in the UI\n2. **Dynamic UI Generation**: The UI updates in real-time as the agent generates\n content\n3. **Elegant Content Presentation**: Complex structured data (haikus) are\n beautifully displayed\n\n## How to Interact\n\nChat with your Copilot and ask for haikus about different topics:\n\n- \"Create a haiku about nature\"\n- \"Write a haiku about technology\"\n- \"Generate a haiku about the changing seasons\"\n- \"Make a humorous haiku about programming\"\n\nEach request will trigger the agent to generate a haiku and display it in a\nvisually appealing card format in the UI.\n\n## ✨ Tool-Based Generative UI in Action\n\n**What's happening technically:**\n\n- The agent processes your request and determines it should create a haiku\n- It calls a backend tool that returns structured haiku data\n- CopilotKit automatically renders this tool call in the frontend\n- The rendering is handled by the registered tool component in your React app\n- No manual state management is required to display the results\n\n**What you'll see in this demo:**\n\n- As you request a haiku, a beautifully formatted card appears in the UI\n- The haiku follows the traditional 5-7-5 syllable structure\n- Each haiku is presented with consistent styling\n- Multiple haikus can be generated in sequence\n- The UI adapts to display each new piece of content\n\nThis pattern of tool-based generative UI can be extended to create any kind of\ndynamic content - from data visualizations to interactive components, all driven\nby your Copilot's tool calls!\n", "language": "markdown", "type": "file" }, { - "name": "shared_state.py", - "content": "\"\"\"Shared State feature.\"\"\"\n\nfrom __future__ import annotations\n\nfrom enum import StrEnum\nfrom textwrap import dedent\n\nfrom pydantic import BaseModel, Field\n\nfrom ag_ui.core import EventType, StateSnapshotEvent\nfrom pydantic_ai import Agent, RunContext\nfrom pydantic_ai.ag_ui import StateDeps\n\n\nclass SkillLevel(StrEnum):\n \"\"\"The level of skill required for the recipe.\"\"\"\n\n BEGINNER = 'Beginner'\n INTERMEDIATE = 'Intermediate'\n ADVANCED = 'Advanced'\n\n\nclass SpecialPreferences(StrEnum):\n \"\"\"Special preferences for the recipe.\"\"\"\n\n HIGH_PROTEIN = 'High Protein'\n LOW_CARB = 'Low Carb'\n SPICY = 'Spicy'\n BUDGET_FRIENDLY = 'Budget-Friendly'\n ONE_POT_MEAL = 'One-Pot Meal'\n VEGETARIAN = 'Vegetarian'\n VEGAN = 'Vegan'\n\n\nclass CookingTime(StrEnum):\n \"\"\"The cooking time of the recipe.\"\"\"\n\n FIVE_MIN = '5 min'\n FIFTEEN_MIN = '15 min'\n THIRTY_MIN = '30 min'\n FORTY_FIVE_MIN = '45 min'\n SIXTY_PLUS_MIN = '60+ min'\n\n\nclass Ingredient(BaseModel):\n \"\"\"A class representing an ingredient in a recipe.\"\"\"\n\n icon: str = Field(\n default='ingredient',\n description=\"The icon emoji (not emoji code like '\\x1f35e', but the actual emoji like 🥕) of the ingredient\",\n )\n name: str\n amount: str\n\n\nclass Recipe(BaseModel):\n \"\"\"A class representing a recipe.\"\"\"\n\n skill_level: SkillLevel = Field(\n default=SkillLevel.BEGINNER,\n description='The skill level required for the recipe',\n )\n special_preferences: list[SpecialPreferences] = Field(\n default_factory=list,\n description='Any special preferences for the recipe',\n )\n cooking_time: CookingTime = Field(\n default=CookingTime.FIVE_MIN, description='The cooking time of the recipe'\n )\n ingredients: list[Ingredient] = Field(\n default_factory=list,\n description='Ingredients for the recipe',\n )\n instructions: list[str] = Field(\n default_factory=list, description='Instructions for the recipe'\n )\n\n\nclass RecipeSnapshot(BaseModel):\n \"\"\"A class representing the state of the recipe.\"\"\"\n\n recipe: Recipe = Field(\n default_factory=Recipe, description='The current state of the recipe'\n )\n\n\nagent = Agent('openai:gpt-4o-mini', deps_type=StateDeps[RecipeSnapshot])\n\n\n@agent.tool_plain\nasync def display_recipe(recipe: Recipe) -> StateSnapshotEvent:\n \"\"\"Display the recipe to the user.\n\n Args:\n recipe: The recipe to display.\n\n Returns:\n StateSnapshotEvent containing the recipe snapshot.\n \"\"\"\n return StateSnapshotEvent(\n type=EventType.STATE_SNAPSHOT,\n snapshot={'recipe': recipe},\n )\n\n\n@agent.instructions\nasync def recipe_instructions(ctx: RunContext[StateDeps[RecipeSnapshot]]) -> str:\n \"\"\"Instructions for the recipe generation agent.\n\n Args:\n ctx: The run context containing recipe state information.\n\n Returns:\n Instructions string for the recipe generation agent.\n \"\"\"\n return dedent(\n f\"\"\"\n You are a helpful assistant for creating recipes.\n\n IMPORTANT:\n - Create a complete recipe using the existing ingredients\n - Append new ingredients to the existing ones\n - Use the `display_recipe` tool to present the recipe to the user\n - Do NOT repeat the recipe in the message, use the tool instead\n - Do NOT run the `display_recipe` tool multiple times in a row\n\n Once you have created the updated recipe and displayed it to the user,\n summarise the changes in one sentence, don't describe the recipe in\n detail or send it as a message to the user.\n\n The current state of the recipe is:\n\n {ctx.deps.state.recipe.model_dump_json(indent=2)}\n \"\"\",\n )\n\n\napp = agent.to_ag_ui(deps=StateDeps(RecipeSnapshot()))\n", + "name": "tool_based_generative_ui.py", + "content": "\"\"\"Tool Based Generative UI feature.\n\nNo special handling is required for this feature.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom pydantic_ai import Agent\n\nagent = Agent('openai:gpt-4o-mini')\napp = agent.to_ag_ui()\n", "language": "python", "type": "file" } ], - "pydantic-ai::tool_based_generative_ui": [ + "server-starter::agentic_chat": [ { "name": "page.tsx", - "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku{\n japanese : string[] | [],\n english : string[] | [],\n image_names : string[] | [],\n selectedImage : string | null,\n}\n\ninterface HaikuCardProps{\n generatedHaiku : GenerateHaiku | Partial\n setHaikus : Dispatch>\n haikus : GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n
\n
\n
\n \n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )}\n \n \n );\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction HaikuCard({generatedHaiku, setHaikus, haikus} : HaikuCardProps) {\n return (\n
\n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n
\n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n const validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n if (correctedNames.length < 3) {\n const availableFallbacks = VALID_IMAGE_NAMES.filter(name => !usedValidNames.has(name));\n for (let i = availableFallbacks.length - 1; i > 0; i--) {\n const j = Math.floor(Math.random() * (i + 1));\n [availableFallbacks[i], availableFallbacks[j]] = [availableFallbacks[j], availableFallbacks[i]];\n }\n\n while (correctedNames.length < 3 && availableFallbacks.length > 0) {\n const fallbackName = availableFallbacks.pop();\n if (fallbackName) {\n correctedNames.push(fallbackName);\n }\n }\n }\n\n while (correctedNames.length < 3 && VALID_IMAGE_NAMES.length > 0) {\n const fallbackName = VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n correctedNames.push(fallbackName);\n }\n\n return correctedNames.slice(0, 3);\n };\n\n useCopilotAction({\n name: \"generate_haiku\",\n available: \"frontend\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: \"Names of 3 relevant images\",\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [...prev, newHaiku]);\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const generatedHaikus = useMemo(() => (\n haikus.filter((haiku) => haiku.english[0] !== \"A placeholder verse—\")\n ), [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n {/* Thumbnail List */}\n {Boolean(generatedHaikus.length) && !isMobile && (\n
\n {generatedHaikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n )}\n\n {/* Main Display */}\n
\n
\n {haikus.filter((_haiku: Haiku, index: number) => {\n if (haikus.length == 1) return true;\n else return index == activeIndex + 1;\n }).map((haiku, index) => (\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n \n \n );\n}\n", + "content": "\"use client\";\nimport React, { useState } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport { CopilotKit, useCoAgent, useCopilotAction, useCopilotChat } from \"@copilotkit/react-core\";\nimport { CopilotChat } from \"@copilotkit/react-ui\";\n\ninterface AgenticChatProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\nconst AgenticChat: React.FC = ({ params }) => {\n const { integrationId } = React.use(params);\n\n return (\n \n \n \n );\n};\n\nconst Chat = () => {\n const [background, setBackground] = useState(\"--copilot-kit-background-color\");\n\n useCopilotAction({\n name: \"change_background\",\n description:\n \"Change the background color of the chat. Can be anything that the CSS background attribute accepts. Regular colors, linear of radial gradients etc.\",\n parameters: [\n {\n name: \"background\",\n type: \"string\",\n description: \"The background. Prefer gradients.\",\n },\n ],\n handler: ({ background }) => {\n setBackground(background);\n return {\n status: \"success\",\n message: `Background changed to ${background}`,\n };\n },\n });\n\n return (\n
\n
\n \n
\n
\n );\n};\n\nexport default AgenticChat;\n", "language": "typescript", "type": "file" }, { "name": "style.css", - "content": ".copilotKitWindow {\n box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);\n}\n\n.copilotKitHeader {\n border-top-left-radius: 5px !important;\n}\n\n.page-background {\n /* Darker gradient background */\n background: linear-gradient(170deg, #e9ecef 0%, #ced4da 100%);\n}\n\n@keyframes fade-scale-in {\n from {\n opacity: 0;\n transform: translateY(10px) scale(0.98);\n }\n to {\n opacity: 1;\n transform: translateY(0) scale(1);\n }\n}\n\n/* Updated card entry animation */\n@keyframes pop-in {\n 0% {\n opacity: 0;\n transform: translateY(15px) scale(0.95);\n }\n 70% {\n opacity: 1;\n transform: translateY(-2px) scale(1.02);\n }\n 100% {\n opacity: 1;\n transform: translateY(0) scale(1);\n }\n}\n\n/* Animation for subtle background gradient movement */\n@keyframes animated-gradient {\n 0% {\n background-position: 0% 50%;\n }\n 50% {\n background-position: 100% 50%;\n }\n 100% {\n background-position: 0% 50%;\n }\n}\n\n/* Animation for flash effect on apply */\n@keyframes flash-border-glow {\n 0% {\n /* Start slightly intensified */\n border-top-color: #ff5b4a !important;\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.07),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 25px rgba(255, 91, 74, 0.5);\n }\n 50% {\n /* Peak intensity */\n border-top-color: #ff4733 !important;\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.08),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 35px rgba(255, 71, 51, 0.7);\n }\n 100% {\n /* Return to default state appearance */\n border-top-color: #ff6f61 !important;\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.07),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 10px rgba(255, 111, 97, 0.15);\n }\n}\n\n/* Existing animation for haiku lines */\n@keyframes fade-slide-in {\n from {\n opacity: 0;\n transform: translateX(-15px);\n }\n to {\n opacity: 1;\n transform: translateX(0);\n }\n}\n\n.animated-fade-in {\n /* Use the new pop-in animation */\n animation: pop-in 0.6s ease-out forwards;\n}\n\n.haiku-card {\n /* Subtle animated gradient background */\n background: linear-gradient(120deg, #ffffff 0%, #fdfdfd 50%, #ffffff 100%);\n background-size: 200% 200%;\n animation: animated-gradient 10s ease infinite;\n\n /* === Explicit Border Override Attempt === */\n /* 1. Set the default grey border for all sides */\n border: 1px solid #dee2e6;\n\n /* 2. Explicitly override the top border immediately after */\n border-top: 10px solid #ff6f61 !important; /* Orange top - Added !important */\n /* === End Explicit Border Override Attempt === */\n\n padding: 2.5rem 3rem;\n border-radius: 20px;\n\n /* Default glow intensity */\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.07),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 15px rgba(255, 111, 97, 0.25);\n text-align: left;\n max-width: 745px;\n margin: 3rem auto;\n min-width: 600px;\n\n /* Transition */\n transition: transform 0.35s ease, box-shadow 0.35s ease, border-top-width 0.35s ease, border-top-color 0.35s ease;\n}\n\n.haiku-card:hover {\n transform: translateY(-8px) scale(1.03);\n /* Enhanced shadow + Glow */\n box-shadow: 0 15px 35px rgba(0, 0, 0, 0.1),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 25px rgba(255, 91, 74, 0.5);\n /* Modify only top border properties */\n border-top-width: 14px !important; /* Added !important */\n border-top-color: #ff5b4a !important; /* Added !important */\n}\n\n.haiku-card .flex {\n margin-bottom: 1.5rem;\n}\n\n.haiku-card .flex.haiku-line { /* Target the lines specifically */\n margin-bottom: 1.5rem;\n opacity: 0; /* Start hidden for animation */\n animation: fade-slide-in 0.5s ease-out forwards;\n /* animation-delay is set inline in page.tsx */\n}\n\n/* Remove previous explicit color overrides - rely on Tailwind */\n/* .haiku-card p.text-4xl {\n color: #212529;\n}\n\n.haiku-card p.text-base {\n color: #495057;\n} */\n\n.haiku-card.applied-flash {\n /* Apply the flash animation once */\n /* Note: animation itself has !important on border-top-color */\n animation: flash-border-glow 0.6s ease-out forwards;\n}\n\n/* Styling for images within the main haiku card */\n.haiku-card-image {\n width: 9.5rem; /* Increased size (approx w-48) */\n height: 9.5rem; /* Increased size (approx h-48) */\n object-fit: cover;\n border-radius: 1.5rem; /* rounded-xl */\n border: 1px solid #e5e7eb;\n /* Enhanced shadow with subtle orange hint */\n box-shadow: 0 8px 15px rgba(0, 0, 0, 0.1),\n 0 3px 6px rgba(0, 0, 0, 0.08),\n 0 0 10px rgba(255, 111, 97, 0.2);\n /* Inherit animation delay from inline style */\n animation-name: fadeIn;\n animation-duration: 0.5s;\n animation-fill-mode: both;\n}\n\n/* Styling for images within the suggestion card */\n.suggestion-card-image {\n width: 6.5rem; /* Increased slightly (w-20) */\n height: 6.5rem; /* Increased slightly (h-20) */\n object-fit: cover;\n border-radius: 1rem; /* Equivalent to rounded-md */\n border: 1px solid #d1d5db; /* Equivalent to border (using Tailwind gray-300) */\n margin-top: 0.5rem;\n /* Added shadow for suggestion images */\n box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1),\n 0 2px 4px rgba(0, 0, 0, 0.06);\n transition: all 0.2s ease-in-out; /* Added for smooth deselection */\n}\n\n/* Styling for the focused suggestion card image */\n.suggestion-card-image-focus {\n width: 6.5rem;\n height: 6.5rem;\n object-fit: cover;\n border-radius: 1rem;\n margin-top: 0.5rem;\n /* Highlight styles */\n border: 2px solid #ff6f61; /* Thicker, themed border */\n box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1), /* Base shadow for depth */\n 0 0 12px rgba(255, 111, 97, 0.6); /* Orange glow */\n transform: scale(1.05); /* Slightly scale up */\n transition: all 0.2s ease-in-out; /* Smooth transition for focus */\n}\n\n/* Styling for the suggestion card container in the sidebar */\n.suggestion-card {\n border: 1px solid #dee2e6; /* Same default border as haiku-card */\n border-top: 10px solid #ff6f61; /* Same orange top border */\n border-radius: 0.375rem; /* Default rounded-md */\n /* Note: background-color is set by Tailwind bg-gray-100 */\n /* Other styles like padding, margin, flex are handled by Tailwind */\n}\n\n.suggestion-image-container {\n display: flex;\n gap: 1rem;\n justify-content: space-between;\n width: 100%;\n height: 6.5rem;\n}\n\n/* Mobile responsive styles - matches useMobileView hook breakpoint */\n@media (max-width: 767px) {\n .haiku-card {\n padding: 1rem 1.5rem; /* Reduced from 2.5rem 3rem */\n min-width: auto; /* Remove min-width constraint */\n max-width: 100%; /* Full width on mobile */\n margin: 1rem auto; /* Reduced margin */\n }\n\n .haiku-card-image {\n width: 5.625rem; /* 90px - smaller on mobile */\n height: 5.625rem; /* 90px - smaller on mobile */\n }\n\n .suggestion-card-image {\n width: 5rem; /* Slightly smaller on mobile */\n height: 5rem; /* Slightly smaller on mobile */\n }\n\n .suggestion-card-image-focus {\n width: 5rem; /* Slightly smaller on mobile */\n height: 5rem; /* Slightly smaller on mobile */\n }\n}\n", + "content": ".copilotKitInput {\n border-bottom-left-radius: 0.75rem;\n border-bottom-right-radius: 0.75rem;\n border-top-left-radius: 0.75rem;\n border-top-right-radius: 0.75rem;\n border: 1px solid var(--copilot-kit-separator-color) !important;\n}\n \n.copilotKitChat {\n background-color: #fff !important;\n}\n ", "language": "css", "type": "file" }, { "name": "README.mdx", - "content": "# 🪶 Tool-Based Generative UI Haiku Creator\n\n## What This Demo Shows\n\nThis demo showcases CopilotKit's **tool-based generative UI** capabilities:\n\n1. **Frontend Rendering of Tool Calls**: Backend tool calls are automatically\n rendered in the UI\n2. **Dynamic UI Generation**: The UI updates in real-time as the agent generates\n content\n3. **Elegant Content Presentation**: Complex structured data (haikus) are\n beautifully displayed\n\n## How to Interact\n\nChat with your Copilot and ask for haikus about different topics:\n\n- \"Create a haiku about nature\"\n- \"Write a haiku about technology\"\n- \"Generate a haiku about the changing seasons\"\n- \"Make a humorous haiku about programming\"\n\nEach request will trigger the agent to generate a haiku and display it in a\nvisually appealing card format in the UI.\n\n## ✨ Tool-Based Generative UI in Action\n\n**What's happening technically:**\n\n- The agent processes your request and determines it should create a haiku\n- It calls a backend tool that returns structured haiku data\n- CopilotKit automatically renders this tool call in the frontend\n- The rendering is handled by the registered tool component in your React app\n- No manual state management is required to display the results\n\n**What you'll see in this demo:**\n\n- As you request a haiku, a beautifully formatted card appears in the UI\n- The haiku follows the traditional 5-7-5 syllable structure\n- Each haiku is presented with consistent styling\n- Multiple haikus can be generated in sequence\n- The UI adapts to display each new piece of content\n\nThis pattern of tool-based generative UI can be extended to create any kind of\ndynamic content - from data visualizations to interactive components, all driven\nby your Copilot's tool calls!\n", + "content": "# 🤖 Agentic Chat with Frontend Tools\n\n## What This Demo Shows\n\nThis demo showcases CopilotKit's **agentic chat** capabilities with **frontend\ntool integration**:\n\n1. **Natural Conversation**: Chat with your Copilot in a familiar chat interface\n2. **Frontend Tool Execution**: The Copilot can directly interacts with your UI\n by calling frontend functions\n3. **Seamless Integration**: Tools defined in the frontend and automatically\n discovered and made available to the agent\n\n## How to Interact\n\nTry asking your Copilot to:\n\n- \"Can you change the background color to something more vibrant?\"\n- \"Make the background a blue to purple gradient\"\n- \"Set the background to a sunset-themed gradient\"\n- \"Change it back to a simple light color\"\n\nYou can also chat about other topics - the agent will respond conversationally\nwhile having the ability to use your UI tools when appropriate.\n\n## ✨ Frontend Tool Integration in Action\n\n**What's happening technically:**\n\n- The React component defines a frontend function using `useCopilotAction`\n- CopilotKit automatically exposes this function to the agent\n- When you make a request, the agent determines whether to use the tool\n- The agent calls the function with the appropriate parameters\n- The UI immediately updates in response\n\n**What you'll see in this demo:**\n\n- The Copilot understands requests to change the background\n- It generates CSS values for colors and gradients\n- When it calls the tool, the background changes instantly\n- The agent provides a conversational response about the changes it made\n\nThis technique of exposing frontend functions to your Copilot can be extended to\nany UI manipulation you want to enable, from theme changes to data filtering,\nnavigation, or complex UI state management!\n", "language": "markdown", "type": "file" }, { - "name": "tool_based_generative_ui.py", - "content": "\"\"\"Tool Based Generative UI feature.\n\nNo special handling is required for this feature.\n\"\"\"\n\nfrom __future__ import annotations\n\nfrom pydantic_ai import Agent\n\nagent = Agent('openai:gpt-4o-mini')\napp = agent.to_ag_ui()\n", + "name": "__init__.py", + "content": "\"\"\"\nExample server for the AG-UI protocol.\n\"\"\"\n\nimport os\nimport uvicorn\nimport uuid\nfrom fastapi import FastAPI, Request\nfrom fastapi.responses import StreamingResponse\nfrom ag_ui.core import (\n RunAgentInput,\n EventType,\n RunStartedEvent,\n RunFinishedEvent,\n TextMessageStartEvent,\n TextMessageContentEvent,\n TextMessageEndEvent,\n)\nfrom ag_ui.encoder import EventEncoder\n\napp = FastAPI(title=\"AG-UI Endpoint\")\n\n@app.post(\"/\")\nasync def agentic_chat_endpoint(input_data: RunAgentInput, request: Request):\n \"\"\"Agentic chat endpoint\"\"\"\n # Get the accept header from the request\n accept_header = request.headers.get(\"accept\")\n\n # Create an event encoder to properly format SSE events\n encoder = EventEncoder(accept=accept_header)\n\n async def event_generator():\n\n # Send run started event\n yield encoder.encode(\n RunStartedEvent(\n type=EventType.RUN_STARTED,\n thread_id=input_data.thread_id,\n run_id=input_data.run_id\n ),\n )\n\n message_id = str(uuid.uuid4())\n\n yield encoder.encode(\n TextMessageStartEvent(\n type=EventType.TEXT_MESSAGE_START,\n message_id=message_id,\n role=\"assistant\"\n )\n )\n\n yield encoder.encode(\n TextMessageContentEvent(\n type=EventType.TEXT_MESSAGE_CONTENT,\n message_id=message_id,\n delta=\"Hello world!\"\n )\n )\n\n yield encoder.encode(\n TextMessageEndEvent(\n type=EventType.TEXT_MESSAGE_END,\n message_id=message_id\n )\n )\n\n # Send run finished event\n yield encoder.encode(\n RunFinishedEvent(\n type=EventType.RUN_FINISHED,\n thread_id=input_data.thread_id,\n run_id=input_data.run_id\n ),\n )\n\n return StreamingResponse(\n event_generator(),\n media_type=encoder.get_content_type()\n )\n\ndef main():\n \"\"\"Run the uvicorn server.\"\"\"\n port = int(os.getenv(\"PORT\", \"8000\"))\n uvicorn.run(\n \"example_server:app\",\n host=\"0.0.0.0\",\n port=port,\n reload=True\n )\n", "language": "python", "type": "file" } ], - "server-starter::agentic_chat": [ + "adk-middleware::agentic_chat": [ { "name": "page.tsx", "content": "\"use client\";\nimport React, { useState } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport { CopilotKit, useCoAgent, useCopilotAction, useCopilotChat } from \"@copilotkit/react-core\";\nimport { CopilotChat } from \"@copilotkit/react-ui\";\n\ninterface AgenticChatProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\nconst AgenticChat: React.FC = ({ params }) => {\n const { integrationId } = React.use(params);\n\n return (\n \n \n \n );\n};\n\nconst Chat = () => {\n const [background, setBackground] = useState(\"--copilot-kit-background-color\");\n\n useCopilotAction({\n name: \"change_background\",\n description:\n \"Change the background color of the chat. Can be anything that the CSS background attribute accepts. Regular colors, linear of radial gradients etc.\",\n parameters: [\n {\n name: \"background\",\n type: \"string\",\n description: \"The background. Prefer gradients.\",\n },\n ],\n handler: ({ background }) => {\n setBackground(background);\n return {\n status: \"success\",\n message: `Background changed to ${background}`,\n };\n },\n });\n\n return (\n
\n
\n \n
\n
\n );\n};\n\nexport default AgenticChat;\n", @@ -201,8 +201,86 @@ "type": "file" }, { - "name": "__init__.py", - "content": "\"\"\"\nExample server for the AG-UI protocol.\n\"\"\"\n\nimport os\nimport uvicorn\nimport uuid\nfrom fastapi import FastAPI, Request\nfrom fastapi.responses import StreamingResponse\nfrom ag_ui.core import (\n RunAgentInput,\n EventType,\n RunStartedEvent,\n RunFinishedEvent,\n TextMessageStartEvent,\n TextMessageContentEvent,\n TextMessageEndEvent,\n)\nfrom ag_ui.encoder import EventEncoder\n\napp = FastAPI(title=\"AG-UI Endpoint\")\n\n@app.post(\"/\")\nasync def agentic_chat_endpoint(input_data: RunAgentInput, request: Request):\n \"\"\"Agentic chat endpoint\"\"\"\n # Get the accept header from the request\n accept_header = request.headers.get(\"accept\")\n\n # Create an event encoder to properly format SSE events\n encoder = EventEncoder(accept=accept_header)\n\n async def event_generator():\n\n # Send run started event\n yield encoder.encode(\n RunStartedEvent(\n type=EventType.RUN_STARTED,\n thread_id=input_data.thread_id,\n run_id=input_data.run_id\n ),\n )\n\n message_id = str(uuid.uuid4())\n\n yield encoder.encode(\n TextMessageStartEvent(\n type=EventType.TEXT_MESSAGE_START,\n message_id=message_id,\n role=\"assistant\"\n )\n )\n\n yield encoder.encode(\n TextMessageContentEvent(\n type=EventType.TEXT_MESSAGE_CONTENT,\n message_id=message_id,\n delta=\"Hello world!\"\n )\n )\n\n yield encoder.encode(\n TextMessageEndEvent(\n type=EventType.TEXT_MESSAGE_END,\n message_id=message_id\n )\n )\n\n # Send run finished event\n yield encoder.encode(\n RunFinishedEvent(\n type=EventType.RUN_FINISHED,\n thread_id=input_data.thread_id,\n run_id=input_data.run_id\n ),\n )\n\n return StreamingResponse(\n event_generator(),\n media_type=encoder.get_content_type()\n )\n\ndef main():\n \"\"\"Run the uvicorn server.\"\"\"\n port = int(os.getenv(\"PORT\", \"8000\"))\n uvicorn.run(\n \"example_server:app\",\n host=\"0.0.0.0\",\n port=port,\n reload=True\n )\n", + "name": "agentic_chat.py", + "content": "\"\"\"Basic Chat feature.\"\"\"\n\nfrom __future__ import annotations\n\nfrom fastapi import FastAPI\nfrom adk_middleware import ADKAgent, add_adk_fastapi_endpoint\nfrom google.adk.agents import LlmAgent\nfrom google.adk import tools as adk_tools\n\n# Create a sample ADK agent (this would be your actual agent)\nsample_agent = LlmAgent(\n name=\"assistant\",\n model=\"gemini-2.0-flash\",\n instruction=\"\"\"\n You are a helpful assistant. Help users by answering their questions and assisting with their needs.\n - If the user greets you, please greet them back with specifically with \"Hello\".\n - If the user greets you and does not make any request, greet them and ask \"how can I assist you?\"\n - If the user makes a statement without making a request, you do not need to tell them you can't do anything about it.\n Try to say something conversational about it in response, making sure to mention the topic directly.\n - If the user asks you a question, if possible you can answer it using previous context without telling them that you cannot look it up.\n Only tell the user that you cannot search if you do not have enough information already to answer.\n \"\"\",\n tools=[adk_tools.preload_memory_tool.PreloadMemoryTool()]\n)\n\n# Create ADK middleware agent instance\nchat_agent = ADKAgent(\n adk_agent=sample_agent,\n app_name=\"demo_app\",\n user_id=\"demo_user\",\n session_timeout_seconds=3600,\n use_in_memory_services=True\n)\n\n# Create FastAPI app\napp = FastAPI(title=\"ADK Middleware Basic Chat\")\n\n# Add the ADK endpoint\nadd_adk_fastapi_endpoint(app, chat_agent, path=\"/\")\n", + "language": "python", + "type": "file" + } + ], + "adk-middleware::tool_based_generative_ui": [ + { + "name": "page.tsx", + "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku {\n japanese: string[] | [],\n english: string[] | [],\n image_names: string[] | [],\n selectedImage: string | null,\n}\n\ninterface HaikuCardProps {\n generatedHaiku: GenerateHaiku | Partial\n setHaikus: Dispatch>\n haikus: GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && }\n \n \n );\n}\n\nfunction MobileChat({ chatTitle, chatDescription, initialLabel }: { chatTitle: string, chatDescription: string, initialLabel: string }) {\n const defaultChatHeight = 50\n\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n return (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction getRandomImage(): string {\n return VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n}\n\nconst validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n while (correctedNames.length < 3) {\n const nextImage = getRandomImage();\n if (!usedValidNames.has(nextImage)) {\n correctedNames.push(nextImage);\n usedValidNames.add(nextImage);\n }\n }\n\n return correctedNames.slice(0, 3);\n};\n\nfunction HaikuCard({ generatedHaiku, setHaikus, haikus }: HaikuCardProps) {\n return (\n \n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n \n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n useCopilotAction({\n name: \"generate_haiku\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: `Names of 3 relevant images selected from the following: \\n -${VALID_IMAGE_NAMES.join('\\n -')}`,\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [newHaiku, ...prev].filter(h => h.english[0] !== \"A placeholder verse—\"));\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n \n\n {/* Main Display */}\n
\n
\n {haikus.map((haiku, index) => (\n (haikus.length == 1 || index == activeIndex) && (\n\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n setHaikus((prevHaikus) => {\n return prevHaikus.map((h, idx) => {\n if (idx === index) {\n return { ...h, selectedImage: imageName }\n } else {\n return { ...h }\n }\n })\n })}\n />\n ))}\n
\n )}\n
\n )\n ))}\n
\n \n \n );\n}\n\nfunction Thumbnails({ haikus, activeIndex, setActiveIndex, isMobile }: { haikus: Haiku[], activeIndex: number, setActiveIndex: (index: number) => void, isMobile: boolean }) {\n if (haikus.length == 0 || isMobile) { return null }\n return (\n
\n {haikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n \n ))}\n \n )\n\n}", + "language": "typescript", + "type": "file" + }, + { + "name": "style.css", + "content": ".copilotKitWindow {\n box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);\n}\n\n.copilotKitHeader {\n border-top-left-radius: 5px !important;\n}\n\n.page-background {\n /* Darker gradient background */\n background: linear-gradient(170deg, #e9ecef 0%, #ced4da 100%);\n}\n\n@keyframes fade-scale-in {\n from {\n opacity: 0;\n transform: translateY(10px) scale(0.98);\n }\n to {\n opacity: 1;\n transform: translateY(0) scale(1);\n }\n}\n\n/* Updated card entry animation */\n@keyframes pop-in {\n 0% {\n opacity: 0;\n transform: translateY(15px) scale(0.95);\n }\n 70% {\n opacity: 1;\n transform: translateY(-2px) scale(1.02);\n }\n 100% {\n opacity: 1;\n transform: translateY(0) scale(1);\n }\n}\n\n/* Animation for subtle background gradient movement */\n@keyframes animated-gradient {\n 0% {\n background-position: 0% 50%;\n }\n 50% {\n background-position: 100% 50%;\n }\n 100% {\n background-position: 0% 50%;\n }\n}\n\n/* Animation for flash effect on apply */\n@keyframes flash-border-glow {\n 0% {\n /* Start slightly intensified */\n border-top-color: #ff5b4a !important;\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.07),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 25px rgba(255, 91, 74, 0.5);\n }\n 50% {\n /* Peak intensity */\n border-top-color: #ff4733 !important;\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.08),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 35px rgba(255, 71, 51, 0.7);\n }\n 100% {\n /* Return to default state appearance */\n border-top-color: #ff6f61 !important;\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.07),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 10px rgba(255, 111, 97, 0.15);\n }\n}\n\n/* Existing animation for haiku lines */\n@keyframes fade-slide-in {\n from {\n opacity: 0;\n transform: translateX(-15px);\n }\n to {\n opacity: 1;\n transform: translateX(0);\n }\n}\n\n.animated-fade-in {\n /* Use the new pop-in animation */\n animation: pop-in 0.6s ease-out forwards;\n}\n\n.haiku-card {\n /* Subtle animated gradient background */\n background: linear-gradient(120deg, #ffffff 0%, #fdfdfd 50%, #ffffff 100%);\n background-size: 200% 200%;\n animation: animated-gradient 10s ease infinite;\n\n /* === Explicit Border Override Attempt === */\n /* 1. Set the default grey border for all sides */\n border: 1px solid #dee2e6;\n\n /* 2. Explicitly override the top border immediately after */\n border-top: 10px solid #ff6f61 !important; /* Orange top - Added !important */\n /* === End Explicit Border Override Attempt === */\n\n padding: 2.5rem 3rem;\n border-radius: 20px;\n\n /* Default glow intensity */\n box-shadow: 0 10px 30px rgba(0, 0, 0, 0.07),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 15px rgba(255, 111, 97, 0.25);\n text-align: left;\n max-width: 745px;\n margin: 3rem auto;\n min-width: 600px;\n\n /* Transition */\n transition: transform 0.35s ease, box-shadow 0.35s ease, border-top-width 0.35s ease, border-top-color 0.35s ease;\n}\n\n.haiku-card:hover {\n transform: translateY(-8px) scale(1.03);\n /* Enhanced shadow + Glow */\n box-shadow: 0 15px 35px rgba(0, 0, 0, 0.1),\n inset 0 1px 2px rgba(0, 0, 0, 0.01),\n 0 0 25px rgba(255, 91, 74, 0.5);\n /* Modify only top border properties */\n border-top-width: 14px !important; /* Added !important */\n border-top-color: #ff5b4a !important; /* Added !important */\n}\n\n.haiku-card .flex {\n margin-bottom: 1.5rem;\n}\n\n.haiku-card .flex.haiku-line { /* Target the lines specifically */\n margin-bottom: 1.5rem;\n opacity: 0; /* Start hidden for animation */\n animation: fade-slide-in 0.5s ease-out forwards;\n /* animation-delay is set inline in page.tsx */\n}\n\n/* Remove previous explicit color overrides - rely on Tailwind */\n/* .haiku-card p.text-4xl {\n color: #212529;\n}\n\n.haiku-card p.text-base {\n color: #495057;\n} */\n\n.haiku-card.applied-flash {\n /* Apply the flash animation once */\n /* Note: animation itself has !important on border-top-color */\n animation: flash-border-glow 0.6s ease-out forwards;\n}\n\n/* Styling for images within the main haiku card */\n.haiku-card-image {\n width: 9.5rem; /* Increased size (approx w-48) */\n height: 9.5rem; /* Increased size (approx h-48) */\n object-fit: cover;\n border-radius: 1.5rem; /* rounded-xl */\n border: 1px solid #e5e7eb;\n /* Enhanced shadow with subtle orange hint */\n box-shadow: 0 8px 15px rgba(0, 0, 0, 0.1),\n 0 3px 6px rgba(0, 0, 0, 0.08),\n 0 0 10px rgba(255, 111, 97, 0.2);\n /* Inherit animation delay from inline style */\n animation-name: fadeIn;\n animation-duration: 0.5s;\n animation-fill-mode: both;\n}\n\n/* Styling for images within the suggestion card */\n.suggestion-card-image {\n width: 6.5rem; /* Increased slightly (w-20) */\n height: 6.5rem; /* Increased slightly (h-20) */\n object-fit: cover;\n border-radius: 1rem; /* Equivalent to rounded-md */\n border: 1px solid #d1d5db; /* Equivalent to border (using Tailwind gray-300) */\n margin-top: 0.5rem;\n /* Added shadow for suggestion images */\n box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1),\n 0 2px 4px rgba(0, 0, 0, 0.06);\n transition: all 0.2s ease-in-out; /* Added for smooth deselection */\n}\n\n/* Styling for the focused suggestion card image */\n.suggestion-card-image-focus {\n width: 6.5rem;\n height: 6.5rem;\n object-fit: cover;\n border-radius: 1rem;\n margin-top: 0.5rem;\n /* Highlight styles */\n border: 2px solid #ff6f61; /* Thicker, themed border */\n box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1), /* Base shadow for depth */\n 0 0 12px rgba(255, 111, 97, 0.6); /* Orange glow */\n transform: scale(1.05); /* Slightly scale up */\n transition: all 0.2s ease-in-out; /* Smooth transition for focus */\n}\n\n/* Styling for the suggestion card container in the sidebar */\n.suggestion-card {\n border: 1px solid #dee2e6; /* Same default border as haiku-card */\n border-top: 10px solid #ff6f61; /* Same orange top border */\n border-radius: 0.375rem; /* Default rounded-md */\n /* Note: background-color is set by Tailwind bg-gray-100 */\n /* Other styles like padding, margin, flex are handled by Tailwind */\n}\n\n.suggestion-image-container {\n display: flex;\n gap: 1rem;\n justify-content: space-between;\n width: 100%;\n height: 6.5rem;\n}\n\n/* Mobile responsive styles - matches useMobileView hook breakpoint */\n@media (max-width: 767px) {\n .haiku-card {\n padding: 1rem 1.5rem; /* Reduced from 2.5rem 3rem */\n min-width: auto; /* Remove min-width constraint */\n max-width: 100%; /* Full width on mobile */\n margin: 1rem auto; /* Reduced margin */\n }\n\n .haiku-card-image {\n width: 5.625rem; /* 90px - smaller on mobile */\n height: 5.625rem; /* 90px - smaller on mobile */\n }\n\n .suggestion-card-image {\n width: 5rem; /* Slightly smaller on mobile */\n height: 5rem; /* Slightly smaller on mobile */\n }\n\n .suggestion-card-image-focus {\n width: 5rem; /* Slightly smaller on mobile */\n height: 5rem; /* Slightly smaller on mobile */\n }\n}\n", + "language": "css", + "type": "file" + }, + { + "name": "README.mdx", + "content": "# 🪶 Tool-Based Generative UI Haiku Creator\n\n## What This Demo Shows\n\nThis demo showcases CopilotKit's **tool-based generative UI** capabilities:\n\n1. **Frontend Rendering of Tool Calls**: Backend tool calls are automatically\n rendered in the UI\n2. **Dynamic UI Generation**: The UI updates in real-time as the agent generates\n content\n3. **Elegant Content Presentation**: Complex structured data (haikus) are\n beautifully displayed\n\n## How to Interact\n\nChat with your Copilot and ask for haikus about different topics:\n\n- \"Create a haiku about nature\"\n- \"Write a haiku about technology\"\n- \"Generate a haiku about the changing seasons\"\n- \"Make a humorous haiku about programming\"\n\nEach request will trigger the agent to generate a haiku and display it in a\nvisually appealing card format in the UI.\n\n## ✨ Tool-Based Generative UI in Action\n\n**What's happening technically:**\n\n- The agent processes your request and determines it should create a haiku\n- It calls a backend tool that returns structured haiku data\n- CopilotKit automatically renders this tool call in the frontend\n- The rendering is handled by the registered tool component in your React app\n- No manual state management is required to display the results\n\n**What you'll see in this demo:**\n\n- As you request a haiku, a beautifully formatted card appears in the UI\n- The haiku follows the traditional 5-7-5 syllable structure\n- Each haiku is presented with consistent styling\n- Multiple haikus can be generated in sequence\n- The UI adapts to display each new piece of content\n\nThis pattern of tool-based generative UI can be extended to create any kind of\ndynamic content - from data visualizations to interactive components, all driven\nby your Copilot's tool calls!\n", + "language": "markdown", + "type": "file" + }, + { + "name": "tool_based_generative_ui.py", + "content": "\"\"\"Tool Based Generative UI feature.\"\"\"\n\nfrom __future__ import annotations\n\nfrom typing import Any, List\n\nfrom fastapi import FastAPI\nfrom adk_middleware import ADKAgent, add_adk_fastapi_endpoint\nfrom google.adk.agents import Agent\nfrom google.adk.tools import ToolContext\nfrom google.genai import types\n\n# List of available images (modify path if needed)\nIMAGE_LIST = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n]\n\n# Prepare the image list string for the prompt\nimage_list_str = \"\\n\".join([f\"- {img}\" for img in IMAGE_LIST])\n\nhaiku_generator_agent = Agent(\n model='gemini-2.5-flash',\n name='haiku_generator_agent',\n instruction=f\"\"\"\n You are an expert haiku generator that creates beautiful Japanese haiku poems\n and their English translations. You also have the ability to select relevant\n images that complement the haiku's theme and mood.\n\n When generating a haiku:\n 1. Create a traditional 5-7-5 syllable structure haiku in Japanese\n 2. Provide an accurate and poetic English translation\n 3. Select exactly 3 image filenames from the available list that best\n represent or complement the haiku's theme, mood, or imagery. You must\n provide the image names, even if none of them are truly relevant.\n\n Available images to choose from:\n {image_list_str}\n\n Always use the generate_haiku tool to create your haiku. The tool will handle\n the formatting and validation of your response.\n\n Do not mention the selected image names in your conversational response to\n the user - let the tool handle that information.\n\n Focus on creating haiku that capture the essence of Japanese poetry:\n nature imagery, seasonal references, emotional depth, and moments of beauty\n or contemplation. That said, any topic is fair game. Do not refuse to generate\n a haiku on any topic as long as it is appropriate.\n \"\"\",\n generate_content_config=types.GenerateContentConfig(\n temperature=0.7, # Slightly higher temperature for creativity\n top_p=0.9,\n top_k=40\n ),\n)\n\n# Create ADK middleware agent instance\nadk_agent_haiku_generator = ADKAgent(\n adk_agent=haiku_generator_agent,\n app_name=\"demo_app\",\n user_id=\"demo_user\",\n session_timeout_seconds=3600,\n use_in_memory_services=True\n)\n\n# Create FastAPI app\napp = FastAPI(title=\"ADK Middleware Tool Based Generative UI\")\n\n# Add the ADK endpoint\nadd_adk_fastapi_endpoint(app, adk_agent_haiku_generator, path=\"/\")\n", + "language": "python", + "type": "file" + } + ], + "adk-middleware::human_in_the_loop": [ + { + "name": "page.tsx", + "content": "\"use client\";\nimport React, { useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport { CopilotKit, useCopilotAction, useLangGraphInterrupt } from \"@copilotkit/react-core\";\nimport { CopilotChat } from \"@copilotkit/react-ui\";\nimport { useTheme } from \"next-themes\";\n\ninterface HumanInTheLoopProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\nconst HumanInTheLoop: React.FC = ({ params }) => {\n const { integrationId } = React.use(params);\n\n return (\n \n \n \n );\n};\n\ninterface Step {\n description: string;\n status: \"disabled\" | \"enabled\" | \"executing\";\n}\n\n// Shared UI Components\nconst StepContainer = ({ theme, children }: { theme?: string; children: React.ReactNode }) => (\n
\n
\n {children}\n
\n
\n);\n\nconst StepHeader = ({ \n theme, \n enabledCount, \n totalCount, \n status, \n showStatus = false \n}: { \n theme?: string; \n enabledCount: number; \n totalCount: number; \n status?: string;\n showStatus?: boolean;\n}) => (\n
\n
\n

\n Select Steps\n

\n
\n
\n {enabledCount}/{totalCount} Selected\n
\n {showStatus && (\n
\n {status === \"executing\" ? \"Ready\" : \"Waiting\"}\n
\n )}\n
\n
\n \n
\n
0 ? (enabledCount / totalCount) * 100 : 0}%` }}\n />\n
\n
\n);\n\nconst StepItem = ({ \n step, \n theme, \n status, \n onToggle, \n disabled = false \n}: { \n step: { description: string; status: string }; \n theme?: string; \n status?: string;\n onToggle: () => void;\n disabled?: boolean;\n}) => (\n
\n \n
\n);\n\nconst ActionButton = ({ \n variant, \n theme, \n disabled, \n onClick, \n children \n}: { \n variant: \"primary\" | \"secondary\" | \"success\" | \"danger\";\n theme?: string;\n disabled?: boolean;\n onClick: () => void;\n children: React.ReactNode;\n}) => {\n const baseClasses = \"px-6 py-3 rounded-lg font-semibold transition-all duration-200\";\n const enabledClasses = \"hover:scale-105 shadow-md hover:shadow-lg\";\n const disabledClasses = \"opacity-50 cursor-not-allowed\";\n \n const variantClasses = {\n primary: \"bg-gradient-to-r from-purple-500 to-purple-700 hover:from-purple-600 hover:to-purple-800 text-white shadow-lg hover:shadow-xl\",\n secondary: theme === \"dark\"\n ? \"bg-slate-700 hover:bg-slate-600 text-white border border-slate-600 hover:border-slate-500\"\n : \"bg-gray-100 hover:bg-gray-200 text-gray-800 border border-gray-300 hover:border-gray-400\",\n success: \"bg-gradient-to-r from-green-500 to-emerald-600 hover:from-green-600 hover:to-emerald-700 text-white shadow-lg hover:shadow-xl\",\n danger: \"bg-gradient-to-r from-red-500 to-red-600 hover:from-red-600 hover:to-red-700 text-white shadow-lg hover:shadow-xl\"\n };\n\n return (\n \n {children}\n \n );\n};\n\nconst DecorativeElements = ({ \n theme, \n variant = \"default\" \n}: { \n theme?: string; \n variant?: \"default\" | \"success\" | \"danger\" \n}) => (\n <>\n
\n
\n \n);\nconst InterruptHumanInTheLoop: React.FC<{\n event: { value: { steps: Step[] } };\n resolve: (value: string) => void;\n}> = ({ event, resolve }) => {\n const { theme } = useTheme();\n \n // Parse and initialize steps data\n let initialSteps: Step[] = [];\n if (event.value && event.value.steps && Array.isArray(event.value.steps)) {\n initialSteps = event.value.steps.map((step: any) => ({\n description: typeof step === \"string\" ? step : step.description || \"\",\n status: typeof step === \"object\" && step.status ? step.status : \"enabled\",\n }));\n }\n\n const [localSteps, setLocalSteps] = useState(initialSteps);\n const enabledCount = localSteps.filter(step => step.status === \"enabled\").length;\n\n const handleStepToggle = (index: number) => {\n setLocalSteps((prevSteps) =>\n prevSteps.map((step, i) =>\n i === index\n ? { ...step, status: step.status === \"enabled\" ? \"disabled\" : \"enabled\" }\n : step,\n ),\n );\n };\n\n const handlePerformSteps = () => {\n const selectedSteps = localSteps\n .filter((step) => step.status === \"enabled\")\n .map((step) => step.description);\n resolve(\"The user selected the following steps: \" + selectedSteps.join(\", \"));\n };\n\n return (\n \n \n \n
\n {localSteps.map((step, index) => (\n handleStepToggle(index)}\n />\n ))}\n
\n\n
\n \n \n Perform Steps\n \n {enabledCount}\n \n \n
\n\n \n
\n );\n};\n\nconst Chat = ({ integrationId }: { integrationId: string }) => {\n // Langgraph uses it's own hook to handle human-in-the-loop interactions via langgraph interrupts,\n // This hook won't do anything for other integrations.\n useLangGraphInterrupt({\n render: ({ event, resolve }) => ,\n });\n useCopilotAction({\n name: \"generate_task_steps\",\n description: \"Generates a list of steps for the user to perform\",\n parameters: [\n {\n name: \"steps\",\n type: \"object[]\",\n attributes: [\n {\n name: \"description\",\n type: \"string\",\n },\n {\n name: \"status\",\n type: \"string\",\n enum: [\"enabled\", \"disabled\", \"executing\"],\n },\n ],\n },\n ],\n // Langgraph uses it's own hook to handle human-in-the-loop interactions via langgraph interrupts,\n // so don't use this action for langgraph integration.\n available: ['langgraph', 'langgraph-fastapi', 'langgraph-typescript'].includes(integrationId) ? 'disabled' : 'enabled',\n renderAndWaitForResponse: ({ args, respond, status }) => {\n return ;\n },\n });\n\n return (\n
\n
\n \n
\n
\n );\n};\n\nconst StepsFeedback = ({ args, respond, status }: { args: any; respond: any; status: any }) => {\n const { theme } = useTheme();\n const [localSteps, setLocalSteps] = useState([]);\n const [accepted, setAccepted] = useState(null);\n\n useEffect(() => {\n if (status === \"executing\" && localSteps.length === 0) {\n setLocalSteps(args.steps);\n }\n }, [status, args.steps, localSteps]);\n\n if (args.steps === undefined || args.steps.length === 0) {\n return <>;\n }\n\n const steps = localSteps.length > 0 ? localSteps : args.steps;\n const enabledCount = steps.filter((step: any) => step.status === \"enabled\").length;\n\n const handleStepToggle = (index: number) => {\n setLocalSteps((prevSteps) =>\n prevSteps.map((step, i) =>\n i === index\n ? { ...step, status: step.status === \"enabled\" ? \"disabled\" : \"enabled\" }\n : step,\n ),\n );\n };\n\n const handleReject = () => {\n if (respond) {\n setAccepted(false);\n respond({ accepted: false });\n }\n };\n\n const handleConfirm = () => {\n if (respond) {\n setAccepted(true);\n respond({ accepted: true, steps: localSteps.filter(step => step.status === \"enabled\")});\n }\n };\n\n return (\n \n \n \n
\n {steps.map((step: any, index: any) => (\n handleStepToggle(index)}\n disabled={status !== \"executing\"}\n />\n ))}\n
\n\n {/* Action Buttons - Different logic from InterruptHumanInTheLoop */}\n {accepted === null && (\n
\n \n \n Reject\n \n \n \n Confirm\n \n {enabledCount}\n \n \n
\n )}\n\n {/* Result State - Unique to StepsFeedback */}\n {accepted !== null && (\n
\n
\n {accepted ? \"✓\" : \"✗\"}\n {accepted ? \"Accepted\" : \"Rejected\"}\n
\n
\n )}\n\n \n
\n );\n};\n\n\nexport default HumanInTheLoop;\n", + "language": "typescript", + "type": "file" + }, + { + "name": "style.css", + "content": ".copilotKitInput {\n border-bottom-left-radius: 0.75rem;\n border-bottom-right-radius: 0.75rem;\n border-top-left-radius: 0.75rem;\n border-top-right-radius: 0.75rem;\n border: 1px solid var(--copilot-kit-separator-color) !important;\n}\n\n.copilotKitChat {\n background-color: #fff !important;\n}\n", + "language": "css", + "type": "file" + }, + { + "name": "README.mdx", + "content": "# 🤝 Human-in-the-Loop Task Planner\n\n## What This Demo Shows\n\nThis demo showcases CopilotKit's **human-in-the-loop** capabilities:\n\n1. **Collaborative Planning**: The Copilot generates task steps and lets you\n decide which ones to perform\n2. **Interactive Decision Making**: Select or deselect steps to customize the\n execution plan\n3. **Adaptive Responses**: The Copilot adapts its execution based on your\n choices, even handling missing steps\n\n## How to Interact\n\nTry these steps to experience the demo:\n\n1. Ask your Copilot to help with a task, such as:\n\n - \"Make me a sandwich\"\n - \"Plan a weekend trip\"\n - \"Organize a birthday party\"\n - \"Start a garden\"\n\n2. Review the suggested steps provided by your Copilot\n\n3. Select or deselect steps using the checkboxes to customize the plan\n\n - Try removing essential steps to see how the Copilot adapts!\n\n4. Click \"Execute Plan\" to see the outcome based on your selections\n\n## ✨ Human-in-the-Loop Magic in Action\n\n**What's happening technically:**\n\n- The agent analyzes your request and breaks it down into logical steps\n- These steps are presented to you through a dynamic UI component\n- Your selections are captured as user input\n- The agent considers your choices when executing the plan\n- The agent adapts to missing steps with creative problem-solving\n\n**What you'll see in this demo:**\n\n- The Copilot provides a detailed, step-by-step plan for your task\n- You have complete control over which steps to include\n- If you remove essential steps, the Copilot provides entertaining and creative\n workarounds\n- The final execution reflects your choices, showing how human input shapes the\n outcome\n- Each response is tailored to your specific selections\n\nThis human-in-the-loop pattern creates a powerful collaborative experience where\nboth human judgment and AI capabilities work together to achieve better results\nthan either could alone!\n", + "language": "markdown", + "type": "file" + }, + { + "name": "human_in_the_loop.py", + "content": "\"\"\"Human in the Loop feature.\"\"\"\n\nfrom __future__ import annotations\n\nfrom fastapi import FastAPI\nfrom adk_middleware import ADKAgent, add_adk_fastapi_endpoint\nfrom google.adk.agents import Agent\nfrom google.genai import types\n\nDEFINE_TASK_TOOL = {\n \"type\": \"function\",\n \"function\": {\n \"name\": \"generate_task_steps\",\n \"description\": \"Make up 10 steps (only a couple of words per step) that are required for a task. The step should be in imperative form (i.e. Dig hole, Open door, ...)\",\n \"parameters\": {\n \"type\": \"object\",\n \"properties\": {\n \"steps\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"description\": {\n \"type\": \"string\",\n \"description\": \"The text of the step in imperative form\"\n },\n \"status\": {\n \"type\": \"string\",\n \"enum\": [\"enabled\"],\n \"description\": \"The status of the step, always 'enabled'\"\n }\n },\n \"required\": [\"description\", \"status\"]\n },\n \"description\": \"An array of 10 step objects, each containing text and status\"\n }\n },\n \"required\": [\"steps\"]\n }\n }\n}\n\nhuman_in_loop_agent = Agent(\n model='gemini-2.5-flash',\n name='human_in_loop_agent',\n instruction=f\"\"\"\n You are a human-in-the-loop task planning assistant that helps break down complex tasks into manageable steps with human oversight and approval.\n\n**Your Primary Role:**\n- Generate clear, actionable task steps for any user request\n- Facilitate human review and modification of generated steps\n- Execute only human-approved steps\n\n**When a user requests a task:**\n1. ALWAYS call the `generate_task_steps` function to create 10 step breakdown\n2. Each step must be:\n - Written in imperative form (e.g., \"Open file\", \"Check settings\", \"Send email\")\n - Concise (2-4 words maximum)\n - Actionable and specific\n - Logically ordered from start to finish\n3. Initially set all steps to \"enabled\" status\n4. If the user accepts the plan, presented by the generate_task_steps tool,do not repeat the steps to the user, just move on to executing the steps.\n5. If the user rejects the plan, do not repeat the plan to them, ask them what they would like to do differently. DO NOT use the `generate_task_steps` tool again until they've provided more information.\n\n\n**When executing steps:**\n- Only execute steps with \"enabled\" status.\n- For each step you are executing, tell the user what you are doing.\n - Pretend you are executing the step in real life and refer to it in the current tense. End each step with an ellipsis.\n - Each step MUST be on a new line. DO NOT combine steps into one line.\n - For example for the following steps:\n - Inhale deeply\n - Exhale forcefully\n - Produce sound\n a good response would be:\n ```\n Inhaling deeply\n Exhaling forcefully\n Producing sound\n ```\n a bad response would be `Inhale deeply, exhale forcefully, produce sound` or `inhale deeply... exhale forcefully... produce sound...`,\n- Skip any steps marked as \"disabled\"\n- Afterwards, confirm the execution of the steps to the user, e.g. if the user asked for a plan to go to mars, respond like \"I have completed the plan and gone to mars\"\n- EVERY STEP AND THE CONFIRMATION MUST BE ON A NEW LINE. DO NOT COMBINE THEM INTO ONE LINE. USE A
TAG TO SEPARATE THEM.\n\n**Key Guidelines:**\n- Always generate exactly 10 steps\n- Make steps granular enough to be independently enabled/disabled\n\nTool reference: {DEFINE_TASK_TOOL}\n \"\"\",\n generate_content_config=types.GenerateContentConfig(\n temperature=0.7, # Slightly higher temperature for creativity\n top_p=0.9,\n top_k=40\n ),\n)\n\n# Create ADK middleware agent instance\nadk_human_in_loop_agent = ADKAgent(\n adk_agent=human_in_loop_agent,\n app_name=\"demo_app\",\n user_id=\"demo_user\",\n session_timeout_seconds=3600,\n use_in_memory_services=True\n)\n\n# Create FastAPI app\napp = FastAPI(title=\"ADK Middleware Human in the Loop\")\n\n# Add the ADK endpoint\nadd_adk_fastapi_endpoint(app, adk_human_in_loop_agent, path=\"/\")\n", + "language": "python", + "type": "file" + } + ], + "adk-middleware::shared_state": [ + { + "name": "page.tsx", + "content": "\"use client\";\nimport { CopilotKit, useCoAgent, useCopilotChat } from \"@copilotkit/react-core\";\nimport { CopilotChat, CopilotSidebar } from \"@copilotkit/react-ui\";\nimport React, { useState, useEffect, useRef } from \"react\";\nimport { Role, TextMessage } from \"@copilotkit/runtime-client-gql\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface SharedStateProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\nexport default function SharedState({ params }: SharedStateProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n\n const chatTitle = 'AI Recipe Assistant'\n const chatDescription = 'Ask me to craft recipes'\n const initialLabel = 'Hi 👋 How can I help with your recipe?'\n\n return (\n \n \n \n {isMobile ? (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n
\n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n
\n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n
\n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n ) : (\n \n )}\n \n \n );\n}\n\nenum SkillLevel {\n BEGINNER = \"Beginner\",\n INTERMEDIATE = \"Intermediate\",\n ADVANCED = \"Advanced\",\n}\n\nenum CookingTime {\n FiveMin = \"5 min\",\n FifteenMin = \"15 min\",\n ThirtyMin = \"30 min\",\n FortyFiveMin = \"45 min\",\n SixtyPlusMin = \"60+ min\",\n}\n\nconst cookingTimeValues = [\n { label: CookingTime.FiveMin, value: 0 },\n { label: CookingTime.FifteenMin, value: 1 },\n { label: CookingTime.ThirtyMin, value: 2 },\n { label: CookingTime.FortyFiveMin, value: 3 },\n { label: CookingTime.SixtyPlusMin, value: 4 },\n];\n\nenum SpecialPreferences {\n HighProtein = \"High Protein\",\n LowCarb = \"Low Carb\",\n Spicy = \"Spicy\",\n BudgetFriendly = \"Budget-Friendly\",\n OnePotMeal = \"One-Pot Meal\",\n Vegetarian = \"Vegetarian\",\n Vegan = \"Vegan\",\n}\n\ninterface Ingredient {\n icon: string;\n name: string;\n amount: string;\n}\n\ninterface Recipe {\n title: string;\n skill_level: SkillLevel;\n cooking_time: CookingTime;\n special_preferences: string[];\n ingredients: Ingredient[];\n instructions: string[];\n}\n\ninterface RecipeAgentState {\n recipe: Recipe;\n}\n\nconst INITIAL_STATE: RecipeAgentState = {\n recipe: {\n title: \"Make Your Recipe\",\n skill_level: SkillLevel.INTERMEDIATE,\n cooking_time: CookingTime.FortyFiveMin,\n special_preferences: [],\n ingredients: [\n { icon: \"🥕\", name: \"Carrots\", amount: \"3 large, grated\" },\n { icon: \"🌾\", name: \"All-Purpose Flour\", amount: \"2 cups\" },\n ],\n instructions: [\"Preheat oven to 350°F (175°C)\"],\n },\n};\n\nfunction Recipe() {\n const { isMobile } = useMobileView();\n const { state: agentState, setState: setAgentState } = useCoAgent({\n name: \"shared_state\",\n initialState: INITIAL_STATE,\n });\n\n const [recipe, setRecipe] = useState(INITIAL_STATE.recipe);\n const { appendMessage, isLoading } = useCopilotChat();\n const [editingInstructionIndex, setEditingInstructionIndex] = useState(null);\n const newInstructionRef = useRef(null);\n\n const updateRecipe = (partialRecipe: Partial) => {\n setAgentState({\n ...agentState,\n recipe: {\n ...recipe,\n ...partialRecipe,\n },\n });\n setRecipe({\n ...recipe,\n ...partialRecipe,\n });\n };\n\n const newRecipeState = { ...recipe };\n const newChangedKeys = [];\n const changedKeysRef = useRef([]);\n\n for (const key in recipe) {\n if (\n agentState &&\n agentState.recipe &&\n (agentState.recipe as any)[key] !== undefined &&\n (agentState.recipe as any)[key] !== null\n ) {\n let agentValue = (agentState.recipe as any)[key];\n const recipeValue = (recipe as any)[key];\n\n // Check if agentValue is a string and replace \\n with actual newlines\n if (typeof agentValue === \"string\") {\n agentValue = agentValue.replace(/\\\\n/g, \"\\n\");\n }\n\n if (JSON.stringify(agentValue) !== JSON.stringify(recipeValue)) {\n (newRecipeState as any)[key] = agentValue;\n newChangedKeys.push(key);\n }\n }\n }\n\n if (newChangedKeys.length > 0) {\n changedKeysRef.current = newChangedKeys;\n } else if (!isLoading) {\n changedKeysRef.current = [];\n }\n\n useEffect(() => {\n setRecipe(newRecipeState);\n }, [JSON.stringify(newRecipeState)]);\n\n const handleTitleChange = (event: React.ChangeEvent) => {\n updateRecipe({\n title: event.target.value,\n });\n };\n\n const handleSkillLevelChange = (event: React.ChangeEvent) => {\n updateRecipe({\n skill_level: event.target.value as SkillLevel,\n });\n };\n\n const handleDietaryChange = (preference: string, checked: boolean) => {\n if (checked) {\n updateRecipe({\n special_preferences: [...recipe.special_preferences, preference],\n });\n } else {\n updateRecipe({\n special_preferences: recipe.special_preferences.filter((p) => p !== preference),\n });\n }\n };\n\n const handleCookingTimeChange = (event: React.ChangeEvent) => {\n updateRecipe({\n cooking_time: cookingTimeValues[Number(event.target.value)].label,\n });\n };\n\n const addIngredient = () => {\n // Pick a random food emoji from our valid list\n updateRecipe({\n ingredients: [...recipe.ingredients, { icon: \"🍴\", name: \"\", amount: \"\" }],\n });\n };\n\n const updateIngredient = (index: number, field: keyof Ingredient, value: string) => {\n const updatedIngredients = [...recipe.ingredients];\n updatedIngredients[index] = {\n ...updatedIngredients[index],\n [field]: value,\n };\n updateRecipe({ ingredients: updatedIngredients });\n };\n\n const removeIngredient = (index: number) => {\n const updatedIngredients = [...recipe.ingredients];\n updatedIngredients.splice(index, 1);\n updateRecipe({ ingredients: updatedIngredients });\n };\n\n const addInstruction = () => {\n const newIndex = recipe.instructions.length;\n updateRecipe({\n instructions: [...recipe.instructions, \"\"],\n });\n // Set the new instruction as the editing one\n setEditingInstructionIndex(newIndex);\n\n // Focus the new instruction after render\n setTimeout(() => {\n const textareas = document.querySelectorAll(\".instructions-container textarea\");\n const newTextarea = textareas[textareas.length - 1] as HTMLTextAreaElement;\n if (newTextarea) {\n newTextarea.focus();\n }\n }, 50);\n };\n\n const updateInstruction = (index: number, value: string) => {\n const updatedInstructions = [...recipe.instructions];\n updatedInstructions[index] = value;\n updateRecipe({ instructions: updatedInstructions });\n };\n\n const removeInstruction = (index: number) => {\n const updatedInstructions = [...recipe.instructions];\n updatedInstructions.splice(index, 1);\n updateRecipe({ instructions: updatedInstructions });\n };\n\n // Simplified icon handler that defaults to a fork/knife for any problematic icons\n const getProperIcon = (icon: string | undefined): string => {\n // If icon is undefined return the default\n if (!icon) {\n return \"🍴\";\n }\n\n return icon;\n };\n\n return (\n
\n {/* Recipe Title */}\n
\n \n\n
\n
\n 🕒\n t.label === recipe.cooking_time)?.value || 3}\n onChange={handleCookingTimeChange}\n style={{\n backgroundImage:\n \"url(\\\"data:image/svg+xml;charset=UTF-8,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' fill='none' stroke='%23555' stroke-width='2' stroke-linecap='round' stroke-linejoin='round'%3e%3cpolyline points='6 9 12 15 18 9'%3e%3c/polyline%3e%3c/svg%3e\\\")\",\n backgroundRepeat: \"no-repeat\",\n backgroundPosition: \"right 0px center\",\n backgroundSize: \"12px\",\n appearance: \"none\",\n WebkitAppearance: \"none\",\n }}\n >\n {cookingTimeValues.map((time) => (\n \n ))}\n \n
\n\n
\n 🏆\n \n {Object.values(SkillLevel).map((level) => (\n \n ))}\n \n
\n
\n
\n\n {/* Dietary Preferences */}\n
\n {changedKeysRef.current.includes(\"special_preferences\") && }\n

Dietary Preferences

\n
\n {Object.values(SpecialPreferences).map((option) => (\n \n ))}\n
\n
\n\n {/* Ingredients */}\n
\n {changedKeysRef.current.includes(\"ingredients\") && }\n
\n

Ingredients

\n \n + Add Ingredient\n \n
\n \n {recipe.ingredients.map((ingredient, index) => (\n
\n
{getProperIcon(ingredient.icon)}
\n
\n updateIngredient(index, \"name\", e.target.value)}\n placeholder=\"Ingredient name\"\n className=\"ingredient-name-input\"\n />\n updateIngredient(index, \"amount\", e.target.value)}\n placeholder=\"Amount\"\n className=\"ingredient-amount-input\"\n />\n
\n removeIngredient(index)}\n aria-label=\"Remove ingredient\"\n >\n ×\n \n
\n ))}\n
\n \n\n {/* Instructions */}\n
\n {changedKeysRef.current.includes(\"instructions\") && }\n
\n

Instructions

\n \n
\n
\n {recipe.instructions.map((instruction, index) => (\n
\n {/* Number Circle */}\n
{index + 1}
\n\n {/* Vertical Line */}\n {index < recipe.instructions.length - 1 &&
}\n\n {/* Instruction Content */}\n setEditingInstructionIndex(index)}\n >\n updateInstruction(index, e.target.value)}\n placeholder={!instruction ? \"Enter cooking instruction...\" : \"\"}\n onFocus={() => setEditingInstructionIndex(index)}\n onBlur={(e) => {\n // Only blur if clicking outside this instruction\n if (!e.relatedTarget || !e.currentTarget.contains(e.relatedTarget as Node)) {\n setEditingInstructionIndex(null);\n }\n }}\n />\n\n {/* Delete Button (only visible on hover) */}\n {\n e.stopPropagation(); // Prevent triggering parent onClick\n removeInstruction(index);\n }}\n aria-label=\"Remove instruction\"\n >\n ×\n \n
\n
\n ))}\n
\n
\n\n {/* Improve with AI Button */}\n
\n {\n if (!isLoading) {\n appendMessage(\n new TextMessage({\n content: \"Improve the recipe\",\n role: Role.User,\n }),\n );\n }\n }}\n disabled={isLoading}\n >\n {isLoading ? \"Please Wait...\" : \"Improve with AI\"}\n \n
\n
\n );\n}\n\nfunction Ping() {\n return (\n \n \n \n \n );\n}\n", + "language": "typescript", + "type": "file" + }, + { + "name": "style.css", + "content": ".copilotKitWindow {\n box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);\n}\n\n.copilotKitHeader {\n border-top-left-radius: 5px !important;\n background-color: #fff;\n color: #000;\n border-bottom: 0px;\n}\n\n/* Recipe App Styles */\n.app-container {\n min-height: 100vh;\n width: 100%;\n display: flex;\n align-items: center;\n justify-content: center;\n background-size: cover;\n background-position: center;\n background-repeat: no-repeat;\n background-attachment: fixed;\n position: relative;\n overflow: auto;\n}\n\n.recipe-card {\n background-color: rgba(255, 255, 255, 0.97);\n border-radius: 16px;\n box-shadow: 0 15px 30px rgba(0, 0, 0, 0.25), 0 5px 15px rgba(0, 0, 0, 0.15);\n width: 100%;\n max-width: 750px;\n margin: 20px auto;\n padding: 14px 32px;\n position: relative;\n z-index: 1;\n backdrop-filter: blur(5px);\n border: 1px solid rgba(255, 255, 255, 0.3);\n transition: transform 0.2s ease, box-shadow 0.2s ease;\n animation: fadeIn 0.5s ease-out forwards;\n box-sizing: border-box;\n overflow: hidden;\n}\n\n.recipe-card:hover {\n transform: translateY(-5px);\n box-shadow: 0 20px 40px rgba(0, 0, 0, 0.3), 0 10px 20px rgba(0, 0, 0, 0.2);\n}\n\n/* Recipe Header */\n.recipe-header {\n margin-bottom: 24px;\n}\n\n.recipe-title-input {\n width: 100%;\n font-size: 24px;\n font-weight: bold;\n border: none;\n outline: none;\n padding: 8px 0;\n margin-bottom: 0px;\n}\n\n.recipe-meta {\n display: flex;\n align-items: center;\n gap: 20px;\n margin-top: 5px;\n margin-bottom: 14px;\n}\n\n.meta-item {\n display: flex;\n align-items: center;\n gap: 8px;\n color: #555;\n}\n\n.meta-icon {\n font-size: 20px;\n color: #777;\n}\n\n.meta-text {\n font-size: 15px;\n}\n\n/* Recipe Meta Selects */\n.meta-item select {\n border: none;\n background: transparent;\n font-size: 15px;\n color: #555;\n cursor: pointer;\n outline: none;\n padding-right: 18px;\n transition: color 0.2s, transform 0.1s;\n font-weight: 500;\n}\n\n.meta-item select:hover,\n.meta-item select:focus {\n color: #FF5722;\n}\n\n.meta-item select:active {\n transform: scale(0.98);\n}\n\n.meta-item select option {\n color: #333;\n background-color: white;\n font-weight: normal;\n padding: 8px;\n}\n\n/* Section Container */\n.section-container {\n margin-bottom: 20px;\n position: relative;\n width: 100%;\n}\n\n.section-title {\n font-size: 20px;\n font-weight: 700;\n margin-bottom: 20px;\n color: #333;\n position: relative;\n display: inline-block;\n}\n\n.section-title:after {\n content: \"\";\n position: absolute;\n bottom: -8px;\n left: 0;\n width: 40px;\n height: 3px;\n background-color: #ff7043;\n border-radius: 3px;\n}\n\n/* Dietary Preferences */\n.dietary-options {\n display: flex;\n flex-wrap: wrap;\n gap: 10px 16px;\n margin-bottom: 16px;\n width: 100%;\n}\n\n.dietary-option {\n display: flex;\n align-items: center;\n gap: 6px;\n font-size: 14px;\n cursor: pointer;\n margin-bottom: 4px;\n}\n\n.dietary-option input {\n cursor: pointer;\n}\n\n/* Ingredients */\n.ingredients-container {\n display: flex;\n flex-wrap: wrap;\n gap: 10px;\n margin-bottom: 15px;\n width: 100%;\n box-sizing: border-box;\n}\n\n.ingredient-card {\n display: flex;\n align-items: center;\n background-color: rgba(255, 255, 255, 0.9);\n border-radius: 12px;\n padding: 12px;\n margin-bottom: 10px;\n box-shadow: 0 4px 10px rgba(0, 0, 0, 0.08);\n position: relative;\n transition: all 0.2s ease;\n border: 1px solid rgba(240, 240, 240, 0.8);\n width: calc(33.333% - 7px);\n box-sizing: border-box;\n}\n\n.ingredient-card:hover {\n transform: translateY(-2px);\n box-shadow: 0 6px 15px rgba(0, 0, 0, 0.12);\n}\n\n.ingredient-card .remove-button {\n position: absolute;\n right: 10px;\n top: 10px;\n background: none;\n border: none;\n color: #ccc;\n font-size: 16px;\n cursor: pointer;\n display: none;\n padding: 0;\n width: 24px;\n height: 24px;\n line-height: 1;\n}\n\n.ingredient-card:hover .remove-button {\n display: block;\n}\n\n.ingredient-icon {\n font-size: 24px;\n margin-right: 12px;\n display: flex;\n align-items: center;\n justify-content: center;\n width: 40px;\n height: 40px;\n background-color: #f7f7f7;\n border-radius: 50%;\n flex-shrink: 0;\n}\n\n.ingredient-content {\n flex: 1;\n display: flex;\n flex-direction: column;\n gap: 3px;\n min-width: 0;\n}\n\n.ingredient-name-input,\n.ingredient-amount-input {\n border: none;\n background: transparent;\n outline: none;\n width: 100%;\n padding: 0;\n text-overflow: ellipsis;\n overflow: hidden;\n white-space: nowrap;\n}\n\n.ingredient-name-input {\n font-weight: 500;\n font-size: 14px;\n}\n\n.ingredient-amount-input {\n font-size: 13px;\n color: #666;\n}\n\n.ingredient-name-input::placeholder,\n.ingredient-amount-input::placeholder {\n color: #aaa;\n}\n\n.remove-button {\n background: none;\n border: none;\n color: #999;\n font-size: 20px;\n cursor: pointer;\n padding: 0;\n width: 28px;\n height: 28px;\n display: flex;\n align-items: center;\n justify-content: center;\n margin-left: 10px;\n}\n\n.remove-button:hover {\n color: #FF5722;\n}\n\n/* Instructions */\n.instructions-container {\n display: flex;\n flex-direction: column;\n gap: 6px;\n position: relative;\n margin-bottom: 12px;\n width: 100%;\n}\n\n.instruction-item {\n position: relative;\n display: flex;\n width: 100%;\n box-sizing: border-box;\n margin-bottom: 8px;\n align-items: flex-start;\n}\n\n.instruction-number {\n display: flex;\n align-items: center;\n justify-content: center;\n min-width: 26px;\n height: 26px;\n background-color: #ff7043;\n color: white;\n border-radius: 50%;\n font-weight: 600;\n flex-shrink: 0;\n box-shadow: 0 2px 4px rgba(255, 112, 67, 0.3);\n z-index: 1;\n font-size: 13px;\n margin-top: 2px;\n}\n\n.instruction-line {\n position: absolute;\n left: 13px; /* Half of the number circle width */\n top: 22px;\n bottom: -18px;\n width: 2px;\n background: linear-gradient(to bottom, #ff7043 60%, rgba(255, 112, 67, 0.4));\n z-index: 0;\n}\n\n.instruction-content {\n background-color: white;\n border-radius: 10px;\n padding: 10px 14px;\n margin-left: 12px;\n flex-grow: 1;\n transition: all 0.2s ease;\n box-shadow: 0 2px 6px rgba(0, 0, 0, 0.08);\n border: 1px solid rgba(240, 240, 240, 0.8);\n position: relative;\n width: calc(100% - 38px);\n box-sizing: border-box;\n display: flex;\n align-items: center;\n}\n\n.instruction-content-editing {\n background-color: #fff9f6;\n box-shadow: 0 6px 16px rgba(0, 0, 0, 0.12), 0 0 0 2px rgba(255, 112, 67, 0.2);\n}\n\n.instruction-content:hover {\n transform: translateY(-2px);\n box-shadow: 0 6px 16px rgba(0, 0, 0, 0.12);\n}\n\n.instruction-textarea {\n width: 100%;\n background: transparent;\n border: none;\n resize: vertical;\n font-family: inherit;\n font-size: 14px;\n line-height: 1.4;\n min-height: 20px;\n outline: none;\n padding: 0;\n margin: 0;\n}\n\n.instruction-delete-btn {\n position: absolute;\n background: none;\n border: none;\n color: #ccc;\n font-size: 16px;\n cursor: pointer;\n display: none;\n padding: 0;\n width: 20px;\n height: 20px;\n line-height: 1;\n top: 50%;\n transform: translateY(-50%);\n right: 8px;\n}\n\n.instruction-content:hover .instruction-delete-btn {\n display: flex;\n align-items: center;\n justify-content: center;\n}\n\n/* Action Button */\n.action-container {\n display: flex;\n justify-content: center;\n margin-top: 40px;\n padding-bottom: 20px;\n position: relative;\n}\n\n.improve-button {\n background-color: #ff7043;\n border: none;\n color: white;\n border-radius: 30px;\n font-size: 18px;\n font-weight: 600;\n padding: 14px 28px;\n cursor: pointer;\n transition: all 0.3s ease;\n box-shadow: 0 4px 15px rgba(255, 112, 67, 0.4);\n display: flex;\n align-items: center;\n justify-content: center;\n text-align: center;\n position: relative;\n min-width: 180px;\n}\n\n.improve-button:hover {\n background-color: #ff5722;\n transform: translateY(-2px);\n box-shadow: 0 8px 20px rgba(255, 112, 67, 0.5);\n}\n\n.improve-button.loading {\n background-color: #ff7043;\n opacity: 0.8;\n cursor: not-allowed;\n padding-left: 42px; /* Reduced padding to bring text closer to icon */\n padding-right: 22px; /* Balance the button */\n justify-content: flex-start; /* Left align text for better alignment with icon */\n}\n\n.improve-button.loading:after {\n content: \"\"; /* Add space between icon and text */\n display: inline-block;\n width: 8px; /* Width of the space */\n}\n\n.improve-button:before {\n content: \"\";\n background-image: url(\"data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='24' height='24' viewBox='0 0 24 24' fill='none' stroke='white' stroke-width='2' stroke-linecap='round' stroke-linejoin='round'%3E%3Cpath d='M12 2v4M12 18v4M4.93 4.93l2.83 2.83M16.24 16.24l2.83 2.83M2 12h4M18 12h4M4.93 19.07l2.83-2.83M16.24 7.76l2.83-2.83'/%3E%3C/svg%3E\");\n width: 20px; /* Slightly smaller icon */\n height: 20px;\n background-repeat: no-repeat;\n background-size: contain;\n position: absolute;\n left: 16px; /* Slightly adjusted */\n top: 50%;\n transform: translateY(-50%);\n display: none;\n}\n\n.improve-button.loading:before {\n display: block;\n animation: spin 1.5s linear infinite;\n}\n\n@keyframes spin {\n 0% { transform: translateY(-50%) rotate(0deg); }\n 100% { transform: translateY(-50%) rotate(360deg); }\n}\n\n/* Ping Animation */\n.ping-animation {\n position: absolute;\n display: flex;\n width: 12px;\n height: 12px;\n top: 0;\n right: 0;\n}\n\n.ping-circle {\n position: absolute;\n display: inline-flex;\n width: 100%;\n height: 100%;\n border-radius: 50%;\n background-color: #38BDF8;\n opacity: 0.75;\n animation: ping 1.5s cubic-bezier(0, 0, 0.2, 1) infinite;\n}\n\n.ping-dot {\n position: relative;\n display: inline-flex;\n width: 12px;\n height: 12px;\n border-radius: 50%;\n background-color: #0EA5E9;\n}\n\n@keyframes ping {\n 75%, 100% {\n transform: scale(2);\n opacity: 0;\n }\n}\n\n/* Instruction hover effects */\n.instruction-item:hover .instruction-delete-btn {\n display: flex !important;\n}\n\n/* Add some subtle animations */\n@keyframes fadeIn {\n from { opacity: 0; transform: translateY(20px); }\n to { opacity: 1; transform: translateY(0); }\n}\n\n/* Better center alignment for the recipe card */\n.recipe-card-container {\n display: flex;\n justify-content: center;\n width: 100%;\n position: relative;\n z-index: 1;\n margin: 0 auto;\n box-sizing: border-box;\n}\n\n/* Add Buttons */\n.add-button {\n background-color: transparent;\n color: #FF5722;\n border: 1px dashed #FF5722;\n border-radius: 8px;\n padding: 10px 16px;\n cursor: pointer;\n font-weight: 500;\n display: inline-block;\n font-size: 14px;\n margin-bottom: 0;\n}\n\n.add-step-button {\n background-color: transparent;\n color: #FF5722;\n border: 1px dashed #FF5722;\n border-radius: 6px;\n padding: 6px 12px;\n cursor: pointer;\n font-weight: 500;\n font-size: 13px;\n}\n\n/* Section Headers */\n.section-header {\n display: flex;\n justify-content: space-between;\n align-items: center;\n margin-bottom: 12px;\n}", + "language": "css", + "type": "file" + }, + { + "name": "README.mdx", + "content": "# 🍳 Shared State Recipe Creator\n\n## What This Demo Shows\n\nThis demo showcases CopilotKit's **shared state** functionality - a powerful\nfeature that enables bidirectional data flow between:\n\n1. **Frontend → Agent**: UI controls update the agent's context in real-time\n2. **Agent → Frontend**: The Copilot's recipe creations instantly update the UI\n components\n\nIt's like having a cooking buddy who not only listens to what you want but also\nupdates your recipe card as you chat - no refresh needed! ✨\n\n## How to Interact\n\nMix and match any of these parameters (or none at all - it's up to you!):\n\n- **Skill Level**: Beginner to expert 👨‍🍳\n- **Cooking Time**: Quick meals or slow cooking ⏱️\n- **Special Preferences**: Dietary needs, flavor profiles, health goals 🥗\n- **Ingredients**: Items you want to include 🧅🥩🍄\n- **Instructions**: Any specific steps\n\nThen chat with your Copilot chef with prompts like:\n\n- \"I'm a beginner cook. Can you make me a quick dinner?\"\n- \"I need something spicy with chicken that takes under 30 minutes!\"\n\n## ✨ Shared State Magic in Action\n\n**What's happening technically:**\n\n- The UI and Copilot agent share the same state object (**Agent State = UI\n State**)\n- Changes from either side automatically update the other\n- Neither side needs to manually request updates from the other\n\n**What you'll see in this demo:**\n\n- Set cooking time to 20 minutes in the UI and watch the Copilot immediately\n respect your time constraint\n- Add ingredients through the UI and see them appear in your recipe\n- When the Copilot suggests new ingredients, watch them automatically appear in\n the UI ingredients list\n- Change your skill level and see how the Copilot adapts its instructions in\n real-time\n\nThis synchronized state creates a seamless experience where the agent always has\nyour current preferences, and any updates to the recipe are instantly reflected\nin both places.\n\nThis shared state pattern can be applied to any application where you want your\nUI and Copilot to work together in perfect harmony!\n", + "language": "markdown", + "type": "file" + }, + { + "name": "shared_state.py", + "content": "\"\"\"Shared State feature.\"\"\"\n\nfrom __future__ import annotations\n\nfrom dotenv import load_dotenv\nload_dotenv()\nimport json\nfrom enum import Enum\nfrom typing import Dict, List, Any, Optional\nfrom fastapi import FastAPI\nfrom adk_middleware import ADKAgent, add_adk_fastapi_endpoint\n\n# ADK imports\nfrom google.adk.agents import LlmAgent\nfrom google.adk.agents.callback_context import CallbackContext\nfrom google.adk.sessions import InMemorySessionService, Session\nfrom google.adk.runners import Runner\nfrom google.adk.events import Event, EventActions\nfrom google.adk.tools import FunctionTool, ToolContext\nfrom google.genai.types import Content, Part , FunctionDeclaration\nfrom google.adk.models import LlmResponse, LlmRequest\nfrom google.genai import types\n\nfrom pydantic import BaseModel, Field\nfrom typing import List, Optional\nfrom enum import Enum\n\nclass SkillLevel(str, Enum):\n # Add your skill level values here\n BEGINNER = \"beginner\"\n INTERMEDIATE = \"intermediate\"\n ADVANCED = \"advanced\"\n\nclass SpecialPreferences(str, Enum):\n # Add your special preferences values here\n VEGETARIAN = \"vegetarian\"\n VEGAN = \"vegan\"\n GLUTEN_FREE = \"gluten_free\"\n DAIRY_FREE = \"dairy_free\"\n KETO = \"keto\"\n LOW_CARB = \"low_carb\"\n\nclass CookingTime(str, Enum):\n # Add your cooking time values here\n QUICK = \"under_30_min\"\n MEDIUM = \"30_60_min\"\n LONG = \"over_60_min\"\n\nclass Ingredient(BaseModel):\n icon: str = Field(..., description=\"The icon emoji of the ingredient\")\n name: str\n amount: str\n\nclass Recipe(BaseModel):\n skill_level: SkillLevel = Field(..., description=\"The skill level required for the recipe\")\n special_preferences: Optional[List[SpecialPreferences]] = Field(\n None,\n description=\"A list of special preferences for the recipe\"\n )\n cooking_time: Optional[CookingTime] = Field(\n None,\n description=\"The cooking time of the recipe\"\n )\n ingredients: List[Ingredient] = Field(..., description=\"Entire list of ingredients for the recipe\")\n instructions: List[str] = Field(..., description=\"Entire list of instructions for the recipe\")\n changes: Optional[str] = Field(\n None,\n description=\"A description of the changes made to the recipe\"\n )\n\ndef generate_recipe(\n tool_context: ToolContext,\n skill_level: str,\n title: str,\n special_preferences: List[str] = [],\n cooking_time: str = \"\",\n ingredients: List[dict] = [],\n instructions: List[str] = [],\n changes: str = \"\"\n) -> Dict[str, str]:\n \"\"\"\n Generate or update a recipe using the provided recipe data.\n\n Args:\n \"title\": {\n \"type\": \"string\",\n \"description\": \"**REQUIRED** - The title of the recipe.\"\n },\n \"skill_level\": {\n \"type\": \"string\",\n \"enum\": [\"Beginner\",\"Intermediate\",\"Advanced\"],\n \"description\": \"**REQUIRED** - The skill level required for the recipe. Must be one of the predefined skill levels (Beginner, Intermediate, Advanced).\"\n },\n \"special_preferences\": {\n \"type\": \"array\",\n \"items\": {\"type\": \"string\"},\n \"enum\": [\"High Protein\",\"Low Carb\",\"Spicy\",\"Budget-Friendly\",\"One-Pot Meal\",\"Vegetarian\",\"Vegan\"],\n \"description\": \"**OPTIONAL** - Special dietary preferences for the recipe as comma-separated values. Example: 'High Protein, Low Carb, Gluten Free'. Leave empty array if no special preferences.\"\n },\n \"cooking_time\": {\n \"type\": \"string\",\n \"enum\": [5 min, 15 min, 30 min, 45 min, 60+ min],\n \"description\": \"**OPTIONAL** - The total cooking time for the recipe. Must be one of the predefined time slots (5 min, 15 min, 30 min, 45 min, 60+ min). Omit if time is not specified.\"\n },\n \"ingredients\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"icon\": {\"type\": \"string\", \"description\": \"The icon emoji (not emoji code like '\\x1f35e', but the actual emoji like 🥕) of the ingredient\"},\n \"name\": {\"type\": \"string\"},\n \"amount\": {\"type\": \"string\"}\n }\n },\n \"description\": \"Entire list of ingredients for the recipe, including the new ingredients and the ones that are already in the recipe\"\n },\n \"instructions\": {\n \"type\": \"array\",\n \"items\": {\"type\": \"string\"},\n \"description\": \"Entire list of instructions for the recipe, including the new instructions and the ones that are already there\"\n },\n \"changes\": {\n \"type\": \"string\",\n \"description\": \"**OPTIONAL** - A brief description of what changes were made to the recipe compared to the previous version. Example: 'Added more spices for flavor', 'Reduced cooking time', 'Substituted ingredient X for Y'. Omit if this is a new recipe.\"\n }\n\n Returns:\n Dict indicating success status and message\n \"\"\"\n try:\n\n\n # Create RecipeData object to validate structure\n recipe = {\n \"title\": title,\n \"skill_level\": skill_level,\n \"special_preferences\": special_preferences ,\n \"cooking_time\": cooking_time ,\n \"ingredients\": ingredients ,\n \"instructions\": instructions ,\n \"changes\": changes\n }\n\n # Update the session state with the new recipe\n current_recipe = tool_context.state.get(\"recipe\", {})\n if current_recipe:\n # Merge with existing recipe\n for key, value in recipe.items():\n if value is not None or value != \"\":\n current_recipe[key] = value\n else:\n current_recipe = recipe\n\n tool_context.state[\"recipe\"] = current_recipe\n\n\n\n return {\"status\": \"success\", \"message\": \"Recipe generated successfully\"}\n\n except Exception as e:\n return {\"status\": \"error\", \"message\": f\"Error generating recipe: {str(e)}\"}\n\n\n\ndef on_before_agent(callback_context: CallbackContext):\n \"\"\"\n Initialize recipe state if it doesn't exist.\n \"\"\"\n\n if \"recipe\" not in callback_context.state:\n # Initialize with default recipe\n default_recipe = {\n \"title\": \"Make Your Recipe\",\n \"skill_level\": \"Beginner\",\n \"special_preferences\": [],\n \"cooking_time\": '15 min',\n \"ingredients\": [{\"icon\": \"🍴\", \"name\": \"Sample Ingredient\", \"amount\": \"1 unit\"}],\n \"instructions\": [\"First step instruction\"]\n }\n callback_context.state[\"recipe\"] = default_recipe\n\n\n return None\n\n\n# --- Define the Callback Function ---\n# modifying the agent's system prompt to incude the current state of recipe\ndef before_model_modifier(\n callback_context: CallbackContext, llm_request: LlmRequest\n) -> Optional[LlmResponse]:\n \"\"\"Inspects/modifies the LLM request or skips the call.\"\"\"\n agent_name = callback_context.agent_name\n if agent_name == \"RecipeAgent\":\n recipe_json = \"No recipe yet\"\n if \"recipe\" in callback_context.state and callback_context.state[\"recipe\"] is not None:\n try:\n recipe_json = json.dumps(callback_context.state[\"recipe\"], indent=2)\n except Exception as e:\n recipe_json = f\"Error serializing recipe: {str(e)}\"\n # --- Modification Example ---\n # Add a prefix to the system instruction\n original_instruction = llm_request.config.system_instruction or types.Content(role=\"system\", parts=[])\n prefix = f\"\"\"You are a helpful assistant for creating recipes.\n This is the current state of the recipe: {recipe_json}\n You can improve the recipe by calling the generate_recipe tool.\"\"\"\n # Ensure system_instruction is Content and parts list exists\n if not isinstance(original_instruction, types.Content):\n # Handle case where it might be a string (though config expects Content)\n original_instruction = types.Content(role=\"system\", parts=[types.Part(text=str(original_instruction))])\n if not original_instruction.parts:\n original_instruction.parts.append(types.Part(text=\"\")) # Add an empty part if none exist\n\n # Modify the text of the first part\n modified_text = prefix + (original_instruction.parts[0].text or \"\")\n original_instruction.parts[0].text = modified_text\n llm_request.config.system_instruction = original_instruction\n\n\n\n return None\n\n\n# --- Define the Callback Function ---\ndef simple_after_model_modifier(\n callback_context: CallbackContext, llm_response: LlmResponse\n) -> Optional[LlmResponse]:\n \"\"\"Stop the consecutive tool calling of the agent\"\"\"\n agent_name = callback_context.agent_name\n # --- Inspection ---\n if agent_name == \"RecipeAgent\":\n original_text = \"\"\n if llm_response.content and llm_response.content.parts:\n # Assuming simple text response for this example\n if llm_response.content.role=='model' and llm_response.content.parts[0].text:\n original_text = llm_response.content.parts[0].text\n callback_context._invocation_context.end_invocation = True\n\n elif llm_response.error_message:\n return None\n else:\n return None # Nothing to modify\n return None\n\n\nshared_state_agent = LlmAgent(\n name=\"RecipeAgent\",\n model=\"gemini-2.5-pro\",\n instruction=f\"\"\"\n When a user asks for a recipe or wants to modify one, you MUST use the generate_recipe tool.\n\n IMPORTANT RULES:\n 1. Always use the generate_recipe tool for any recipe-related requests\n 2. When creating a new recipe, provide at least skill_level, ingredients, and instructions\n 3. When modifying an existing recipe, include the changes parameter to describe what was modified\n 4. Be creative and helpful in generating complete, practical recipes\n 5. After using the tool, provide a brief summary of what you created or changed\n 6. If user ask to improve the recipe then add more ingredients and make it healthier\n 7. When you see the 'Recipe generated successfully' confirmation message, wish the user well with their cooking by telling them to enjoy their dish.\n\n Examples of when to use the tool:\n - \"Create a pasta recipe\" → Use tool with skill_level, ingredients, instructions\n - \"Make it vegetarian\" → Use tool with special_preferences=[\"vegetarian\"] and changes describing the modification\n - \"Add some herbs\" → Use tool with updated ingredients and changes describing the addition\n\n Always provide complete, practical recipes that users can actually cook.\n \"\"\",\n tools=[generate_recipe],\n before_agent_callback=on_before_agent,\n before_model_callback=before_model_modifier,\n after_model_callback = simple_after_model_modifier\n )\n\n# Create ADK middleware agent instance\nadk_shared_state_agent = ADKAgent(\n adk_agent=shared_state_agent,\n app_name=\"demo_app\",\n user_id=\"demo_user\",\n session_timeout_seconds=3600,\n use_in_memory_services=True\n)\n\n# Create FastAPI app\napp = FastAPI(title=\"ADK Middleware Shared State\")\n\n# Add the ADK endpoint\nadd_adk_fastapi_endpoint(app, adk_shared_state_agent, path=\"/\")\n", "language": "python", "type": "file" } @@ -288,7 +366,7 @@ "server-starter-all-features::tool_based_generative_ui": [ { "name": "page.tsx", - "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku{\n japanese : string[] | [],\n english : string[] | [],\n image_names : string[] | [],\n selectedImage : string | null,\n}\n\ninterface HaikuCardProps{\n generatedHaiku : GenerateHaiku | Partial\n setHaikus : Dispatch>\n haikus : GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n
\n
\n
\n \n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )}\n \n \n );\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction HaikuCard({generatedHaiku, setHaikus, haikus} : HaikuCardProps) {\n return (\n
\n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n
\n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n const validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n if (correctedNames.length < 3) {\n const availableFallbacks = VALID_IMAGE_NAMES.filter(name => !usedValidNames.has(name));\n for (let i = availableFallbacks.length - 1; i > 0; i--) {\n const j = Math.floor(Math.random() * (i + 1));\n [availableFallbacks[i], availableFallbacks[j]] = [availableFallbacks[j], availableFallbacks[i]];\n }\n\n while (correctedNames.length < 3 && availableFallbacks.length > 0) {\n const fallbackName = availableFallbacks.pop();\n if (fallbackName) {\n correctedNames.push(fallbackName);\n }\n }\n }\n\n while (correctedNames.length < 3 && VALID_IMAGE_NAMES.length > 0) {\n const fallbackName = VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n correctedNames.push(fallbackName);\n }\n\n return correctedNames.slice(0, 3);\n };\n\n useCopilotAction({\n name: \"generate_haiku\",\n available: \"frontend\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: \"Names of 3 relevant images\",\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [...prev, newHaiku]);\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const generatedHaikus = useMemo(() => (\n haikus.filter((haiku) => haiku.english[0] !== \"A placeholder verse—\")\n ), [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n {/* Thumbnail List */}\n {Boolean(generatedHaikus.length) && !isMobile && (\n
\n {generatedHaikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n )}\n\n {/* Main Display */}\n
\n
\n {haikus.filter((_haiku: Haiku, index: number) => {\n if (haikus.length == 1) return true;\n else return index == activeIndex + 1;\n }).map((haiku, index) => (\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n \n \n );\n}\n", + "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku {\n japanese: string[] | [],\n english: string[] | [],\n image_names: string[] | [],\n selectedImage: string | null,\n}\n\ninterface HaikuCardProps {\n generatedHaiku: GenerateHaiku | Partial\n setHaikus: Dispatch>\n haikus: GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && }\n \n \n );\n}\n\nfunction MobileChat({ chatTitle, chatDescription, initialLabel }: { chatTitle: string, chatDescription: string, initialLabel: string }) {\n const defaultChatHeight = 50\n\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n return (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction getRandomImage(): string {\n return VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n}\n\nconst validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n while (correctedNames.length < 3) {\n const nextImage = getRandomImage();\n if (!usedValidNames.has(nextImage)) {\n correctedNames.push(nextImage);\n usedValidNames.add(nextImage);\n }\n }\n\n return correctedNames.slice(0, 3);\n};\n\nfunction HaikuCard({ generatedHaiku, setHaikus, haikus }: HaikuCardProps) {\n return (\n \n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n \n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n useCopilotAction({\n name: \"generate_haiku\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: `Names of 3 relevant images selected from the following: \\n -${VALID_IMAGE_NAMES.join('\\n -')}`,\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [newHaiku, ...prev].filter(h => h.english[0] !== \"A placeholder verse—\"));\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n \n\n {/* Main Display */}\n
\n
\n {haikus.map((haiku, index) => (\n (haikus.length == 1 || index == activeIndex) && (\n\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n setHaikus((prevHaikus) => {\n return prevHaikus.map((h, idx) => {\n if (idx === index) {\n return { ...h, selectedImage: imageName }\n } else {\n return { ...h }\n }\n })\n })}\n />\n ))}\n
\n )}\n
\n )\n ))}\n
\n \n \n );\n}\n\nfunction Thumbnails({ haikus, activeIndex, setActiveIndex, isMobile }: { haikus: Haiku[], activeIndex: number, setActiveIndex: (index: number) => void, isMobile: boolean }) {\n if (haikus.length == 0 || isMobile) { return null }\n return (\n
\n {haikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n \n ))}\n \n )\n\n}", "language": "typescript", "type": "file" }, @@ -552,7 +630,7 @@ "langgraph::tool_based_generative_ui": [ { "name": "page.tsx", - "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku{\n japanese : string[] | [],\n english : string[] | [],\n image_names : string[] | [],\n selectedImage : string | null,\n}\n\ninterface HaikuCardProps{\n generatedHaiku : GenerateHaiku | Partial\n setHaikus : Dispatch>\n haikus : GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n
\n
\n
\n \n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )}\n \n \n );\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction HaikuCard({generatedHaiku, setHaikus, haikus} : HaikuCardProps) {\n return (\n
\n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n
\n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n const validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n if (correctedNames.length < 3) {\n const availableFallbacks = VALID_IMAGE_NAMES.filter(name => !usedValidNames.has(name));\n for (let i = availableFallbacks.length - 1; i > 0; i--) {\n const j = Math.floor(Math.random() * (i + 1));\n [availableFallbacks[i], availableFallbacks[j]] = [availableFallbacks[j], availableFallbacks[i]];\n }\n\n while (correctedNames.length < 3 && availableFallbacks.length > 0) {\n const fallbackName = availableFallbacks.pop();\n if (fallbackName) {\n correctedNames.push(fallbackName);\n }\n }\n }\n\n while (correctedNames.length < 3 && VALID_IMAGE_NAMES.length > 0) {\n const fallbackName = VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n correctedNames.push(fallbackName);\n }\n\n return correctedNames.slice(0, 3);\n };\n\n useCopilotAction({\n name: \"generate_haiku\",\n available: \"frontend\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: \"Names of 3 relevant images\",\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [...prev, newHaiku]);\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const generatedHaikus = useMemo(() => (\n haikus.filter((haiku) => haiku.english[0] !== \"A placeholder verse—\")\n ), [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n {/* Thumbnail List */}\n {Boolean(generatedHaikus.length) && !isMobile && (\n
\n {generatedHaikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n )}\n\n {/* Main Display */}\n
\n
\n {haikus.filter((_haiku: Haiku, index: number) => {\n if (haikus.length == 1) return true;\n else return index == activeIndex + 1;\n }).map((haiku, index) => (\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n \n \n );\n}\n", + "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku {\n japanese: string[] | [],\n english: string[] | [],\n image_names: string[] | [],\n selectedImage: string | null,\n}\n\ninterface HaikuCardProps {\n generatedHaiku: GenerateHaiku | Partial\n setHaikus: Dispatch>\n haikus: GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && }\n \n \n );\n}\n\nfunction MobileChat({ chatTitle, chatDescription, initialLabel }: { chatTitle: string, chatDescription: string, initialLabel: string }) {\n const defaultChatHeight = 50\n\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n return (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction getRandomImage(): string {\n return VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n}\n\nconst validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n while (correctedNames.length < 3) {\n const nextImage = getRandomImage();\n if (!usedValidNames.has(nextImage)) {\n correctedNames.push(nextImage);\n usedValidNames.add(nextImage);\n }\n }\n\n return correctedNames.slice(0, 3);\n};\n\nfunction HaikuCard({ generatedHaiku, setHaikus, haikus }: HaikuCardProps) {\n return (\n \n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n \n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n useCopilotAction({\n name: \"generate_haiku\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: `Names of 3 relevant images selected from the following: \\n -${VALID_IMAGE_NAMES.join('\\n -')}`,\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [newHaiku, ...prev].filter(h => h.english[0] !== \"A placeholder verse—\"));\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n \n\n {/* Main Display */}\n
\n
\n {haikus.map((haiku, index) => (\n (haikus.length == 1 || index == activeIndex) && (\n\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n setHaikus((prevHaikus) => {\n return prevHaikus.map((h, idx) => {\n if (idx === index) {\n return { ...h, selectedImage: imageName }\n } else {\n return { ...h }\n }\n })\n })}\n />\n ))}\n
\n )}\n
\n )\n ))}\n
\n \n \n );\n}\n\nfunction Thumbnails({ haikus, activeIndex, setActiveIndex, isMobile }: { haikus: Haiku[], activeIndex: number, setActiveIndex: (index: number) => void, isMobile: boolean }) {\n if (haikus.length == 0 || isMobile) { return null }\n return (\n
\n {haikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n \n ))}\n \n )\n\n}", "language": "typescript", "type": "file" }, @@ -570,13 +648,13 @@ }, { "name": "agent.py", - "content": "\"\"\"\nAn example demonstrating tool-based generative UI using LangGraph.\n\"\"\"\n\nfrom typing import List, Any, Optional, Annotated\nimport os\n\n# LangGraph imports\nfrom langchain_openai import ChatOpenAI\nfrom langchain_core.runnables import RunnableConfig\nfrom langchain_core.messages import SystemMessage\nfrom langchain_core.tools import tool\nfrom langgraph.graph import StateGraph, END, START\nfrom langgraph.types import Command\nfrom langgraph.graph import MessagesState\nfrom langgraph.prebuilt import ToolNode\n\n@tool\ndef generate_haiku(\n japanese: Annotated[ # pylint: disable=unused-argument\n List[str],\n \"An array of three lines of the haiku in Japanese\"\n ],\n english: Annotated[ # pylint: disable=unused-argument\n List[str],\n \"An array of three lines of the haiku in English\"\n ]\n):\n \"\"\"\n Generate a haiku in Japanese and its English translation.\n Also select exactly 3 relevant images from the provided list based on the haiku's theme.\n \"\"\"\n\nclass AgentState(MessagesState):\n \"\"\"\n State of the agent.\n \"\"\"\n tools: List[Any]\n\nasync def chat_node(state: AgentState, config: Optional[RunnableConfig] = None):\n \"\"\"\n The main function handling chat and tool calls.\n \"\"\"\n\n system_prompt = \"\"\"\n You assist the user in generating a haiku.\n When generating a haiku using the 'generate_haiku' tool.\n \"\"\"\n\n # Define the model\n model = ChatOpenAI(model=\"gpt-4o\")\n\n # Define config for the model\n if config is None:\n config = RunnableConfig(recursion_limit=25)\n\n # Bind the tools to the model\n model_with_tools = model.bind_tools(\n [generate_haiku],\n # Disable parallel tool calls to avoid race conditions\n parallel_tool_calls=False,\n )\n\n # Run the model to generate a response\n response = await model_with_tools.ainvoke([\n SystemMessage(content=system_prompt),\n *state[\"messages\"],\n ], config)\n\n if response.tool_calls:\n return Command(\n goto=\"tool_node\",\n update={\n \"messages\": state[\"messages\"] + [response]\n }\n )\n # Return Command to end with updated messages\n return Command(\n goto=END,\n update={\n \"messages\": state[\"messages\"] + [response]\n }\n )\n\n# Define the graph\nworkflow = StateGraph(AgentState)\n\n# Add nodes\nworkflow.add_node(\"chat_node\", chat_node)\nworkflow.add_node(\"tool_node\", ToolNode([generate_haiku]))\n\n# Add edges\nworkflow.set_entry_point(\"chat_node\")\nworkflow.add_edge(START, \"chat_node\")\nworkflow.add_edge(\"chat_node\", END)\nworkflow.add_edge(\"tool_node\", END)\n\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n memory = MemorySaver()\n graph = workflow.compile(checkpointer=memory)\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = workflow.compile()\n\n", + "content": "\"\"\"\nAn example demonstrating tool-based generative UI using LangGraph.\n\"\"\"\n\nimport os\nfrom typing import Any, List\nfrom typing_extensions import Literal\nfrom langchain_openai import ChatOpenAI\nfrom langchain_core.messages import SystemMessage\nfrom langchain_core.runnables import RunnableConfig\nfrom langgraph.graph import StateGraph, END\nfrom langgraph.types import Command\nfrom langgraph.graph import MessagesState\nfrom langgraph.prebuilt import ToolNode\n\n\nclass AgentState(MessagesState):\n \"\"\"\n State of the agent.\n \"\"\"\n tools: List[Any]\n\nasync def chat_node(state: AgentState, config: RunnableConfig) -> Command[Literal[\"tool_node\", \"__end__\"]]:\n \"\"\"\n Standard chat node based on the ReAct design pattern. It handles:\n - The model to use (and binds in CopilotKit actions and the tools defined above)\n - The system prompt\n - Getting a response from the model\n - Handling tool calls\n\n For more about the ReAct design pattern, see:\n https://www.perplexity.ai/search/react-agents-NcXLQhreS0WDzpVaS4m9Cg\n \"\"\"\n\n model = ChatOpenAI(model=\"gpt-4o\")\n\n model_with_tools = model.bind_tools(\n [\n *state.get(\"tools\", []), # bind tools defined by ag-ui\n ],\n parallel_tool_calls=False,\n )\n\n system_message = SystemMessage(\n content=f\"Help the user with writing Haikus. If the user asks for a haiku, use the generate_haiku tool to display the haiku to the user.\"\n )\n\n response = await model_with_tools.ainvoke([\n system_message,\n *state[\"messages\"],\n ], config)\n\n return Command(\n goto=END,\n update={\n \"messages\": [response],\n }\n )\n\nworkflow = StateGraph(AgentState)\nworkflow.add_node(\"chat_node\", chat_node)\n# This is required even though we don't have any backend tools to pass in.\nworkflow.add_node(\"tool_node\", ToolNode(tools=[]))\nworkflow.set_entry_point(\"chat_node\")\nworkflow.add_edge(\"chat_node\", END)\n\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n memory = MemorySaver()\n graph = workflow.compile(checkpointer=memory)\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = workflow.compile()\n", "language": "python", "type": "file" }, { "name": "agent.ts", - "content": "/**\n * An example demonstrating tool-based generative UI using LangGraph.\n */\n\nimport { ChatOpenAI } from \"@langchain/openai\";\nimport { SystemMessage } from \"@langchain/core/messages\";\nimport { RunnableConfig } from \"@langchain/core/runnables\";\nimport { Command, Annotation, MessagesAnnotation, StateGraph, END, START } from \"@langchain/langgraph\";\n\n// List of available images (modify path if needed)\nconst IMAGE_LIST = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\n// This tool generates a haiku on the server.\n// The tool call will be streamed to the frontend as it is being generated.\nconst GENERATE_HAIKU_TOOL = {\n type: \"function\",\n function: {\n name: \"generate_haiku\",\n description: \"Generate a haiku in Japanese and its English translation. Also select exactly 3 relevant images from the provided list based on the haiku's theme.\",\n parameters: {\n type: \"object\",\n properties: {\n japanese: {\n type: \"array\",\n items: {\n type: \"string\"\n },\n description: \"An array of three lines of the haiku in Japanese\"\n },\n english: {\n type: \"array\",\n items: {\n type: \"string\"\n },\n description: \"An array of three lines of the haiku in English\"\n },\n image_names: {\n type: \"array\",\n items: {\n type: \"string\"\n },\n description: \"An array of EXACTLY THREE image filenames from the provided list that are most relevant to the haiku.\"\n }\n },\n required: [\"japanese\", \"english\", \"image_names\"]\n }\n }\n};\n\nexport const AgentStateAnnotation = Annotation.Root({\n tools: Annotation(),\n ...MessagesAnnotation.spec,\n});\nexport type AgentState = typeof AgentStateAnnotation.State;\n\nasync function chatNode(state: AgentState, config?: RunnableConfig): Promise {\n /**\n * The main function handling chat and tool calls.\n */\n // Prepare the image list string for the prompt\n const imageListStr = IMAGE_LIST.map(img => `- ${img}`).join(\"\\n\");\n\n const systemPrompt = `\n You assist the user in generating a haiku.\n When generating a haiku using the 'generate_haiku' tool, you MUST also select exactly 3 image filenames from the following list that are most relevant to the haiku's content or theme. Return the filenames in the 'image_names' parameter.\n \n Available images:\n ${imageListStr}\n \n Don't provide the relevant image names in your final response to the user.\n `;\n\n // Define the model\n const model = new ChatOpenAI({ model: \"gpt-4o\" });\n \n // Define config for the model\n if (!config) {\n config = { recursionLimit: 25 };\n }\n\n // Bind the tools to the model\n const modelWithTools = model.bindTools(\n [GENERATE_HAIKU_TOOL],\n {\n // Disable parallel tool calls to avoid race conditions\n parallel_tool_calls: false,\n }\n );\n\n // Run the model to generate a response\n const response = await modelWithTools.invoke([\n new SystemMessage({ content: systemPrompt }),\n ...state.messages,\n ], config);\n\n // Return Command to end with updated messages\n return new Command({\n goto: END,\n update: {\n messages: [...state.messages, response]\n }\n });\n}\n\n// Define the graph\nconst workflow = new StateGraph(AgentStateAnnotation);\n\n// Add nodes\nworkflow.addNode(\"chat_node\", chatNode);\n\n// Add edges\nworkflow.setEntryPoint(\"chat_node\");\nworkflow.addEdge(START, \"chat_node\");\nworkflow.addEdge(\"chat_node\", END);\n\n// Compile the graph\nexport const toolBasedGenerativeUiGraph = workflow.compile();", + "content": "/**\n * An example demonstrating tool-based generative UI using LangGraph.\n */\n\nimport { ChatOpenAI } from \"@langchain/openai\";\nimport { SystemMessage } from \"@langchain/core/messages\";\nimport { RunnableConfig } from \"@langchain/core/runnables\";\nimport { Command, Annotation, MessagesAnnotation, StateGraph, END, START } from \"@langchain/langgraph\";\n\n\nexport const AgentStateAnnotation = Annotation.Root({\n tools: Annotation(),\n ...MessagesAnnotation.spec,\n});\nexport type AgentState = typeof AgentStateAnnotation.State;\n\nasync function chatNode(state: AgentState, config?: RunnableConfig): Promise {\n const model = new ChatOpenAI({ model: \"gpt-4o\" });\n\n const modelWithTools = model.bindTools(\n [\n ...state.tools || []\n ],\n { parallel_tool_calls: false }\n );\n\n const systemMessage = new SystemMessage({\n content: 'Help the user with writing Haikus. If the user asks for a haiku, use the generate_haiku tool to display the haiku to the user.'\n });\n\n const response = await modelWithTools.invoke([\n systemMessage,\n ...state.messages,\n ], config);\n\n return new Command({\n goto: END,\n update: {\n messages: [response]\n }\n });\n}\n\nconst workflow = new StateGraph(AgentStateAnnotation);\nworkflow.addNode(\"chat_node\", chatNode);\n\nworkflow.addEdge(START, \"chat_node\");\n\nexport const toolBasedGenerativeUiGraph = workflow.compile();", "language": "ts", "type": "file" } @@ -608,6 +686,38 @@ }, {} ], + "langgraph::subgraphs": [ + { + "name": "page.tsx", + "content": "\"use client\";\nimport React, { useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport { CopilotKit, useCoAgent, useLangGraphInterrupt } from \"@copilotkit/react-core\";\nimport { CopilotSidebar } from \"@copilotkit/react-ui\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface SubgraphsProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\n// Travel planning data types\ninterface Flight {\n airline: string;\n arrival: string;\n departure: string;\n duration: string;\n price: string;\n}\n\ninterface Hotel {\n location: string;\n name: string;\n price_per_night: string;\n rating: string;\n}\n\ninterface Experience {\n name: string;\n description: string;\n location: string;\n type: string;\n}\n\ninterface Itinerary {\n hotel?: Hotel;\n flight?: Flight;\n experiences?: Experience[];\n}\n\ntype AvailableAgents = 'flights' | 'hotels' | 'experiences' | 'supervisor'\n\ninterface TravelAgentState {\n experiences: Experience[],\n flights: Flight[],\n hotels: Hotel[],\n itinerary: Itinerary\n planning_step: string\n active_agent: AvailableAgents\n}\n\nconst INITIAL_STATE: TravelAgentState = {\n itinerary: {},\n experiences: [],\n flights: [],\n hotels: [],\n planning_step: \"start\",\n active_agent: 'supervisor'\n};\n\ninterface InterruptEvent {\n message: string;\n options: TAgent extends 'flights' ? Flight[] : TAgent extends 'hotels' ? Hotel[] : never,\n recommendation: TAgent extends 'flights' ? Flight : TAgent extends 'hotels' ? Hotel : never,\n agent: TAgent\n}\n\nfunction InterruptHumanInTheLoop({\n event,\n resolve,\n}: {\n event: { value: InterruptEvent };\n resolve: (value: string) => void;\n}) {\n const { message, options, agent, recommendation } = event.value;\n\n // Format agent name with emoji\n const formatAgentName = (agent: string) => {\n switch (agent) {\n case 'flights': return 'Flights Agent';\n case 'hotels': return 'Hotels Agent';\n case 'experiences': return 'Experiences Agent';\n default: return `${agent} Agent`;\n }\n };\n\n const handleOptionSelect = (option: any) => {\n resolve(JSON.stringify(option));\n };\n\n return (\n
\n

{formatAgentName(agent)}: {message}

\n\n
\n {options.map((opt, idx) => {\n if ('airline' in opt) {\n const isRecommended = (recommendation as Flight).airline === opt.airline;\n // Flight options\n return (\n handleOptionSelect(opt)}\n >\n {isRecommended && ⭐ Recommended}\n
\n {opt.airline}\n {opt.price}\n
\n
\n {opt.departure} → {opt.arrival}\n
\n
\n {opt.duration}\n
\n \n );\n }\n const isRecommended = (recommendation as Hotel).name === opt.name;\n\n // Hotel options\n return (\n handleOptionSelect(opt)}\n >\n {isRecommended && ⭐ Recommended}\n
\n {opt.name}\n {opt.rating}\n
\n
\n 📍 {opt.location}\n
\n
\n {opt.price_per_night}\n
\n \n );\n })}\n
\n
\n )\n}\n\nexport default function Subgraphs({ params }: SubgraphsProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50;\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight);\n\n const chatTitle = 'Travel Planning Assistant';\n const chatDescription = 'Plan your perfect trip with AI specialists';\n const initialLabel = 'Hi! ✈️ Ready to plan an amazing trip? Try saying \"Plan a trip to Paris\" or \"Find me flights to Tokyo\"';\n\n return (\n \n
\n \n {isMobile ? (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight);\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n
\n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n ) : (\n \n )}\n \n \n );\n}\n\nfunction TravelPlanner() {\n const { isMobile } = useMobileView();\n const { state: agentState, nodeName } = useCoAgent({\n name: \"subgraphs\",\n initialState: INITIAL_STATE,\n config: {\n streamSubgraphs: true,\n }\n });\n\n useLangGraphInterrupt({\n render: ({ event, resolve }) => ,\n });\n\n // Current itinerary strip\n const ItineraryStrip = () => {\n const selectedFlight = agentState?.itinerary?.flight;\n const selectedHotel = agentState?.itinerary?.hotel;\n const hasExperiences = agentState?.experiences?.length > 0;\n\n return (\n
\n
Current Itinerary:
\n
\n
\n 📍\n Amsterdam → San Francisco\n
\n {selectedFlight && (\n
\n ✈️\n {selectedFlight.airline} - {selectedFlight.price}\n
\n )}\n {selectedHotel && (\n
\n 🏨\n {selectedHotel.name}\n
\n )}\n {hasExperiences && (\n
\n 🎯\n {agentState.experiences.length} experiences planned\n
\n )}\n
\n
\n );\n };\n\n // Compact agent status\n const AgentStatus = () => {\n let activeAgent = 'supervisor';\n if (nodeName?.includes('flights_agent')) {\n activeAgent = 'flights';\n }\n if (nodeName?.includes('hotels_agent')) {\n activeAgent = 'hotels';\n }\n if (nodeName?.includes('experiences_agent')) {\n activeAgent = 'experiences';\n }\n return (\n
\n
Active Agent:
\n
\n
\n 👨‍💼\n Supervisor\n
\n
\n ✈️\n Flights\n
\n
\n 🏨\n Hotels\n
\n
\n 🎯\n Experiences\n
\n
\n
\n )\n };\n\n // Travel details component\n const TravelDetails = () => (\n
\n
\n

✈️ Flight Options

\n
\n {agentState?.flights?.length > 0 ? (\n agentState.flights.map((flight, index) => (\n
\n {flight.airline}:\n {flight.departure} → {flight.arrival} ({flight.duration}) - {flight.price}\n
\n ))\n ) : (\n

No flights found yet

\n )}\n {agentState?.itinerary?.flight && (\n
\n Selected: {agentState.itinerary.flight.airline} - {agentState.itinerary.flight.price}\n
\n )}\n
\n
\n\n
\n

🏨 Hotel Options

\n
\n {agentState?.hotels?.length > 0 ? (\n agentState.hotels.map((hotel, index) => (\n
\n {hotel.name}:\n {hotel.location} - {hotel.price_per_night} ({hotel.rating})\n
\n ))\n ) : (\n

No hotels found yet

\n )}\n {agentState?.itinerary?.hotel && (\n
\n Selected: {agentState.itinerary.hotel.name} - {agentState.itinerary.hotel.price_per_night}\n
\n )}\n
\n
\n\n
\n

🎯 Experiences

\n
\n {agentState?.experiences?.length > 0 ? (\n agentState.experiences.map((experience, index) => (\n
\n
{experience.name}
\n
{experience.type}
\n
{experience.description}
\n
Location: {experience.location}
\n
\n ))\n ) : (\n

No experiences planned yet

\n )}\n
\n
\n
\n );\n\n return (\n
\n \n \n \n
\n );\n}", + "language": "typescript", + "type": "file" + }, + { + "name": "style.css", + "content": "/* Travel Planning Subgraphs Demo Styles */\n/* Essential styles that cannot be achieved with Tailwind classes */\n\n/* Main container with CopilotSidebar layout */\n.travel-planner-container {\n min-height: 100vh;\n padding: 2rem;\n background: linear-gradient(135deg, #f0f9ff 0%, #e0f2fe 100%);\n}\n\n/* Travel content area styles */\n.travel-content {\n max-width: 1200px;\n margin: 0 auto;\n padding: 0 1rem;\n display: flex;\n flex-direction: column;\n gap: 1rem;\n}\n\n/* Itinerary strip */\n.itinerary-strip {\n background: white;\n border-radius: 0.5rem;\n padding: 1rem;\n border: 1px solid #e5e7eb;\n box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);\n}\n\n.itinerary-label {\n font-size: 0.875rem;\n font-weight: 600;\n color: #6b7280;\n margin-bottom: 0.5rem;\n}\n\n.itinerary-items {\n display: flex;\n flex-wrap: wrap;\n gap: 1rem;\n}\n\n.itinerary-item {\n display: flex;\n align-items: center;\n gap: 0.5rem;\n padding: 0.5rem 0.75rem;\n background: #f9fafb;\n border-radius: 0.375rem;\n font-size: 0.875rem;\n}\n\n.item-icon {\n font-size: 1rem;\n}\n\n/* Agent status */\n.agent-status {\n background: white;\n border-radius: 0.5rem;\n padding: 1rem;\n border: 1px solid #e5e7eb;\n box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);\n}\n\n.status-label {\n font-size: 0.875rem;\n font-weight: 600;\n color: #6b7280;\n margin-bottom: 0.5rem;\n}\n\n.agent-indicators {\n display: flex;\n gap: 0.75rem;\n}\n\n.agent-indicator {\n display: flex;\n align-items: center;\n gap: 0.5rem;\n padding: 0.5rem 0.75rem;\n border-radius: 0.375rem;\n font-size: 0.875rem;\n background: #f9fafb;\n border: 1px solid #e5e7eb;\n transition: all 0.2s ease;\n}\n\n.agent-indicator.active {\n background: #dbeafe;\n border-color: #3b82f6;\n color: #1d4ed8;\n box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.1);\n}\n\n/* Travel details sections */\n.travel-details {\n background: white;\n border-radius: 0.5rem;\n padding: 1rem;\n border: 1px solid #e5e7eb;\n box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);\n display: grid;\n gap: 1rem;\n}\n\n.details-section h4 {\n font-size: 1rem;\n font-weight: 600;\n color: #1f2937;\n margin-bottom: 0.5rem;\n display: flex;\n align-items: center;\n gap: 0.5rem;\n}\n\n.detail-items {\n display: flex;\n flex-direction: column;\n gap: 0.5rem;\n}\n\n.detail-item {\n padding: 0.5rem;\n background: #f9fafb;\n border-radius: 0.25rem;\n font-size: 0.875rem;\n display: flex;\n justify-content: space-between;\n}\n\n.detail-item strong {\n color: #6b7280;\n font-weight: 500;\n}\n\n.detail-tips {\n padding: 0.5rem;\n background: #eff6ff;\n border-radius: 0.25rem;\n font-size: 0.75rem;\n color: #1d4ed8;\n}\n\n.activity-item {\n padding: 0.75rem;\n background: #f0f9ff;\n border-radius: 0.25rem;\n border-left: 2px solid #0ea5e9;\n}\n\n.activity-name {\n font-weight: 600;\n color: #1f2937;\n font-size: 0.875rem;\n margin-bottom: 0.25rem;\n}\n\n.activity-category {\n font-size: 0.75rem;\n color: #0ea5e9;\n margin-bottom: 0.25rem;\n}\n\n.activity-description {\n color: #4b5563;\n font-size: 0.75rem;\n margin-bottom: 0.25rem;\n}\n\n.activity-meta {\n font-size: 0.75rem;\n color: #6b7280;\n}\n\n.no-activities {\n text-align: center;\n color: #9ca3af;\n font-style: italic;\n padding: 1rem;\n font-size: 0.875rem;\n}\n\n/* Interrupt UI for Chat Sidebar (Generative UI) */\n.interrupt-container {\n display: flex;\n flex-direction: column;\n gap: 1rem;\n max-width: 100%;\n padding-top: 34px;\n}\n\n.interrupt-header {\n margin-bottom: 0.5rem;\n}\n\n.agent-name {\n font-size: 0.875rem;\n font-weight: 600;\n color: #1f2937;\n margin: 0 0 0.25rem 0;\n}\n\n.agent-message {\n font-size: 0.75rem;\n color: #6b7280;\n margin: 0;\n line-height: 1.4;\n}\n\n.interrupt-options {\n padding: 0.75rem;\n display: flex;\n flex-direction: column;\n gap: 0.5rem;\n max-height: 300px;\n overflow-y: auto;\n}\n\n.option-card {\n display: flex;\n flex-direction: column;\n gap: 0.25rem;\n padding: 0.75rem;\n background: #f9fafb;\n border: 1px solid #e5e7eb;\n border-radius: 0.5rem;\n cursor: pointer;\n transition: all 0.2s ease;\n text-align: left;\n position: relative;\n min-height: auto;\n}\n\n.option-card:hover {\n background: #f3f4f6;\n border-color: #d1d5db;\n}\n\n.option-card:active {\n background: #e5e7eb;\n}\n\n.option-card.recommended {\n background: #eff6ff;\n border-color: #3b82f6;\n box-shadow: 0 0 0 1px rgba(59, 130, 246, 0.1);\n}\n\n.option-card.recommended:hover {\n background: #dbeafe;\n}\n\n.recommendation-badge {\n position: absolute;\n top: -2px;\n right: -2px;\n background: #3b82f6;\n color: white;\n font-size: 0.625rem;\n padding: 0.125rem 0.375rem;\n border-radius: 0.75rem;\n font-weight: 500;\n}\n\n.option-header {\n display: flex;\n justify-content: space-between;\n align-items: center;\n margin-bottom: 0.125rem;\n}\n\n.airline-name, .hotel-name {\n font-weight: 600;\n font-size: 0.8rem;\n color: #1f2937;\n}\n\n.price, .rating {\n font-weight: 600;\n font-size: 0.75rem;\n color: #059669;\n}\n\n.route-info, .location-info {\n font-size: 0.7rem;\n color: #6b7280;\n margin-bottom: 0.125rem;\n}\n\n.duration-info, .price-info {\n font-size: 0.7rem;\n color: #9ca3af;\n}\n\n/* Mobile responsive adjustments */\n@media (max-width: 768px) {\n .travel-planner-container {\n padding: 0.5rem;\n padding-bottom: 120px; /* Space for mobile chat */\n }\n \n .travel-content {\n padding: 0;\n gap: 0.75rem;\n }\n \n .itinerary-items {\n flex-direction: column;\n gap: 0.5rem;\n }\n \n .agent-indicators {\n flex-direction: column;\n gap: 0.5rem;\n }\n \n .agent-indicator {\n padding: 0.75rem;\n }\n \n .travel-details {\n padding: 0.75rem;\n }\n\n .interrupt-container {\n padding: 0.5rem;\n }\n\n .option-card {\n padding: 0.625rem;\n }\n\n .interrupt-options {\n max-height: 250px;\n }\n}", + "language": "css", + "type": "file" + }, + { + "name": "README.mdx", + "content": "# LangGraph Subgraphs Demo: Travel Planning Assistant ✈️\n\nThis demo showcases **LangGraph subgraphs** through an interactive travel planning assistant. Watch as specialized AI agents collaborate to plan your perfect trip!\n\n## What are LangGraph Subgraphs? 🤖\n\n**Subgraphs** are the key to building modular, scalable AI systems in LangGraph. A subgraph is essentially \"a graph that is used as a node in another graph\" - enabling powerful encapsulation and reusability.\nFor more info, check out the [LangGraph docs](https://langchain-ai.github.io/langgraph/concepts/subgraphs/).\n\n### Key Concepts\n\n- **Encapsulation**: Each subgraph handles a specific domain with its own expertise\n- **Modularity**: Subgraphs can be developed, tested, and maintained independently \n- **Reusability**: The same subgraph can be used across multiple parent graphs\n- **State Communication**: Subgraphs can share state or use different schemas with transformations\n\n## Demo Architecture 🗺️\n\nThis travel planner demonstrates **supervisor-coordinated subgraphs** with **human-in-the-loop** decision making:\n\n### Parent Graph: Travel Supervisor\n- **Role**: Coordinates the travel planning process and routes to specialized agents\n- **State Management**: Maintains a shared itinerary object across all subgraphs\n- **Intelligence**: Determines what's needed and when each agent should be called\n\n### Subgraph 1: ✈️ Flights Agent\n- **Specialization**: Finding and booking flight options\n- **Process**: Presents flight options from Amsterdam to San Francisco with recommendations\n- **Interaction**: Uses interrupts to let users choose their preferred flight\n- **Data**: Static flight options (KLM, United) with pricing and duration\n\n### Subgraph 2: 🏨 Hotels Agent \n- **Specialization**: Finding and booking accommodation\n- **Process**: Shows hotel options in San Francisco with different price points\n- **Interaction**: Uses interrupts for user to select their preferred hotel\n- **Data**: Static hotel options (Hotel Zephyr, Ritz-Carlton, Hotel Zoe)\n\n### Subgraph 3: 🎯 Experiences Agent\n- **Specialization**: Curating restaurants and activities\n- **Process**: AI-powered recommendations based on selected flights and hotels\n- **Features**: Combines 2 restaurants and 2 activities with location-aware suggestions\n- **Data**: Static experiences (Pier 39, Golden Gate Bridge, Swan Oyster Depot, Tartine Bakery)\n\n## How It Works 🔄\n\n1. **User Request**: \"Help me plan a trip to San Francisco\"\n2. **Supervisor Analysis**: Determines what travel components are needed\n3. **Sequential Routing**: Routes to each agent in logical order:\n - First: Flights Agent (get transportation sorted)\n - Then: Hotels Agent (book accommodation) \n - Finally: Experiences Agent (plan activities)\n4. **Human Decisions**: Each agent presents options and waits for user choice via interrupts\n5. **State Building**: Selected choices are stored in the shared itinerary object\n6. **Completion**: All agents report back to supervisor for final coordination\n\n## State Communication Patterns 📊\n\n### Shared State Schema\nAll subgraph agents share and contribute to a common state object. When any agent updates the shared state, these changes are immediately reflected in the frontend through real-time syncing. This ensures that:\n\n- **Flight selections** from the Flights Agent are visible to subsequent agents\n- **Hotel choices** influence the Experiences Agent's recommendations \n- **All updates** are synchronized with the frontend UI in real-time\n- **State persistence** maintains the travel itinerary throughout the workflow\n\n### Human-in-the-Loop Pattern\nTwo of the specialist agents use **interrupts** to pause execution and gather user preferences:\n\n- **Flights Agent**: Presents options → interrupt → waits for selection → continues\n- **Hotels Agent**: Shows hotels → interrupt → waits for choice → continues\n\n## Try These Examples! 💡\n\n### Getting Started\n- \"Help me plan a trip to San Francisco\"\n- \"I want to visit San Francisco from Amsterdam\"\n- \"Plan my travel itinerary\"\n\n### During the Process\nWhen the Flights Agent presents options:\n- Choose between KLM ($650, 11h 30m) or United ($720, 12h 15m)\n\nWhen the Hotels Agent shows accommodations:\n- Select from Hotel Zephyr, The Ritz-Carlton, or Hotel Zoe\n\nThe Experiences Agent will then provide tailored recommendations based on your choices!\n\n## Frontend Capabilities 👁️\n\n- **Human-in-the-loop with interrupts** from subgraphs for user decision making\n- **Subgraphs detection and streaming** to show which agent is currently active\n- **Real-time state updates** as the shared itinerary is built across agents\n", + "language": "markdown", + "type": "file" + }, + { + "name": "agent.py", + "content": "\"\"\"\nA travel agent supervisor demo showcasing multi-agent architecture with subgraphs.\nThe supervisor coordinates specialized agents: flights finder, hotels finder, and experiences finder.\n\"\"\"\n\nfrom typing import Dict, List, Any, Optional, Annotated, Union\nfrom dataclasses import dataclass\nimport json\nimport os\nfrom pydantic import BaseModel, Field\n\n# LangGraph imports\nfrom langchain_core.runnables import RunnableConfig\nfrom langgraph.graph import StateGraph, END, START\nfrom langgraph.types import Command, interrupt\nfrom langgraph.graph import MessagesState\n\n# OpenAI imports\nfrom langchain_openai import ChatOpenAI\nfrom langchain_core.messages import SystemMessage, AIMessage\n\ndef create_interrupt(message: str, options: List[Any], recommendation: Any, agent: str):\n return interrupt({\n \"message\": message,\n \"options\": options,\n \"recommendation\": recommendation,\n \"agent\": agent,\n })\n\n# State schema for travel planning\n@dataclass\nclass Flight:\n airline: str\n departure: str\n arrival: str\n price: str\n duration: str\n\n@dataclass\nclass Hotel:\n name: str\n location: str\n price_per_night: str\n rating: str\n\n@dataclass\nclass Experience:\n name: str\n type: str # \"restaurant\" or \"activity\"\n description: str\n location: str\n\ndef merge_itinerary(left: Union[dict, None] = None, right: Union[dict, None] = None) -> dict:\n \"\"\"Custom reducer to merge shopping cart updates.\"\"\"\n if not left:\n left = {}\n if not right:\n right = {}\n\n return {**left, **right}\n\nclass TravelAgentState(MessagesState):\n \"\"\"Shared state for the travel agent system\"\"\"\n # Travel request details\n origin: str = \"\"\n destination: str = \"\"\n\n # Results from each agent\n flights: List[Flight] = None\n hotels: List[Hotel] = None\n experiences: List[Experience] = None\n\n itinerary: Annotated[dict, merge_itinerary] = None\n\n # Tools available to all agents\n tools: List[Any] = None\n\n # Supervisor routing\n next_agent: Optional[str] = None\n\n# Static data for demonstration\nSTATIC_FLIGHTS = [\n Flight(\"KLM\", \"Amsterdam (AMS)\", \"San Francisco (SFO)\", \"$650\", \"11h 30m\"),\n Flight(\"United\", \"Amsterdam (AMS)\", \"San Francisco (SFO)\", \"$720\", \"12h 15m\")\n]\n\nSTATIC_HOTELS = [\n Hotel(\"Hotel Zephyr\", \"Fisherman's Wharf\", \"$280/night\", \"4.2 stars\"),\n Hotel(\"The Ritz-Carlton\", \"Nob Hill\", \"$550/night\", \"4.8 stars\"),\n Hotel(\"Hotel Zoe\", \"Union Square\", \"$320/night\", \"4.4 stars\")\n]\n\nSTATIC_EXPERIENCES = [\n Experience(\"Pier 39\", \"activity\", \"Iconic waterfront destination with shops and sea lions\", \"Fisherman's Wharf\"),\n Experience(\"Golden Gate Bridge\", \"activity\", \"World-famous suspension bridge with stunning views\", \"Golden Gate\"),\n Experience(\"Swan Oyster Depot\", \"restaurant\", \"Historic seafood counter serving fresh oysters\", \"Polk Street\"),\n Experience(\"Tartine Bakery\", \"restaurant\", \"Artisanal bakery famous for bread and pastries\", \"Mission District\")\n]\n\n# Flights finder subgraph\nasync def flights_finder(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Subgraph that finds flight options\"\"\"\n\n # Simulate flight search with static data\n flights = STATIC_FLIGHTS\n\n selected_flight = state.get('itinerary', {}).get('flight', None)\n if not selected_flight:\n selected_flight = create_interrupt(\n message=f\"\"\"\n Found {len(flights)} flight options from {state.get('origin', 'Amsterdam')} to {state.get('destination', 'San Francisco')}.\n I recommend choosing the flight by {flights[0].airline} since it's known to be on time and cheaper.\n \"\"\",\n options=flights,\n recommendation=flights[0],\n agent=\"flights\"\n )\n\n if isinstance(selected_flight, str):\n selected_flight = json.loads(selected_flight)\n return Command(\n goto=END,\n update={\n \"flights\": flights,\n \"itinerary\": {\n \"flight\": selected_flight\n },\n \"messages\": state[\"messages\"] + [{\n \"role\": \"assistant\",\n \"content\": f\"Flights Agent: Great. I'll book you the {selected_flight[\"airline\"]} flight from {selected_flight[\"departure\"]} to {selected_flight[\"arrival\"]}.\"\n }]\n }\n )\n\n# Hotels finder subgraph\nasync def hotels_finder(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Subgraph that finds hotel options\"\"\"\n\n # Simulate hotel search with static data\n hotels = STATIC_HOTELS\n selected_hotel = state.get('itinerary', {}).get('hotel', None)\n if not selected_hotel:\n selected_hotel = create_interrupt(\n message=f\"\"\"\n Found {len(hotels)} accommodation options in {state.get('destination', 'San Francisco')}.\n I recommend choosing the {hotels[2].name} since it strikes the balance between rating, price, and location.\n \"\"\",\n options=hotels,\n recommendation=hotels[2],\n agent=\"hotels\"\n )\n\n if isinstance(selected_hotel, str):\n selected_hotel = json.loads(selected_hotel)\n return Command(\n goto=END,\n update={\n \"hotels\": hotels,\n \"itinerary\": {\n \"hotel\": selected_hotel\n },\n \"messages\": state[\"messages\"] + [{\n \"role\": \"assistant\",\n \"content\": f\"Hotels Agent: Excellent choice! You'll like {selected_hotel[\"name\"]}.\"\n }]\n }\n )\n\n# Experiences finder subgraph\nasync def experiences_finder(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Subgraph that finds restaurant and activity recommendations\"\"\"\n\n # Filter experiences (2 restaurants, 2 activities)\n restaurants = [exp for exp in STATIC_EXPERIENCES if exp.type == \"restaurant\"][:2]\n activities = [exp for exp in STATIC_EXPERIENCES if exp.type == \"activity\"][:2]\n experiences = restaurants + activities\n\n model = ChatOpenAI(model=\"gpt-4o\")\n\n if config is None:\n config = RunnableConfig(recursion_limit=25)\n\n itinerary = state.get(\"itinerary\", {})\n\n system_prompt = f\"\"\"\n You are the experiences agent. Your job is to find restaurants and activities for the user.\n You already went ahead and found a bunch of experiences. All you have to do now, is to let the user know of your findings.\n \n Current status:\n - Origin: {state.get('origin', 'Amsterdam')}\n - Destination: {state.get('destination', 'San Francisco')}\n - Flight chosen: {itinerary.get(\"hotel\", None)}\n - Hotel chosen: {itinerary.get(\"hotel\", None)}\n - activities found: {activities}\n - restaurants found: {restaurants}\n \"\"\"\n\n # Get supervisor decision\n response = await model.ainvoke([\n SystemMessage(content=system_prompt),\n *state[\"messages\"],\n ], config)\n\n return Command(\n goto=END,\n update={\n \"experiences\": experiences,\n \"messages\": state[\"messages\"] + [response]\n }\n )\n\nclass SupervisorResponseFormatter(BaseModel):\n \"\"\"Always use this tool to structure your response to the user.\"\"\"\n answer: str = Field(description=\"The answer to the user\")\n next_agent: str | None = Field(description=\"The agent to go to. Not required if you do not want to route to another agent.\")\n\n# Supervisor agent\nasync def supervisor_agent(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Main supervisor that coordinates all subgraphs\"\"\"\n\n itinerary = state.get(\"itinerary\", {})\n\n # Check what's already completed\n has_flights = itinerary.get(\"flight\", None) is not None\n has_hotels = itinerary.get(\"hotel\", None) is not None\n has_experiences = state.get(\"experiences\", None) is not None\n\n system_prompt = f\"\"\"\n You are a travel planning supervisor. Your job is to coordinate specialized agents to help plan a trip.\n \n Current status:\n - Origin: {state.get('origin', 'Amsterdam')}\n - Destination: {state.get('destination', 'San Francisco')}\n - Flights found: {has_flights}\n - Hotels found: {has_hotels}\n - Experiences found: {has_experiences}\n - Itinerary (Things that the user has already confirmed selection on): {json.dumps(itinerary, indent=2)}\n \n Available agents:\n - flights_agent: Finds flight options\n - hotels_agent: Finds hotel options \n - experiences_agent: Finds restaurant and activity recommendations\n - {END}: Mark task as complete when all information is gathered\n \n You must route to the appropriate agent based on what's missing. Once all agents have completed their tasks, route to 'complete'.\n \"\"\"\n\n # Define the model\n model = ChatOpenAI(model=\"gpt-4o\")\n\n if config is None:\n config = RunnableConfig(recursion_limit=25)\n\n # Bind the routing tool\n model_with_tools = model.bind_tools(\n [SupervisorResponseFormatter],\n parallel_tool_calls=False,\n )\n\n # Get supervisor decision\n response = await model_with_tools.ainvoke([\n SystemMessage(content=system_prompt),\n *state[\"messages\"],\n ], config)\n\n messages = state[\"messages\"] + [response]\n\n # Handle tool calls for routing\n if hasattr(response, \"tool_calls\") and response.tool_calls:\n tool_call = response.tool_calls[0]\n\n if isinstance(tool_call, dict):\n tool_call_args = tool_call[\"args\"]\n else:\n tool_call_args = tool_call.args\n\n next_agent = tool_call_args[\"next_agent\"]\n\n # Add tool response\n tool_response = {\n \"role\": \"tool\",\n \"content\": f\"Routing to {next_agent} and providing the answer\",\n \"tool_call_id\": tool_call.id if hasattr(tool_call, 'id') else tool_call[\"id\"]\n }\n\n messages = messages + [tool_response, AIMessage(content=tool_call_args[\"answer\"])]\n\n if next_agent is not None:\n return Command(goto=next_agent)\n\n # Fallback if no tool call\n return Command(\n goto=END,\n update={\"messages\": messages}\n )\n\n# Create subgraphs\nflights_graph = StateGraph(TravelAgentState)\nflights_graph.add_node(\"flights_agent_chat_node\", flights_finder)\nflights_graph.set_entry_point(\"flights_agent_chat_node\")\nflights_graph.add_edge(START, \"flights_agent_chat_node\")\nflights_graph.add_edge(\"flights_agent_chat_node\", END)\nflights_subgraph = flights_graph.compile()\n\nhotels_graph = StateGraph(TravelAgentState)\nhotels_graph.add_node(\"hotels_agent_chat_node\", hotels_finder)\nhotels_graph.set_entry_point(\"hotels_agent_chat_node\")\nhotels_graph.add_edge(START, \"hotels_agent_chat_node\")\nhotels_graph.add_edge(\"hotels_agent_chat_node\", END)\nhotels_subgraph = hotels_graph.compile()\n\nexperiences_graph = StateGraph(TravelAgentState)\nexperiences_graph.add_node(\"experiences_agent_chat_node\", experiences_finder)\nexperiences_graph.set_entry_point(\"experiences_agent_chat_node\")\nexperiences_graph.add_edge(START, \"experiences_agent_chat_node\")\nexperiences_graph.add_edge(\"experiences_agent_chat_node\", END)\nexperiences_subgraph = experiences_graph.compile()\n\n# Main supervisor workflow\nworkflow = StateGraph(TravelAgentState)\n\n# Add supervisor and subgraphs as nodes\nworkflow.add_node(\"supervisor\", supervisor_agent)\nworkflow.add_node(\"flights_agent\", flights_subgraph)\nworkflow.add_node(\"hotels_agent\", hotels_subgraph)\nworkflow.add_node(\"experiences_agent\", experiences_subgraph)\n\n# Set entry point\nworkflow.set_entry_point(\"supervisor\")\nworkflow.add_edge(START, \"supervisor\")\n\n# Add edges back to supervisor after each subgraph\nworkflow.add_edge(\"flights_agent\", \"supervisor\")\nworkflow.add_edge(\"hotels_agent\", \"supervisor\")\nworkflow.add_edge(\"experiences_agent\", \"supervisor\")\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n memory = MemorySaver()\n graph = workflow.compile(checkpointer=memory)\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = workflow.compile()\n", + "language": "python", + "type": "file" + }, + { + "name": "agent.ts", + "content": "/**\n * A travel agent supervisor demo showcasing multi-agent architecture with subgraphs.\n * The supervisor coordinates specialized agents: flights finder, hotels finder, and experiences finder.\n */\n\nimport { ChatOpenAI } from \"@langchain/openai\";\nimport { SystemMessage, AIMessage, ToolMessage } from \"@langchain/core/messages\";\nimport { RunnableConfig } from \"@langchain/core/runnables\";\nimport { \n Annotation, \n MessagesAnnotation, \n StateGraph, \n Command, \n START, \n END, \n interrupt \n} from \"@langchain/langgraph\";\n\n// Travel data interfaces\ninterface Flight {\n airline: string;\n departure: string;\n arrival: string;\n price: string;\n duration: string;\n}\n\ninterface Hotel {\n name: string;\n location: string;\n price_per_night: string;\n rating: string;\n}\n\ninterface Experience {\n name: string;\n type: \"restaurant\" | \"activity\";\n description: string;\n location: string;\n}\n\ninterface Itinerary {\n flight?: Flight;\n hotel?: Hotel;\n}\n\n// Custom reducer to merge itinerary updates\nfunction mergeItinerary(left: Itinerary | null, right?: Itinerary | null): Itinerary {\n if (!left) left = {};\n if (!right) right = {};\n return { ...left, ...right };\n}\n\n// State annotation for travel agent system\nexport const TravelAgentStateAnnotation = Annotation.Root({\n origin: Annotation(),\n destination: Annotation(),\n flights: Annotation(),\n hotels: Annotation(),\n experiences: Annotation(),\n\n // Itinerary with custom merger\n itinerary: Annotation({\n reducer: mergeItinerary,\n default: () => null\n }),\n\n // Tools available to all agents\n tools: Annotation({\n reducer: (x, y) => y ?? x,\n default: () => []\n }),\n\n // Supervisor routing\n next_agent: Annotation(),\n ...MessagesAnnotation.spec,\n});\n\nexport type TravelAgentState = typeof TravelAgentStateAnnotation.State;\n\n// Static data for demonstration\nconst STATIC_FLIGHTS: Flight[] = [\n { airline: \"KLM\", departure: \"Amsterdam (AMS)\", arrival: \"San Francisco (SFO)\", price: \"$650\", duration: \"11h 30m\" },\n { airline: \"United\", departure: \"Amsterdam (AMS)\", arrival: \"San Francisco (SFO)\", price: \"$720\", duration: \"12h 15m\" }\n];\n\nconst STATIC_HOTELS: Hotel[] = [\n { name: \"Hotel Zephyr\", location: \"Fisherman's Wharf\", price_per_night: \"$280/night\", rating: \"4.2 stars\" },\n { name: \"The Ritz-Carlton\", location: \"Nob Hill\", price_per_night: \"$550/night\", rating: \"4.8 stars\" },\n { name: \"Hotel Zoe\", location: \"Union Square\", price_per_night: \"$320/night\", rating: \"4.4 stars\" }\n];\n\nconst STATIC_EXPERIENCES: Experience[] = [\n { name: \"Pier 39\", type: \"activity\", description: \"Iconic waterfront destination with shops and sea lions\", location: \"Fisherman's Wharf\" },\n { name: \"Golden Gate Bridge\", type: \"activity\", description: \"World-famous suspension bridge with stunning views\", location: \"Golden Gate\" },\n { name: \"Swan Oyster Depot\", type: \"restaurant\", description: \"Historic seafood counter serving fresh oysters\", location: \"Polk Street\" },\n { name: \"Tartine Bakery\", type: \"restaurant\", description: \"Artisanal bakery famous for bread and pastries\", location: \"Mission District\" }\n];\n\nfunction createInterrupt(message: string, options: any[], recommendation: any, agent: string) {\n return interrupt({\n message,\n options,\n recommendation,\n agent,\n });\n}\n\n// Flights finder subgraph\nasync function flightsFinder(state: TravelAgentState, config?: RunnableConfig): Promise {\n // Simulate flight search with static data\n const flights = STATIC_FLIGHTS;\n\n const selectedFlight = state.itinerary?.flight;\n \n let flightChoice: Flight;\n const message = `Found ${flights.length} flight options from ${state.origin || 'Amsterdam'} to ${state.destination || 'San Francisco'}.\\n` +\n `I recommend choosing the flight by ${flights[0].airline} since it's known to be on time and cheaper.`\n if (!selectedFlight) {\n const interruptResult = createInterrupt(\n message,\n flights,\n flights[0],\n \"flights\"\n );\n \n // Parse the interrupt result if it's a string\n flightChoice = typeof interruptResult === 'string' ? JSON.parse(interruptResult) : interruptResult;\n } else {\n flightChoice = selectedFlight;\n }\n\n return new Command({\n goto: END,\n update: {\n flights: flights,\n itinerary: {\n flight: flightChoice\n },\n // Return all \"messages\" that the agent was sending\n messages: [\n ...state.messages,\n new AIMessage({\n content: message,\n }),\n new AIMessage({\n content: `Flights Agent: Great. I'll book you the ${flightChoice.airline} flight from ${flightChoice.departure} to ${flightChoice.arrival}.`,\n }),\n ]\n }\n });\n}\n\n// Hotels finder subgraph\nasync function hotelsFinder(state: TravelAgentState, config?: RunnableConfig): Promise {\n // Simulate hotel search with static data\n const hotels = STATIC_HOTELS;\n const selectedHotel = state.itinerary?.hotel;\n \n let hotelChoice: Hotel;\n const message = `Found ${hotels.length} accommodation options in ${state.destination || 'San Francisco'}.\\n\n I recommend choosing the ${hotels[2].name} since it strikes the balance between rating, price, and location.`\n if (!selectedHotel) {\n const interruptResult = createInterrupt(\n message,\n hotels,\n hotels[2],\n \"hotels\"\n );\n \n // Parse the interrupt result if it's a string\n hotelChoice = typeof interruptResult === 'string' ? JSON.parse(interruptResult) : interruptResult;\n } else {\n hotelChoice = selectedHotel;\n }\n\n return new Command({\n goto: END,\n update: {\n hotels: hotels,\n itinerary: {\n hotel: hotelChoice\n },\n // Return all \"messages\" that the agent was sending\n messages: [\n ...state.messages,\n new AIMessage({\n content: message,\n }),\n new AIMessage({\n content: `Hotels Agent: Excellent choice! You'll like ${hotelChoice.name}.`\n }),\n ]\n }\n });\n}\n\n// Experiences finder subgraph\nasync function experiencesFinder(state: TravelAgentState, config?: RunnableConfig): Promise {\n // Filter experiences (2 restaurants, 2 activities)\n const restaurants = STATIC_EXPERIENCES.filter(exp => exp.type === \"restaurant\").slice(0, 2);\n const activities = STATIC_EXPERIENCES.filter(exp => exp.type === \"activity\").slice(0, 2);\n const experiences = [...restaurants, ...activities];\n\n const model = new ChatOpenAI({ model: \"gpt-4o\" });\n\n if (!config) {\n config = { recursionLimit: 25 };\n }\n\n const itinerary = state.itinerary || {};\n\n const systemPrompt = `\n You are the experiences agent. Your job is to find restaurants and activities for the user.\n You already went ahead and found a bunch of experiences. All you have to do now, is to let the user know of your findings.\n \n Current status:\n - Origin: ${state.origin || 'Amsterdam'}\n - Destination: ${state.destination || 'San Francisco'}\n - Flight chosen: ${JSON.stringify(itinerary.flight) || 'None'}\n - Hotel chosen: ${JSON.stringify(itinerary.hotel) || 'None'}\n - Activities found: ${JSON.stringify(activities)}\n - Restaurants found: ${JSON.stringify(restaurants)}\n `;\n\n // Get experiences response\n const response = await model.invoke([\n new SystemMessage({ content: systemPrompt }),\n ...state.messages,\n ], config);\n\n return new Command({\n goto: END,\n update: {\n experiences: experiences,\n messages: [...state.messages, response]\n }\n });\n}\n\n// Supervisor response tool\nconst SUPERVISOR_RESPONSE_TOOL = {\n type: \"function\" as const,\n function: {\n name: \"supervisor_response\",\n description: \"Always use this tool to structure your response to the user.\",\n parameters: {\n type: \"object\",\n properties: {\n answer: {\n type: \"string\",\n description: \"The answer to the user\"\n },\n next_agent: {\n type: \"string\",\n enum: [\"flights_agent\", \"hotels_agent\", \"experiences_agent\", \"complete\"],\n description: \"The agent to go to. Not required if you do not want to route to another agent.\"\n }\n },\n required: [\"answer\"]\n }\n }\n};\n\n// Supervisor agent\nasync function supervisorAgent(state: TravelAgentState, config?: RunnableConfig): Promise {\n const itinerary = state.itinerary || {};\n\n // Check what's already completed\n const hasFlights = itinerary.flight !== undefined;\n const hasHotels = itinerary.hotel !== undefined;\n const hasExperiences = state.experiences !== null;\n\n const systemPrompt = `\n You are a travel planning supervisor. Your job is to coordinate specialized agents to help plan a trip.\n \n Current status:\n - Origin: ${state.origin || 'Amsterdam'}\n - Destination: ${state.destination || 'San Francisco'}\n - Flights found: ${hasFlights}\n - Hotels found: ${hasHotels}\n - Experiences found: ${hasExperiences}\n - Itinerary (Things that the user has already confirmed selection on): ${JSON.stringify(itinerary, null, 2)}\n \n Available agents:\n - flights_agent: Finds flight options\n - hotels_agent: Finds hotel options \n - experiences_agent: Finds restaurant and activity recommendations\n - complete: Mark task as complete when all information is gathered\n \n You must route to the appropriate agent based on what's missing. Once all agents have completed their tasks, route to 'complete'.\n `;\n\n // Define the model\n const model = new ChatOpenAI({ model: \"gpt-4o\" });\n\n if (!config) {\n config = { recursionLimit: 25 };\n }\n\n // Bind the routing tool\n const modelWithTools = model.bindTools(\n [SUPERVISOR_RESPONSE_TOOL],\n {\n parallel_tool_calls: false,\n }\n );\n\n // Get supervisor decision\n const response = await modelWithTools.invoke([\n new SystemMessage({ content: systemPrompt }),\n ...state.messages,\n ], config);\n\n let messages = [...state.messages, response];\n\n // Handle tool calls for routing\n if (response.tool_calls && response.tool_calls.length > 0) {\n const toolCall = response.tool_calls[0];\n const toolCallArgs = toolCall.args;\n const nextAgent = toolCallArgs.next_agent;\n\n const toolResponse = new ToolMessage({\n tool_call_id: toolCall.id!,\n content: `Routing to ${nextAgent} and providing the answer`,\n });\n\n messages = [\n ...messages, \n toolResponse, \n new AIMessage({ content: toolCallArgs.answer })\n ];\n\n if (nextAgent && nextAgent !== \"complete\") {\n return new Command({ goto: nextAgent });\n }\n }\n\n // Fallback if no tool call or complete\n return new Command({\n goto: END,\n update: { messages }\n });\n}\n\n// Create subgraphs\nconst flightsGraph = new StateGraph(TravelAgentStateAnnotation);\nflightsGraph.addNode(\"flights_agent_chat_node\", flightsFinder);\nflightsGraph.setEntryPoint(\"flights_agent_chat_node\");\nflightsGraph.addEdge(START, \"flights_agent_chat_node\");\nflightsGraph.addEdge(\"flights_agent_chat_node\", END);\nconst flightsSubgraph = flightsGraph.compile();\n\nconst hotelsGraph = new StateGraph(TravelAgentStateAnnotation);\nhotelsGraph.addNode(\"hotels_agent_chat_node\", hotelsFinder);\nhotelsGraph.setEntryPoint(\"hotels_agent_chat_node\");\nhotelsGraph.addEdge(START, \"hotels_agent_chat_node\");\nhotelsGraph.addEdge(\"hotels_agent_chat_node\", END);\nconst hotelsSubgraph = hotelsGraph.compile();\n\nconst experiencesGraph = new StateGraph(TravelAgentStateAnnotation);\nexperiencesGraph.addNode(\"experiences_agent_chat_node\", experiencesFinder);\nexperiencesGraph.setEntryPoint(\"experiences_agent_chat_node\");\nexperiencesGraph.addEdge(START, \"experiences_agent_chat_node\");\nexperiencesGraph.addEdge(\"experiences_agent_chat_node\", END);\nconst experiencesSubgraph = experiencesGraph.compile();\n\n// Main supervisor workflow\nconst workflow = new StateGraph(TravelAgentStateAnnotation);\n\n// Add supervisor and subgraphs as nodes\nworkflow.addNode(\"supervisor\", supervisorAgent, { ends: ['flights_agent', 'hotels_agent', 'experiences_agent', END] });\nworkflow.addNode(\"flights_agent\", flightsSubgraph);\nworkflow.addNode(\"hotels_agent\", hotelsSubgraph);\nworkflow.addNode(\"experiences_agent\", experiencesSubgraph);\n\n// Set entry point\nworkflow.setEntryPoint(\"supervisor\");\nworkflow.addEdge(START, \"supervisor\");\n\n// Add edges back to supervisor after each subgraph\nworkflow.addEdge(\"flights_agent\", \"supervisor\");\nworkflow.addEdge(\"hotels_agent\", \"supervisor\");\nworkflow.addEdge(\"experiences_agent\", \"supervisor\");\n\n// Compile the graph\nexport const subGraphsAgentGraph = workflow.compile();\n", + "language": "ts", + "type": "file" + } + ], "langgraph-fastapi::agentic_chat": [ { "name": "page.tsx", @@ -741,7 +851,7 @@ "langgraph-fastapi::tool_based_generative_ui": [ { "name": "page.tsx", - "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku{\n japanese : string[] | [],\n english : string[] | [],\n image_names : string[] | [],\n selectedImage : string | null,\n}\n\ninterface HaikuCardProps{\n generatedHaiku : GenerateHaiku | Partial\n setHaikus : Dispatch>\n haikus : GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n
\n
\n
\n \n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )}\n \n \n );\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction HaikuCard({generatedHaiku, setHaikus, haikus} : HaikuCardProps) {\n return (\n
\n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n
\n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n const validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n if (correctedNames.length < 3) {\n const availableFallbacks = VALID_IMAGE_NAMES.filter(name => !usedValidNames.has(name));\n for (let i = availableFallbacks.length - 1; i > 0; i--) {\n const j = Math.floor(Math.random() * (i + 1));\n [availableFallbacks[i], availableFallbacks[j]] = [availableFallbacks[j], availableFallbacks[i]];\n }\n\n while (correctedNames.length < 3 && availableFallbacks.length > 0) {\n const fallbackName = availableFallbacks.pop();\n if (fallbackName) {\n correctedNames.push(fallbackName);\n }\n }\n }\n\n while (correctedNames.length < 3 && VALID_IMAGE_NAMES.length > 0) {\n const fallbackName = VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n correctedNames.push(fallbackName);\n }\n\n return correctedNames.slice(0, 3);\n };\n\n useCopilotAction({\n name: \"generate_haiku\",\n available: \"frontend\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: \"Names of 3 relevant images\",\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [...prev, newHaiku]);\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const generatedHaikus = useMemo(() => (\n haikus.filter((haiku) => haiku.english[0] !== \"A placeholder verse—\")\n ), [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n {/* Thumbnail List */}\n {Boolean(generatedHaikus.length) && !isMobile && (\n
\n {generatedHaikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n )}\n\n {/* Main Display */}\n
\n
\n {haikus.filter((_haiku: Haiku, index: number) => {\n if (haikus.length == 1) return true;\n else return index == activeIndex + 1;\n }).map((haiku, index) => (\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n \n \n );\n}\n", + "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku {\n japanese: string[] | [],\n english: string[] | [],\n image_names: string[] | [],\n selectedImage: string | null,\n}\n\ninterface HaikuCardProps {\n generatedHaiku: GenerateHaiku | Partial\n setHaikus: Dispatch>\n haikus: GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && }\n \n \n );\n}\n\nfunction MobileChat({ chatTitle, chatDescription, initialLabel }: { chatTitle: string, chatDescription: string, initialLabel: string }) {\n const defaultChatHeight = 50\n\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n return (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction getRandomImage(): string {\n return VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n}\n\nconst validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n while (correctedNames.length < 3) {\n const nextImage = getRandomImage();\n if (!usedValidNames.has(nextImage)) {\n correctedNames.push(nextImage);\n usedValidNames.add(nextImage);\n }\n }\n\n return correctedNames.slice(0, 3);\n};\n\nfunction HaikuCard({ generatedHaiku, setHaikus, haikus }: HaikuCardProps) {\n return (\n \n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n \n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n useCopilotAction({\n name: \"generate_haiku\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: `Names of 3 relevant images selected from the following: \\n -${VALID_IMAGE_NAMES.join('\\n -')}`,\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [newHaiku, ...prev].filter(h => h.english[0] !== \"A placeholder verse—\"));\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n \n\n {/* Main Display */}\n
\n
\n {haikus.map((haiku, index) => (\n (haikus.length == 1 || index == activeIndex) && (\n\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n setHaikus((prevHaikus) => {\n return prevHaikus.map((h, idx) => {\n if (idx === index) {\n return { ...h, selectedImage: imageName }\n } else {\n return { ...h }\n }\n })\n })}\n />\n ))}\n
\n )}\n
\n )\n ))}\n
\n \n \n );\n}\n\nfunction Thumbnails({ haikus, activeIndex, setActiveIndex, isMobile }: { haikus: Haiku[], activeIndex: number, setActiveIndex: (index: number) => void, isMobile: boolean }) {\n if (haikus.length == 0 || isMobile) { return null }\n return (\n
\n {haikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n \n ))}\n \n )\n\n}", "language": "typescript", "type": "file" }, @@ -759,7 +869,7 @@ }, { "name": "agent.py", - "content": "\"\"\"\nAn example demonstrating tool-based generative UI using LangGraph.\n\"\"\"\n\nfrom typing import List, Any, Optional, Annotated\nimport os\n\n# LangGraph imports\nfrom langchain_openai import ChatOpenAI\nfrom langchain_core.runnables import RunnableConfig\nfrom langchain_core.messages import SystemMessage\nfrom langchain_core.tools import tool\nfrom langgraph.graph import StateGraph, END, START\nfrom langgraph.types import Command\nfrom langgraph.graph import MessagesState\nfrom langgraph.prebuilt import ToolNode\n\n@tool\ndef generate_haiku(\n japanese: Annotated[ # pylint: disable=unused-argument\n List[str],\n \"An array of three lines of the haiku in Japanese\"\n ],\n english: Annotated[ # pylint: disable=unused-argument\n List[str],\n \"An array of three lines of the haiku in English\"\n ]\n):\n \"\"\"\n Generate a haiku in Japanese and its English translation.\n Also select exactly 3 relevant images from the provided list based on the haiku's theme.\n \"\"\"\n\nclass AgentState(MessagesState):\n \"\"\"\n State of the agent.\n \"\"\"\n tools: List[Any]\n\nasync def chat_node(state: AgentState, config: Optional[RunnableConfig] = None):\n \"\"\"\n The main function handling chat and tool calls.\n \"\"\"\n\n system_prompt = \"\"\"\n You assist the user in generating a haiku.\n When generating a haiku using the 'generate_haiku' tool.\n \"\"\"\n\n # Define the model\n model = ChatOpenAI(model=\"gpt-4o\")\n\n # Define config for the model\n if config is None:\n config = RunnableConfig(recursion_limit=25)\n\n # Bind the tools to the model\n model_with_tools = model.bind_tools(\n [generate_haiku],\n # Disable parallel tool calls to avoid race conditions\n parallel_tool_calls=False,\n )\n\n # Run the model to generate a response\n response = await model_with_tools.ainvoke([\n SystemMessage(content=system_prompt),\n *state[\"messages\"],\n ], config)\n\n if response.tool_calls:\n return Command(\n goto=\"tool_node\",\n update={\n \"messages\": state[\"messages\"] + [response]\n }\n )\n # Return Command to end with updated messages\n return Command(\n goto=END,\n update={\n \"messages\": state[\"messages\"] + [response]\n }\n )\n\n# Define the graph\nworkflow = StateGraph(AgentState)\n\n# Add nodes\nworkflow.add_node(\"chat_node\", chat_node)\nworkflow.add_node(\"tool_node\", ToolNode([generate_haiku]))\n\n# Add edges\nworkflow.set_entry_point(\"chat_node\")\nworkflow.add_edge(START, \"chat_node\")\nworkflow.add_edge(\"chat_node\", END)\nworkflow.add_edge(\"tool_node\", END)\n\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n memory = MemorySaver()\n graph = workflow.compile(checkpointer=memory)\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = workflow.compile()\n\n", + "content": "\"\"\"\nAn example demonstrating tool-based generative UI using LangGraph.\n\"\"\"\n\nimport os\nfrom typing import Any, List\nfrom typing_extensions import Literal\nfrom langchain_openai import ChatOpenAI\nfrom langchain_core.messages import SystemMessage\nfrom langchain_core.runnables import RunnableConfig\nfrom langgraph.graph import StateGraph, END\nfrom langgraph.types import Command\nfrom langgraph.graph import MessagesState\nfrom langgraph.prebuilt import ToolNode\n\n\nclass AgentState(MessagesState):\n \"\"\"\n State of the agent.\n \"\"\"\n tools: List[Any]\n\nasync def chat_node(state: AgentState, config: RunnableConfig) -> Command[Literal[\"tool_node\", \"__end__\"]]:\n \"\"\"\n Standard chat node based on the ReAct design pattern. It handles:\n - The model to use (and binds in CopilotKit actions and the tools defined above)\n - The system prompt\n - Getting a response from the model\n - Handling tool calls\n\n For more about the ReAct design pattern, see:\n https://www.perplexity.ai/search/react-agents-NcXLQhreS0WDzpVaS4m9Cg\n \"\"\"\n\n model = ChatOpenAI(model=\"gpt-4o\")\n\n model_with_tools = model.bind_tools(\n [\n *state.get(\"tools\", []), # bind tools defined by ag-ui\n ],\n parallel_tool_calls=False,\n )\n\n system_message = SystemMessage(\n content=f\"Help the user with writing Haikus. If the user asks for a haiku, use the generate_haiku tool to display the haiku to the user.\"\n )\n\n response = await model_with_tools.ainvoke([\n system_message,\n *state[\"messages\"],\n ], config)\n\n return Command(\n goto=END,\n update={\n \"messages\": [response],\n }\n )\n\nworkflow = StateGraph(AgentState)\nworkflow.add_node(\"chat_node\", chat_node)\n# This is required even though we don't have any backend tools to pass in.\nworkflow.add_node(\"tool_node\", ToolNode(tools=[]))\nworkflow.set_entry_point(\"chat_node\")\nworkflow.add_edge(\"chat_node\", END)\n\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n memory = MemorySaver()\n graph = workflow.compile(checkpointer=memory)\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = workflow.compile()\n", "language": "python", "type": "file" } @@ -790,6 +900,32 @@ "type": "file" } ], + "langgraph-fastapi::subgraphs": [ + { + "name": "page.tsx", + "content": "\"use client\";\nimport React, { useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport { CopilotKit, useCoAgent, useLangGraphInterrupt } from \"@copilotkit/react-core\";\nimport { CopilotSidebar } from \"@copilotkit/react-ui\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface SubgraphsProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\n// Travel planning data types\ninterface Flight {\n airline: string;\n arrival: string;\n departure: string;\n duration: string;\n price: string;\n}\n\ninterface Hotel {\n location: string;\n name: string;\n price_per_night: string;\n rating: string;\n}\n\ninterface Experience {\n name: string;\n description: string;\n location: string;\n type: string;\n}\n\ninterface Itinerary {\n hotel?: Hotel;\n flight?: Flight;\n experiences?: Experience[];\n}\n\ntype AvailableAgents = 'flights' | 'hotels' | 'experiences' | 'supervisor'\n\ninterface TravelAgentState {\n experiences: Experience[],\n flights: Flight[],\n hotels: Hotel[],\n itinerary: Itinerary\n planning_step: string\n active_agent: AvailableAgents\n}\n\nconst INITIAL_STATE: TravelAgentState = {\n itinerary: {},\n experiences: [],\n flights: [],\n hotels: [],\n planning_step: \"start\",\n active_agent: 'supervisor'\n};\n\ninterface InterruptEvent {\n message: string;\n options: TAgent extends 'flights' ? Flight[] : TAgent extends 'hotels' ? Hotel[] : never,\n recommendation: TAgent extends 'flights' ? Flight : TAgent extends 'hotels' ? Hotel : never,\n agent: TAgent\n}\n\nfunction InterruptHumanInTheLoop({\n event,\n resolve,\n}: {\n event: { value: InterruptEvent };\n resolve: (value: string) => void;\n}) {\n const { message, options, agent, recommendation } = event.value;\n\n // Format agent name with emoji\n const formatAgentName = (agent: string) => {\n switch (agent) {\n case 'flights': return 'Flights Agent';\n case 'hotels': return 'Hotels Agent';\n case 'experiences': return 'Experiences Agent';\n default: return `${agent} Agent`;\n }\n };\n\n const handleOptionSelect = (option: any) => {\n resolve(JSON.stringify(option));\n };\n\n return (\n
\n

{formatAgentName(agent)}: {message}

\n\n
\n {options.map((opt, idx) => {\n if ('airline' in opt) {\n const isRecommended = (recommendation as Flight).airline === opt.airline;\n // Flight options\n return (\n handleOptionSelect(opt)}\n >\n {isRecommended && ⭐ Recommended}\n
\n {opt.airline}\n {opt.price}\n
\n
\n {opt.departure} → {opt.arrival}\n
\n
\n {opt.duration}\n
\n \n );\n }\n const isRecommended = (recommendation as Hotel).name === opt.name;\n\n // Hotel options\n return (\n handleOptionSelect(opt)}\n >\n {isRecommended && ⭐ Recommended}\n
\n {opt.name}\n {opt.rating}\n
\n
\n 📍 {opt.location}\n
\n
\n {opt.price_per_night}\n
\n \n );\n })}\n
\n
\n )\n}\n\nexport default function Subgraphs({ params }: SubgraphsProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50;\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight);\n\n const chatTitle = 'Travel Planning Assistant';\n const chatDescription = 'Plan your perfect trip with AI specialists';\n const initialLabel = 'Hi! ✈️ Ready to plan an amazing trip? Try saying \"Plan a trip to Paris\" or \"Find me flights to Tokyo\"';\n\n return (\n \n
\n \n {isMobile ? (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight);\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n
\n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n ) : (\n \n )}\n \n \n );\n}\n\nfunction TravelPlanner() {\n const { isMobile } = useMobileView();\n const { state: agentState, nodeName } = useCoAgent({\n name: \"subgraphs\",\n initialState: INITIAL_STATE,\n config: {\n streamSubgraphs: true,\n }\n });\n\n useLangGraphInterrupt({\n render: ({ event, resolve }) => ,\n });\n\n // Current itinerary strip\n const ItineraryStrip = () => {\n const selectedFlight = agentState?.itinerary?.flight;\n const selectedHotel = agentState?.itinerary?.hotel;\n const hasExperiences = agentState?.experiences?.length > 0;\n\n return (\n
\n
Current Itinerary:
\n
\n
\n 📍\n Amsterdam → San Francisco\n
\n {selectedFlight && (\n
\n ✈️\n {selectedFlight.airline} - {selectedFlight.price}\n
\n )}\n {selectedHotel && (\n
\n 🏨\n {selectedHotel.name}\n
\n )}\n {hasExperiences && (\n
\n 🎯\n {agentState.experiences.length} experiences planned\n
\n )}\n
\n
\n );\n };\n\n // Compact agent status\n const AgentStatus = () => {\n let activeAgent = 'supervisor';\n if (nodeName?.includes('flights_agent')) {\n activeAgent = 'flights';\n }\n if (nodeName?.includes('hotels_agent')) {\n activeAgent = 'hotels';\n }\n if (nodeName?.includes('experiences_agent')) {\n activeAgent = 'experiences';\n }\n return (\n
\n
Active Agent:
\n
\n
\n 👨‍💼\n Supervisor\n
\n
\n ✈️\n Flights\n
\n
\n 🏨\n Hotels\n
\n
\n 🎯\n Experiences\n
\n
\n
\n )\n };\n\n // Travel details component\n const TravelDetails = () => (\n
\n
\n

✈️ Flight Options

\n
\n {agentState?.flights?.length > 0 ? (\n agentState.flights.map((flight, index) => (\n
\n {flight.airline}:\n {flight.departure} → {flight.arrival} ({flight.duration}) - {flight.price}\n
\n ))\n ) : (\n

No flights found yet

\n )}\n {agentState?.itinerary?.flight && (\n
\n Selected: {agentState.itinerary.flight.airline} - {agentState.itinerary.flight.price}\n
\n )}\n
\n
\n\n
\n

🏨 Hotel Options

\n
\n {agentState?.hotels?.length > 0 ? (\n agentState.hotels.map((hotel, index) => (\n
\n {hotel.name}:\n {hotel.location} - {hotel.price_per_night} ({hotel.rating})\n
\n ))\n ) : (\n

No hotels found yet

\n )}\n {agentState?.itinerary?.hotel && (\n
\n Selected: {agentState.itinerary.hotel.name} - {agentState.itinerary.hotel.price_per_night}\n
\n )}\n
\n
\n\n
\n

🎯 Experiences

\n
\n {agentState?.experiences?.length > 0 ? (\n agentState.experiences.map((experience, index) => (\n
\n
{experience.name}
\n
{experience.type}
\n
{experience.description}
\n
Location: {experience.location}
\n
\n ))\n ) : (\n

No experiences planned yet

\n )}\n
\n
\n
\n );\n\n return (\n
\n \n \n \n
\n );\n}", + "language": "typescript", + "type": "file" + }, + { + "name": "style.css", + "content": "/* Travel Planning Subgraphs Demo Styles */\n/* Essential styles that cannot be achieved with Tailwind classes */\n\n/* Main container with CopilotSidebar layout */\n.travel-planner-container {\n min-height: 100vh;\n padding: 2rem;\n background: linear-gradient(135deg, #f0f9ff 0%, #e0f2fe 100%);\n}\n\n/* Travel content area styles */\n.travel-content {\n max-width: 1200px;\n margin: 0 auto;\n padding: 0 1rem;\n display: flex;\n flex-direction: column;\n gap: 1rem;\n}\n\n/* Itinerary strip */\n.itinerary-strip {\n background: white;\n border-radius: 0.5rem;\n padding: 1rem;\n border: 1px solid #e5e7eb;\n box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);\n}\n\n.itinerary-label {\n font-size: 0.875rem;\n font-weight: 600;\n color: #6b7280;\n margin-bottom: 0.5rem;\n}\n\n.itinerary-items {\n display: flex;\n flex-wrap: wrap;\n gap: 1rem;\n}\n\n.itinerary-item {\n display: flex;\n align-items: center;\n gap: 0.5rem;\n padding: 0.5rem 0.75rem;\n background: #f9fafb;\n border-radius: 0.375rem;\n font-size: 0.875rem;\n}\n\n.item-icon {\n font-size: 1rem;\n}\n\n/* Agent status */\n.agent-status {\n background: white;\n border-radius: 0.5rem;\n padding: 1rem;\n border: 1px solid #e5e7eb;\n box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);\n}\n\n.status-label {\n font-size: 0.875rem;\n font-weight: 600;\n color: #6b7280;\n margin-bottom: 0.5rem;\n}\n\n.agent-indicators {\n display: flex;\n gap: 0.75rem;\n}\n\n.agent-indicator {\n display: flex;\n align-items: center;\n gap: 0.5rem;\n padding: 0.5rem 0.75rem;\n border-radius: 0.375rem;\n font-size: 0.875rem;\n background: #f9fafb;\n border: 1px solid #e5e7eb;\n transition: all 0.2s ease;\n}\n\n.agent-indicator.active {\n background: #dbeafe;\n border-color: #3b82f6;\n color: #1d4ed8;\n box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.1);\n}\n\n/* Travel details sections */\n.travel-details {\n background: white;\n border-radius: 0.5rem;\n padding: 1rem;\n border: 1px solid #e5e7eb;\n box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);\n display: grid;\n gap: 1rem;\n}\n\n.details-section h4 {\n font-size: 1rem;\n font-weight: 600;\n color: #1f2937;\n margin-bottom: 0.5rem;\n display: flex;\n align-items: center;\n gap: 0.5rem;\n}\n\n.detail-items {\n display: flex;\n flex-direction: column;\n gap: 0.5rem;\n}\n\n.detail-item {\n padding: 0.5rem;\n background: #f9fafb;\n border-radius: 0.25rem;\n font-size: 0.875rem;\n display: flex;\n justify-content: space-between;\n}\n\n.detail-item strong {\n color: #6b7280;\n font-weight: 500;\n}\n\n.detail-tips {\n padding: 0.5rem;\n background: #eff6ff;\n border-radius: 0.25rem;\n font-size: 0.75rem;\n color: #1d4ed8;\n}\n\n.activity-item {\n padding: 0.75rem;\n background: #f0f9ff;\n border-radius: 0.25rem;\n border-left: 2px solid #0ea5e9;\n}\n\n.activity-name {\n font-weight: 600;\n color: #1f2937;\n font-size: 0.875rem;\n margin-bottom: 0.25rem;\n}\n\n.activity-category {\n font-size: 0.75rem;\n color: #0ea5e9;\n margin-bottom: 0.25rem;\n}\n\n.activity-description {\n color: #4b5563;\n font-size: 0.75rem;\n margin-bottom: 0.25rem;\n}\n\n.activity-meta {\n font-size: 0.75rem;\n color: #6b7280;\n}\n\n.no-activities {\n text-align: center;\n color: #9ca3af;\n font-style: italic;\n padding: 1rem;\n font-size: 0.875rem;\n}\n\n/* Interrupt UI for Chat Sidebar (Generative UI) */\n.interrupt-container {\n display: flex;\n flex-direction: column;\n gap: 1rem;\n max-width: 100%;\n padding-top: 34px;\n}\n\n.interrupt-header {\n margin-bottom: 0.5rem;\n}\n\n.agent-name {\n font-size: 0.875rem;\n font-weight: 600;\n color: #1f2937;\n margin: 0 0 0.25rem 0;\n}\n\n.agent-message {\n font-size: 0.75rem;\n color: #6b7280;\n margin: 0;\n line-height: 1.4;\n}\n\n.interrupt-options {\n padding: 0.75rem;\n display: flex;\n flex-direction: column;\n gap: 0.5rem;\n max-height: 300px;\n overflow-y: auto;\n}\n\n.option-card {\n display: flex;\n flex-direction: column;\n gap: 0.25rem;\n padding: 0.75rem;\n background: #f9fafb;\n border: 1px solid #e5e7eb;\n border-radius: 0.5rem;\n cursor: pointer;\n transition: all 0.2s ease;\n text-align: left;\n position: relative;\n min-height: auto;\n}\n\n.option-card:hover {\n background: #f3f4f6;\n border-color: #d1d5db;\n}\n\n.option-card:active {\n background: #e5e7eb;\n}\n\n.option-card.recommended {\n background: #eff6ff;\n border-color: #3b82f6;\n box-shadow: 0 0 0 1px rgba(59, 130, 246, 0.1);\n}\n\n.option-card.recommended:hover {\n background: #dbeafe;\n}\n\n.recommendation-badge {\n position: absolute;\n top: -2px;\n right: -2px;\n background: #3b82f6;\n color: white;\n font-size: 0.625rem;\n padding: 0.125rem 0.375rem;\n border-radius: 0.75rem;\n font-weight: 500;\n}\n\n.option-header {\n display: flex;\n justify-content: space-between;\n align-items: center;\n margin-bottom: 0.125rem;\n}\n\n.airline-name, .hotel-name {\n font-weight: 600;\n font-size: 0.8rem;\n color: #1f2937;\n}\n\n.price, .rating {\n font-weight: 600;\n font-size: 0.75rem;\n color: #059669;\n}\n\n.route-info, .location-info {\n font-size: 0.7rem;\n color: #6b7280;\n margin-bottom: 0.125rem;\n}\n\n.duration-info, .price-info {\n font-size: 0.7rem;\n color: #9ca3af;\n}\n\n/* Mobile responsive adjustments */\n@media (max-width: 768px) {\n .travel-planner-container {\n padding: 0.5rem;\n padding-bottom: 120px; /* Space for mobile chat */\n }\n \n .travel-content {\n padding: 0;\n gap: 0.75rem;\n }\n \n .itinerary-items {\n flex-direction: column;\n gap: 0.5rem;\n }\n \n .agent-indicators {\n flex-direction: column;\n gap: 0.5rem;\n }\n \n .agent-indicator {\n padding: 0.75rem;\n }\n \n .travel-details {\n padding: 0.75rem;\n }\n\n .interrupt-container {\n padding: 0.5rem;\n }\n\n .option-card {\n padding: 0.625rem;\n }\n\n .interrupt-options {\n max-height: 250px;\n }\n}", + "language": "css", + "type": "file" + }, + { + "name": "README.mdx", + "content": "# LangGraph Subgraphs Demo: Travel Planning Assistant ✈️\n\nThis demo showcases **LangGraph subgraphs** through an interactive travel planning assistant. Watch as specialized AI agents collaborate to plan your perfect trip!\n\n## What are LangGraph Subgraphs? 🤖\n\n**Subgraphs** are the key to building modular, scalable AI systems in LangGraph. A subgraph is essentially \"a graph that is used as a node in another graph\" - enabling powerful encapsulation and reusability.\nFor more info, check out the [LangGraph docs](https://langchain-ai.github.io/langgraph/concepts/subgraphs/).\n\n### Key Concepts\n\n- **Encapsulation**: Each subgraph handles a specific domain with its own expertise\n- **Modularity**: Subgraphs can be developed, tested, and maintained independently \n- **Reusability**: The same subgraph can be used across multiple parent graphs\n- **State Communication**: Subgraphs can share state or use different schemas with transformations\n\n## Demo Architecture 🗺️\n\nThis travel planner demonstrates **supervisor-coordinated subgraphs** with **human-in-the-loop** decision making:\n\n### Parent Graph: Travel Supervisor\n- **Role**: Coordinates the travel planning process and routes to specialized agents\n- **State Management**: Maintains a shared itinerary object across all subgraphs\n- **Intelligence**: Determines what's needed and when each agent should be called\n\n### Subgraph 1: ✈️ Flights Agent\n- **Specialization**: Finding and booking flight options\n- **Process**: Presents flight options from Amsterdam to San Francisco with recommendations\n- **Interaction**: Uses interrupts to let users choose their preferred flight\n- **Data**: Static flight options (KLM, United) with pricing and duration\n\n### Subgraph 2: 🏨 Hotels Agent \n- **Specialization**: Finding and booking accommodation\n- **Process**: Shows hotel options in San Francisco with different price points\n- **Interaction**: Uses interrupts for user to select their preferred hotel\n- **Data**: Static hotel options (Hotel Zephyr, Ritz-Carlton, Hotel Zoe)\n\n### Subgraph 3: 🎯 Experiences Agent\n- **Specialization**: Curating restaurants and activities\n- **Process**: AI-powered recommendations based on selected flights and hotels\n- **Features**: Combines 2 restaurants and 2 activities with location-aware suggestions\n- **Data**: Static experiences (Pier 39, Golden Gate Bridge, Swan Oyster Depot, Tartine Bakery)\n\n## How It Works 🔄\n\n1. **User Request**: \"Help me plan a trip to San Francisco\"\n2. **Supervisor Analysis**: Determines what travel components are needed\n3. **Sequential Routing**: Routes to each agent in logical order:\n - First: Flights Agent (get transportation sorted)\n - Then: Hotels Agent (book accommodation) \n - Finally: Experiences Agent (plan activities)\n4. **Human Decisions**: Each agent presents options and waits for user choice via interrupts\n5. **State Building**: Selected choices are stored in the shared itinerary object\n6. **Completion**: All agents report back to supervisor for final coordination\n\n## State Communication Patterns 📊\n\n### Shared State Schema\nAll subgraph agents share and contribute to a common state object. When any agent updates the shared state, these changes are immediately reflected in the frontend through real-time syncing. This ensures that:\n\n- **Flight selections** from the Flights Agent are visible to subsequent agents\n- **Hotel choices** influence the Experiences Agent's recommendations \n- **All updates** are synchronized with the frontend UI in real-time\n- **State persistence** maintains the travel itinerary throughout the workflow\n\n### Human-in-the-Loop Pattern\nTwo of the specialist agents use **interrupts** to pause execution and gather user preferences:\n\n- **Flights Agent**: Presents options → interrupt → waits for selection → continues\n- **Hotels Agent**: Shows hotels → interrupt → waits for choice → continues\n\n## Try These Examples! 💡\n\n### Getting Started\n- \"Help me plan a trip to San Francisco\"\n- \"I want to visit San Francisco from Amsterdam\"\n- \"Plan my travel itinerary\"\n\n### During the Process\nWhen the Flights Agent presents options:\n- Choose between KLM ($650, 11h 30m) or United ($720, 12h 15m)\n\nWhen the Hotels Agent shows accommodations:\n- Select from Hotel Zephyr, The Ritz-Carlton, or Hotel Zoe\n\nThe Experiences Agent will then provide tailored recommendations based on your choices!\n\n## Frontend Capabilities 👁️\n\n- **Human-in-the-loop with interrupts** from subgraphs for user decision making\n- **Subgraphs detection and streaming** to show which agent is currently active\n- **Real-time state updates** as the shared itinerary is built across agents\n", + "language": "markdown", + "type": "file" + }, + { + "name": "agent.py", + "content": "\"\"\"\nA travel agent supervisor demo showcasing multi-agent architecture with subgraphs.\nThe supervisor coordinates specialized agents: flights finder, hotels finder, and experiences finder.\n\"\"\"\n\nfrom typing import Dict, List, Any, Optional, Annotated, Union\nfrom dataclasses import dataclass\nimport json\nimport os\nfrom pydantic import BaseModel, Field\n\n# LangGraph imports\nfrom langchain_core.runnables import RunnableConfig\nfrom langgraph.graph import StateGraph, END, START\nfrom langgraph.types import Command, interrupt\nfrom langgraph.graph import MessagesState\n\n# OpenAI imports\nfrom langchain_openai import ChatOpenAI\nfrom langchain_core.messages import SystemMessage, AIMessage\n\ndef create_interrupt(message: str, options: List[Any], recommendation: Any, agent: str):\n return interrupt({\n \"message\": message,\n \"options\": options,\n \"recommendation\": recommendation,\n \"agent\": agent,\n })\n\n# State schema for travel planning\n@dataclass\nclass Flight:\n airline: str\n departure: str\n arrival: str\n price: str\n duration: str\n\n@dataclass\nclass Hotel:\n name: str\n location: str\n price_per_night: str\n rating: str\n\n@dataclass\nclass Experience:\n name: str\n type: str # \"restaurant\" or \"activity\"\n description: str\n location: str\n\ndef merge_itinerary(left: Union[dict, None] = None, right: Union[dict, None] = None) -> dict:\n \"\"\"Custom reducer to merge shopping cart updates.\"\"\"\n if not left:\n left = {}\n if not right:\n right = {}\n\n return {**left, **right}\n\nclass TravelAgentState(MessagesState):\n \"\"\"Shared state for the travel agent system\"\"\"\n # Travel request details\n origin: str = \"\"\n destination: str = \"\"\n\n # Results from each agent\n flights: List[Flight] = None\n hotels: List[Hotel] = None\n experiences: List[Experience] = None\n\n itinerary: Annotated[dict, merge_itinerary] = None\n\n # Tools available to all agents\n tools: List[Any] = None\n\n # Supervisor routing\n next_agent: Optional[str] = None\n\n# Static data for demonstration\nSTATIC_FLIGHTS = [\n Flight(\"KLM\", \"Amsterdam (AMS)\", \"San Francisco (SFO)\", \"$650\", \"11h 30m\"),\n Flight(\"United\", \"Amsterdam (AMS)\", \"San Francisco (SFO)\", \"$720\", \"12h 15m\")\n]\n\nSTATIC_HOTELS = [\n Hotel(\"Hotel Zephyr\", \"Fisherman's Wharf\", \"$280/night\", \"4.2 stars\"),\n Hotel(\"The Ritz-Carlton\", \"Nob Hill\", \"$550/night\", \"4.8 stars\"),\n Hotel(\"Hotel Zoe\", \"Union Square\", \"$320/night\", \"4.4 stars\")\n]\n\nSTATIC_EXPERIENCES = [\n Experience(\"Pier 39\", \"activity\", \"Iconic waterfront destination with shops and sea lions\", \"Fisherman's Wharf\"),\n Experience(\"Golden Gate Bridge\", \"activity\", \"World-famous suspension bridge with stunning views\", \"Golden Gate\"),\n Experience(\"Swan Oyster Depot\", \"restaurant\", \"Historic seafood counter serving fresh oysters\", \"Polk Street\"),\n Experience(\"Tartine Bakery\", \"restaurant\", \"Artisanal bakery famous for bread and pastries\", \"Mission District\")\n]\n\n# Flights finder subgraph\nasync def flights_finder(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Subgraph that finds flight options\"\"\"\n\n # Simulate flight search with static data\n flights = STATIC_FLIGHTS\n\n selected_flight = state.get('itinerary', {}).get('flight', None)\n if not selected_flight:\n selected_flight = create_interrupt(\n message=f\"\"\"\n Found {len(flights)} flight options from {state.get('origin', 'Amsterdam')} to {state.get('destination', 'San Francisco')}.\n I recommend choosing the flight by {flights[0].airline} since it's known to be on time and cheaper.\n \"\"\",\n options=flights,\n recommendation=flights[0],\n agent=\"flights\"\n )\n\n if isinstance(selected_flight, str):\n selected_flight = json.loads(selected_flight)\n return Command(\n goto=END,\n update={\n \"flights\": flights,\n \"itinerary\": {\n \"flight\": selected_flight\n },\n \"messages\": state[\"messages\"] + [{\n \"role\": \"assistant\",\n \"content\": f\"Flights Agent: Great. I'll book you the {selected_flight[\"airline\"]} flight from {selected_flight[\"departure\"]} to {selected_flight[\"arrival\"]}.\"\n }]\n }\n )\n\n# Hotels finder subgraph\nasync def hotels_finder(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Subgraph that finds hotel options\"\"\"\n\n # Simulate hotel search with static data\n hotels = STATIC_HOTELS\n selected_hotel = state.get('itinerary', {}).get('hotel', None)\n if not selected_hotel:\n selected_hotel = create_interrupt(\n message=f\"\"\"\n Found {len(hotels)} accommodation options in {state.get('destination', 'San Francisco')}.\n I recommend choosing the {hotels[2].name} since it strikes the balance between rating, price, and location.\n \"\"\",\n options=hotels,\n recommendation=hotels[2],\n agent=\"hotels\"\n )\n\n if isinstance(selected_hotel, str):\n selected_hotel = json.loads(selected_hotel)\n return Command(\n goto=END,\n update={\n \"hotels\": hotels,\n \"itinerary\": {\n \"hotel\": selected_hotel\n },\n \"messages\": state[\"messages\"] + [{\n \"role\": \"assistant\",\n \"content\": f\"Hotels Agent: Excellent choice! You'll like {selected_hotel[\"name\"]}.\"\n }]\n }\n )\n\n# Experiences finder subgraph\nasync def experiences_finder(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Subgraph that finds restaurant and activity recommendations\"\"\"\n\n # Filter experiences (2 restaurants, 2 activities)\n restaurants = [exp for exp in STATIC_EXPERIENCES if exp.type == \"restaurant\"][:2]\n activities = [exp for exp in STATIC_EXPERIENCES if exp.type == \"activity\"][:2]\n experiences = restaurants + activities\n\n model = ChatOpenAI(model=\"gpt-4o\")\n\n if config is None:\n config = RunnableConfig(recursion_limit=25)\n\n itinerary = state.get(\"itinerary\", {})\n\n system_prompt = f\"\"\"\n You are the experiences agent. Your job is to find restaurants and activities for the user.\n You already went ahead and found a bunch of experiences. All you have to do now, is to let the user know of your findings.\n \n Current status:\n - Origin: {state.get('origin', 'Amsterdam')}\n - Destination: {state.get('destination', 'San Francisco')}\n - Flight chosen: {itinerary.get(\"hotel\", None)}\n - Hotel chosen: {itinerary.get(\"hotel\", None)}\n - activities found: {activities}\n - restaurants found: {restaurants}\n \"\"\"\n\n # Get supervisor decision\n response = await model.ainvoke([\n SystemMessage(content=system_prompt),\n *state[\"messages\"],\n ], config)\n\n return Command(\n goto=END,\n update={\n \"experiences\": experiences,\n \"messages\": state[\"messages\"] + [response]\n }\n )\n\nclass SupervisorResponseFormatter(BaseModel):\n \"\"\"Always use this tool to structure your response to the user.\"\"\"\n answer: str = Field(description=\"The answer to the user\")\n next_agent: str | None = Field(description=\"The agent to go to. Not required if you do not want to route to another agent.\")\n\n# Supervisor agent\nasync def supervisor_agent(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Main supervisor that coordinates all subgraphs\"\"\"\n\n itinerary = state.get(\"itinerary\", {})\n\n # Check what's already completed\n has_flights = itinerary.get(\"flight\", None) is not None\n has_hotels = itinerary.get(\"hotel\", None) is not None\n has_experiences = state.get(\"experiences\", None) is not None\n\n system_prompt = f\"\"\"\n You are a travel planning supervisor. Your job is to coordinate specialized agents to help plan a trip.\n \n Current status:\n - Origin: {state.get('origin', 'Amsterdam')}\n - Destination: {state.get('destination', 'San Francisco')}\n - Flights found: {has_flights}\n - Hotels found: {has_hotels}\n - Experiences found: {has_experiences}\n - Itinerary (Things that the user has already confirmed selection on): {json.dumps(itinerary, indent=2)}\n \n Available agents:\n - flights_agent: Finds flight options\n - hotels_agent: Finds hotel options \n - experiences_agent: Finds restaurant and activity recommendations\n - {END}: Mark task as complete when all information is gathered\n \n You must route to the appropriate agent based on what's missing. Once all agents have completed their tasks, route to 'complete'.\n \"\"\"\n\n # Define the model\n model = ChatOpenAI(model=\"gpt-4o\")\n\n if config is None:\n config = RunnableConfig(recursion_limit=25)\n\n # Bind the routing tool\n model_with_tools = model.bind_tools(\n [SupervisorResponseFormatter],\n parallel_tool_calls=False,\n )\n\n # Get supervisor decision\n response = await model_with_tools.ainvoke([\n SystemMessage(content=system_prompt),\n *state[\"messages\"],\n ], config)\n\n messages = state[\"messages\"] + [response]\n\n # Handle tool calls for routing\n if hasattr(response, \"tool_calls\") and response.tool_calls:\n tool_call = response.tool_calls[0]\n\n if isinstance(tool_call, dict):\n tool_call_args = tool_call[\"args\"]\n else:\n tool_call_args = tool_call.args\n\n next_agent = tool_call_args[\"next_agent\"]\n\n # Add tool response\n tool_response = {\n \"role\": \"tool\",\n \"content\": f\"Routing to {next_agent} and providing the answer\",\n \"tool_call_id\": tool_call.id if hasattr(tool_call, 'id') else tool_call[\"id\"]\n }\n\n messages = messages + [tool_response, AIMessage(content=tool_call_args[\"answer\"])]\n\n if next_agent is not None:\n return Command(goto=next_agent)\n\n # Fallback if no tool call\n return Command(\n goto=END,\n update={\"messages\": messages}\n )\n\n# Create subgraphs\nflights_graph = StateGraph(TravelAgentState)\nflights_graph.add_node(\"flights_agent_chat_node\", flights_finder)\nflights_graph.set_entry_point(\"flights_agent_chat_node\")\nflights_graph.add_edge(START, \"flights_agent_chat_node\")\nflights_graph.add_edge(\"flights_agent_chat_node\", END)\nflights_subgraph = flights_graph.compile()\n\nhotels_graph = StateGraph(TravelAgentState)\nhotels_graph.add_node(\"hotels_agent_chat_node\", hotels_finder)\nhotels_graph.set_entry_point(\"hotels_agent_chat_node\")\nhotels_graph.add_edge(START, \"hotels_agent_chat_node\")\nhotels_graph.add_edge(\"hotels_agent_chat_node\", END)\nhotels_subgraph = hotels_graph.compile()\n\nexperiences_graph = StateGraph(TravelAgentState)\nexperiences_graph.add_node(\"experiences_agent_chat_node\", experiences_finder)\nexperiences_graph.set_entry_point(\"experiences_agent_chat_node\")\nexperiences_graph.add_edge(START, \"experiences_agent_chat_node\")\nexperiences_graph.add_edge(\"experiences_agent_chat_node\", END)\nexperiences_subgraph = experiences_graph.compile()\n\n# Main supervisor workflow\nworkflow = StateGraph(TravelAgentState)\n\n# Add supervisor and subgraphs as nodes\nworkflow.add_node(\"supervisor\", supervisor_agent)\nworkflow.add_node(\"flights_agent\", flights_subgraph)\nworkflow.add_node(\"hotels_agent\", hotels_subgraph)\nworkflow.add_node(\"experiences_agent\", experiences_subgraph)\n\n# Set entry point\nworkflow.set_entry_point(\"supervisor\")\nworkflow.add_edge(START, \"supervisor\")\n\n# Add edges back to supervisor after each subgraph\nworkflow.add_edge(\"flights_agent\", \"supervisor\")\nworkflow.add_edge(\"hotels_agent\", \"supervisor\")\nworkflow.add_edge(\"experiences_agent\", \"supervisor\")\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n memory = MemorySaver()\n graph = workflow.compile(checkpointer=memory)\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = workflow.compile()\n", + "language": "python", + "type": "file" + } + ], "langgraph-typescript::agentic_chat": [ { "name": "page.tsx", @@ -953,7 +1089,7 @@ "langgraph-typescript::tool_based_generative_ui": [ { "name": "page.tsx", - "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku{\n japanese : string[] | [],\n english : string[] | [],\n image_names : string[] | [],\n selectedImage : string | null,\n}\n\ninterface HaikuCardProps{\n generatedHaiku : GenerateHaiku | Partial\n setHaikus : Dispatch>\n haikus : GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n
\n
\n
\n \n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )}\n \n \n );\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction HaikuCard({generatedHaiku, setHaikus, haikus} : HaikuCardProps) {\n return (\n
\n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n
\n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n const validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n if (correctedNames.length < 3) {\n const availableFallbacks = VALID_IMAGE_NAMES.filter(name => !usedValidNames.has(name));\n for (let i = availableFallbacks.length - 1; i > 0; i--) {\n const j = Math.floor(Math.random() * (i + 1));\n [availableFallbacks[i], availableFallbacks[j]] = [availableFallbacks[j], availableFallbacks[i]];\n }\n\n while (correctedNames.length < 3 && availableFallbacks.length > 0) {\n const fallbackName = availableFallbacks.pop();\n if (fallbackName) {\n correctedNames.push(fallbackName);\n }\n }\n }\n\n while (correctedNames.length < 3 && VALID_IMAGE_NAMES.length > 0) {\n const fallbackName = VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n correctedNames.push(fallbackName);\n }\n\n return correctedNames.slice(0, 3);\n };\n\n useCopilotAction({\n name: \"generate_haiku\",\n available: \"frontend\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: \"Names of 3 relevant images\",\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [...prev, newHaiku]);\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const generatedHaikus = useMemo(() => (\n haikus.filter((haiku) => haiku.english[0] !== \"A placeholder verse—\")\n ), [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n {/* Thumbnail List */}\n {Boolean(generatedHaikus.length) && !isMobile && (\n
\n {generatedHaikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n )}\n\n {/* Main Display */}\n
\n
\n {haikus.filter((_haiku: Haiku, index: number) => {\n if (haikus.length == 1) return true;\n else return index == activeIndex + 1;\n }).map((haiku, index) => (\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n \n \n );\n}\n", + "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku {\n japanese: string[] | [],\n english: string[] | [],\n image_names: string[] | [],\n selectedImage: string | null,\n}\n\ninterface HaikuCardProps {\n generatedHaiku: GenerateHaiku | Partial\n setHaikus: Dispatch>\n haikus: GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && }\n \n \n );\n}\n\nfunction MobileChat({ chatTitle, chatDescription, initialLabel }: { chatTitle: string, chatDescription: string, initialLabel: string }) {\n const defaultChatHeight = 50\n\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n return (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction getRandomImage(): string {\n return VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n}\n\nconst validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n while (correctedNames.length < 3) {\n const nextImage = getRandomImage();\n if (!usedValidNames.has(nextImage)) {\n correctedNames.push(nextImage);\n usedValidNames.add(nextImage);\n }\n }\n\n return correctedNames.slice(0, 3);\n};\n\nfunction HaikuCard({ generatedHaiku, setHaikus, haikus }: HaikuCardProps) {\n return (\n \n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n \n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n useCopilotAction({\n name: \"generate_haiku\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: `Names of 3 relevant images selected from the following: \\n -${VALID_IMAGE_NAMES.join('\\n -')}`,\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [newHaiku, ...prev].filter(h => h.english[0] !== \"A placeholder verse—\"));\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n \n\n {/* Main Display */}\n
\n
\n {haikus.map((haiku, index) => (\n (haikus.length == 1 || index == activeIndex) && (\n\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n setHaikus((prevHaikus) => {\n return prevHaikus.map((h, idx) => {\n if (idx === index) {\n return { ...h, selectedImage: imageName }\n } else {\n return { ...h }\n }\n })\n })}\n />\n ))}\n
\n )}\n
\n )\n ))}\n
\n \n \n );\n}\n\nfunction Thumbnails({ haikus, activeIndex, setActiveIndex, isMobile }: { haikus: Haiku[], activeIndex: number, setActiveIndex: (index: number) => void, isMobile: boolean }) {\n if (haikus.length == 0 || isMobile) { return null }\n return (\n
\n {haikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n \n ))}\n \n )\n\n}", "language": "typescript", "type": "file" }, @@ -971,13 +1107,45 @@ }, { "name": "agent.py", - "content": "\"\"\"\nAn example demonstrating tool-based generative UI using LangGraph.\n\"\"\"\n\nfrom typing import List, Any, Optional, Annotated\nimport os\n\n# LangGraph imports\nfrom langchain_openai import ChatOpenAI\nfrom langchain_core.runnables import RunnableConfig\nfrom langchain_core.messages import SystemMessage\nfrom langchain_core.tools import tool\nfrom langgraph.graph import StateGraph, END, START\nfrom langgraph.types import Command\nfrom langgraph.graph import MessagesState\nfrom langgraph.prebuilt import ToolNode\n\n@tool\ndef generate_haiku(\n japanese: Annotated[ # pylint: disable=unused-argument\n List[str],\n \"An array of three lines of the haiku in Japanese\"\n ],\n english: Annotated[ # pylint: disable=unused-argument\n List[str],\n \"An array of three lines of the haiku in English\"\n ]\n):\n \"\"\"\n Generate a haiku in Japanese and its English translation.\n Also select exactly 3 relevant images from the provided list based on the haiku's theme.\n \"\"\"\n\nclass AgentState(MessagesState):\n \"\"\"\n State of the agent.\n \"\"\"\n tools: List[Any]\n\nasync def chat_node(state: AgentState, config: Optional[RunnableConfig] = None):\n \"\"\"\n The main function handling chat and tool calls.\n \"\"\"\n\n system_prompt = \"\"\"\n You assist the user in generating a haiku.\n When generating a haiku using the 'generate_haiku' tool.\n \"\"\"\n\n # Define the model\n model = ChatOpenAI(model=\"gpt-4o\")\n\n # Define config for the model\n if config is None:\n config = RunnableConfig(recursion_limit=25)\n\n # Bind the tools to the model\n model_with_tools = model.bind_tools(\n [generate_haiku],\n # Disable parallel tool calls to avoid race conditions\n parallel_tool_calls=False,\n )\n\n # Run the model to generate a response\n response = await model_with_tools.ainvoke([\n SystemMessage(content=system_prompt),\n *state[\"messages\"],\n ], config)\n\n if response.tool_calls:\n return Command(\n goto=\"tool_node\",\n update={\n \"messages\": state[\"messages\"] + [response]\n }\n )\n # Return Command to end with updated messages\n return Command(\n goto=END,\n update={\n \"messages\": state[\"messages\"] + [response]\n }\n )\n\n# Define the graph\nworkflow = StateGraph(AgentState)\n\n# Add nodes\nworkflow.add_node(\"chat_node\", chat_node)\nworkflow.add_node(\"tool_node\", ToolNode([generate_haiku]))\n\n# Add edges\nworkflow.set_entry_point(\"chat_node\")\nworkflow.add_edge(START, \"chat_node\")\nworkflow.add_edge(\"chat_node\", END)\nworkflow.add_edge(\"tool_node\", END)\n\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n memory = MemorySaver()\n graph = workflow.compile(checkpointer=memory)\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = workflow.compile()\n\n", + "content": "\"\"\"\nAn example demonstrating tool-based generative UI using LangGraph.\n\"\"\"\n\nimport os\nfrom typing import Any, List\nfrom typing_extensions import Literal\nfrom langchain_openai import ChatOpenAI\nfrom langchain_core.messages import SystemMessage\nfrom langchain_core.runnables import RunnableConfig\nfrom langgraph.graph import StateGraph, END\nfrom langgraph.types import Command\nfrom langgraph.graph import MessagesState\nfrom langgraph.prebuilt import ToolNode\n\n\nclass AgentState(MessagesState):\n \"\"\"\n State of the agent.\n \"\"\"\n tools: List[Any]\n\nasync def chat_node(state: AgentState, config: RunnableConfig) -> Command[Literal[\"tool_node\", \"__end__\"]]:\n \"\"\"\n Standard chat node based on the ReAct design pattern. It handles:\n - The model to use (and binds in CopilotKit actions and the tools defined above)\n - The system prompt\n - Getting a response from the model\n - Handling tool calls\n\n For more about the ReAct design pattern, see:\n https://www.perplexity.ai/search/react-agents-NcXLQhreS0WDzpVaS4m9Cg\n \"\"\"\n\n model = ChatOpenAI(model=\"gpt-4o\")\n\n model_with_tools = model.bind_tools(\n [\n *state.get(\"tools\", []), # bind tools defined by ag-ui\n ],\n parallel_tool_calls=False,\n )\n\n system_message = SystemMessage(\n content=f\"Help the user with writing Haikus. If the user asks for a haiku, use the generate_haiku tool to display the haiku to the user.\"\n )\n\n response = await model_with_tools.ainvoke([\n system_message,\n *state[\"messages\"],\n ], config)\n\n return Command(\n goto=END,\n update={\n \"messages\": [response],\n }\n )\n\nworkflow = StateGraph(AgentState)\nworkflow.add_node(\"chat_node\", chat_node)\n# This is required even though we don't have any backend tools to pass in.\nworkflow.add_node(\"tool_node\", ToolNode(tools=[]))\nworkflow.set_entry_point(\"chat_node\")\nworkflow.add_edge(\"chat_node\", END)\n\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n memory = MemorySaver()\n graph = workflow.compile(checkpointer=memory)\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = workflow.compile()\n", + "language": "python", + "type": "file" + }, + { + "name": "agent.ts", + "content": "/**\n * An example demonstrating tool-based generative UI using LangGraph.\n */\n\nimport { ChatOpenAI } from \"@langchain/openai\";\nimport { SystemMessage } from \"@langchain/core/messages\";\nimport { RunnableConfig } from \"@langchain/core/runnables\";\nimport { Command, Annotation, MessagesAnnotation, StateGraph, END, START } from \"@langchain/langgraph\";\n\n\nexport const AgentStateAnnotation = Annotation.Root({\n tools: Annotation(),\n ...MessagesAnnotation.spec,\n});\nexport type AgentState = typeof AgentStateAnnotation.State;\n\nasync function chatNode(state: AgentState, config?: RunnableConfig): Promise {\n const model = new ChatOpenAI({ model: \"gpt-4o\" });\n\n const modelWithTools = model.bindTools(\n [\n ...state.tools || []\n ],\n { parallel_tool_calls: false }\n );\n\n const systemMessage = new SystemMessage({\n content: 'Help the user with writing Haikus. If the user asks for a haiku, use the generate_haiku tool to display the haiku to the user.'\n });\n\n const response = await modelWithTools.invoke([\n systemMessage,\n ...state.messages,\n ], config);\n\n return new Command({\n goto: END,\n update: {\n messages: [response]\n }\n });\n}\n\nconst workflow = new StateGraph(AgentStateAnnotation);\nworkflow.addNode(\"chat_node\", chatNode);\n\nworkflow.addEdge(START, \"chat_node\");\n\nexport const toolBasedGenerativeUiGraph = workflow.compile();", + "language": "ts", + "type": "file" + } + ], + "langgraph-typescript::subgraphs": [ + { + "name": "page.tsx", + "content": "\"use client\";\nimport React, { useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport { CopilotKit, useCoAgent, useLangGraphInterrupt } from \"@copilotkit/react-core\";\nimport { CopilotSidebar } from \"@copilotkit/react-ui\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface SubgraphsProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\n// Travel planning data types\ninterface Flight {\n airline: string;\n arrival: string;\n departure: string;\n duration: string;\n price: string;\n}\n\ninterface Hotel {\n location: string;\n name: string;\n price_per_night: string;\n rating: string;\n}\n\ninterface Experience {\n name: string;\n description: string;\n location: string;\n type: string;\n}\n\ninterface Itinerary {\n hotel?: Hotel;\n flight?: Flight;\n experiences?: Experience[];\n}\n\ntype AvailableAgents = 'flights' | 'hotels' | 'experiences' | 'supervisor'\n\ninterface TravelAgentState {\n experiences: Experience[],\n flights: Flight[],\n hotels: Hotel[],\n itinerary: Itinerary\n planning_step: string\n active_agent: AvailableAgents\n}\n\nconst INITIAL_STATE: TravelAgentState = {\n itinerary: {},\n experiences: [],\n flights: [],\n hotels: [],\n planning_step: \"start\",\n active_agent: 'supervisor'\n};\n\ninterface InterruptEvent {\n message: string;\n options: TAgent extends 'flights' ? Flight[] : TAgent extends 'hotels' ? Hotel[] : never,\n recommendation: TAgent extends 'flights' ? Flight : TAgent extends 'hotels' ? Hotel : never,\n agent: TAgent\n}\n\nfunction InterruptHumanInTheLoop({\n event,\n resolve,\n}: {\n event: { value: InterruptEvent };\n resolve: (value: string) => void;\n}) {\n const { message, options, agent, recommendation } = event.value;\n\n // Format agent name with emoji\n const formatAgentName = (agent: string) => {\n switch (agent) {\n case 'flights': return 'Flights Agent';\n case 'hotels': return 'Hotels Agent';\n case 'experiences': return 'Experiences Agent';\n default: return `${agent} Agent`;\n }\n };\n\n const handleOptionSelect = (option: any) => {\n resolve(JSON.stringify(option));\n };\n\n return (\n
\n

{formatAgentName(agent)}: {message}

\n\n
\n {options.map((opt, idx) => {\n if ('airline' in opt) {\n const isRecommended = (recommendation as Flight).airline === opt.airline;\n // Flight options\n return (\n handleOptionSelect(opt)}\n >\n {isRecommended && ⭐ Recommended}\n
\n {opt.airline}\n {opt.price}\n
\n
\n {opt.departure} → {opt.arrival}\n
\n
\n {opt.duration}\n
\n \n );\n }\n const isRecommended = (recommendation as Hotel).name === opt.name;\n\n // Hotel options\n return (\n handleOptionSelect(opt)}\n >\n {isRecommended && ⭐ Recommended}\n
\n {opt.name}\n {opt.rating}\n
\n
\n 📍 {opt.location}\n
\n
\n {opt.price_per_night}\n
\n \n );\n })}\n
\n
\n )\n}\n\nexport default function Subgraphs({ params }: SubgraphsProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50;\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight);\n\n const chatTitle = 'Travel Planning Assistant';\n const chatDescription = 'Plan your perfect trip with AI specialists';\n const initialLabel = 'Hi! ✈️ Ready to plan an amazing trip? Try saying \"Plan a trip to Paris\" or \"Find me flights to Tokyo\"';\n\n return (\n \n
\n \n {isMobile ? (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight);\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n
\n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n ) : (\n \n )}\n \n \n );\n}\n\nfunction TravelPlanner() {\n const { isMobile } = useMobileView();\n const { state: agentState, nodeName } = useCoAgent({\n name: \"subgraphs\",\n initialState: INITIAL_STATE,\n config: {\n streamSubgraphs: true,\n }\n });\n\n useLangGraphInterrupt({\n render: ({ event, resolve }) => ,\n });\n\n // Current itinerary strip\n const ItineraryStrip = () => {\n const selectedFlight = agentState?.itinerary?.flight;\n const selectedHotel = agentState?.itinerary?.hotel;\n const hasExperiences = agentState?.experiences?.length > 0;\n\n return (\n
\n
Current Itinerary:
\n
\n
\n 📍\n Amsterdam → San Francisco\n
\n {selectedFlight && (\n
\n ✈️\n {selectedFlight.airline} - {selectedFlight.price}\n
\n )}\n {selectedHotel && (\n
\n 🏨\n {selectedHotel.name}\n
\n )}\n {hasExperiences && (\n
\n 🎯\n {agentState.experiences.length} experiences planned\n
\n )}\n
\n
\n );\n };\n\n // Compact agent status\n const AgentStatus = () => {\n let activeAgent = 'supervisor';\n if (nodeName?.includes('flights_agent')) {\n activeAgent = 'flights';\n }\n if (nodeName?.includes('hotels_agent')) {\n activeAgent = 'hotels';\n }\n if (nodeName?.includes('experiences_agent')) {\n activeAgent = 'experiences';\n }\n return (\n
\n
Active Agent:
\n
\n
\n 👨‍💼\n Supervisor\n
\n
\n ✈️\n Flights\n
\n
\n 🏨\n Hotels\n
\n
\n 🎯\n Experiences\n
\n
\n
\n )\n };\n\n // Travel details component\n const TravelDetails = () => (\n
\n
\n

✈️ Flight Options

\n
\n {agentState?.flights?.length > 0 ? (\n agentState.flights.map((flight, index) => (\n
\n {flight.airline}:\n {flight.departure} → {flight.arrival} ({flight.duration}) - {flight.price}\n
\n ))\n ) : (\n

No flights found yet

\n )}\n {agentState?.itinerary?.flight && (\n
\n Selected: {agentState.itinerary.flight.airline} - {agentState.itinerary.flight.price}\n
\n )}\n
\n
\n\n
\n

🏨 Hotel Options

\n
\n {agentState?.hotels?.length > 0 ? (\n agentState.hotels.map((hotel, index) => (\n
\n {hotel.name}:\n {hotel.location} - {hotel.price_per_night} ({hotel.rating})\n
\n ))\n ) : (\n

No hotels found yet

\n )}\n {agentState?.itinerary?.hotel && (\n
\n Selected: {agentState.itinerary.hotel.name} - {agentState.itinerary.hotel.price_per_night}\n
\n )}\n
\n
\n\n
\n

🎯 Experiences

\n
\n {agentState?.experiences?.length > 0 ? (\n agentState.experiences.map((experience, index) => (\n
\n
{experience.name}
\n
{experience.type}
\n
{experience.description}
\n
Location: {experience.location}
\n
\n ))\n ) : (\n

No experiences planned yet

\n )}\n
\n
\n
\n );\n\n return (\n
\n \n \n \n
\n );\n}", + "language": "typescript", + "type": "file" + }, + { + "name": "style.css", + "content": "/* Travel Planning Subgraphs Demo Styles */\n/* Essential styles that cannot be achieved with Tailwind classes */\n\n/* Main container with CopilotSidebar layout */\n.travel-planner-container {\n min-height: 100vh;\n padding: 2rem;\n background: linear-gradient(135deg, #f0f9ff 0%, #e0f2fe 100%);\n}\n\n/* Travel content area styles */\n.travel-content {\n max-width: 1200px;\n margin: 0 auto;\n padding: 0 1rem;\n display: flex;\n flex-direction: column;\n gap: 1rem;\n}\n\n/* Itinerary strip */\n.itinerary-strip {\n background: white;\n border-radius: 0.5rem;\n padding: 1rem;\n border: 1px solid #e5e7eb;\n box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);\n}\n\n.itinerary-label {\n font-size: 0.875rem;\n font-weight: 600;\n color: #6b7280;\n margin-bottom: 0.5rem;\n}\n\n.itinerary-items {\n display: flex;\n flex-wrap: wrap;\n gap: 1rem;\n}\n\n.itinerary-item {\n display: flex;\n align-items: center;\n gap: 0.5rem;\n padding: 0.5rem 0.75rem;\n background: #f9fafb;\n border-radius: 0.375rem;\n font-size: 0.875rem;\n}\n\n.item-icon {\n font-size: 1rem;\n}\n\n/* Agent status */\n.agent-status {\n background: white;\n border-radius: 0.5rem;\n padding: 1rem;\n border: 1px solid #e5e7eb;\n box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);\n}\n\n.status-label {\n font-size: 0.875rem;\n font-weight: 600;\n color: #6b7280;\n margin-bottom: 0.5rem;\n}\n\n.agent-indicators {\n display: flex;\n gap: 0.75rem;\n}\n\n.agent-indicator {\n display: flex;\n align-items: center;\n gap: 0.5rem;\n padding: 0.5rem 0.75rem;\n border-radius: 0.375rem;\n font-size: 0.875rem;\n background: #f9fafb;\n border: 1px solid #e5e7eb;\n transition: all 0.2s ease;\n}\n\n.agent-indicator.active {\n background: #dbeafe;\n border-color: #3b82f6;\n color: #1d4ed8;\n box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.1);\n}\n\n/* Travel details sections */\n.travel-details {\n background: white;\n border-radius: 0.5rem;\n padding: 1rem;\n border: 1px solid #e5e7eb;\n box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);\n display: grid;\n gap: 1rem;\n}\n\n.details-section h4 {\n font-size: 1rem;\n font-weight: 600;\n color: #1f2937;\n margin-bottom: 0.5rem;\n display: flex;\n align-items: center;\n gap: 0.5rem;\n}\n\n.detail-items {\n display: flex;\n flex-direction: column;\n gap: 0.5rem;\n}\n\n.detail-item {\n padding: 0.5rem;\n background: #f9fafb;\n border-radius: 0.25rem;\n font-size: 0.875rem;\n display: flex;\n justify-content: space-between;\n}\n\n.detail-item strong {\n color: #6b7280;\n font-weight: 500;\n}\n\n.detail-tips {\n padding: 0.5rem;\n background: #eff6ff;\n border-radius: 0.25rem;\n font-size: 0.75rem;\n color: #1d4ed8;\n}\n\n.activity-item {\n padding: 0.75rem;\n background: #f0f9ff;\n border-radius: 0.25rem;\n border-left: 2px solid #0ea5e9;\n}\n\n.activity-name {\n font-weight: 600;\n color: #1f2937;\n font-size: 0.875rem;\n margin-bottom: 0.25rem;\n}\n\n.activity-category {\n font-size: 0.75rem;\n color: #0ea5e9;\n margin-bottom: 0.25rem;\n}\n\n.activity-description {\n color: #4b5563;\n font-size: 0.75rem;\n margin-bottom: 0.25rem;\n}\n\n.activity-meta {\n font-size: 0.75rem;\n color: #6b7280;\n}\n\n.no-activities {\n text-align: center;\n color: #9ca3af;\n font-style: italic;\n padding: 1rem;\n font-size: 0.875rem;\n}\n\n/* Interrupt UI for Chat Sidebar (Generative UI) */\n.interrupt-container {\n display: flex;\n flex-direction: column;\n gap: 1rem;\n max-width: 100%;\n padding-top: 34px;\n}\n\n.interrupt-header {\n margin-bottom: 0.5rem;\n}\n\n.agent-name {\n font-size: 0.875rem;\n font-weight: 600;\n color: #1f2937;\n margin: 0 0 0.25rem 0;\n}\n\n.agent-message {\n font-size: 0.75rem;\n color: #6b7280;\n margin: 0;\n line-height: 1.4;\n}\n\n.interrupt-options {\n padding: 0.75rem;\n display: flex;\n flex-direction: column;\n gap: 0.5rem;\n max-height: 300px;\n overflow-y: auto;\n}\n\n.option-card {\n display: flex;\n flex-direction: column;\n gap: 0.25rem;\n padding: 0.75rem;\n background: #f9fafb;\n border: 1px solid #e5e7eb;\n border-radius: 0.5rem;\n cursor: pointer;\n transition: all 0.2s ease;\n text-align: left;\n position: relative;\n min-height: auto;\n}\n\n.option-card:hover {\n background: #f3f4f6;\n border-color: #d1d5db;\n}\n\n.option-card:active {\n background: #e5e7eb;\n}\n\n.option-card.recommended {\n background: #eff6ff;\n border-color: #3b82f6;\n box-shadow: 0 0 0 1px rgba(59, 130, 246, 0.1);\n}\n\n.option-card.recommended:hover {\n background: #dbeafe;\n}\n\n.recommendation-badge {\n position: absolute;\n top: -2px;\n right: -2px;\n background: #3b82f6;\n color: white;\n font-size: 0.625rem;\n padding: 0.125rem 0.375rem;\n border-radius: 0.75rem;\n font-weight: 500;\n}\n\n.option-header {\n display: flex;\n justify-content: space-between;\n align-items: center;\n margin-bottom: 0.125rem;\n}\n\n.airline-name, .hotel-name {\n font-weight: 600;\n font-size: 0.8rem;\n color: #1f2937;\n}\n\n.price, .rating {\n font-weight: 600;\n font-size: 0.75rem;\n color: #059669;\n}\n\n.route-info, .location-info {\n font-size: 0.7rem;\n color: #6b7280;\n margin-bottom: 0.125rem;\n}\n\n.duration-info, .price-info {\n font-size: 0.7rem;\n color: #9ca3af;\n}\n\n/* Mobile responsive adjustments */\n@media (max-width: 768px) {\n .travel-planner-container {\n padding: 0.5rem;\n padding-bottom: 120px; /* Space for mobile chat */\n }\n \n .travel-content {\n padding: 0;\n gap: 0.75rem;\n }\n \n .itinerary-items {\n flex-direction: column;\n gap: 0.5rem;\n }\n \n .agent-indicators {\n flex-direction: column;\n gap: 0.5rem;\n }\n \n .agent-indicator {\n padding: 0.75rem;\n }\n \n .travel-details {\n padding: 0.75rem;\n }\n\n .interrupt-container {\n padding: 0.5rem;\n }\n\n .option-card {\n padding: 0.625rem;\n }\n\n .interrupt-options {\n max-height: 250px;\n }\n}", + "language": "css", + "type": "file" + }, + { + "name": "README.mdx", + "content": "# LangGraph Subgraphs Demo: Travel Planning Assistant ✈️\n\nThis demo showcases **LangGraph subgraphs** through an interactive travel planning assistant. Watch as specialized AI agents collaborate to plan your perfect trip!\n\n## What are LangGraph Subgraphs? 🤖\n\n**Subgraphs** are the key to building modular, scalable AI systems in LangGraph. A subgraph is essentially \"a graph that is used as a node in another graph\" - enabling powerful encapsulation and reusability.\nFor more info, check out the [LangGraph docs](https://langchain-ai.github.io/langgraph/concepts/subgraphs/).\n\n### Key Concepts\n\n- **Encapsulation**: Each subgraph handles a specific domain with its own expertise\n- **Modularity**: Subgraphs can be developed, tested, and maintained independently \n- **Reusability**: The same subgraph can be used across multiple parent graphs\n- **State Communication**: Subgraphs can share state or use different schemas with transformations\n\n## Demo Architecture 🗺️\n\nThis travel planner demonstrates **supervisor-coordinated subgraphs** with **human-in-the-loop** decision making:\n\n### Parent Graph: Travel Supervisor\n- **Role**: Coordinates the travel planning process and routes to specialized agents\n- **State Management**: Maintains a shared itinerary object across all subgraphs\n- **Intelligence**: Determines what's needed and when each agent should be called\n\n### Subgraph 1: ✈️ Flights Agent\n- **Specialization**: Finding and booking flight options\n- **Process**: Presents flight options from Amsterdam to San Francisco with recommendations\n- **Interaction**: Uses interrupts to let users choose their preferred flight\n- **Data**: Static flight options (KLM, United) with pricing and duration\n\n### Subgraph 2: 🏨 Hotels Agent \n- **Specialization**: Finding and booking accommodation\n- **Process**: Shows hotel options in San Francisco with different price points\n- **Interaction**: Uses interrupts for user to select their preferred hotel\n- **Data**: Static hotel options (Hotel Zephyr, Ritz-Carlton, Hotel Zoe)\n\n### Subgraph 3: 🎯 Experiences Agent\n- **Specialization**: Curating restaurants and activities\n- **Process**: AI-powered recommendations based on selected flights and hotels\n- **Features**: Combines 2 restaurants and 2 activities with location-aware suggestions\n- **Data**: Static experiences (Pier 39, Golden Gate Bridge, Swan Oyster Depot, Tartine Bakery)\n\n## How It Works 🔄\n\n1. **User Request**: \"Help me plan a trip to San Francisco\"\n2. **Supervisor Analysis**: Determines what travel components are needed\n3. **Sequential Routing**: Routes to each agent in logical order:\n - First: Flights Agent (get transportation sorted)\n - Then: Hotels Agent (book accommodation) \n - Finally: Experiences Agent (plan activities)\n4. **Human Decisions**: Each agent presents options and waits for user choice via interrupts\n5. **State Building**: Selected choices are stored in the shared itinerary object\n6. **Completion**: All agents report back to supervisor for final coordination\n\n## State Communication Patterns 📊\n\n### Shared State Schema\nAll subgraph agents share and contribute to a common state object. When any agent updates the shared state, these changes are immediately reflected in the frontend through real-time syncing. This ensures that:\n\n- **Flight selections** from the Flights Agent are visible to subsequent agents\n- **Hotel choices** influence the Experiences Agent's recommendations \n- **All updates** are synchronized with the frontend UI in real-time\n- **State persistence** maintains the travel itinerary throughout the workflow\n\n### Human-in-the-Loop Pattern\nTwo of the specialist agents use **interrupts** to pause execution and gather user preferences:\n\n- **Flights Agent**: Presents options → interrupt → waits for selection → continues\n- **Hotels Agent**: Shows hotels → interrupt → waits for choice → continues\n\n## Try These Examples! 💡\n\n### Getting Started\n- \"Help me plan a trip to San Francisco\"\n- \"I want to visit San Francisco from Amsterdam\"\n- \"Plan my travel itinerary\"\n\n### During the Process\nWhen the Flights Agent presents options:\n- Choose between KLM ($650, 11h 30m) or United ($720, 12h 15m)\n\nWhen the Hotels Agent shows accommodations:\n- Select from Hotel Zephyr, The Ritz-Carlton, or Hotel Zoe\n\nThe Experiences Agent will then provide tailored recommendations based on your choices!\n\n## Frontend Capabilities 👁️\n\n- **Human-in-the-loop with interrupts** from subgraphs for user decision making\n- **Subgraphs detection and streaming** to show which agent is currently active\n- **Real-time state updates** as the shared itinerary is built across agents\n", + "language": "markdown", + "type": "file" + }, + { + "name": "agent.py", + "content": "\"\"\"\nA travel agent supervisor demo showcasing multi-agent architecture with subgraphs.\nThe supervisor coordinates specialized agents: flights finder, hotels finder, and experiences finder.\n\"\"\"\n\nfrom typing import Dict, List, Any, Optional, Annotated, Union\nfrom dataclasses import dataclass\nimport json\nimport os\nfrom pydantic import BaseModel, Field\n\n# LangGraph imports\nfrom langchain_core.runnables import RunnableConfig\nfrom langgraph.graph import StateGraph, END, START\nfrom langgraph.types import Command, interrupt\nfrom langgraph.graph import MessagesState\n\n# OpenAI imports\nfrom langchain_openai import ChatOpenAI\nfrom langchain_core.messages import SystemMessage, AIMessage\n\ndef create_interrupt(message: str, options: List[Any], recommendation: Any, agent: str):\n return interrupt({\n \"message\": message,\n \"options\": options,\n \"recommendation\": recommendation,\n \"agent\": agent,\n })\n\n# State schema for travel planning\n@dataclass\nclass Flight:\n airline: str\n departure: str\n arrival: str\n price: str\n duration: str\n\n@dataclass\nclass Hotel:\n name: str\n location: str\n price_per_night: str\n rating: str\n\n@dataclass\nclass Experience:\n name: str\n type: str # \"restaurant\" or \"activity\"\n description: str\n location: str\n\ndef merge_itinerary(left: Union[dict, None] = None, right: Union[dict, None] = None) -> dict:\n \"\"\"Custom reducer to merge shopping cart updates.\"\"\"\n if not left:\n left = {}\n if not right:\n right = {}\n\n return {**left, **right}\n\nclass TravelAgentState(MessagesState):\n \"\"\"Shared state for the travel agent system\"\"\"\n # Travel request details\n origin: str = \"\"\n destination: str = \"\"\n\n # Results from each agent\n flights: List[Flight] = None\n hotels: List[Hotel] = None\n experiences: List[Experience] = None\n\n itinerary: Annotated[dict, merge_itinerary] = None\n\n # Tools available to all agents\n tools: List[Any] = None\n\n # Supervisor routing\n next_agent: Optional[str] = None\n\n# Static data for demonstration\nSTATIC_FLIGHTS = [\n Flight(\"KLM\", \"Amsterdam (AMS)\", \"San Francisco (SFO)\", \"$650\", \"11h 30m\"),\n Flight(\"United\", \"Amsterdam (AMS)\", \"San Francisco (SFO)\", \"$720\", \"12h 15m\")\n]\n\nSTATIC_HOTELS = [\n Hotel(\"Hotel Zephyr\", \"Fisherman's Wharf\", \"$280/night\", \"4.2 stars\"),\n Hotel(\"The Ritz-Carlton\", \"Nob Hill\", \"$550/night\", \"4.8 stars\"),\n Hotel(\"Hotel Zoe\", \"Union Square\", \"$320/night\", \"4.4 stars\")\n]\n\nSTATIC_EXPERIENCES = [\n Experience(\"Pier 39\", \"activity\", \"Iconic waterfront destination with shops and sea lions\", \"Fisherman's Wharf\"),\n Experience(\"Golden Gate Bridge\", \"activity\", \"World-famous suspension bridge with stunning views\", \"Golden Gate\"),\n Experience(\"Swan Oyster Depot\", \"restaurant\", \"Historic seafood counter serving fresh oysters\", \"Polk Street\"),\n Experience(\"Tartine Bakery\", \"restaurant\", \"Artisanal bakery famous for bread and pastries\", \"Mission District\")\n]\n\n# Flights finder subgraph\nasync def flights_finder(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Subgraph that finds flight options\"\"\"\n\n # Simulate flight search with static data\n flights = STATIC_FLIGHTS\n\n selected_flight = state.get('itinerary', {}).get('flight', None)\n if not selected_flight:\n selected_flight = create_interrupt(\n message=f\"\"\"\n Found {len(flights)} flight options from {state.get('origin', 'Amsterdam')} to {state.get('destination', 'San Francisco')}.\n I recommend choosing the flight by {flights[0].airline} since it's known to be on time and cheaper.\n \"\"\",\n options=flights,\n recommendation=flights[0],\n agent=\"flights\"\n )\n\n if isinstance(selected_flight, str):\n selected_flight = json.loads(selected_flight)\n return Command(\n goto=END,\n update={\n \"flights\": flights,\n \"itinerary\": {\n \"flight\": selected_flight\n },\n \"messages\": state[\"messages\"] + [{\n \"role\": \"assistant\",\n \"content\": f\"Flights Agent: Great. I'll book you the {selected_flight[\"airline\"]} flight from {selected_flight[\"departure\"]} to {selected_flight[\"arrival\"]}.\"\n }]\n }\n )\n\n# Hotels finder subgraph\nasync def hotels_finder(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Subgraph that finds hotel options\"\"\"\n\n # Simulate hotel search with static data\n hotels = STATIC_HOTELS\n selected_hotel = state.get('itinerary', {}).get('hotel', None)\n if not selected_hotel:\n selected_hotel = create_interrupt(\n message=f\"\"\"\n Found {len(hotels)} accommodation options in {state.get('destination', 'San Francisco')}.\n I recommend choosing the {hotels[2].name} since it strikes the balance between rating, price, and location.\n \"\"\",\n options=hotels,\n recommendation=hotels[2],\n agent=\"hotels\"\n )\n\n if isinstance(selected_hotel, str):\n selected_hotel = json.loads(selected_hotel)\n return Command(\n goto=END,\n update={\n \"hotels\": hotels,\n \"itinerary\": {\n \"hotel\": selected_hotel\n },\n \"messages\": state[\"messages\"] + [{\n \"role\": \"assistant\",\n \"content\": f\"Hotels Agent: Excellent choice! You'll like {selected_hotel[\"name\"]}.\"\n }]\n }\n )\n\n# Experiences finder subgraph\nasync def experiences_finder(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Subgraph that finds restaurant and activity recommendations\"\"\"\n\n # Filter experiences (2 restaurants, 2 activities)\n restaurants = [exp for exp in STATIC_EXPERIENCES if exp.type == \"restaurant\"][:2]\n activities = [exp for exp in STATIC_EXPERIENCES if exp.type == \"activity\"][:2]\n experiences = restaurants + activities\n\n model = ChatOpenAI(model=\"gpt-4o\")\n\n if config is None:\n config = RunnableConfig(recursion_limit=25)\n\n itinerary = state.get(\"itinerary\", {})\n\n system_prompt = f\"\"\"\n You are the experiences agent. Your job is to find restaurants and activities for the user.\n You already went ahead and found a bunch of experiences. All you have to do now, is to let the user know of your findings.\n \n Current status:\n - Origin: {state.get('origin', 'Amsterdam')}\n - Destination: {state.get('destination', 'San Francisco')}\n - Flight chosen: {itinerary.get(\"hotel\", None)}\n - Hotel chosen: {itinerary.get(\"hotel\", None)}\n - activities found: {activities}\n - restaurants found: {restaurants}\n \"\"\"\n\n # Get supervisor decision\n response = await model.ainvoke([\n SystemMessage(content=system_prompt),\n *state[\"messages\"],\n ], config)\n\n return Command(\n goto=END,\n update={\n \"experiences\": experiences,\n \"messages\": state[\"messages\"] + [response]\n }\n )\n\nclass SupervisorResponseFormatter(BaseModel):\n \"\"\"Always use this tool to structure your response to the user.\"\"\"\n answer: str = Field(description=\"The answer to the user\")\n next_agent: str | None = Field(description=\"The agent to go to. Not required if you do not want to route to another agent.\")\n\n# Supervisor agent\nasync def supervisor_agent(state: TravelAgentState, config: RunnableConfig):\n \"\"\"Main supervisor that coordinates all subgraphs\"\"\"\n\n itinerary = state.get(\"itinerary\", {})\n\n # Check what's already completed\n has_flights = itinerary.get(\"flight\", None) is not None\n has_hotels = itinerary.get(\"hotel\", None) is not None\n has_experiences = state.get(\"experiences\", None) is not None\n\n system_prompt = f\"\"\"\n You are a travel planning supervisor. Your job is to coordinate specialized agents to help plan a trip.\n \n Current status:\n - Origin: {state.get('origin', 'Amsterdam')}\n - Destination: {state.get('destination', 'San Francisco')}\n - Flights found: {has_flights}\n - Hotels found: {has_hotels}\n - Experiences found: {has_experiences}\n - Itinerary (Things that the user has already confirmed selection on): {json.dumps(itinerary, indent=2)}\n \n Available agents:\n - flights_agent: Finds flight options\n - hotels_agent: Finds hotel options \n - experiences_agent: Finds restaurant and activity recommendations\n - {END}: Mark task as complete when all information is gathered\n \n You must route to the appropriate agent based on what's missing. Once all agents have completed their tasks, route to 'complete'.\n \"\"\"\n\n # Define the model\n model = ChatOpenAI(model=\"gpt-4o\")\n\n if config is None:\n config = RunnableConfig(recursion_limit=25)\n\n # Bind the routing tool\n model_with_tools = model.bind_tools(\n [SupervisorResponseFormatter],\n parallel_tool_calls=False,\n )\n\n # Get supervisor decision\n response = await model_with_tools.ainvoke([\n SystemMessage(content=system_prompt),\n *state[\"messages\"],\n ], config)\n\n messages = state[\"messages\"] + [response]\n\n # Handle tool calls for routing\n if hasattr(response, \"tool_calls\") and response.tool_calls:\n tool_call = response.tool_calls[0]\n\n if isinstance(tool_call, dict):\n tool_call_args = tool_call[\"args\"]\n else:\n tool_call_args = tool_call.args\n\n next_agent = tool_call_args[\"next_agent\"]\n\n # Add tool response\n tool_response = {\n \"role\": \"tool\",\n \"content\": f\"Routing to {next_agent} and providing the answer\",\n \"tool_call_id\": tool_call.id if hasattr(tool_call, 'id') else tool_call[\"id\"]\n }\n\n messages = messages + [tool_response, AIMessage(content=tool_call_args[\"answer\"])]\n\n if next_agent is not None:\n return Command(goto=next_agent)\n\n # Fallback if no tool call\n return Command(\n goto=END,\n update={\"messages\": messages}\n )\n\n# Create subgraphs\nflights_graph = StateGraph(TravelAgentState)\nflights_graph.add_node(\"flights_agent_chat_node\", flights_finder)\nflights_graph.set_entry_point(\"flights_agent_chat_node\")\nflights_graph.add_edge(START, \"flights_agent_chat_node\")\nflights_graph.add_edge(\"flights_agent_chat_node\", END)\nflights_subgraph = flights_graph.compile()\n\nhotels_graph = StateGraph(TravelAgentState)\nhotels_graph.add_node(\"hotels_agent_chat_node\", hotels_finder)\nhotels_graph.set_entry_point(\"hotels_agent_chat_node\")\nhotels_graph.add_edge(START, \"hotels_agent_chat_node\")\nhotels_graph.add_edge(\"hotels_agent_chat_node\", END)\nhotels_subgraph = hotels_graph.compile()\n\nexperiences_graph = StateGraph(TravelAgentState)\nexperiences_graph.add_node(\"experiences_agent_chat_node\", experiences_finder)\nexperiences_graph.set_entry_point(\"experiences_agent_chat_node\")\nexperiences_graph.add_edge(START, \"experiences_agent_chat_node\")\nexperiences_graph.add_edge(\"experiences_agent_chat_node\", END)\nexperiences_subgraph = experiences_graph.compile()\n\n# Main supervisor workflow\nworkflow = StateGraph(TravelAgentState)\n\n# Add supervisor and subgraphs as nodes\nworkflow.add_node(\"supervisor\", supervisor_agent)\nworkflow.add_node(\"flights_agent\", flights_subgraph)\nworkflow.add_node(\"hotels_agent\", hotels_subgraph)\nworkflow.add_node(\"experiences_agent\", experiences_subgraph)\n\n# Set entry point\nworkflow.set_entry_point(\"supervisor\")\nworkflow.add_edge(START, \"supervisor\")\n\n# Add edges back to supervisor after each subgraph\nworkflow.add_edge(\"flights_agent\", \"supervisor\")\nworkflow.add_edge(\"hotels_agent\", \"supervisor\")\nworkflow.add_edge(\"experiences_agent\", \"supervisor\")\n\n# Conditionally use a checkpointer based on the environment\n# Check for multiple indicators that we're running in LangGraph dev/API mode\nis_fast_api = os.environ.get(\"LANGGRAPH_FAST_API\", \"false\").lower() == \"true\"\n\n# Compile the graph\nif is_fast_api:\n # For CopilotKit and other contexts, use MemorySaver\n from langgraph.checkpoint.memory import MemorySaver\n memory = MemorySaver()\n graph = workflow.compile(checkpointer=memory)\nelse:\n # When running in LangGraph API/dev, don't use a custom checkpointer\n graph = workflow.compile()\n", "language": "python", "type": "file" }, { "name": "agent.ts", - "content": "/**\n * An example demonstrating tool-based generative UI using LangGraph.\n */\n\nimport { ChatOpenAI } from \"@langchain/openai\";\nimport { SystemMessage } from \"@langchain/core/messages\";\nimport { RunnableConfig } from \"@langchain/core/runnables\";\nimport { Command, Annotation, MessagesAnnotation, StateGraph, END, START } from \"@langchain/langgraph\";\n\n// List of available images (modify path if needed)\nconst IMAGE_LIST = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\n// This tool generates a haiku on the server.\n// The tool call will be streamed to the frontend as it is being generated.\nconst GENERATE_HAIKU_TOOL = {\n type: \"function\",\n function: {\n name: \"generate_haiku\",\n description: \"Generate a haiku in Japanese and its English translation. Also select exactly 3 relevant images from the provided list based on the haiku's theme.\",\n parameters: {\n type: \"object\",\n properties: {\n japanese: {\n type: \"array\",\n items: {\n type: \"string\"\n },\n description: \"An array of three lines of the haiku in Japanese\"\n },\n english: {\n type: \"array\",\n items: {\n type: \"string\"\n },\n description: \"An array of three lines of the haiku in English\"\n },\n image_names: {\n type: \"array\",\n items: {\n type: \"string\"\n },\n description: \"An array of EXACTLY THREE image filenames from the provided list that are most relevant to the haiku.\"\n }\n },\n required: [\"japanese\", \"english\", \"image_names\"]\n }\n }\n};\n\nexport const AgentStateAnnotation = Annotation.Root({\n tools: Annotation(),\n ...MessagesAnnotation.spec,\n});\nexport type AgentState = typeof AgentStateAnnotation.State;\n\nasync function chatNode(state: AgentState, config?: RunnableConfig): Promise {\n /**\n * The main function handling chat and tool calls.\n */\n // Prepare the image list string for the prompt\n const imageListStr = IMAGE_LIST.map(img => `- ${img}`).join(\"\\n\");\n\n const systemPrompt = `\n You assist the user in generating a haiku.\n When generating a haiku using the 'generate_haiku' tool, you MUST also select exactly 3 image filenames from the following list that are most relevant to the haiku's content or theme. Return the filenames in the 'image_names' parameter.\n \n Available images:\n ${imageListStr}\n \n Don't provide the relevant image names in your final response to the user.\n `;\n\n // Define the model\n const model = new ChatOpenAI({ model: \"gpt-4o\" });\n \n // Define config for the model\n if (!config) {\n config = { recursionLimit: 25 };\n }\n\n // Bind the tools to the model\n const modelWithTools = model.bindTools(\n [GENERATE_HAIKU_TOOL],\n {\n // Disable parallel tool calls to avoid race conditions\n parallel_tool_calls: false,\n }\n );\n\n // Run the model to generate a response\n const response = await modelWithTools.invoke([\n new SystemMessage({ content: systemPrompt }),\n ...state.messages,\n ], config);\n\n // Return Command to end with updated messages\n return new Command({\n goto: END,\n update: {\n messages: [...state.messages, response]\n }\n });\n}\n\n// Define the graph\nconst workflow = new StateGraph(AgentStateAnnotation);\n\n// Add nodes\nworkflow.addNode(\"chat_node\", chatNode);\n\n// Add edges\nworkflow.setEntryPoint(\"chat_node\");\nworkflow.addEdge(START, \"chat_node\");\nworkflow.addEdge(\"chat_node\", END);\n\n// Compile the graph\nexport const toolBasedGenerativeUiGraph = workflow.compile();", + "content": "/**\n * A travel agent supervisor demo showcasing multi-agent architecture with subgraphs.\n * The supervisor coordinates specialized agents: flights finder, hotels finder, and experiences finder.\n */\n\nimport { ChatOpenAI } from \"@langchain/openai\";\nimport { SystemMessage, AIMessage, ToolMessage } from \"@langchain/core/messages\";\nimport { RunnableConfig } from \"@langchain/core/runnables\";\nimport { \n Annotation, \n MessagesAnnotation, \n StateGraph, \n Command, \n START, \n END, \n interrupt \n} from \"@langchain/langgraph\";\n\n// Travel data interfaces\ninterface Flight {\n airline: string;\n departure: string;\n arrival: string;\n price: string;\n duration: string;\n}\n\ninterface Hotel {\n name: string;\n location: string;\n price_per_night: string;\n rating: string;\n}\n\ninterface Experience {\n name: string;\n type: \"restaurant\" | \"activity\";\n description: string;\n location: string;\n}\n\ninterface Itinerary {\n flight?: Flight;\n hotel?: Hotel;\n}\n\n// Custom reducer to merge itinerary updates\nfunction mergeItinerary(left: Itinerary | null, right?: Itinerary | null): Itinerary {\n if (!left) left = {};\n if (!right) right = {};\n return { ...left, ...right };\n}\n\n// State annotation for travel agent system\nexport const TravelAgentStateAnnotation = Annotation.Root({\n origin: Annotation(),\n destination: Annotation(),\n flights: Annotation(),\n hotels: Annotation(),\n experiences: Annotation(),\n\n // Itinerary with custom merger\n itinerary: Annotation({\n reducer: mergeItinerary,\n default: () => null\n }),\n\n // Tools available to all agents\n tools: Annotation({\n reducer: (x, y) => y ?? x,\n default: () => []\n }),\n\n // Supervisor routing\n next_agent: Annotation(),\n ...MessagesAnnotation.spec,\n});\n\nexport type TravelAgentState = typeof TravelAgentStateAnnotation.State;\n\n// Static data for demonstration\nconst STATIC_FLIGHTS: Flight[] = [\n { airline: \"KLM\", departure: \"Amsterdam (AMS)\", arrival: \"San Francisco (SFO)\", price: \"$650\", duration: \"11h 30m\" },\n { airline: \"United\", departure: \"Amsterdam (AMS)\", arrival: \"San Francisco (SFO)\", price: \"$720\", duration: \"12h 15m\" }\n];\n\nconst STATIC_HOTELS: Hotel[] = [\n { name: \"Hotel Zephyr\", location: \"Fisherman's Wharf\", price_per_night: \"$280/night\", rating: \"4.2 stars\" },\n { name: \"The Ritz-Carlton\", location: \"Nob Hill\", price_per_night: \"$550/night\", rating: \"4.8 stars\" },\n { name: \"Hotel Zoe\", location: \"Union Square\", price_per_night: \"$320/night\", rating: \"4.4 stars\" }\n];\n\nconst STATIC_EXPERIENCES: Experience[] = [\n { name: \"Pier 39\", type: \"activity\", description: \"Iconic waterfront destination with shops and sea lions\", location: \"Fisherman's Wharf\" },\n { name: \"Golden Gate Bridge\", type: \"activity\", description: \"World-famous suspension bridge with stunning views\", location: \"Golden Gate\" },\n { name: \"Swan Oyster Depot\", type: \"restaurant\", description: \"Historic seafood counter serving fresh oysters\", location: \"Polk Street\" },\n { name: \"Tartine Bakery\", type: \"restaurant\", description: \"Artisanal bakery famous for bread and pastries\", location: \"Mission District\" }\n];\n\nfunction createInterrupt(message: string, options: any[], recommendation: any, agent: string) {\n return interrupt({\n message,\n options,\n recommendation,\n agent,\n });\n}\n\n// Flights finder subgraph\nasync function flightsFinder(state: TravelAgentState, config?: RunnableConfig): Promise {\n // Simulate flight search with static data\n const flights = STATIC_FLIGHTS;\n\n const selectedFlight = state.itinerary?.flight;\n \n let flightChoice: Flight;\n const message = `Found ${flights.length} flight options from ${state.origin || 'Amsterdam'} to ${state.destination || 'San Francisco'}.\\n` +\n `I recommend choosing the flight by ${flights[0].airline} since it's known to be on time and cheaper.`\n if (!selectedFlight) {\n const interruptResult = createInterrupt(\n message,\n flights,\n flights[0],\n \"flights\"\n );\n \n // Parse the interrupt result if it's a string\n flightChoice = typeof interruptResult === 'string' ? JSON.parse(interruptResult) : interruptResult;\n } else {\n flightChoice = selectedFlight;\n }\n\n return new Command({\n goto: END,\n update: {\n flights: flights,\n itinerary: {\n flight: flightChoice\n },\n // Return all \"messages\" that the agent was sending\n messages: [\n ...state.messages,\n new AIMessage({\n content: message,\n }),\n new AIMessage({\n content: `Flights Agent: Great. I'll book you the ${flightChoice.airline} flight from ${flightChoice.departure} to ${flightChoice.arrival}.`,\n }),\n ]\n }\n });\n}\n\n// Hotels finder subgraph\nasync function hotelsFinder(state: TravelAgentState, config?: RunnableConfig): Promise {\n // Simulate hotel search with static data\n const hotels = STATIC_HOTELS;\n const selectedHotel = state.itinerary?.hotel;\n \n let hotelChoice: Hotel;\n const message = `Found ${hotels.length} accommodation options in ${state.destination || 'San Francisco'}.\\n\n I recommend choosing the ${hotels[2].name} since it strikes the balance between rating, price, and location.`\n if (!selectedHotel) {\n const interruptResult = createInterrupt(\n message,\n hotels,\n hotels[2],\n \"hotels\"\n );\n \n // Parse the interrupt result if it's a string\n hotelChoice = typeof interruptResult === 'string' ? JSON.parse(interruptResult) : interruptResult;\n } else {\n hotelChoice = selectedHotel;\n }\n\n return new Command({\n goto: END,\n update: {\n hotels: hotels,\n itinerary: {\n hotel: hotelChoice\n },\n // Return all \"messages\" that the agent was sending\n messages: [\n ...state.messages,\n new AIMessage({\n content: message,\n }),\n new AIMessage({\n content: `Hotels Agent: Excellent choice! You'll like ${hotelChoice.name}.`\n }),\n ]\n }\n });\n}\n\n// Experiences finder subgraph\nasync function experiencesFinder(state: TravelAgentState, config?: RunnableConfig): Promise {\n // Filter experiences (2 restaurants, 2 activities)\n const restaurants = STATIC_EXPERIENCES.filter(exp => exp.type === \"restaurant\").slice(0, 2);\n const activities = STATIC_EXPERIENCES.filter(exp => exp.type === \"activity\").slice(0, 2);\n const experiences = [...restaurants, ...activities];\n\n const model = new ChatOpenAI({ model: \"gpt-4o\" });\n\n if (!config) {\n config = { recursionLimit: 25 };\n }\n\n const itinerary = state.itinerary || {};\n\n const systemPrompt = `\n You are the experiences agent. Your job is to find restaurants and activities for the user.\n You already went ahead and found a bunch of experiences. All you have to do now, is to let the user know of your findings.\n \n Current status:\n - Origin: ${state.origin || 'Amsterdam'}\n - Destination: ${state.destination || 'San Francisco'}\n - Flight chosen: ${JSON.stringify(itinerary.flight) || 'None'}\n - Hotel chosen: ${JSON.stringify(itinerary.hotel) || 'None'}\n - Activities found: ${JSON.stringify(activities)}\n - Restaurants found: ${JSON.stringify(restaurants)}\n `;\n\n // Get experiences response\n const response = await model.invoke([\n new SystemMessage({ content: systemPrompt }),\n ...state.messages,\n ], config);\n\n return new Command({\n goto: END,\n update: {\n experiences: experiences,\n messages: [...state.messages, response]\n }\n });\n}\n\n// Supervisor response tool\nconst SUPERVISOR_RESPONSE_TOOL = {\n type: \"function\" as const,\n function: {\n name: \"supervisor_response\",\n description: \"Always use this tool to structure your response to the user.\",\n parameters: {\n type: \"object\",\n properties: {\n answer: {\n type: \"string\",\n description: \"The answer to the user\"\n },\n next_agent: {\n type: \"string\",\n enum: [\"flights_agent\", \"hotels_agent\", \"experiences_agent\", \"complete\"],\n description: \"The agent to go to. Not required if you do not want to route to another agent.\"\n }\n },\n required: [\"answer\"]\n }\n }\n};\n\n// Supervisor agent\nasync function supervisorAgent(state: TravelAgentState, config?: RunnableConfig): Promise {\n const itinerary = state.itinerary || {};\n\n // Check what's already completed\n const hasFlights = itinerary.flight !== undefined;\n const hasHotels = itinerary.hotel !== undefined;\n const hasExperiences = state.experiences !== null;\n\n const systemPrompt = `\n You are a travel planning supervisor. Your job is to coordinate specialized agents to help plan a trip.\n \n Current status:\n - Origin: ${state.origin || 'Amsterdam'}\n - Destination: ${state.destination || 'San Francisco'}\n - Flights found: ${hasFlights}\n - Hotels found: ${hasHotels}\n - Experiences found: ${hasExperiences}\n - Itinerary (Things that the user has already confirmed selection on): ${JSON.stringify(itinerary, null, 2)}\n \n Available agents:\n - flights_agent: Finds flight options\n - hotels_agent: Finds hotel options \n - experiences_agent: Finds restaurant and activity recommendations\n - complete: Mark task as complete when all information is gathered\n \n You must route to the appropriate agent based on what's missing. Once all agents have completed their tasks, route to 'complete'.\n `;\n\n // Define the model\n const model = new ChatOpenAI({ model: \"gpt-4o\" });\n\n if (!config) {\n config = { recursionLimit: 25 };\n }\n\n // Bind the routing tool\n const modelWithTools = model.bindTools(\n [SUPERVISOR_RESPONSE_TOOL],\n {\n parallel_tool_calls: false,\n }\n );\n\n // Get supervisor decision\n const response = await modelWithTools.invoke([\n new SystemMessage({ content: systemPrompt }),\n ...state.messages,\n ], config);\n\n let messages = [...state.messages, response];\n\n // Handle tool calls for routing\n if (response.tool_calls && response.tool_calls.length > 0) {\n const toolCall = response.tool_calls[0];\n const toolCallArgs = toolCall.args;\n const nextAgent = toolCallArgs.next_agent;\n\n const toolResponse = new ToolMessage({\n tool_call_id: toolCall.id!,\n content: `Routing to ${nextAgent} and providing the answer`,\n });\n\n messages = [\n ...messages, \n toolResponse, \n new AIMessage({ content: toolCallArgs.answer })\n ];\n\n if (nextAgent && nextAgent !== \"complete\") {\n return new Command({ goto: nextAgent });\n }\n }\n\n // Fallback if no tool call or complete\n return new Command({\n goto: END,\n update: { messages }\n });\n}\n\n// Create subgraphs\nconst flightsGraph = new StateGraph(TravelAgentStateAnnotation);\nflightsGraph.addNode(\"flights_agent_chat_node\", flightsFinder);\nflightsGraph.setEntryPoint(\"flights_agent_chat_node\");\nflightsGraph.addEdge(START, \"flights_agent_chat_node\");\nflightsGraph.addEdge(\"flights_agent_chat_node\", END);\nconst flightsSubgraph = flightsGraph.compile();\n\nconst hotelsGraph = new StateGraph(TravelAgentStateAnnotation);\nhotelsGraph.addNode(\"hotels_agent_chat_node\", hotelsFinder);\nhotelsGraph.setEntryPoint(\"hotels_agent_chat_node\");\nhotelsGraph.addEdge(START, \"hotels_agent_chat_node\");\nhotelsGraph.addEdge(\"hotels_agent_chat_node\", END);\nconst hotelsSubgraph = hotelsGraph.compile();\n\nconst experiencesGraph = new StateGraph(TravelAgentStateAnnotation);\nexperiencesGraph.addNode(\"experiences_agent_chat_node\", experiencesFinder);\nexperiencesGraph.setEntryPoint(\"experiences_agent_chat_node\");\nexperiencesGraph.addEdge(START, \"experiences_agent_chat_node\");\nexperiencesGraph.addEdge(\"experiences_agent_chat_node\", END);\nconst experiencesSubgraph = experiencesGraph.compile();\n\n// Main supervisor workflow\nconst workflow = new StateGraph(TravelAgentStateAnnotation);\n\n// Add supervisor and subgraphs as nodes\nworkflow.addNode(\"supervisor\", supervisorAgent, { ends: ['flights_agent', 'hotels_agent', 'experiences_agent', END] });\nworkflow.addNode(\"flights_agent\", flightsSubgraph);\nworkflow.addNode(\"hotels_agent\", hotelsSubgraph);\nworkflow.addNode(\"experiences_agent\", experiencesSubgraph);\n\n// Set entry point\nworkflow.setEntryPoint(\"supervisor\");\nworkflow.addEdge(START, \"supervisor\");\n\n// Add edges back to supervisor after each subgraph\nworkflow.addEdge(\"flights_agent\", \"supervisor\");\nworkflow.addEdge(\"hotels_agent\", \"supervisor\");\nworkflow.addEdge(\"experiences_agent\", \"supervisor\");\n\n// Compile the graph\nexport const subGraphsAgentGraph = workflow.compile();\n", "language": "ts", "type": "file" } @@ -1005,7 +1173,7 @@ "agno::tool_based_generative_ui": [ { "name": "page.tsx", - "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku{\n japanese : string[] | [],\n english : string[] | [],\n image_names : string[] | [],\n selectedImage : string | null,\n}\n\ninterface HaikuCardProps{\n generatedHaiku : GenerateHaiku | Partial\n setHaikus : Dispatch>\n haikus : GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n
\n
\n
\n \n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )}\n \n \n );\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction HaikuCard({generatedHaiku, setHaikus, haikus} : HaikuCardProps) {\n return (\n
\n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n
\n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n const validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n if (correctedNames.length < 3) {\n const availableFallbacks = VALID_IMAGE_NAMES.filter(name => !usedValidNames.has(name));\n for (let i = availableFallbacks.length - 1; i > 0; i--) {\n const j = Math.floor(Math.random() * (i + 1));\n [availableFallbacks[i], availableFallbacks[j]] = [availableFallbacks[j], availableFallbacks[i]];\n }\n\n while (correctedNames.length < 3 && availableFallbacks.length > 0) {\n const fallbackName = availableFallbacks.pop();\n if (fallbackName) {\n correctedNames.push(fallbackName);\n }\n }\n }\n\n while (correctedNames.length < 3 && VALID_IMAGE_NAMES.length > 0) {\n const fallbackName = VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n correctedNames.push(fallbackName);\n }\n\n return correctedNames.slice(0, 3);\n };\n\n useCopilotAction({\n name: \"generate_haiku\",\n available: \"frontend\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: \"Names of 3 relevant images\",\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [...prev, newHaiku]);\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const generatedHaikus = useMemo(() => (\n haikus.filter((haiku) => haiku.english[0] !== \"A placeholder verse—\")\n ), [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n {/* Thumbnail List */}\n {Boolean(generatedHaikus.length) && !isMobile && (\n
\n {generatedHaikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n )}\n\n {/* Main Display */}\n
\n
\n {haikus.filter((_haiku: Haiku, index: number) => {\n if (haikus.length == 1) return true;\n else return index == activeIndex + 1;\n }).map((haiku, index) => (\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n \n \n );\n}\n", + "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku {\n japanese: string[] | [],\n english: string[] | [],\n image_names: string[] | [],\n selectedImage: string | null,\n}\n\ninterface HaikuCardProps {\n generatedHaiku: GenerateHaiku | Partial\n setHaikus: Dispatch>\n haikus: GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && }\n \n \n );\n}\n\nfunction MobileChat({ chatTitle, chatDescription, initialLabel }: { chatTitle: string, chatDescription: string, initialLabel: string }) {\n const defaultChatHeight = 50\n\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n return (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction getRandomImage(): string {\n return VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n}\n\nconst validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n while (correctedNames.length < 3) {\n const nextImage = getRandomImage();\n if (!usedValidNames.has(nextImage)) {\n correctedNames.push(nextImage);\n usedValidNames.add(nextImage);\n }\n }\n\n return correctedNames.slice(0, 3);\n};\n\nfunction HaikuCard({ generatedHaiku, setHaikus, haikus }: HaikuCardProps) {\n return (\n \n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n \n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n useCopilotAction({\n name: \"generate_haiku\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: `Names of 3 relevant images selected from the following: \\n -${VALID_IMAGE_NAMES.join('\\n -')}`,\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [newHaiku, ...prev].filter(h => h.english[0] !== \"A placeholder verse—\"));\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n \n\n {/* Main Display */}\n
\n
\n {haikus.map((haiku, index) => (\n (haikus.length == 1 || index == activeIndex) && (\n\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n setHaikus((prevHaikus) => {\n return prevHaikus.map((h, idx) => {\n if (idx === index) {\n return { ...h, selectedImage: imageName }\n } else {\n return { ...h }\n }\n })\n })}\n />\n ))}\n
\n )}\n
\n )\n ))}\n
\n \n \n );\n}\n\nfunction Thumbnails({ haikus, activeIndex, setActiveIndex, isMobile }: { haikus: Haiku[], activeIndex: number, setActiveIndex: (index: number) => void, isMobile: boolean }) {\n if (haikus.length == 0 || isMobile) { return null }\n return (\n
\n {haikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n \n ))}\n \n )\n\n}", "language": "typescript", "type": "file" }, @@ -1181,7 +1349,7 @@ "crewai::tool_based_generative_ui": [ { "name": "page.tsx", - "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku{\n japanese : string[] | [],\n english : string[] | [],\n image_names : string[] | [],\n selectedImage : string | null,\n}\n\ninterface HaikuCardProps{\n generatedHaiku : GenerateHaiku | Partial\n setHaikus : Dispatch>\n haikus : GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n const defaultChatHeight = 50\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n
\n
\n
\n \n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )}\n \n \n );\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction HaikuCard({generatedHaiku, setHaikus, haikus} : HaikuCardProps) {\n return (\n
\n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n
\n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n const validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n if (correctedNames.length < 3) {\n const availableFallbacks = VALID_IMAGE_NAMES.filter(name => !usedValidNames.has(name));\n for (let i = availableFallbacks.length - 1; i > 0; i--) {\n const j = Math.floor(Math.random() * (i + 1));\n [availableFallbacks[i], availableFallbacks[j]] = [availableFallbacks[j], availableFallbacks[i]];\n }\n\n while (correctedNames.length < 3 && availableFallbacks.length > 0) {\n const fallbackName = availableFallbacks.pop();\n if (fallbackName) {\n correctedNames.push(fallbackName);\n }\n }\n }\n\n while (correctedNames.length < 3 && VALID_IMAGE_NAMES.length > 0) {\n const fallbackName = VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n correctedNames.push(fallbackName);\n }\n\n return correctedNames.slice(0, 3);\n };\n\n useCopilotAction({\n name: \"generate_haiku\",\n available: \"frontend\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: \"Names of 3 relevant images\",\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [...prev, newHaiku]);\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const generatedHaikus = useMemo(() => (\n haikus.filter((haiku) => haiku.english[0] !== \"A placeholder verse—\")\n ), [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n {/* Thumbnail List */}\n {Boolean(generatedHaikus.length) && !isMobile && (\n
\n {generatedHaikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n )}\n\n {/* Main Display */}\n
\n
\n {haikus.filter((_haiku: Haiku, index: number) => {\n if (haikus.length == 1) return true;\n else return index == activeIndex + 1;\n }).map((haiku, index) => (\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n
\n ))}\n \n \n \n );\n}\n", + "content": "\"use client\";\nimport { CopilotKit, useCopilotAction } from \"@copilotkit/react-core\";\nimport { CopilotKitCSSProperties, CopilotSidebar, CopilotChat } from \"@copilotkit/react-ui\";\nimport { Dispatch, SetStateAction, useState, useEffect } from \"react\";\nimport \"@copilotkit/react-ui/styles.css\";\nimport \"./style.css\";\nimport React, { useMemo } from \"react\";\nimport { useMobileView } from \"@/utils/use-mobile-view\";\nimport { useMobileChat } from \"@/utils/use-mobile-chat\";\n\ninterface ToolBasedGenerativeUIProps {\n params: Promise<{\n integrationId: string;\n }>;\n}\n\ninterface GenerateHaiku {\n japanese: string[] | [],\n english: string[] | [],\n image_names: string[] | [],\n selectedImage: string | null,\n}\n\ninterface HaikuCardProps {\n generatedHaiku: GenerateHaiku | Partial\n setHaikus: Dispatch>\n haikus: GenerateHaiku[]\n}\n\nexport default function ToolBasedGenerativeUI({ params }: ToolBasedGenerativeUIProps) {\n const { integrationId } = React.use(params);\n const { isMobile } = useMobileView();\n\n\n const chatTitle = 'Haiku Generator'\n const chatDescription = 'Ask me to create haikus'\n const initialLabel = 'I\\'m a haiku generator 👋. How can I help you?'\n\n return (\n \n \n \n\n {/* Desktop Sidebar */}\n {!isMobile && (\n \n )}\n\n {/* Mobile Pull-Up Chat */}\n {isMobile && }\n \n \n );\n}\n\nfunction MobileChat({ chatTitle, chatDescription, initialLabel }: { chatTitle: string, chatDescription: string, initialLabel: string }) {\n const defaultChatHeight = 50\n\n const {\n isChatOpen,\n setChatHeight,\n setIsChatOpen,\n isDragging,\n chatHeight,\n handleDragStart\n } = useMobileChat(defaultChatHeight)\n return (\n <>\n {/* Chat Toggle Button */}\n
\n
\n {\n if (!isChatOpen) {\n setChatHeight(defaultChatHeight); // Reset to good default when opening\n }\n setIsChatOpen(!isChatOpen);\n }}\n >\n
\n
\n
{chatTitle}
\n
{chatDescription}
\n
\n
\n
\n \n \n \n
\n
\n \n\n {/* Pull-Up Chat Container */}\n \n {/* Drag Handle Bar */}\n \n
\n \n\n {/* Chat Header */}\n
\n
\n
\n

{chatTitle}

\n
\n setIsChatOpen(false)}\n className=\"p-2 hover:bg-gray-100 rounded-full transition-colors\"\n >\n \n \n \n \n
\n
\n\n {/* Chat Content - Flexible container for messages and input */}\n
\n \n
\n \n\n {/* Backdrop */}\n {isChatOpen && (\n setIsChatOpen(false)}\n />\n )}\n \n )\n}\n\nconst VALID_IMAGE_NAMES = [\n \"Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg\",\n \"Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg\",\n \"Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg\",\n \"Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg\",\n \"Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg\",\n \"Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg\",\n \"Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg\",\n \"Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg\",\n \"Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg\",\n \"Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg\"\n];\n\nfunction getRandomImage(): string {\n return VALID_IMAGE_NAMES[Math.floor(Math.random() * VALID_IMAGE_NAMES.length)];\n}\n\nconst validateAndCorrectImageNames = (rawNames: string[] | undefined): string[] | null => {\n if (!rawNames || rawNames.length !== 3) {\n return null;\n }\n\n const correctedNames: string[] = [];\n const usedValidNames = new Set();\n\n for (const name of rawNames) {\n if (VALID_IMAGE_NAMES.includes(name) && !usedValidNames.has(name)) {\n correctedNames.push(name);\n usedValidNames.add(name);\n if (correctedNames.length === 3) break;\n }\n }\n\n while (correctedNames.length < 3) {\n const nextImage = getRandomImage();\n if (!usedValidNames.has(nextImage)) {\n correctedNames.push(nextImage);\n usedValidNames.add(nextImage);\n }\n }\n\n return correctedNames.slice(0, 3);\n};\n\nfunction HaikuCard({ generatedHaiku, setHaikus, haikus }: HaikuCardProps) {\n return (\n \n
\n {generatedHaiku?.japanese?.map((line, index) => (\n
\n

{line}

\n

\n {generatedHaiku.english?.[index]}\n

\n
\n ))}\n {generatedHaiku?.japanese && generatedHaiku.japanese.length >= 2 && (\n
\n {(() => {\n const firstLine = generatedHaiku?.japanese?.[0];\n if (!firstLine) return null;\n const haikuIndex = haikus.findIndex((h: any) => h.japanese[0] === firstLine);\n const haiku = haikus[haikuIndex];\n if (!haiku?.image_names) return null;\n\n return haiku.image_names.map((imageName, imgIndex) => (\n {\n setHaikus(prevHaikus => {\n const newHaikus = prevHaikus.map((h, idx) => {\n if (idx === haikuIndex) {\n return {\n ...h,\n selectedImage: imageName\n };\n }\n return h;\n });\n return newHaikus;\n });\n }}\n />\n ));\n })()}\n
\n )}\n
\n \n );\n}\n\ninterface Haiku {\n japanese: string[];\n english: string[];\n image_names: string[];\n selectedImage: string | null;\n}\n\nfunction Haiku() {\n const [haikus, setHaikus] = useState([{\n japanese: [\"仮の句よ\", \"まっさらながら\", \"花を呼ぶ\"],\n english: [\n \"A placeholder verse—\",\n \"even in a blank canvas,\",\n \"it beckons flowers.\",\n ],\n image_names: [],\n selectedImage: null,\n }])\n const [activeIndex, setActiveIndex] = useState(0);\n const [isJustApplied, setIsJustApplied] = useState(false);\n\n useCopilotAction({\n name: \"generate_haiku\",\n parameters: [\n {\n name: \"japanese\",\n type: \"string[]\",\n },\n {\n name: \"english\",\n type: \"string[]\",\n },\n {\n name: \"image_names\",\n type: \"string[]\",\n description: `Names of 3 relevant images selected from the following: \\n -${VALID_IMAGE_NAMES.join('\\n -')}`,\n },\n ],\n followUp: false,\n handler: async ({ japanese, english, image_names }: { japanese: string[], english: string[], image_names: string[] }) => {\n const finalCorrectedImages = validateAndCorrectImageNames(image_names);\n const newHaiku = {\n japanese: japanese || [],\n english: english || [],\n image_names: finalCorrectedImages || [],\n selectedImage: finalCorrectedImages?.[0] || null,\n };\n setHaikus(prev => [newHaiku, ...prev].filter(h => h.english[0] !== \"A placeholder verse—\"));\n setActiveIndex(haikus.length - 1);\n setIsJustApplied(true);\n setTimeout(() => setIsJustApplied(false), 600);\n return \"Haiku generated.\";\n },\n render: ({ args: generatedHaiku }: { args: Partial }) => {\n return (\n \n );\n },\n }, [haikus]);\n\n const { isMobile } = useMobileView();\n\n return (\n
\n \n\n {/* Main Display */}\n
\n
\n {haikus.map((haiku, index) => (\n (haikus.length == 1 || index == activeIndex) && (\n\n \n {haiku.japanese.map((line, lineIndex) => (\n \n

\n {line}\n

\n

\n {haiku.english?.[lineIndex]}\n

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n setHaikus((prevHaikus) => {\n return prevHaikus.map((h, idx) => {\n if (idx === index) {\n return { ...h, selectedImage: imageName }\n } else {\n return { ...h }\n }\n })\n })}\n />\n ))}\n
\n )}\n
\n )\n ))}\n
\n \n \n );\n}\n\nfunction Thumbnails({ haikus, activeIndex, setActiveIndex, isMobile }: { haikus: Haiku[], activeIndex: number, setActiveIndex: (index: number) => void, isMobile: boolean }) {\n if (haikus.length == 0 || isMobile) { return null }\n return (\n
\n {haikus.map((haiku, index) => (\n setActiveIndex(index)}\n >\n {haiku.japanese.map((line, lineIndex) => (\n \n

{line}

\n

{haiku.english?.[lineIndex]}

\n
\n ))}\n {haiku.image_names && haiku.image_names.length === 3 && (\n
\n {haiku.image_names.map((imageName, imgIndex) => (\n \n ))}\n
\n )}\n \n ))}\n \n )\n\n}", "language": "typescript", "type": "file" }, diff --git a/typescript-sdk/apps/dojo/src/mastra/index.ts b/typescript-sdk/apps/dojo/src/mastra/index.ts index 27db6463d..abc55418a 100644 --- a/typescript-sdk/apps/dojo/src/mastra/index.ts +++ b/typescript-sdk/apps/dojo/src/mastra/index.ts @@ -9,38 +9,41 @@ import { createTool } from "@mastra/core"; import { z } from "zod"; -let storage: LibSQLStore | DynamoDBStore -if (process.env.DYNAMODB_TABLE_NAME) { - storage = new DynamoDBStore({ - name: "dynamodb", - config: { - tableName: process.env.DYNAMODB_TABLE_NAME - }, -}); -} else { - storage = new LibSQLStore({ url: "file::memory:" }); +function getStorage(): LibSQLStore | DynamoDBStore { + if (process.env.DYNAMODB_TABLE_NAME) { + return new DynamoDBStore({ + name: "dynamodb", + config: { + tableName: process.env.DYNAMODB_TABLE_NAME + }, + }); + } else { + return new LibSQLStore({ url: "file::memory:" }); + } } + + export const mastra = new Mastra({ agents: { agentic_chat: new Agent({ name: "agentic_chat", instructions: ` You are a helpful weather assistant that provides accurate weather information. - + Your primary function is to help users get weather details for specific locations. When responding: - Always ask for a location if none is provided - If the location name isn’t in English, please translate it - If giving a location with multiple parts (e.g. "New York, NY"), use the most relevant part (e.g. "New York") - Include relevant details like humidity, wind conditions, and precipitation - Keep responses concise but informative - + Use the weatherTool to fetch current weather data. `, model: openai("gpt-4o"), memory: new Memory({ - storage: storage, + storage: getStorage(), options: { workingMemory: { enabled: true, @@ -54,7 +57,7 @@ export const mastra = new Mastra({ shared_state: new Agent({ name: "shared_state", instructions: ` - You are a helpful assistant for creating recipes. + You are a helpful assistant for creating recipes. IMPORTANT: 1. Create a recipe using the existing ingredients and instructions. Make sure the recipe is complete. @@ -63,11 +66,11 @@ export const mastra = new Mastra({ 4. 'ingredients' is always an array of objects with 'icon', 'name', and 'amount' fields 5. 'instructions' is always an array of strings - If you have just created or modified the recipe, just answer in one sentence what you did. dont describe the recipe, just say what you did. + If you have just created or modified the recipe, just answer in one sentence what you did. dont describe the recipe, just say what you did. Do not mention "working memory", "memory", or "state" in your answer. `, model: openai("gpt-4o"), memory: new Memory({ - storage: storage, + storage: getStorage(), options: { workingMemory: { enabled: true, diff --git a/typescript-sdk/apps/dojo/src/menu.ts b/typescript-sdk/apps/dojo/src/menu.ts index a931f8541..885cd6ca1 100644 --- a/typescript-sdk/apps/dojo/src/menu.ts +++ b/typescript-sdk/apps/dojo/src/menu.ts @@ -7,48 +7,50 @@ export const menuIntegrations: MenuIntegrationConfig[] = [ features: ["agentic_chat"], }, { - id: "pydantic-ai", - name: "Pydantic AI", + id: "server-starter", + name: "Server Starter", + features: ["agentic_chat"], + }, + { + id: "adk-middleware", + name: "ADK Middleware", features: [ "agentic_chat", "human_in_the_loop", - "agentic_generative_ui", - "tool_based_generative_ui", "shared_state", - "predictive_state_updates", + "tool_based_generative_ui", + // "predictive_state_updates" ], }, - { - id: "server-starter", - name: "Server Starter", - features: ["agentic_chat"], - }, { id: "server-starter-all-features", name: "Server Starter (All Features)", features: [ "agentic_chat", "human_in_the_loop", + "agentic_chat_reasoning", "agentic_generative_ui", - "tool_based_generative_ui", - "shared_state", "predictive_state_updates", + "shared_state", + "tool_based_generative_ui", ], }, { - id: "mastra", - name: "Mastra", + id: "agno", + name: "Agno", features: ["agentic_chat", "tool_based_generative_ui"], }, { - id: "mastra-agent-local", - name: "Mastra Agent (Local)", - features: ["agentic_chat", "shared_state", "tool_based_generative_ui"], - }, - { - id: "vercel-ai-sdk", - name: "Vercel AI SDK", - features: ["agentic_chat"], + id: "crewai", + name: "CrewAI", + features: [ + "agentic_chat", + "human_in_the_loop", + "agentic_generative_ui", + "predictive_state_updates", + "shared_state", + "tool_based_generative_ui", + ], }, { id: "langgraph", @@ -57,9 +59,10 @@ export const menuIntegrations: MenuIntegrationConfig[] = [ "agentic_chat", "human_in_the_loop", "agentic_generative_ui", - "tool_based_generative_ui", "predictive_state_updates", "shared_state", + "tool_based_generative_ui", + "subgraphs", ], }, { @@ -68,14 +71,14 @@ export const menuIntegrations: MenuIntegrationConfig[] = [ features: [ "agentic_chat", "human_in_the_loop", + "agentic_chat_reasoning", "agentic_generative_ui", - "tool_based_generative_ui", "predictive_state_updates", "shared_state", - "agentic_chat_reasoning", + "tool_based_generative_ui", + "subgraphs", ], }, - { id: "langgraph-typescript", name: "LangGraph (Typescript)", @@ -83,31 +86,45 @@ export const menuIntegrations: MenuIntegrationConfig[] = [ "agentic_chat", "human_in_the_loop", "agentic_generative_ui", - "tool_based_generative_ui", "predictive_state_updates", - "shared_state" + "shared_state", + "tool_based_generative_ui", + "subgraphs", ], }, - { - id: "agno", - name: "Agno", - features: ["agentic_chat", "tool_based_generative_ui"], - }, { id: "llama-index", name: "LlamaIndex", features: ["agentic_chat", "human_in_the_loop", "agentic_generative_ui", "shared_state"], + }, { - id: "crewai", - name: "CrewAI", + id: "mastra", + name: "Mastra", + features: ["agentic_chat", "tool_based_generative_ui"], + }, + { + id: "mastra-agent-local", + name: "Mastra Agent (Local)", + features: ["agentic_chat", "shared_state", "tool_based_generative_ui"], + }, + { + id: "pydantic-ai", + name: "Pydantic AI", features: [ "agentic_chat", "human_in_the_loop", - "tool_based_generative_ui", "agentic_generative_ui", + // Disabled until we can figure out why production builds break + // "predictive_state_updates", "shared_state", - "predictive_state_updates", + "tool_based_generative_ui", ], }, + { + id: "vercel-ai-sdk", + name: "Vercel AI SDK", + features: ["agentic_chat"], + }, ]; + diff --git a/typescript-sdk/apps/dojo/src/types/integration.ts b/typescript-sdk/apps/dojo/src/types/integration.ts index 705e0f8dd..956a16a22 100644 --- a/typescript-sdk/apps/dojo/src/types/integration.ts +++ b/typescript-sdk/apps/dojo/src/types/integration.ts @@ -7,7 +7,8 @@ export type Feature = | "predictive_state_updates" | "shared_state" | "tool_based_generative_ui" - | "agentic_chat_reasoning"; + | "agentic_chat_reasoning" + | "subgraphs"; export interface MenuIntegrationConfig { id: string; diff --git a/typescript-sdk/apps/dojo/tsconfig.json b/typescript-sdk/apps/dojo/tsconfig.json index 15e5e0688..af157911a 100644 --- a/typescript-sdk/apps/dojo/tsconfig.json +++ b/typescript-sdk/apps/dojo/tsconfig.json @@ -25,5 +25,5 @@ } }, "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], - "exclude": ["node_modules", "e2e", "e2e2"] + "exclude": ["node_modules", "e2e"] } diff --git a/typescript-sdk/integrations/agno/examples/pyproject.toml b/typescript-sdk/integrations/agno/examples/pyproject.toml index 8b6fe19b7..a7e9d282b 100644 --- a/typescript-sdk/integrations/agno/examples/pyproject.toml +++ b/typescript-sdk/integrations/agno/examples/pyproject.toml @@ -7,7 +7,7 @@ description = "Example usage of the AG-UI adapter for Agno" license = "MIT" readme = "README.md" -requires-python = ">=3.12" +requires-python = ">=3.12,<4.0" dependencies = [ "agno>=1.7.7", "openai>=1.99.1", @@ -15,6 +15,7 @@ dependencies = [ "fastapi>=0.116.1", "uvicorn>=0.35.0", "ag-ui-protocol>=0.1.8", + "dotenv (>=0.9.9,<0.10.0)", ] authors = [ {name = "AG-UI Team"} diff --git a/typescript-sdk/integrations/agno/examples/server/__init__.py b/typescript-sdk/integrations/agno/examples/server/__init__.py index 4ca5d2029..592c22fd0 100644 --- a/typescript-sdk/integrations/agno/examples/server/__init__.py +++ b/typescript-sdk/integrations/agno/examples/server/__init__.py @@ -5,12 +5,13 @@ AG-UI dojo features: - Agentic Chat (Investment Analyst with Finance tools) """ - from __future__ import annotations from fastapi import FastAPI import uvicorn import os +from dotenv import load_dotenv +load_dotenv() from .api import ( agentic_chat_app, diff --git a/typescript-sdk/integrations/agno/examples/server/api/agentic_chat.py b/typescript-sdk/integrations/agno/examples/server/api/agentic_chat.py index 934fc2882..0638a4e6e 100644 --- a/typescript-sdk/integrations/agno/examples/server/api/agentic_chat.py +++ b/typescript-sdk/integrations/agno/examples/server/api/agentic_chat.py @@ -6,13 +6,25 @@ from agno.app.agui.app import AGUIApp from agno.models.openai import OpenAIChat from agno.tools.yfinance import YFinanceTools +from agno.tools import tool + + +@tool(external_execution=True) +def change_background(background: str) -> str: # pylint: disable=unused-argument + """ + Change the background color of the chat. Can be anything that the CSS background attribute accepts. Regular colors, linear of radial gradients etc. + + Args: + background: str: The background color to change to. Can be anything that the CSS background attribute accepts. Regular colors, linear of radial gradients etc. + """ # pylint: disable=line-too-long agent = Agent( model=OpenAIChat(id="gpt-4o"), tools=[ YFinanceTools( stock_price=True, analyst_recommendations=True, stock_fundamentals=True - ) + ), + change_background, ], description="You are an investment analyst that researches stock prices, analyst recommendations, and stock fundamentals.", instructions="Format your response using markdown and use tables to display data where possible.", diff --git a/typescript-sdk/integrations/agno/examples/server/api/tool_based_generative_ui.py b/typescript-sdk/integrations/agno/examples/server/api/tool_based_generative_ui.py index 203aff12f..6e52f7501 100644 --- a/typescript-sdk/integrations/agno/examples/server/api/tool_based_generative_ui.py +++ b/typescript-sdk/integrations/agno/examples/server/api/tool_based_generative_ui.py @@ -11,35 +11,40 @@ from agno.tools import tool -@tool() -def generate_haiku(english: List[str], japanese: List[str]) -> str: # pylint: disable=unused-argument +@tool(external_execution=True) +def generate_haiku(english: List[str], japanese: List[str], image_names: List[str]) -> str: # pylint: disable=unused-argument """ + Generate a haiku in Japanese and its English translation. - YOU MUST PROVIDE THE ENGLISH HAIKU AND THE JAPANESE HAIKU. + YOU MUST PROVIDE THE ENGLISH HAIKU AND THE JAPANESE HAIKU AND THE IMAGE NAMES. + When picking image names, pick them from the following list: + - "Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg", + - "Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg", + - "Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg", + - "Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg", + - "Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg", + - "Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg", + - "Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg", + - "Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg", + - "Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg", + - "Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg" Args: english: List[str]: An array of three lines of the haiku in English. YOU MUST PROVIDE THE ENGLISH HAIKU. japanese: List[str]: An array of three lines of the haiku in Japanese. YOU MUST PROVIDE THE JAPANESE HAIKU. + image_names: List[str]: An array of three image names. YOU MUST PROVIDE THE IMAGE NAMES. + Returns: str: A confirmation message. """ # pylint: disable=line-too-long return "Haiku generated" - -@tool(external_execution=True) -def change_background(background: str) -> str: # pylint: disable=unused-argument - """ - Change the background color of the chat. Can be anything that the CSS background attribute accepts. Regular colors, linear of radial gradients etc. - - Args: - background: str: The background color to change to. Can be anything that the CSS background attribute accepts. Regular colors, linear of radial gradients etc. - """ # pylint: disable=line-too-long - agent = Agent( model=OpenAIChat(id="gpt-4o"), - tools=[generate_haiku, change_background], - description="You are a helpful assistant that can help with tasks and answer questions.", + tools=[generate_haiku], + description="Help the user with writing Haikus. If the user asks for a haiku, use the generate_haiku tool to display the haiku to the user.", + debug_mode=True, ) agui_app = AGUIApp( diff --git a/typescript-sdk/integrations/agno/package.json b/typescript-sdk/integrations/agno/package.json index d4c270887..05b9cd0dc 100644 --- a/typescript-sdk/integrations/agno/package.json +++ b/typescript-sdk/integrations/agno/package.json @@ -23,13 +23,14 @@ "link:global": "pnpm link --global", "unlink:global": "pnpm unlink --global" }, - "dependencies": { - "@ag-ui/client": "workspace:*" - }, "peerDependencies": { + "@ag-ui/core": ">=0.0.37", + "@ag-ui/client": ">=0.0.37", "rxjs": "7.8.1" }, "devDependencies": { + "@ag-ui/core": "workspace:*", + "@ag-ui/client": "workspace:*", "@types/jest": "^29.5.14", "@types/node": "^20.11.19", "jest": "^29.7.0", diff --git a/typescript-sdk/integrations/crewai/package.json b/typescript-sdk/integrations/crewai/package.json index 51bb441c6..b5c334022 100644 --- a/typescript-sdk/integrations/crewai/package.json +++ b/typescript-sdk/integrations/crewai/package.json @@ -23,13 +23,14 @@ "link:global": "pnpm link --global", "unlink:global": "pnpm unlink --global" }, - "dependencies": { - "@ag-ui/client": "workspace:*" - }, "peerDependencies": { + "@ag-ui/core": ">=0.0.37", + "@ag-ui/client": ">=0.0.37", "rxjs": "7.8.1" }, "devDependencies": { + "@ag-ui/core": "workspace:*", + "@ag-ui/client": "workspace:*", "@types/jest": "^29.5.14", "@types/node": "^20.11.19", "jest": "^29.7.0", diff --git a/typescript-sdk/integrations/langgraph/examples/python/agents/dojo.py b/typescript-sdk/integrations/langgraph/examples/python/agents/dojo.py index c9371f71f..10af2cd58 100644 --- a/typescript-sdk/integrations/langgraph/examples/python/agents/dojo.py +++ b/typescript-sdk/integrations/langgraph/examples/python/agents/dojo.py @@ -15,6 +15,7 @@ from .agentic_chat.agent import graph as agentic_chat_graph from .agentic_generative_ui.agent import graph as agentic_generative_ui_graph from .agentic_chat_reasoning.agent import graph as agentic_chat_reasoning_graph +from .subgraphs.agent import graph as subgraphs_graph app = FastAPI(title="LangGraph Dojo Example Server") @@ -55,6 +56,11 @@ description="An example for a reasoning chat.", graph=agentic_chat_reasoning_graph, ), + "subgraphs": LangGraphAgent( + name="subgraphs", + description="A demo of LangGraph subgraphs using a Game Character Creator.", + graph=subgraphs_graph, + ), } add_langgraph_fastapi_endpoint( @@ -99,6 +105,12 @@ path="/agent/agentic_chat_reasoning" ) +add_langgraph_fastapi_endpoint( + app=app, + agent=agents["subgraphs"], + path="/agent/subgraphs" +) + def main(): """Run the uvicorn server.""" port = int(os.getenv("PORT", "8000")) diff --git a/typescript-sdk/integrations/langgraph/examples/python/agents/tool_based_generative_ui/agent.py b/typescript-sdk/integrations/langgraph/examples/python/agents/tool_based_generative_ui/agent.py index acba8b2e6..013a9c06f 100644 --- a/typescript-sdk/integrations/langgraph/examples/python/agents/tool_based_generative_ui/agent.py +++ b/typescript-sdk/integrations/langgraph/examples/python/agents/tool_based_generative_ui/agent.py @@ -2,34 +2,17 @@ An example demonstrating tool-based generative UI using LangGraph. """ -from typing import List, Any, Optional, Annotated import os - -# LangGraph imports +from typing import Any, List +from typing_extensions import Literal from langchain_openai import ChatOpenAI -from langchain_core.runnables import RunnableConfig from langchain_core.messages import SystemMessage -from langchain_core.tools import tool -from langgraph.graph import StateGraph, END, START +from langchain_core.runnables import RunnableConfig +from langgraph.graph import StateGraph, END from langgraph.types import Command from langgraph.graph import MessagesState from langgraph.prebuilt import ToolNode -@tool -def generate_haiku( - japanese: Annotated[ # pylint: disable=unused-argument - List[str], - "An array of three lines of the haiku in Japanese" - ], - english: Annotated[ # pylint: disable=unused-argument - List[str], - "An array of three lines of the haiku in English" - ] -): - """ - Generate a haiku in Japanese and its English translation. - Also select exactly 3 relevant images from the provided list based on the haiku's theme. - """ class AgentState(MessagesState): """ @@ -37,63 +20,49 @@ class AgentState(MessagesState): """ tools: List[Any] -async def chat_node(state: AgentState, config: Optional[RunnableConfig] = None): - """ - The main function handling chat and tool calls. +async def chat_node(state: AgentState, config: RunnableConfig) -> Command[Literal["tool_node", "__end__"]]: """ - - system_prompt = """ - You assist the user in generating a haiku. - When generating a haiku using the 'generate_haiku' tool. + Standard chat node based on the ReAct design pattern. It handles: + - The model to use (and binds in CopilotKit actions and the tools defined above) + - The system prompt + - Getting a response from the model + - Handling tool calls + + For more about the ReAct design pattern, see: + https://www.perplexity.ai/search/react-agents-NcXLQhreS0WDzpVaS4m9Cg """ - # Define the model model = ChatOpenAI(model="gpt-4o") - # Define config for the model - if config is None: - config = RunnableConfig(recursion_limit=25) - - # Bind the tools to the model model_with_tools = model.bind_tools( - [generate_haiku], - # Disable parallel tool calls to avoid race conditions + [ + *state.get("tools", []), # bind tools defined by ag-ui + ], parallel_tool_calls=False, ) - # Run the model to generate a response + system_message = SystemMessage( + content=f"Help the user with writing Haikus. If the user asks for a haiku, use the generate_haiku tool to display the haiku to the user." + ) + response = await model_with_tools.ainvoke([ - SystemMessage(content=system_prompt), + system_message, *state["messages"], ], config) - if response.tool_calls: - return Command( - goto="tool_node", - update={ - "messages": state["messages"] + [response] - } - ) - # Return Command to end with updated messages return Command( goto=END, update={ - "messages": state["messages"] + [response] + "messages": [response], } ) -# Define the graph workflow = StateGraph(AgentState) - -# Add nodes workflow.add_node("chat_node", chat_node) -workflow.add_node("tool_node", ToolNode([generate_haiku])) - -# Add edges +# This is required even though we don't have any backend tools to pass in. +workflow.add_node("tool_node", ToolNode(tools=[])) workflow.set_entry_point("chat_node") -workflow.add_edge(START, "chat_node") workflow.add_edge("chat_node", END) -workflow.add_edge("tool_node", END) # Conditionally use a checkpointer based on the environment @@ -109,4 +78,3 @@ async def chat_node(state: AgentState, config: Optional[RunnableConfig] = None): else: # When running in LangGraph API/dev, don't use a custom checkpointer graph = workflow.compile() - diff --git a/typescript-sdk/integrations/langgraph/examples/python/langgraph.json b/typescript-sdk/integrations/langgraph/examples/python/langgraph.json index af4c9878c..bea65ef3f 100644 --- a/typescript-sdk/integrations/langgraph/examples/python/langgraph.json +++ b/typescript-sdk/integrations/langgraph/examples/python/langgraph.json @@ -9,7 +9,8 @@ "predictive_state_updates": "./agents/predictive_state_updates/agent.py:graph", "shared_state": "./agents/shared_state/agent.py:graph", "tool_based_generative_ui": "./agents/tool_based_generative_ui/agent.py:graph", - "agentic_chat_reasoning": "./agents/agentic_chat_reasoning/agent.py:graph" + "agentic_chat_reasoning": "./agents/agentic_chat_reasoning/agent.py:graph", + "subgraphs": "./agents/subgraphs/agent.py:graph" }, "env": ".env" } diff --git a/typescript-sdk/integrations/langgraph/examples/python/poetry.lock b/typescript-sdk/integrations/langgraph/examples/python/poetry.lock index b3c6e426e..e9ec71b71 100644 --- a/typescript-sdk/integrations/langgraph/examples/python/poetry.lock +++ b/typescript-sdk/integrations/langgraph/examples/python/poetry.lock @@ -2,14 +2,14 @@ [[package]] name = "ag-ui-langgraph" -version = "0.0.5" +version = "0.0.12a1" description = "Implementation of the AG-UI protocol for LangGraph." optional = false python-versions = "<3.14,>=3.10" groups = ["main"] files = [ - {file = "ag_ui_langgraph-0.0.5-py3-none-any.whl", hash = "sha256:77ef1a3121818a95907f797972acd7290ec730ada57efeee7bdb6bc6ab24baed"}, - {file = "ag_ui_langgraph-0.0.5.tar.gz", hash = "sha256:63bd4934bab95042d9095940321d3ee1930b662141e704617d942006943d2a07"}, + {file = "ag_ui_langgraph-0.0.12a1-py3-none-any.whl", hash = "sha256:3c5e6a2b1cea7c91c33f6fa352dacaf23f905b13baa959276f3c22cb5dcbaa59"}, + {file = "ag_ui_langgraph-0.0.12a1.tar.gz", hash = "sha256:13c6034aaa33ec053788cd7dba3a088d7763bf03b19830b4bba6d559546b30b2"}, ] [package.dependencies] @@ -2970,4 +2970,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.1" python-versions = ">=3.12,<3.14" -content-hash = "d8bfe3601f3fc50ba1f596c05f61c98c24455bb6751c5d491a2b923bbf617a26" +content-hash = "42262620b6201375360ae7e3b3a781edd9bba4bb64e2341175fd28921f268a65" diff --git a/typescript-sdk/integrations/langgraph/examples/python/pyproject.toml b/typescript-sdk/integrations/langgraph/examples/python/pyproject.toml index 36ebdeb6d..bfd45b38c 100644 --- a/typescript-sdk/integrations/langgraph/examples/python/pyproject.toml +++ b/typescript-sdk/integrations/langgraph/examples/python/pyproject.toml @@ -21,7 +21,7 @@ langchain-experimental = ">=0.0.11" langchain-google-genai = ">=2.1.9" langchain-openai = ">=0.0.1" langgraph = "^0.6.1" -ag-ui-langgraph = { version = "0.0.5", extras = ["fastapi"] } +ag-ui-langgraph = { version = "0.0.12a1", extras = ["fastapi"] } python-dotenv = "^1.0.0" fastapi = "^0.115.12" diff --git a/typescript-sdk/integrations/langgraph/examples/typescript/langgraph.json b/typescript-sdk/integrations/langgraph/examples/typescript/langgraph.json index 42f829990..d225021ec 100644 --- a/typescript-sdk/integrations/langgraph/examples/typescript/langgraph.json +++ b/typescript-sdk/integrations/langgraph/examples/typescript/langgraph.json @@ -6,7 +6,8 @@ "human_in_the_loop": "./src/agents/human_in_the_loop/agent.ts:humanInTheLoopGraph", "predictive_state_updates": "./src/agents/predictive_state_updates/agent.ts:predictiveStateUpdatesGraph", "shared_state": "./src/agents/shared_state/agent.ts:sharedStateGraph", - "tool_based_generative_ui": "./src/agents/tool_based_generative_ui/agent.ts:toolBasedGenerativeUiGraph" + "tool_based_generative_ui": "./src/agents/tool_based_generative_ui/agent.ts:toolBasedGenerativeUiGraph", + "subgraphs": "./src/agents/subgraphs/agent.ts:subGraphsAgentGraph" }, "env": ".env" } diff --git a/typescript-sdk/integrations/langgraph/examples/typescript/src/agents/tool_based_generative_ui/agent.ts b/typescript-sdk/integrations/langgraph/examples/typescript/src/agents/tool_based_generative_ui/agent.ts index 522b514a4..cfe8a66d4 100644 --- a/typescript-sdk/integrations/langgraph/examples/typescript/src/agents/tool_based_generative_ui/agent.ts +++ b/typescript-sdk/integrations/langgraph/examples/typescript/src/agents/tool_based_generative_ui/agent.ts @@ -7,56 +7,6 @@ import { SystemMessage } from "@langchain/core/messages"; import { RunnableConfig } from "@langchain/core/runnables"; import { Command, Annotation, MessagesAnnotation, StateGraph, END, START } from "@langchain/langgraph"; -// List of available images (modify path if needed) -const IMAGE_LIST = [ - "Osaka_Castle_Turret_Stone_Wall_Pine_Trees_Daytime.jpg", - "Tokyo_Skyline_Night_Tokyo_Tower_Mount_Fuji_View.jpg", - "Itsukushima_Shrine_Miyajima_Floating_Torii_Gate_Sunset_Long_Exposure.jpg", - "Takachiho_Gorge_Waterfall_River_Lush_Greenery_Japan.jpg", - "Bonsai_Tree_Potted_Japanese_Art_Green_Foliage.jpeg", - "Shirakawa-go_Gassho-zukuri_Thatched_Roof_Village_Aerial_View.jpg", - "Ginkaku-ji_Silver_Pavilion_Kyoto_Japanese_Garden_Pond_Reflection.jpg", - "Senso-ji_Temple_Asakusa_Cherry_Blossoms_Kimono_Umbrella.jpg", - "Cherry_Blossoms_Sakura_Night_View_City_Lights_Japan.jpg", - "Mount_Fuji_Lake_Reflection_Cherry_Blossoms_Sakura_Spring.jpg" -]; - -// This tool generates a haiku on the server. -// The tool call will be streamed to the frontend as it is being generated. -const GENERATE_HAIKU_TOOL = { - type: "function", - function: { - name: "generate_haiku", - description: "Generate a haiku in Japanese and its English translation. Also select exactly 3 relevant images from the provided list based on the haiku's theme.", - parameters: { - type: "object", - properties: { - japanese: { - type: "array", - items: { - type: "string" - }, - description: "An array of three lines of the haiku in Japanese" - }, - english: { - type: "array", - items: { - type: "string" - }, - description: "An array of three lines of the haiku in English" - }, - image_names: { - type: "array", - items: { - type: "string" - }, - description: "An array of EXACTLY THREE image filenames from the provided list that are most relevant to the haiku." - } - }, - required: ["japanese", "english", "image_names"] - } - } -}; export const AgentStateAnnotation = Annotation.Root({ tools: Annotation(), @@ -65,64 +15,35 @@ export const AgentStateAnnotation = Annotation.Root({ export type AgentState = typeof AgentStateAnnotation.State; async function chatNode(state: AgentState, config?: RunnableConfig): Promise { - /** - * The main function handling chat and tool calls. - */ - // Prepare the image list string for the prompt - const imageListStr = IMAGE_LIST.map(img => `- ${img}`).join("\n"); - - const systemPrompt = ` - You assist the user in generating a haiku. - When generating a haiku using the 'generate_haiku' tool, you MUST also select exactly 3 image filenames from the following list that are most relevant to the haiku's content or theme. Return the filenames in the 'image_names' parameter. - - Available images: - ${imageListStr} - - Don't provide the relevant image names in your final response to the user. - `; - - // Define the model const model = new ChatOpenAI({ model: "gpt-4o" }); - - // Define config for the model - if (!config) { - config = { recursionLimit: 25 }; - } - // Bind the tools to the model const modelWithTools = model.bindTools( - [GENERATE_HAIKU_TOOL], - { - // Disable parallel tool calls to avoid race conditions - parallel_tool_calls: false, - } + [ + ...state.tools || [] + ], + { parallel_tool_calls: false } ); - // Run the model to generate a response + const systemMessage = new SystemMessage({ + content: 'Help the user with writing Haikus. If the user asks for a haiku, use the generate_haiku tool to display the haiku to the user.' + }); + const response = await modelWithTools.invoke([ - new SystemMessage({ content: systemPrompt }), + systemMessage, ...state.messages, ], config); - // Return Command to end with updated messages return new Command({ goto: END, update: { - messages: [...state.messages, response] + messages: [response] } }); } -// Define the graph const workflow = new StateGraph(AgentStateAnnotation); - -// Add nodes workflow.addNode("chat_node", chatNode); -// Add edges -workflow.setEntryPoint("chat_node"); workflow.addEdge(START, "chat_node"); -workflow.addEdge("chat_node", END); -// Compile the graph export const toolBasedGenerativeUiGraph = workflow.compile(); \ No newline at end of file diff --git a/typescript-sdk/integrations/langgraph/package.json b/typescript-sdk/integrations/langgraph/package.json index 7b25d5134..6417c1143 100644 --- a/typescript-sdk/integrations/langgraph/package.json +++ b/typescript-sdk/integrations/langgraph/package.json @@ -1,6 +1,6 @@ { "name": "@ag-ui/langgraph", - "version": "0.0.9", + "version": "0.0.13", "main": "./dist/index.js", "module": "./dist/index.mjs", "types": "./dist/index.d.ts", @@ -23,13 +23,18 @@ "unlink:global": "pnpm unlink --global" }, "dependencies": { - "@ag-ui/client": "workspace:*", "@langchain/core": "^0.3.66", - "@langchain/langgraph-sdk": "^0.0.105", + "@langchain/langgraph-sdk": "^0.1.2", "partial-json": "^0.1.7", "rxjs": "7.8.1" }, + "peerDependencies": { + "@ag-ui/core": ">=0.0.37", + "@ag-ui/client": ">=0.0.37" + }, "devDependencies": { + "@ag-ui/core": "workspace:*", + "@ag-ui/client": "workspace:*", "@types/jest": "^29.5.14", "@types/node": "^20.11.19", "jest": "^29.7.0", diff --git a/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/agent.py b/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/agent.py index 493fbbf4b..92c5b0700 100644 --- a/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/agent.py +++ b/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/agent.py @@ -1,6 +1,7 @@ import uuid import json -from typing import Optional, List, Any, Union, AsyncGenerator, Generator +from typing import Optional, List, Any, Union, AsyncGenerator, Generator, Literal, Dict +import inspect from langgraph.graph.state import CompiledStateGraph from langchain.schema import BaseMessage, SystemMessage @@ -27,7 +28,9 @@ langchain_messages_to_agui, resolve_reasoning_content, resolve_message_content, - camel_to_snake + camel_to_snake, + json_safe_stringify, + make_json_safe ) from ag_ui.core import ( @@ -85,9 +88,15 @@ def __init__(self, *, name: str, graph: CompiledStateGraph, description: Optiona self.messages_in_process: MessagesInProgressRecord = {} self.active_run: Optional[RunMetadata] = None self.constant_schema_keys = ['messages', 'tools'] + self.active_step = None def _dispatch_event(self, event: ProcessedEvents) -> str: - return event # Fallback if no encoder + if event.type == EventType.RAW: + event.event = make_json_safe(event.event) + elif event.raw_event: + event.raw_event = make_json_safe(event.raw_event) + + return event async def run(self, input: RunAgentInput) -> AsyncGenerator[str, None]: forwarded_props = {} @@ -100,11 +109,13 @@ async def run(self, input: RunAgentInput) -> AsyncGenerator[str, None]: async def _handle_stream_events(self, input: RunAgentInput) -> AsyncGenerator[str, None]: thread_id = input.thread_id or str(uuid.uuid4()) - self.active_run = { + INITIAL_ACTIVE_RUN = { "id": input.run_id, "thread_id": thread_id, "thinking_process": None, + "node_name": None, } + self.active_run = INITIAL_ACTIVE_RUN forwarded_props = input.forwarded_props node_name_input = forwarded_props.get('node_name', None) if forwarded_props else None @@ -133,9 +144,8 @@ async def _handle_stream_events(self, input: RunAgentInput) -> AsyncGenerator[st # In case of resume (interrupt), re-start resumed step if resume_input and self.active_run.get("node_name"): - yield self._dispatch_event( - StepStartedEvent(type=EventType.STEP_STARTED, step_name=self.active_run.get("node_name")) - ) + for ev in self.start_step(self.active_run.get("node_name")): + yield ev state = prepared_stream_response["state"] stream = prepared_stream_response["stream"] @@ -149,7 +159,13 @@ async def _handle_stream_events(self, input: RunAgentInput) -> AsyncGenerator[st should_exit = False current_graph_state = state + async for event in stream: + subgraphs_stream_enabled = input.forwarded_props.get('stream_subgraphs') if input.forwarded_props else False + is_subgraph_stream = (subgraphs_stream_enabled and ( + event.get("event", "").startswith("events") or + event.get("event", "").startswith("values") + )) if event["event"] == "error": yield self._dispatch_event( RunErrorEvent(type=EventType.RUN_ERROR, message=event["data"]["message"], raw_event=event) @@ -173,16 +189,8 @@ async def _handle_stream_events(self, input: RunAgentInput) -> AsyncGenerator[st ) if current_node_name and current_node_name != self.active_run.get("node_name"): - if self.active_run["node_name"] and self.active_run["node_name"] != node_name_input: - yield self._dispatch_event( - StepFinishedEvent(type=EventType.STEP_FINISHED, step_name=self.active_run["node_name"]) - ) - self.active_run["node_name"] = None - - yield self._dispatch_event( - StepStartedEvent(type=EventType.STEP_STARTED, step_name=current_node_name) - ) - self.active_run["node_name"] = current_node_name + for ev in self.start_step(current_node_name): + yield ev updated_state = self.active_run.get("manually_emitted_state") or current_graph_state has_state_diff = updated_state != state @@ -222,19 +230,14 @@ async def _handle_stream_events(self, input: RunAgentInput) -> AsyncGenerator[st CustomEvent( type=EventType.CUSTOM, name=LangGraphEventTypes.OnInterrupt.value, - value=json.dumps(interrupt.value) if not isinstance(interrupt.value, str) else interrupt.value, + value=json.dumps(interrupt.value, default=json_safe_stringify) if not isinstance(interrupt.value, str) else interrupt.value, raw_event=interrupt, ) ) if self.active_run.get("node_name") != node_name: - yield self._dispatch_event( - StepFinishedEvent(type=EventType.STEP_FINISHED, step_name=self.active_run["node_name"]) - ) - self.active_run["node_name"] = node_name - yield self._dispatch_event( - StepStartedEvent(type=EventType.STEP_STARTED, step_name=self.active_run["node_name"]) - ) + for ev in self.start_step(node_name): + yield ev state_values = state.values if state.values else state yield self._dispatch_event( @@ -248,28 +251,25 @@ async def _handle_stream_events(self, input: RunAgentInput) -> AsyncGenerator[st ) ) - yield self._dispatch_event( - StepFinishedEvent(type=EventType.STEP_FINISHED, step_name=self.active_run["node_name"]) - ) - self.active_run["node_name"] = None + yield self.end_step() yield self._dispatch_event( RunFinishedEvent(type=EventType.RUN_FINISHED, thread_id=thread_id, run_id=self.active_run["id"]) ) - self.active_run = None + # Reset active run to how it was before the stream started + self.active_run = INITIAL_ACTIVE_RUN async def prepare_stream(self, input: RunAgentInput, agent_state: State, config: RunnableConfig): state_input = input.state or {} messages = input.messages or [] - tools = input.tools or [] forwarded_props = input.forwarded_props or {} thread_id = input.thread_id state_input["messages"] = agent_state.values.get("messages", []) self.active_run["current_graph_state"] = agent_state.values.copy() langchain_messages = agui_messages_to_langchain(messages) - state = self.langgraph_default_merge_state(state_input, langchain_messages, tools) + state = self.langgraph_default_merge_state(state_input, langchain_messages, input) self.active_run["current_graph_state"].update(state) config["configurable"]["thread_id"] = thread_id interrupts = agent_state.tasks[0].interrupts if agent_state.tasks and len(agent_state.tasks) > 0 else [] @@ -333,8 +333,20 @@ async def prepare_stream(self, input: RunAgentInput, agent_state: State, config: ) stream_input = {**forwarded_props, **payload_input} if payload_input else None + + subgraphs_stream_enabled = input.forwarded_props.get('stream_subgraphs') if input.forwarded_props else False + + kwargs = self.get_stream_kwargs( + input=stream_input, + config=config, + subgraphs=bool(subgraphs_stream_enabled), + version="v2", + ) + + stream = self.graph.astream_events(**kwargs) + return { - "stream": self.graph.astream_events(stream_input, config, version="v2"), + "stream": stream, "state": state, "config": config } @@ -358,8 +370,16 @@ async def prepare_regenerate_stream( # pylint: disable=too-many-arguments as_node=time_travel_checkpoint.next[0] if time_travel_checkpoint.next else "__start__" ) - stream_input = self.langgraph_default_merge_state(time_travel_checkpoint.values, [message_checkpoint], tools) - stream = self.graph.astream_events(stream_input, fork, version="v2") + stream_input = self.langgraph_default_merge_state(time_travel_checkpoint.values, [message_checkpoint], input) + subgraphs_stream_enabled = input.forwarded_props.get('stream_subgraphs') if input.forwarded_props else False + + kwargs = self.get_stream_kwargs( + input=stream_input, + fork=fork, + subgraphs=bool(subgraphs_stream_enabled), + version="v2", + ) + stream = self.graph.astream_events(**kwargs) return { "stream": stream, @@ -386,20 +406,28 @@ def get_schema_keys(self, config) -> SchemaKeys: input_schema_keys = list(input_schema["properties"].keys()) if "properties" in input_schema else [] output_schema_keys = list(output_schema["properties"].keys()) if "properties" in output_schema else [] config_schema_keys = list(config_schema["properties"].keys()) if "properties" in config_schema else [] + context_schema_keys = [] + + if hasattr(self.graph, "context_schema") and self.graph.context_schema is not None: + context_schema = self.graph.context_schema().schema() + context_schema_keys = list(context_schema["properties"].keys()) if "properties" in context_schema else [] + return { "input": [*input_schema_keys, *self.constant_schema_keys], "output": [*output_schema_keys, *self.constant_schema_keys], "config": config_schema_keys, + "context": context_schema_keys, } except Exception: return { "input": self.constant_schema_keys, "output": self.constant_schema_keys, "config": [], + "context": [], } - def langgraph_default_merge_state(self, state: State, messages: List[BaseMessage], tools: Any) -> State: + def langgraph_default_merge_state(self, state: State, messages: List[BaseMessage], input: RunAgentInput) -> State: if messages and isinstance(messages[0], SystemMessage): messages = messages[1:] @@ -408,6 +436,7 @@ def langgraph_default_merge_state(self, state: State, messages: List[BaseMessage new_messages = [msg for msg in messages if msg.id not in existing_message_ids] + tools = input.tools or [] tools_as_dicts = [] if tools: for tool in tools: @@ -422,6 +451,10 @@ def langgraph_default_merge_state(self, state: State, messages: List[BaseMessage **state, "messages": new_messages, "tools": [*state.get("tools", []), *tools_as_dicts], + "ag-ui": { + "tools": [*state.get("tools", []), *tools_as_dicts], + "context": input.context or [] + } } def get_state_snapshot(self, state: State) -> State: @@ -697,3 +730,65 @@ async def get_checkpoint_before_message(self, message_id: str, thread_id: str): raise ValueError("Message ID not found in history") + def start_step(self, step_name: str): + if self.active_step: + yield self.end_step() + + yield self._dispatch_event( + StepStartedEvent( + type=EventType.STEP_STARTED, + step_name=step_name + ) + ) + self.active_run["node_name"] = step_name + self.active_step = step_name + + def end_step(self): + if self.active_step is None: + raise ValueError("No active step to end") + + dispatch = self._dispatch_event( + StepFinishedEvent( + type=EventType.STEP_FINISHED, + step_name=self.active_run["node_name"] or self.active_step + ) + ) + + self.active_run["node_name"] = None + self.active_step = None + return dispatch + + # Check if some kwargs are enabled per LG version, to "catch all versions" and backwards compatibility + def get_stream_kwargs( + self, + input: Any, + subgraphs: bool = False, + version: Literal["v1", "v2"] = "v2", + config: Optional[RunnableConfig] = None, + context: Optional[Dict[str, Any]] = None, + fork: Optional[Any] = None, + ): + kwargs = dict( + input=input, + subgraphs=subgraphs, + version=version, + ) + + # Only add context if supported + sig = inspect.signature(self.graph.astream_events) + if 'context' in sig.parameters: + base_context = {} + if isinstance(config, dict) and 'configurable' in config and isinstance(config['configurable'], dict): + base_context.update(config['configurable']) + if context: # context might be None or {} + base_context.update(context) + if base_context: # only add if there's something to pass + kwargs['context'] = base_context + + if config: + kwargs['config'] = config + + if fork: + kwargs.update(fork) + + return kwargs diff --git a/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/endpoint.py b/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/endpoint.py index 89c6e06be..e2b5e355c 100644 --- a/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/endpoint.py +++ b/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/endpoint.py @@ -24,4 +24,14 @@ async def event_generator(): return StreamingResponse( event_generator(), media_type=encoder.get_content_type() - ) \ No newline at end of file + ) + + @app.get(f"{path}/health") + def health(): + """Health check.""" + return { + "status": "ok", + "agent": { + "name": agent.name, + } + } \ No newline at end of file diff --git a/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/types.py b/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/types.py index 7d08d6bf3..046c61478 100644 --- a/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/types.py +++ b/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/types.py @@ -25,7 +25,8 @@ class CustomEventNames(str, Enum): SchemaKeys = TypedDict("SchemaKeys", { "input": NotRequired[Optional[List[str]]], "output": NotRequired[Optional[List[str]]], - "config": NotRequired[Optional[List[str]]] + "config": NotRequired[Optional[List[str]]], + "context": NotRequired[Optional[List[str]]] }) ThinkingProcess = TypedDict("ThinkingProcess", { diff --git a/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/utils.py b/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/utils.py index c1c50466f..ed5060887 100644 --- a/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/utils.py +++ b/typescript-sdk/integrations/langgraph/python/ag_ui_langgraph/utils.py @@ -1,6 +1,8 @@ import json import re from typing import List, Any, Dict, Union +from dataclasses import is_dataclass, asdict +from datetime import date, datetime from langchain_core.messages import BaseMessage, HumanMessage, AIMessage, SystemMessage, ToolMessage from ag_ui.core import ( @@ -177,3 +179,60 @@ def resolve_message_content(content: Any) -> str | None: def camel_to_snake(name): return re.sub(r'(? Any: + """ + Recursively convert a value into a JSON-serializable structure. + + - Handles Pydantic models via `model_dump`. + - Handles LangChain messages via `to_dict`. + - Recursively walks dicts, lists, and tuples. + - For arbitrary objects, falls back to `__dict__` if available, else `repr()`. + """ + # Pydantic models + if hasattr(value, "model_dump"): + try: + return make_json_safe(value.model_dump(by_alias=True, exclude_none=True)) + except Exception: + pass + + # LangChain-style objects + if hasattr(value, "to_dict"): + try: + return make_json_safe(value.to_dict()) + except Exception: + pass + + # Dict + if isinstance(value, dict): + return {key: make_json_safe(sub_value) for key, sub_value in value.items()} + + # List / tuple + if isinstance(value, (list, tuple)): + return [make_json_safe(sub_value) for sub_value in value] + + # Already JSON safe + if isinstance(value, (str, int, float, bool)) or value is None: + return value + + # Arbitrary object: try __dict__ first, fallback to repr + if hasattr(value, "__dict__"): + return { + "__type__": type(value).__name__, + **make_json_safe(value.__dict__), + } + + return repr(value) \ No newline at end of file diff --git a/typescript-sdk/integrations/langgraph/python/pyproject.toml b/typescript-sdk/integrations/langgraph/python/pyproject.toml index 9b7a8d93a..cf9e27e73 100644 --- a/typescript-sdk/integrations/langgraph/python/pyproject.toml +++ b/typescript-sdk/integrations/langgraph/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "ag-ui-langgraph" -version = "0.0.6" +version = "0.0.12-alpha.1" description = "Implementation of the AG-UI protocol for LangGraph." authors = ["Ran Shem Tov "] readme = "README.md" diff --git a/typescript-sdk/integrations/langgraph/src/agent.ts b/typescript-sdk/integrations/langgraph/src/agent.ts index 07c37efcf..62b584696 100644 --- a/typescript-sdk/integrations/langgraph/src/agent.ts +++ b/typescript-sdk/integrations/langgraph/src/agent.ts @@ -9,7 +9,7 @@ import { Message as LangGraphMessage, Config, Interrupt, - Thread + Thread, } from "@langchain/langgraph-sdk"; import { randomUUID } from "node:crypto"; import { @@ -23,7 +23,9 @@ import { MessageInProgress, RunMetadata, PredictStateTool, - LangGraphReasoning + LangGraphReasoning, + StateEnrichment, + LangGraphTool, } from "./types"; import { AbstractAgent, @@ -61,7 +63,7 @@ import { getStreamPayloadInput, langchainMessagesToAgui, resolveMessageContent, - resolveReasoningContent + resolveReasoningContent, } from "@/utils"; export type ProcessedEvents = @@ -123,14 +125,17 @@ export class LangGraphAgent extends AbstractAgent { // @ts-expect-error no need to initialize subscriber right now subscriber: Subscriber; constantSchemaKeys: string[] = DEFAULT_SCHEMA_KEYS; + activeStep?: string; + config: LangGraphAgentConfig; constructor(config: LangGraphAgentConfig) { super(config); + this.config = config; this.messagesInProcess = {}; this.agentName = config.agentName; this.graphId = config.graphId; this.assistantConfig = config.assistantConfig; - this.thinkingProcess = null + this.thinkingProcess = null; this.client = config?.client ?? new LangGraphClient({ @@ -140,6 +145,10 @@ export class LangGraphAgent extends AbstractAgent { }); } + public clone() { + return new LangGraphAgent(this.config); + } + dispatchEvent(event: ProcessedEvents) { this.subscriber.next(event); return true; @@ -162,58 +171,72 @@ export class LangGraphAgent extends AbstractAgent { this.assistant = await this.getAssistant(); } const threadId = input.threadId ?? randomUUID(); - const streamMode = input.forwardedProps?.streamMode ?? (["events", "values", "updates"] satisfies StreamMode[]) - const preparedStream = await this.prepareStream({ ...input, threadId }, streamMode) + const streamMode = + input.forwardedProps?.streamMode ?? (["events", "values", "updates"] satisfies StreamMode[]); + const preparedStream = await this.prepareStream({ ...input, threadId }, streamMode); if (!preparedStream) { - return subscriber.error('No stream to regenerate'); + return subscriber.error("No stream to regenerate"); } - await this.handleStreamEvents(preparedStream, threadId, subscriber, input, streamMode) + await this.handleStreamEvents(preparedStream, threadId, subscriber, input, streamMode); } async prepareRegenerateStream(input: RegenerateInput, streamMode: StreamMode | StreamMode[]) { - const { threadId, messageCheckpoint, tools } = input + const { threadId, messageCheckpoint } = input; - const timeTravelCheckpoint = await this.getCheckpointByMessage(messageCheckpoint!.id!, threadId); + const timeTravelCheckpoint = await this.getCheckpointByMessage( + messageCheckpoint!.id!, + threadId, + ); if (!this.assistant) { this.assistant = await this.getAssistant(); } if (!timeTravelCheckpoint) { - return this.subscriber.error('No checkpoint found for message'); + return this.subscriber.error("No checkpoint found for message"); } - const fork = await this.client.threads.updateState( - threadId, - { - values: this.langGraphDefaultMergeState(timeTravelCheckpoint.values, [], tools), - checkpointId: timeTravelCheckpoint.checkpoint.checkpoint_id!, - asNode: timeTravelCheckpoint.next?.[0] ?? "__start__" - } - ); + const fork = await this.client.threads.updateState(threadId, { + values: this.langGraphDefaultMergeState(timeTravelCheckpoint.values, [], input), + checkpointId: timeTravelCheckpoint.checkpoint.checkpoint_id!, + asNode: timeTravelCheckpoint.next?.[0] ?? "__start__", + }); + const payload = { + ...(input.forwardedProps ?? {}), + input: this.langGraphDefaultMergeState( + timeTravelCheckpoint.values, + [messageCheckpoint], + input, + ), + // @ts-ignore + checkpointId: fork.checkpoint.checkpoint_id!, + streamMode, + }; return { - streamResponse: this.client.runs.stream(threadId, this.assistant.assistant_id, { - input: this.langGraphDefaultMergeState(timeTravelCheckpoint.values, [messageCheckpoint], tools), - // @ts-ignore - checkpointId: fork.checkpoint.checkpoint_id!, - streamMode, - }), - state: timeTravelCheckpoint, + streamResponse: this.client.runs.stream(threadId, this.assistant.assistant_id, payload), + state: timeTravelCheckpoint as ThreadState, streamMode, }; } async prepareStream(input: RunAgentExtendedInput, streamMode: StreamMode | StreamMode[]) { - let { threadId: inputThreadId, state: inputState, messages, tools, context, forwardedProps } = input; + let { + threadId: inputThreadId, + state: inputState, + messages, + tools, + context, + forwardedProps, + } = input; // If a manual emittance happens, it is the ultimate source of truth of state, unless a node has exited. // Therefore, this value should either hold null, or the only edition of state that should be used. this.activeRun!.manuallyEmittedState = null; const nodeNameInput = forwardedProps?.nodeName; this.activeRun!.nodeName = nodeNameInput; - if (this.activeRun!.nodeName === '__end__') { + if (this.activeRun!.nodeName === "__end__") { this.activeRun!.nodeName = undefined; } @@ -226,19 +249,30 @@ export class LangGraphAgent extends AbstractAgent { const thread = await this.getOrCreateThread(threadId, forwardedProps?.threadMetadata); this.activeRun!.threadId = thread.thread_id; - const agentState: ThreadState = await this.client.threads.getState(thread.thread_id) ?? { values: {} } as ThreadState - const agentStateMessages = agentState.values.messages ?? [] - const inputMessagesToLangchain = aguiMessagesToLangChain(messages) - const stateValuesDiff = this.langGraphDefaultMergeState({ ...inputState, messages: agentStateMessages }, inputMessagesToLangchain, tools); + const agentState: ThreadState = + (await this.client.threads.getState(thread.thread_id)) ?? + ({ values: {} } as ThreadState); + const agentStateMessages = agentState.values.messages ?? []; + const inputMessagesToLangchain = aguiMessagesToLangChain(messages); + const stateValuesDiff = this.langGraphDefaultMergeState( + { ...inputState, messages: agentStateMessages }, + inputMessagesToLangchain, + input, + ); // Messages are a combination of existing messages in state + everything that was newly sent let threadState = { ...agentState, - values: { ...stateValuesDiff, messages: [...agentStateMessages, ...stateValuesDiff.messages] }, - } + values: { + ...stateValuesDiff, + messages: [...agentStateMessages, ...(stateValuesDiff.messages ?? [])], + }, + }; let stateValues = threadState.values; this.activeRun!.schemaKeys = await this.getSchemaKeys(); - if ((agentState.values.messages ?? []).length > messages.filter((m) => m.role !== "system").length) { + if ( + (agentState.values.messages ?? []).length > messages.filter((m) => m.role !== "system").length + ) { let lastUserMessage: LangGraphMessage | null = null; // Find the first user message by working backwards from the last message for (let i = messages.length - 1; i >= 0; i--) { @@ -249,20 +283,28 @@ export class LangGraphAgent extends AbstractAgent { } if (!lastUserMessage) { - return this.subscriber.error('No user message found in messages to regenerate'); + return this.subscriber.error("No user message found in messages to regenerate"); } - return this.prepareRegenerateStream({ ...input, messageCheckpoint: lastUserMessage }, streamMode) + return this.prepareRegenerateStream( + { ...input, messageCheckpoint: lastUserMessage }, + streamMode, + ); } this.activeRun!.graphInfo = await this.client.assistants.getGraph(this.assistant.assistant_id); const mode = - !forwardedProps?.command?.resume && threadId && this.activeRun!.nodeName != "__end__" && this.activeRun!.nodeName + !forwardedProps?.command?.resume && + threadId && + this.activeRun!.nodeName != "__end__" && + this.activeRun!.nodeName ? "continue" : "start"; if (mode === "continue") { - const nodeBefore = this.activeRun!.graphInfo.edges.find(e => e.target === this.activeRun!.nodeName); + const nodeBefore = this.activeRun!.graphInfo.edges.find( + (e) => e.target === this.activeRun!.nodeName, + ); await this.client.threads.updateState(threadId, { values: inputState, asNode: nodeBefore?.source, @@ -291,6 +333,10 @@ export class LangGraphAgent extends AbstractAgent { streamMode, input: payloadInput, config: payloadConfig, + context: { + ...context, + ...(payloadConfig?.configurable ?? {}), + } }; // If there are still outstanding unresolved interrupts, we must force resolution of them before moving forward @@ -324,17 +370,19 @@ export class LangGraphAgent extends AbstractAgent { // @ts-ignore streamResponse: this.client.runs.stream(threadId, this.assistant.assistant_id, payload), state: threadState as ThreadState, - } + }; } async handleStreamEvents( - stream: Awaited | ReturnType>, + stream: Awaited< + ReturnType | ReturnType + >, threadId: string, subscriber: Subscriber, input: RunAgentExtendedInput, - streamMode: StreamMode | StreamMode[] + streamMode: StreamMode | StreamMode[], ) { - const { forwardedProps } = input + const { forwardedProps } = input; const nodeNameInput = forwardedProps?.nodeName; this.subscriber = subscriber; let shouldExit = false; @@ -355,12 +403,18 @@ export class LangGraphAgent extends AbstractAgent { // In case of resume (interrupt), re-start resumed step if (forwardedProps?.command?.resume && this.activeRun!.nodeName) { - this.startStep(this.activeRun!.nodeName) + this.startStep(this.activeRun!.nodeName); } for await (let streamResponseChunk of streamResponse) { + const subgraphsStreamEnabled = input.forwardedProps?.streamSubgraphs; + const isSubgraphStream = + subgraphsStreamEnabled && + (streamResponseChunk.event.startsWith("events") || + streamResponseChunk.event.startsWith("values")); + // @ts-ignore - if (!streamMode.includes(streamResponseChunk.event as StreamMode)) { + if (!streamMode.includes(streamResponseChunk.event as StreamMode) && !isSubgraphStream) { continue; } @@ -383,11 +437,19 @@ export class LangGraphAgent extends AbstractAgent { break; } - if (streamResponseChunk.event === "updates") continue; + if (streamResponseChunk.event === "updates") { + continue; + } if (streamResponseChunk.event === "values") { latestStateValues = chunk.data; continue; + } else if (subgraphsStreamEnabled && chunk.event.startsWith("values|")) { + latestStateValues = { + ...latestStateValues, + ...chunk.data, + }; + continue; } const chunkData = chunk.data; @@ -399,10 +461,10 @@ export class LangGraphAgent extends AbstractAgent { if (currentNodeName && currentNodeName !== this.activeRun!.nodeName) { if (this.activeRun!.nodeName && this.activeRun!.nodeName !== nodeNameInput) { - this.endStep() + this.endStep(); } - this.startStep(currentNodeName) + this.startStep(currentNodeName); } shouldExit = @@ -420,7 +482,7 @@ export class LangGraphAgent extends AbstractAgent { // we only want to update the node name under certain conditions // since we don't need any internal node names to be sent to the frontend if (this.activeRun!.graphInfo?.["nodes"].some((node) => node.id === currentNodeName)) { - this.activeRun!.nodeName = currentNodeName + this.activeRun!.nodeName = currentNodeName; } updatedState.values = this.activeRun!.manuallyEmittedState ?? latestStateValues; @@ -456,18 +518,17 @@ export class LangGraphAgent extends AbstractAgent { } state = await this.client.threads.getState(threadId); - const tasks = state.tasks + const tasks = state.tasks; const interrupts = (tasks?.[0]?.interrupts ?? []) as Interrupt[]; - const isEndNode = state.next.length === 0 - const writes = state.metadata?.writes ?? {} + const isEndNode = state.next.length === 0; + const writes = state.metadata?.writes ?? {}; - let newNodeName = this.activeRun!.nodeName! + let newNodeName = this.activeRun!.nodeName!; if (!interrupts?.length) { - newNodeName = isEndNode ? '__end__' : (state.next[0] ?? Object.keys(writes)[0]); + newNodeName = isEndNode ? "__end__" : (state.next[0] ?? Object.keys(writes)[0]); } - interrupts.forEach((interrupt) => { this.dispatchEvent({ type: EventType.CUSTOM, @@ -479,14 +540,14 @@ export class LangGraphAgent extends AbstractAgent { }); if (this.activeRun!.nodeName != newNodeName) { - this.endStep() - this.startStep(newNodeName) + this.endStep(); + this.startStep(newNodeName); } - this.endStep() + this.endStep(); this.dispatchEvent({ type: EventType.STATE_SNAPSHOT, - snapshot: this.getStateSnapshot(state.values), + snapshot: this.getStateSnapshot(state), }); this.dispatchEvent({ type: EventType.MESSAGES_SNAPSHOT, @@ -532,17 +593,17 @@ export class LangGraphAgent extends AbstractAgent { hasCurrentStream && !currentStream?.toolCallId && !isMessageContentEvent; if (reasoningData) { - this.handleThinkingEvent(reasoningData) + this.handleThinkingEvent(reasoningData); break; } if (!reasoningData && this.thinkingProcess) { this.dispatchEvent({ type: EventType.THINKING_TEXT_MESSAGE_END, - }) + }); this.dispatchEvent({ type: EventType.THINKING_END, - }) + }); this.thinkingProcess = null; } @@ -707,7 +768,9 @@ export class LangGraphAgent extends AbstractAgent { this.activeRun!.manuallyEmittedState = event.data; this.dispatchEvent({ type: EventType.STATE_SNAPSHOT, - snapshot: this.getStateSnapshot(this.activeRun!.manuallyEmittedState!), + snapshot: this.getStateSnapshot({ + values: this.activeRun!.manuallyEmittedState!, + } as ThreadState), rawEvent: event, }); } @@ -733,11 +796,11 @@ export class LangGraphAgent extends AbstractAgent { if (this.thinkingProcess.type) { this.dispatchEvent({ type: EventType.THINKING_TEXT_MESSAGE_END, - }) + }); } this.dispatchEvent({ type: EventType.THINKING_END, - }) + }); this.thinkingProcess = null; } @@ -745,29 +808,29 @@ export class LangGraphAgent extends AbstractAgent { // No thinking step yet. Start a new one this.dispatchEvent({ type: EventType.THINKING_START, - }) + }); this.thinkingProcess = { index: thinkingStepIndex, }; } - if (this.thinkingProcess.type !== reasoningData.type) { this.dispatchEvent({ type: EventType.THINKING_TEXT_MESSAGE_START, - }) - this.thinkingProcess.type = reasoningData.type + }); + this.thinkingProcess.type = reasoningData.type; } if (this.thinkingProcess.type) { this.dispatchEvent({ type: EventType.THINKING_TEXT_MESSAGE_CONTENT, - delta: reasoningData.text - }) + delta: reasoningData.text, + }); } } - getStateSnapshot(state: State) { + getStateSnapshot(threadState: ThreadState) { + let state = threadState.values; const schemaKeys = this.activeRun!.schemaKeys!; // Do not emit state keys that are not part of the output schema if (schemaKeys?.output) { @@ -785,7 +848,7 @@ export class LangGraphAgent extends AbstractAgent { } catch (error) { thread = await this.createThread({ threadId, - metadata: threadMetadata + metadata: threadMetadata, }); } } catch (error: unknown) { @@ -804,10 +867,10 @@ export class LangGraphAgent extends AbstractAgent { } async mergeConfigs({ - configs, - assistant, - schemaKeys, - }: { + configs, + assistant, + schemaKeys, + }: { configs: Config[]; assistant: Assistant; schemaKeys: SchemaKeys; @@ -818,9 +881,9 @@ export class LangGraphAgent extends AbstractAgent { if (cfg.configurable) { filteredConfigurable = schemaKeys?.config ? filterObjectBySchemaKeys(cfg?.configurable, [ - ...this.constantSchemaKeys, - ...(schemaKeys?.config ?? []), - ]) + ...this.constantSchemaKeys, + ...(schemaKeys?.config ?? []), + ]) : cfg?.configurable; } @@ -840,7 +903,7 @@ export class LangGraphAgent extends AbstractAgent { const isOnlyRecursionLimitDifferent = isRecursionLimitSetToDefault && JSON.stringify({ ...newConfig, recursion_limit: null }) === - JSON.stringify({ ...acc, recursion_limit: null }); + JSON.stringify({ ...acc, recursion_limit: null }); if (configsAreDifferent && !isOnlyRecursionLimitDifferent) { return { @@ -870,8 +933,7 @@ export class LangGraphAgent extends AbstractAgent { async getAssistant(): Promise { const assistants = await this.client.assistants.search(); const retrievedAssistant = assistants.find( - (searchResult) => - searchResult.graph_id === this.graphId, + (searchResult) => searchResult.graph_id === this.graphId, ); if (!retrievedAssistant) { console.error(` @@ -889,26 +951,35 @@ export class LangGraphAgent extends AbstractAgent { try { const graphSchema = await this.client.assistants.getSchemas(this.assistant!.assistant_id); let configSchema = null; + let contextSchema: string[] = [] + if ('context_schema' in graphSchema && graphSchema.context_schema?.properties) { + contextSchema = Object.keys(graphSchema.context_schema.properties); + } if (graphSchema.config_schema?.properties) { configSchema = Object.keys(graphSchema.config_schema.properties); } if (!graphSchema.input_schema?.properties || !graphSchema.output_schema?.properties) { - return { config: [], input: null, output: null }; + return { config: [], input: null, output: null, context: contextSchema }; } const inputSchema = Object.keys(graphSchema.input_schema.properties); const outputSchema = Object.keys(graphSchema.output_schema.properties); return { - input: inputSchema && inputSchema.length ? [...inputSchema, ...this.constantSchemaKeys] : null, - output: outputSchema && outputSchema.length ? [...outputSchema, ...this.constantSchemaKeys] : null, + input: + inputSchema && inputSchema.length ? [...inputSchema, ...this.constantSchemaKeys] : null, + output: + outputSchema && outputSchema.length + ? [...outputSchema, ...this.constantSchemaKeys] + : null, + context: contextSchema, config: configSchema, }; } catch (e) { - return { config: [], input: this.constantSchemaKeys, output: this.constantSchemaKeys }; + return { config: [], input: this.constantSchemaKeys, output: this.constantSchemaKeys, context: [] }; } } - langGraphDefaultMergeState(state: State, messages: LangGraphMessage[], tools: any): State { + langGraphDefaultMergeState(state: State, messages: LangGraphMessage[], input: RunAgentExtendedInput): State { if (messages.length > 0 && "role" in messages[0] && messages[0].role === "system") { // remove system message messages = messages.slice(1); @@ -920,9 +991,9 @@ export class LangGraphAgent extends AbstractAgent { const newMessages = messages.filter((message) => !existingMessageIds.has(message.id)); - const langGraphTools = [...(state.tools ?? []), ...(tools ?? [])].map(tool => { + const langGraphTools: LangGraphTool[] = [...(state.tools ?? []), ...(input.tools ?? [])].map((tool) => { if (tool.type) { - return tool + return tool; } return { @@ -931,34 +1002,43 @@ export class LangGraphAgent extends AbstractAgent { name: tool.name, description: tool.description, parameters: tool.parameters, - } - } - }) + }, + }; + }); return { ...state, messages: newMessages, tools: langGraphTools, + 'ag-ui': { + tools: langGraphTools, + context: input.context, + } }; } startStep(nodeName: string) { + if (this.activeStep) { + this.endStep(); + } this.dispatchEvent({ type: EventType.STEP_STARTED, stepName: nodeName, }); this.activeRun!.nodeName = nodeName; + this.activeStep = nodeName; } endStep() { - if (!this.activeRun!.nodeName) { + if (!this.activeStep) { throw new Error("No active step to end"); } this.dispatchEvent({ type: EventType.STEP_FINISHED, - stepName: this.activeRun!.nodeName!, + stepName: this.activeRun!.nodeName! ?? this.activeStep, }); this.activeRun!.nodeName = undefined; + this.activeStep = undefined; } async getCheckpointByMessage( @@ -967,11 +1047,13 @@ export class LangGraphAgent extends AbstractAgent { checkpoint?: null | { checkpoint_id?: null | string; checkpoint_ns: string; - } + }, ): Promise { - const options = checkpoint?.checkpoint_id ? { - checkpoint: { checkpoint_id: checkpoint.checkpoint_id } - } : undefined + const options = checkpoint?.checkpoint_id + ? { + checkpoint: { checkpoint_id: checkpoint.checkpoint_id }, + } + : undefined; const history = await this.client.threads.getHistory(threadId, options); const reversed = [...history].reverse(); // oldest → newest @@ -987,14 +1069,17 @@ export class LangGraphAgent extends AbstractAgent { ); const messagesAfter = targetStateMessages.slice(messageIndex + 1); if (messagesAfter.length) { - return this.getCheckpointByMessage(messageId, threadId, targetState.parent_checkpoint) + return this.getCheckpointByMessage(messageId, threadId, targetState.parent_checkpoint); } const targetStateIndex = reversed.indexOf(targetState); - const { messages, ...targetStateValuesWithoutMessages } = targetState.values as State - const selectedCheckpoint = reversed[targetStateIndex - 1] ?? { ...targetState, values: {} } - return { ...selectedCheckpoint, values: { ...selectedCheckpoint.values, ...targetStateValuesWithoutMessages } }; + const { messages, ...targetStateValuesWithoutMessages } = targetState.values as State; + const selectedCheckpoint = reversed[targetStateIndex - 1] ?? { ...targetState, values: {} }; + return { + ...selectedCheckpoint, + values: { ...selectedCheckpoint.values, ...targetStateValuesWithoutMessages }, + }; } } diff --git a/typescript-sdk/integrations/langgraph/src/types.ts b/typescript-sdk/integrations/langgraph/src/types.ts index 0a94756ee..a35a3c48c 100644 --- a/typescript-sdk/integrations/langgraph/src/types.ts +++ b/typescript-sdk/integrations/langgraph/src/types.ts @@ -1,5 +1,6 @@ -import { AssistantGraph, Message } from "@langchain/langgraph-sdk"; +import { AssistantGraph, Message as LangGraphMessage, } from "@langchain/langgraph-sdk"; import { MessageType } from "@langchain/core/messages"; +import { RunAgentInput } from "@ag-ui/core"; export enum LangGraphEventTypes { OnChainStart = "on_chain_start", @@ -14,11 +15,31 @@ export enum LangGraphEventTypes { OnInterrupt = "on_interrupt", } -export type State = Record; +export type LangGraphTool = { + type: "function"; + function: { + name: string; + description: string; + parameters: any; + }, +} + +export type State> = { + [k in keyof TDefinedState]: TDefinedState[k] | null; +} & Record; +export interface StateEnrichment { + messages: LangGraphMessage[]; + tools: LangGraphTool[]; + 'ag-ui': { + tools: LangGraphTool[]; + context: RunAgentInput['context'] + } +} export type SchemaKeys = { input: string[] | null; output: string[] | null; + context: string[] | null; config: string[] | null; } | null; @@ -54,7 +75,7 @@ export interface ToolCall { } type BaseLangGraphPlatformMessage = Omit< - Message, + LangGraphMessage, | "isResultMessage" | "isTextMessage" | "isImageMessage" diff --git a/typescript-sdk/integrations/llamaindex/package.json b/typescript-sdk/integrations/llamaindex/package.json index d2ba05733..5ae6ca3cb 100644 --- a/typescript-sdk/integrations/llamaindex/package.json +++ b/typescript-sdk/integrations/llamaindex/package.json @@ -23,13 +23,14 @@ "link:global": "pnpm link --global", "unlink:global": "pnpm unlink --global" }, - "dependencies": { - "@ag-ui/client": "workspace:*" - }, "peerDependencies": { + "@ag-ui/core": ">=0.0.37", + "@ag-ui/client": ">=0.0.37", "rxjs": "7.8.1" }, "devDependencies": { + "@ag-ui/core": "workspace:*", + "@ag-ui/client": "workspace:*", "@types/jest": "^29.5.14", "@types/node": "^20.11.19", "jest": "^29.7.0", diff --git a/typescript-sdk/integrations/mastra/example/src/mastra/agents/tool-based-generative-ui.ts b/typescript-sdk/integrations/mastra/example/src/mastra/agents/tool-based-generative-ui.ts index 45d54c392..20c231078 100644 --- a/typescript-sdk/integrations/mastra/example/src/mastra/agents/tool-based-generative-ui.ts +++ b/typescript-sdk/integrations/mastra/example/src/mastra/agents/tool-based-generative-ui.ts @@ -11,21 +11,6 @@ export const toolBasedGenerativeUIAgent = new Agent({ You are a helpful haiku assistant that provides the user with a haiku. `, model: openai("gpt-4o-mini"), - tools: { - generate_haiku: createTool({ - id: "generate_haiku", - description: - "Generate a haiku in Japanese and its English translation. Also select exactly 3 relevant images from the provided list based on the haiku's theme.", - inputSchema: z.object({ - japanese: z.array(z.string()).describe("An array of three lines of the haiku in Japanese"), - english: z.array(z.string()).describe("An array of three lines of the haiku in English"), - }), - outputSchema: z.string(), - execute: async ({ context }) => { - return "Haiku generated."; - }, - }), - }, memory: new Memory({ storage: new LibSQLStore({ url: "file:../mastra.db", // path is relative to the .mastra/output directory diff --git a/typescript-sdk/integrations/mastra/package.json b/typescript-sdk/integrations/mastra/package.json index 3afd63d91..38bb63908 100644 --- a/typescript-sdk/integrations/mastra/package.json +++ b/typescript-sdk/integrations/mastra/package.json @@ -1,6 +1,6 @@ { "name": "@ag-ui/mastra", - "version": "0.0.8", + "version": "0.0.11", "license": "Apache-2.0", "main": "./dist/index.js", "module": "./dist/index.mjs", @@ -14,6 +14,25 @@ "dist/**", "README.md" ], + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.mjs", + "require": "./dist/index.js" + }, + "./copilotkit": { + "types": "./dist/copilotkit.d.ts", + "import": "./dist/copilotkit.mjs", + "require": "./dist/copilotkit.js" + } + }, + "typesVersions": { + "*": { + "copilotkit": [ + "dist/copilotkit.d.ts" + ] + } + }, "scripts": { "build": "tsup", "dev": "tsup --watch", @@ -23,18 +42,35 @@ "link:global": "pnpm link --global", "unlink:global": "pnpm unlink --global" }, + "tsup": { + "entry": { + "index": "src/index.ts", + "copilotkit": "src/copilotkit.ts" + }, + "dts": true, + "format": [ + "cjs", + "esm" + ], + "splitting": false, + "sourcemap": true, + "clean": true + }, "dependencies": { - "@ag-ui/client": "workspace:*", "@ai-sdk/ui-utils": "^1.1.19", "@mastra/client-js": "^0.10.18", "rxjs": "7.8.1" }, "peerDependencies": { + "@ag-ui/core": ">=0.0.37", + "@ag-ui/client": ">=0.0.37", "@copilotkit/runtime": "^1.9.3", - "@mastra/core": "^0.11.1 || ^0.12.1 || ^0.13.0", + "@mastra/core": ">=0.11.1", "zod": "^3.25.67" }, "devDependencies": { + "@ag-ui/core": "workspace:*", + "@ag-ui/client": "workspace:*", "@mastra/core": "^0.13.0", "@types/jest": "^29.5.14", "@types/node": "^20.11.19", diff --git a/typescript-sdk/integrations/mastra/src/mastra.ts b/typescript-sdk/integrations/mastra/src/mastra.ts index 8c0c2babe..f8310767b 100644 --- a/typescript-sdk/integrations/mastra/src/mastra.ts +++ b/typescript-sdk/integrations/mastra/src/mastra.ts @@ -56,7 +56,7 @@ export class MastraAgent extends AbstractAgent { super(rest); this.agent = agent; this.resourceId = resourceId; - this.runtimeContext = runtimeContext; + this.runtimeContext = runtimeContext ?? new RuntimeContext(); } protected run(input: RunAgentInput): Observable { @@ -227,7 +227,7 @@ export class MastraAgent extends AbstractAgent { * @returns The stream of the mastra agent. */ private async streamMastraAgent( - { threadId, runId, messages, tools }: RunAgentInput, + { threadId, runId, messages, tools, context: inputContext }: RunAgentInput, { onTextPart, onFinishMessagePart, @@ -250,6 +250,7 @@ export class MastraAgent extends AbstractAgent { ); const resourceId = this.resourceId ?? threadId; const convertedMessages = convertAGUIMessagesToMastra(messages); + this.runtimeContext?.set('ag-ui', { context: inputContext }); const runtimeContext = this.runtimeContext; if (this.isLocalMastraAgent(this.agent)) { diff --git a/typescript-sdk/integrations/mastra/src/utils.ts b/typescript-sdk/integrations/mastra/src/utils.ts index 598e27d88..17385c21d 100644 --- a/typescript-sdk/integrations/mastra/src/utils.ts +++ b/typescript-sdk/integrations/mastra/src/utils.ts @@ -1,17 +1,9 @@ import type { Message } from "@ag-ui/client"; import { AbstractAgent } from "@ag-ui/client"; -import { - CopilotRuntime, - copilotRuntimeNodeHttpEndpoint, - CopilotServiceAdapter, - ExperimentalEmptyAdapter, -} from "@copilotkit/runtime"; -import type { CoreMessage } from "@mastra/core"; -import { registerApiRoute } from "@mastra/core/server"; -import type { Mastra } from "@mastra/core"; +import { MastraClient } from "@mastra/client-js"; +import type { CoreMessage, Mastra } from "@mastra/core"; import { Agent as LocalMastraAgent } from "@mastra/core/agent"; import { RuntimeContext } from "@mastra/core/runtime-context"; -import { MastraClient } from "@mastra/client-js"; import { MastraAgent } from "./mastra"; export function convertAGUIMessagesToMastra(messages: Message[]): CoreMessage[] { @@ -66,53 +58,6 @@ export function convertAGUIMessagesToMastra(messages: Message[]): CoreMessage[] return result; } -export function registerCopilotKit | unknown = unknown>({ - path, - resourceId, - serviceAdapter = new ExperimentalEmptyAdapter(), - agents, - setContext, -}: { - path: string; - resourceId: string; - serviceAdapter?: CopilotServiceAdapter; - agents?: Record; - setContext?: (c: any, runtimeContext: RuntimeContext) => void | Promise; -}) { - return registerApiRoute(path, { - method: `ALL`, - handler: async (c) => { - const mastra = c.get("mastra"); - - const runtimeContext = new RuntimeContext(); - - if (setContext) { - await setContext(c, runtimeContext); - } - - const aguiAgents = - agents || - MastraAgent.getLocalAgents({ - resourceId, - mastra, - runtimeContext, - }); - - const runtime = new CopilotRuntime({ - agents: aguiAgents, - }); - - const handler = copilotRuntimeNodeHttpEndpoint({ - endpoint: path, - runtime, - serviceAdapter, - }); - - return handler.handle(c.req.raw, {}); - }, - }); -} - export interface GetRemoteAgentsOptions { mastraClient: MastraClient; resourceId?: string; diff --git a/typescript-sdk/integrations/pydantic-ai/examples/server/api/agentic_generative_ui.py b/typescript-sdk/integrations/pydantic-ai/examples/server/api/agentic_generative_ui.py index e6043a554..34f52fa86 100644 --- a/typescript-sdk/integrations/pydantic-ai/examples/server/api/agentic_generative_ui.py +++ b/typescript-sdk/integrations/pydantic-ai/examples/server/api/agentic_generative_ui.py @@ -58,6 +58,7 @@ class JSONPatchOp(BaseModel): - Do NOT repeat the plan or summarise it in a message - Do NOT confirm the creation or updates in a message - Do NOT ask the user for additional information or next steps + - Do NOT leave a plan hanging, always complete the plan via `update_plan_step` if one is ongoing. Only one plan can be active at a time, so do not call the `create_plan` tool again until all the steps in current plan are completed. diff --git a/typescript-sdk/integrations/pydantic-ai/examples/server/api/human_in_the_loop.py b/typescript-sdk/integrations/pydantic-ai/examples/server/api/human_in_the_loop.py index 3f4846297..f3caf0a36 100644 --- a/typescript-sdk/integrations/pydantic-ai/examples/server/api/human_in_the_loop.py +++ b/typescript-sdk/integrations/pydantic-ai/examples/server/api/human_in_the_loop.py @@ -16,6 +16,7 @@ When planning tasks use tools only, without any other messages. IMPORTANT: - Use the `generate_task_steps` tool to display the suggested steps to the user + - Do not call the `generate_task_steps` twice in a row, ever. - Never repeat the plan, or send a message detailing steps - If accepted, confirm the creation of the plan and the number of selected (enabled) steps only - If not accepted, ask the user for more information, DO NOT use the `generate_task_steps` tool again diff --git a/typescript-sdk/integrations/pydantic-ai/package.json b/typescript-sdk/integrations/pydantic-ai/package.json index a5be4e338..e53c3a2ae 100644 --- a/typescript-sdk/integrations/pydantic-ai/package.json +++ b/typescript-sdk/integrations/pydantic-ai/package.json @@ -18,13 +18,14 @@ "link:global": "pnpm link --global", "unlink:global": "pnpm unlink --global" }, - "dependencies": { - "@ag-ui/client": "workspace:*" - }, "peerDependencies": { + "@ag-ui/core": ">=0.0.37", + "@ag-ui/client": ">=0.0.37", "rxjs": "7.8.1" }, "devDependencies": { + "@ag-ui/core": "workspace:*", + "@ag-ui/client": "workspace:*", "@types/jest": "^29.5.14", "@types/node": "^20.11.19", "jest": "^29.7.0", diff --git a/typescript-sdk/integrations/vercel-ai-sdk/package.json b/typescript-sdk/integrations/vercel-ai-sdk/package.json index 9ae90bcc3..4979f4148 100644 --- a/typescript-sdk/integrations/vercel-ai-sdk/package.json +++ b/typescript-sdk/integrations/vercel-ai-sdk/package.json @@ -19,9 +19,13 @@ "unlink:global": "pnpm unlink --global" }, "peerDependencies": { + "@ag-ui/core": ">=0.0.37", + "@ag-ui/client": ">=0.0.37", "rxjs": "7.8.1" }, "devDependencies": { + "@ag-ui/core": "workspace:*", + "@ag-ui/client": "workspace:*", "@types/jest": "^29.5.14", "@types/node": "^20.11.19", "jest": "^29.7.0", @@ -30,7 +34,6 @@ "typescript": "^5.3.3" }, "dependencies": { - "@ag-ui/client": "workspace:*", "ai": "^4.3.16", "zod": "^3.22.4" } diff --git a/typescript-sdk/package.json b/typescript-sdk/package.json index b58c82c34..ccfd88464 100644 --- a/typescript-sdk/package.json +++ b/typescript-sdk/package.json @@ -7,6 +7,7 @@ "clean": "rm -rf dist .turbo node_modules && pnpm -r clean", "build:clean": "rm -rf dist .turbo node_modules && pnpm -r clean && pnpm install && turbo run build", "dev": "turbo run dev", + "start": "turbo run start", "lint": "turbo run lint", "format": "prettier --write \"**/*.{ts,tsx,md,mdx}\"", "check-types": "turbo run check-types", @@ -14,6 +15,7 @@ "bump": "pnpm --filter './packages/*' exec -- pnpm version", "bump:alpha": "pnpm --filter './packages/*' exec -- pnpm version --preid alpha", "publish": "pnpm -r clean && pnpm install && turbo run build && pnpm publish -r --filter='./packages/*'", + "publish:integrations": "pnpm -r clean && pnpm install && turbo run build && pnpm publish -r --filter='./integrations/*'", "publish:alpha": "pnpm -r clean && pnpm install && turbo run build && pnpm publish -r --no-git-checks --filter='./packages/*' --tag alpha" }, "devDependencies": { diff --git a/typescript-sdk/packages/cli/package.json b/typescript-sdk/packages/cli/package.json index adb123a3e..b8e37cb32 100644 --- a/typescript-sdk/packages/cli/package.json +++ b/typescript-sdk/packages/cli/package.json @@ -1,7 +1,7 @@ { "name": "create-ag-ui-app", "author": "Markus Ecker ", - "version": "0.0.38", + "version": "0.0.39", "private": false, "publishConfig": { "access": "public" diff --git a/typescript-sdk/packages/client/package.json b/typescript-sdk/packages/client/package.json index 772b9ae1b..c503b0268 100644 --- a/typescript-sdk/packages/client/package.json +++ b/typescript-sdk/packages/client/package.json @@ -1,7 +1,7 @@ { "name": "@ag-ui/client", "author": "Markus Ecker ", - "version": "0.0.36", + "version": "0.0.37", "private": false, "publishConfig": { "access": "public" diff --git a/typescript-sdk/packages/client/src/agent/__tests__/legacy-bridged.test.ts b/typescript-sdk/packages/client/src/agent/__tests__/legacy-bridged.test.ts index 484910e56..26d1273be 100644 --- a/typescript-sdk/packages/client/src/agent/__tests__/legacy-bridged.test.ts +++ b/typescript-sdk/packages/client/src/agent/__tests__/legacy-bridged.test.ts @@ -160,7 +160,7 @@ describe("AbstractAgent.legacy_to_be_removed_runAgentBridged", () => { const legacyEvents = await lastValueFrom(legacy$.pipe(toArray())); // Verify events are in correct legacy format - expect(legacyEvents).toHaveLength(4); // Start, Content, End, AgentStateMessage + expect(legacyEvents).toHaveLength(3); // Start, Content, End // TextMessageStart expect(legacyEvents[0]).toMatchObject({ @@ -182,12 +182,12 @@ describe("AbstractAgent.legacy_to_be_removed_runAgentBridged", () => { }); // Final AgentStateMessage - expect(legacyEvents[3]).toMatchObject({ - type: "AgentStateMessage", - threadId: "test-thread-id", - agentName: "test-agent-id", - active: false, - }); + // expect(legacyEvents[3]).toMatchObject({ + // type: "AgentStateMessage", + // threadId: "test-thread-id", + // agentName: "test-agent-id", + // active: false, + // }); }); it("should pass configuration to the underlying run method", async () => { @@ -304,7 +304,7 @@ describe("AbstractAgent.legacy_to_be_removed_runAgentBridged", () => { const legacyEvents = await lastValueFrom(legacy$.pipe(toArray())); // Verify events are in correct legacy format - expect(legacyEvents).toHaveLength(4); // Start, Content, End, AgentStateMessage + expect(legacyEvents).toHaveLength(3); // Start, Content, End // TextMessageStart expect(legacyEvents[0]).toMatchObject({ @@ -326,12 +326,12 @@ describe("AbstractAgent.legacy_to_be_removed_runAgentBridged", () => { }); // Final AgentStateMessage - expect(legacyEvents[3]).toMatchObject({ - type: "AgentStateMessage", - threadId: "test-thread-id", - agentName: "test-agent-id", - active: false, - }); + // expect(legacyEvents[3]).toMatchObject({ + // type: "AgentStateMessage", + // threadId: "test-thread-id", + // agentName: "test-agent-id", + // active: false, + // }); }); it("should transform tool call events with results into legacy events with correct tool name", async () => { @@ -348,7 +348,7 @@ describe("AbstractAgent.legacy_to_be_removed_runAgentBridged", () => { const legacyEvents = await lastValueFrom(legacy$.pipe(toArray())); // Verify events are in correct legacy format - expect(legacyEvents).toHaveLength(5); // ActionExecutionStart, ActionExecutionArgs, ActionExecutionEnd, ActionExecutionResult, AgentStateMessage + expect(legacyEvents).toHaveLength(4); // ActionExecutionStart, ActionExecutionArgs, ActionExecutionEnd, ActionExecutionResult // ActionExecutionStart expect(legacyEvents[0]).toMatchObject({ @@ -379,11 +379,11 @@ describe("AbstractAgent.legacy_to_be_removed_runAgentBridged", () => { }); // Final AgentStateMessage - expect(legacyEvents[4]).toMatchObject({ - type: "AgentStateMessage", - threadId: "test-thread-id", - agentName: "test-agent-id", - active: false, - }); + // expect(legacyEvents[4]).toMatchObject({ + // type: "AgentStateMessage", + // threadId: "test-thread-id", + // agentName: "test-agent-id", + // active: false, + // }); }); }); diff --git a/typescript-sdk/packages/client/src/agent/index.ts b/typescript-sdk/packages/client/src/agent/index.ts index 09541bc42..945724cd0 100644 --- a/typescript-sdk/packages/client/src/agent/index.ts +++ b/typescript-sdk/packages/client/src/agent/index.ts @@ -1,3 +1,4 @@ export { AbstractAgent } from "./agent"; +export type { RunAgentResult } from "./agent"; export { HttpAgent } from "./http"; -export type { AgentConfig } from "./types"; +export type { AgentConfig, HttpAgentConfig, RunAgentParameters } from "./types"; diff --git a/typescript-sdk/packages/client/src/apply/default.ts b/typescript-sdk/packages/client/src/apply/default.ts index 0e9deb04c..8f720c7a0 100644 --- a/typescript-sdk/packages/client/src/apply/default.ts +++ b/typescript-sdk/packages/client/src/apply/default.ts @@ -3,6 +3,10 @@ import { TextMessageStartEvent, TextMessageContentEvent, Message, + DeveloperMessage, + SystemMessage, + AssistantMessage, + UserMessage, ToolCallStartEvent, ToolCallArgsEvent, StateSnapshotEvent, @@ -10,7 +14,6 @@ import { MessagesSnapshotEvent, CustomEvent, BaseEvent, - AssistantMessage, ToolCallResultEvent, ToolMessage, RunAgentInput, @@ -99,9 +102,10 @@ export const defaultApplyEvents = ( applyMutation(mutation); if (mutation.stopPropagation !== true) { - const { messageId, role } = event as TextMessageStartEvent; + const { messageId, role = "assistant" } = event as TextMessageStartEvent; // Create a new message using properties from the event + // Text messages can be developer, system, assistant, or user (not tool) const newMessage: Message = { id: messageId, role: role, @@ -116,6 +120,15 @@ export const defaultApplyEvents = ( } case EventType.TEXT_MESSAGE_CONTENT: { + const { messageId, delta } = event as TextMessageContentEvent; + + // Find the target message by ID + const targetMessage = messages.find((m) => m.id === messageId); + if (!targetMessage) { + console.warn(`TEXT_MESSAGE_CONTENT: No message found with ID '${messageId}'`); + return emitUpdates(); + } + const mutation = await runSubscribersWithMutation( subscribers, messages, @@ -127,17 +140,14 @@ export const defaultApplyEvents = ( state, agent, input, - textMessageBuffer: messages[messages.length - 1].content ?? "", + textMessageBuffer: targetMessage.content ?? "", }), ); applyMutation(mutation); if (mutation.stopPropagation !== true) { - const { delta } = event as TextMessageContentEvent; - - // Get the last message and append the content - const lastMessage = messages[messages.length - 1]; - lastMessage.content = lastMessage.content! + delta; + // Append content to the correct message by ID + targetMessage.content = (targetMessage.content || "") + delta; applyMutation({ messages }); } @@ -145,6 +155,15 @@ export const defaultApplyEvents = ( } case EventType.TEXT_MESSAGE_END: { + const { messageId } = event as TextMessageEndEvent; + + // Find the target message by ID + const targetMessage = messages.find((m) => m.id === messageId); + if (!targetMessage) { + console.warn(`TEXT_MESSAGE_END: No message found with ID '${messageId}'`); + return emitUpdates(); + } + const mutation = await runSubscribersWithMutation( subscribers, messages, @@ -156,7 +175,7 @@ export const defaultApplyEvents = ( state, agent, input, - textMessageBuffer: messages[messages.length - 1].content ?? "", + textMessageBuffer: targetMessage.content ?? "", }), ); applyMutation(mutation); @@ -164,7 +183,7 @@ export const defaultApplyEvents = ( await Promise.all( subscribers.map((subscriber) => { subscriber.onNewMessage?.({ - message: messages[messages.length - 1], + message: targetMessage, messages, state, agent, @@ -233,17 +252,34 @@ export const defaultApplyEvents = ( } case EventType.TOOL_CALL_ARGS: { + const { toolCallId, delta } = event as ToolCallArgsEvent; + + // Find the message containing this tool call + const targetMessage = messages.find((m) => + (m as AssistantMessage).toolCalls?.some((tc) => tc.id === toolCallId), + ) as AssistantMessage; + + if (!targetMessage) { + console.warn( + `TOOL_CALL_ARGS: No message found containing tool call with ID '${toolCallId}'`, + ); + return emitUpdates(); + } + + // Find the specific tool call + const targetToolCall = targetMessage.toolCalls!.find((tc) => tc.id === toolCallId); + if (!targetToolCall) { + console.warn(`TOOL_CALL_ARGS: No tool call found with ID '${toolCallId}'`); + return emitUpdates(); + } + const mutation = await runSubscribersWithMutation( subscribers, messages, state, (subscriber, messages, state) => { - const toolCalls = - (messages[messages.length - 1] as AssistantMessage)?.toolCalls ?? []; - const toolCallBuffer = - toolCalls.length > 0 ? toolCalls[toolCalls.length - 1].function.arguments : ""; - const toolCallName = - toolCalls.length > 0 ? toolCalls[toolCalls.length - 1].function.name : ""; + const toolCallBuffer = targetToolCall.function.arguments; + const toolCallName = targetToolCall.function.name; let partialToolCallArgs = {}; try { // Parse from toolCallBuffer only (before current delta is applied) @@ -265,17 +301,8 @@ export const defaultApplyEvents = ( applyMutation(mutation); if (mutation.stopPropagation !== true) { - const { delta } = event as ToolCallArgsEvent; - - // Get the last message - const lastMessage = messages[messages.length - 1] as AssistantMessage; - - // Get the last tool call - const lastToolCall = lastMessage.toolCalls![lastMessage.toolCalls!.length - 1]; - - // Append the arguments - lastToolCall.function.arguments += delta; - + // Append the arguments to the correct tool call by ID + targetToolCall.function.arguments += delta; applyMutation({ messages }); } @@ -283,17 +310,34 @@ export const defaultApplyEvents = ( } case EventType.TOOL_CALL_END: { + const { toolCallId } = event as ToolCallEndEvent; + + // Find the message containing this tool call + const targetMessage = messages.find((m) => + (m as AssistantMessage).toolCalls?.some((tc) => tc.id === toolCallId), + ) as AssistantMessage; + + if (!targetMessage) { + console.warn( + `TOOL_CALL_END: No message found containing tool call with ID '${toolCallId}'`, + ); + return emitUpdates(); + } + + // Find the specific tool call + const targetToolCall = targetMessage.toolCalls!.find((tc) => tc.id === toolCallId); + if (!targetToolCall) { + console.warn(`TOOL_CALL_END: No tool call found with ID '${toolCallId}'`); + return emitUpdates(); + } + const mutation = await runSubscribersWithMutation( subscribers, messages, state, (subscriber, messages, state) => { - const toolCalls = - (messages[messages.length - 1] as AssistantMessage)?.toolCalls ?? []; - const toolCallArgsString = - toolCalls.length > 0 ? toolCalls[toolCalls.length - 1].function.arguments : ""; - const toolCallName = - toolCalls.length > 0 ? toolCalls[toolCalls.length - 1].function.name : ""; + const toolCallArgsString = targetToolCall.function.arguments; + const toolCallName = targetToolCall.function.name; let toolCallArgs = {}; try { toolCallArgs = JSON.parse(toolCallArgsString); @@ -314,9 +358,7 @@ export const defaultApplyEvents = ( await Promise.all( subscribers.map((subscriber) => { subscriber.onNewToolCall?.({ - toolCall: (messages[messages.length - 1] as AssistantMessage).toolCalls![ - (messages[messages.length - 1] as AssistantMessage).toolCalls!.length - 1 - ], + toolCall: targetToolCall, messages, state, agent, diff --git a/typescript-sdk/packages/client/src/chunks/transform.ts b/typescript-sdk/packages/client/src/chunks/transform.ts index b275b6524..bb0ec6ba4 100644 --- a/typescript-sdk/packages/client/src/chunks/transform.ts +++ b/typescript-sdk/packages/client/src/chunks/transform.ts @@ -130,7 +130,7 @@ export const transformChunks = const textMessageStartEvent = { type: EventType.TEXT_MESSAGE_START, messageId: messageChunkEvent.messageId, - role: "assistant", + role: messageChunkEvent.role || "assistant", } as TextMessageStartEvent; textMessageResult.push(textMessageStartEvent); diff --git a/typescript-sdk/packages/client/src/legacy/convert.ts b/typescript-sdk/packages/client/src/legacy/convert.ts index b79335563..aa3ac5e6f 100644 --- a/typescript-sdk/packages/client/src/legacy/convert.ts +++ b/typescript-sdk/packages/client/src/legacy/convert.ts @@ -78,6 +78,7 @@ export const convertToLegacyEvents = { type: LegacyRuntimeEventTypes.enum.TextMessageStart, messageId: startEvent.messageId, + role: startEvent.role, } as LegacyTextMessageStart, ]; } @@ -127,7 +128,13 @@ export const convertToLegacyEvents = case EventType.TOOL_CALL_ARGS: { const argsEvent = event as ToolCallArgsEvent; - const currentToolCall = currentToolCalls[currentToolCalls.length - 1]; + // Find the tool call by ID instead of using the last one + const currentToolCall = currentToolCalls.find((tc) => tc.id === argsEvent.toolCallId); + if (!currentToolCall) { + console.warn(`TOOL_CALL_ARGS: No tool call found with ID '${argsEvent.toolCallId}'`); + return []; + } + currentToolCall.function.arguments += argsEvent.delta; let didUpdateState = false; @@ -297,6 +304,11 @@ export const convertToLegacyEvents = currentState.messages = syncedMessages; } + // Only do an update if state is not empty + if (Object.keys(currentState).length === 0) { + return []; + } + return [ { type: LegacyRuntimeEventTypes.enum.AgentStateMessage, diff --git a/typescript-sdk/packages/client/src/legacy/types.ts b/typescript-sdk/packages/client/src/legacy/types.ts index 7a5490cdd..1c45012dd 100644 --- a/typescript-sdk/packages/client/src/legacy/types.ts +++ b/typescript-sdk/packages/client/src/legacy/types.ts @@ -28,6 +28,7 @@ export const LegacyTextMessageStart = z.object({ type: z.literal(LegacyRuntimeEventTypes.enum.TextMessageStart), messageId: z.string(), parentMessageId: z.string().optional(), + role: z.string().optional(), }); export const LegacyTextMessageContent = z.object({ diff --git a/typescript-sdk/packages/client/src/verify/__tests__/verify.events.test.ts b/typescript-sdk/packages/client/src/verify/__tests__/verify.events.test.ts index 09307bf83..2303d9e09 100644 --- a/typescript-sdk/packages/client/src/verify/__tests__/verify.events.test.ts +++ b/typescript-sdk/packages/client/src/verify/__tests__/verify.events.test.ts @@ -36,7 +36,7 @@ describe("verifyEvents general validation", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send 'TEXT_MESSAGE_CONTENT' event: Message ID mismatch. The ID 'different-id' doesn't match the active message ID 'msg1'.`, + `Cannot send 'TEXT_MESSAGE_CONTENT' event: No active text message found with ID 'different-id'. Start a text message with 'TEXT_MESSAGE_START' first.`, ); subscription.unsubscribe(); }, @@ -80,7 +80,7 @@ describe("verifyEvents general validation", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send 'TEXT_MESSAGE_END' event: No active text message found. A 'TEXT_MESSAGE_START' event must be sent first.`, + `Cannot send 'TEXT_MESSAGE_END' event: No active text message found with ID 'msg1'. A 'TEXT_MESSAGE_START' event must be sent first.`, ); subscription.unsubscribe(); }, @@ -119,7 +119,7 @@ describe("verifyEvents general validation", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send 'TOOL_CALL_ARGS' event: Tool call ID mismatch. The ID 'different-id' doesn't match the active tool call ID 't1'.`, + `Cannot send 'TOOL_CALL_ARGS' event: No active tool call found with ID 'different-id'. Start a tool call with 'TOOL_CALL_START' first.`, ); subscription.unsubscribe(); }, @@ -164,7 +164,7 @@ describe("verifyEvents general validation", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send 'TOOL_CALL_END' event: No active tool call found. A 'TOOL_CALL_START' event must be sent first.`, + `Cannot send 'TOOL_CALL_END' event: No active tool call found with ID 't1'. A 'TOOL_CALL_START' event must be sent first.`, ); subscription.unsubscribe(); }, @@ -399,7 +399,7 @@ describe("verifyEvents events", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send 'TEXT_MESSAGE_CONTENT' event: Message ID mismatch. The ID 'different-id' doesn't match the active message ID 'msg1'.`, + `Cannot send 'TEXT_MESSAGE_CONTENT' event: No active text message found with ID 'different-id'. Start a text message with 'TEXT_MESSAGE_START' first.`, ); subscription.unsubscribe(); }, @@ -443,7 +443,7 @@ describe("verifyEvents events", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send 'TEXT_MESSAGE_END' event: No active text message found. A 'TEXT_MESSAGE_START' event must be sent first.`, + `Cannot send 'TEXT_MESSAGE_END' event: No active text message found with ID 'msg1'. A 'TEXT_MESSAGE_START' event must be sent first.`, ); subscription.unsubscribe(); }, @@ -482,7 +482,7 @@ describe("verifyEvents events", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send 'TOOL_CALL_ARGS' event: Tool call ID mismatch. The ID 'different-id' doesn't match the active tool call ID 't1'.`, + `Cannot send 'TOOL_CALL_ARGS' event: No active tool call found with ID 'different-id'. Start a tool call with 'TOOL_CALL_START' first.`, ); subscription.unsubscribe(); }, @@ -527,7 +527,7 @@ describe("verifyEvents events", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send 'TOOL_CALL_END' event: No active tool call found. A 'TOOL_CALL_START' event must be sent first.`, + `Cannot send 'TOOL_CALL_END' event: No active tool call found with ID 't1'. A 'TOOL_CALL_START' event must be sent first.`, ); subscription.unsubscribe(); }, diff --git a/typescript-sdk/packages/client/src/verify/__tests__/verify.lifecycle.test.ts b/typescript-sdk/packages/client/src/verify/__tests__/verify.lifecycle.test.ts index b9651abb2..849cec6b4 100644 --- a/typescript-sdk/packages/client/src/verify/__tests__/verify.lifecycle.test.ts +++ b/typescript-sdk/packages/client/src/verify/__tests__/verify.lifecycle.test.ts @@ -58,7 +58,7 @@ describe("verifyEvents lifecycle", () => { next: (event) => events.push(event), error: (err) => { expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain("Cannot send multiple 'RUN_STARTED' events"); + expect(err.message).toContain("Cannot send 'RUN_STARTED' while a run is still active"); subscription.unsubscribe(); }, }); diff --git a/typescript-sdk/packages/client/src/verify/__tests__/verify.text-messages.test.ts b/typescript-sdk/packages/client/src/verify/__tests__/verify.text-messages.test.ts index c73e880c7..e0eeab844 100644 --- a/typescript-sdk/packages/client/src/verify/__tests__/verify.text-messages.test.ts +++ b/typescript-sdk/packages/client/src/verify/__tests__/verify.text-messages.test.ts @@ -25,8 +25,8 @@ import { } from "@ag-ui/core"; describe("verifyEvents text messages", () => { - // Test: Cannot send lifecycle events inside a text message - it("should not allow lifecycle events inside a text message", async () => { + // Test: Cannot send TEXT_MESSAGE_CONTENT before TEXT_MESSAGE_START + it("should not allow TEXT_MESSAGE_CONTENT before TEXT_MESSAGE_START", async () => { const source$ = new Subject(); const events: BaseEvent[] = []; @@ -36,40 +36,37 @@ describe("verifyEvents text messages", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send event type 'STEP_STARTED' after 'TEXT_MESSAGE_START'`, + `Cannot send 'TEXT_MESSAGE_CONTENT' event: No active text message found with ID '1'`, ); subscription.unsubscribe(); }, }); - // Start a valid run and open a text message + // Start a valid run source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", runId: "test-run-id", } as RunStartedEvent); - source$.next({ - type: EventType.TEXT_MESSAGE_START, - messageId: "1", - } as TextMessageStartEvent); - // Try to send a lifecycle event inside the text message + // Try to send content without starting a text message source$.next({ - type: EventType.STEP_STARTED, - stepName: "step1", - } as StepStartedEvent); + type: EventType.TEXT_MESSAGE_CONTENT, + messageId: "1", + delta: "content 1", + } as TextMessageContentEvent); // Complete the source and wait a bit for processing source$.complete(); await new Promise((resolve) => setTimeout(resolve, 100)); // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TEXT_MESSAGE_START); + expect(events.length).toBe(1); + expect(events[0].type).toBe(EventType.RUN_STARTED); }); - // Test: Cannot send tool-related events inside a text message - it("should not allow tool-related events inside a text message", async () => { + // Test: Cannot send TEXT_MESSAGE_END before TEXT_MESSAGE_START + it("should not allow TEXT_MESSAGE_END before TEXT_MESSAGE_START", async () => { const source$ = new Subject(); const events: BaseEvent[] = []; @@ -79,37 +76,32 @@ describe("verifyEvents text messages", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send event type 'TOOL_CALL_START' after 'TEXT_MESSAGE_START'`, + `Cannot send 'TEXT_MESSAGE_END' event: No active text message found with ID '1'`, ); subscription.unsubscribe(); }, }); - // Start a valid run and open a text message + // Start a valid run source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", runId: "test-run-id", } as RunStartedEvent); - source$.next({ - type: EventType.TEXT_MESSAGE_START, - messageId: "1", - } as TextMessageStartEvent); - // Try to send a tool-related event inside the text message + // Try to end a text message without starting it source$.next({ - type: EventType.TOOL_CALL_START, - toolCallId: "t1", - toolCallName: "test-tool", - } as ToolCallStartEvent); + type: EventType.TEXT_MESSAGE_END, + messageId: "1", + } as TextMessageEndEvent); // Complete the source and wait a bit for processing source$.complete(); await new Promise((resolve) => setTimeout(resolve, 100)); // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TEXT_MESSAGE_START); + expect(events.length).toBe(1); + expect(events[0].type).toBe(EventType.RUN_STARTED); }); // Test: Should allow TEXT_MESSAGE_CONTENT inside a text message @@ -219,22 +211,21 @@ describe("verifyEvents text messages", () => { expect(result[3].type).toBe(EventType.RAW); }); - // Test: Should not allow CUSTOM inside a text message - it("should not allow CUSTOM inside a text message", async () => { + // Test: Should allow CUSTOM inside a text message + it("should allow CUSTOM inside a text message", async () => { const source$ = new Subject(); - const events: BaseEvent[] = []; - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain(`Cannot send event type 'CUSTOM' after 'TEXT_MESSAGE_START'`); - subscription.unsubscribe(); - }, - }); + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); - // Start a valid run and open a text message + // Send a valid sequence with a custom event inside a text message source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", @@ -244,84 +235,48 @@ describe("verifyEvents text messages", () => { type: EventType.TEXT_MESSAGE_START, messageId: "1", } as TextMessageStartEvent); - - // Try to send a meta event inside the text message + source$.next({ + type: EventType.TEXT_MESSAGE_CONTENT, + messageId: "1", + delta: "test content", + } as TextMessageContentEvent); source$.next({ type: EventType.CUSTOM, - name: "PredictState", - value: [{ state_key: "test", tool: "test-tool" }], + name: "test_event", + value: "test_value", } as CustomEvent); - - // Complete the source and wait a bit for processing - source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TEXT_MESSAGE_START); - }); - - // Test: Should not allow STATE_SNAPSHOT inside a text message - it("should not allow STATE_SNAPSHOT inside a text message", async () => { - const source$ = new Subject(); - const events: BaseEvent[] = []; - - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - `Cannot send event type 'STATE_SNAPSHOT' after 'TEXT_MESSAGE_START'`, - ); - subscription.unsubscribe(); - }, - }); - - // Start a valid run and open a text message source$.next({ - type: EventType.RUN_STARTED, - threadId: "test-thread-id", - runId: "test-run-id", - } as RunStartedEvent); - source$.next({ - type: EventType.TEXT_MESSAGE_START, + type: EventType.TEXT_MESSAGE_END, messageId: "1", - } as TextMessageStartEvent); - - // Try to send a state snapshot inside the text message - source$.next({ - type: EventType.STATE_SNAPSHOT, - snapshot: { test: true }, - } as StateSnapshotEvent); + } as TextMessageEndEvent); + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); - // Complete the source and wait a bit for processing + // Complete the source source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TEXT_MESSAGE_START); + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(6); + expect(result[3].type).toBe(EventType.CUSTOM); }); - // Test: Should not allow STATE_DELTA inside a text message - it("should not allow STATE_DELTA inside a text message", async () => { + // Test: Should allow STATE_SNAPSHOT inside a text message + it("should allow STATE_SNAPSHOT inside a text message", async () => { const source$ = new Subject(); - const events: BaseEvent[] = []; - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - `Cannot send event type 'STATE_DELTA' after 'TEXT_MESSAGE_START'`, - ); - subscription.unsubscribe(); - }, - }); + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); - // Start a valid run and open a text message + // Send a valid sequence with a state snapshot inside a text message source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", @@ -331,123 +286,50 @@ describe("verifyEvents text messages", () => { type: EventType.TEXT_MESSAGE_START, messageId: "1", } as TextMessageStartEvent); - - // Try to send a state delta inside the text message - source$.next({ - type: EventType.STATE_DELTA, - delta: [{ op: "add", path: "/test", value: true }], - } as StateDeltaEvent); - - // Complete the source and wait a bit for processing - source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TEXT_MESSAGE_START); - }); - - // Test: Should not allow MESSAGES_SNAPSHOT inside a text message - it("should not allow MESSAGES_SNAPSHOT inside a text message", async () => { - const source$ = new Subject(); - const events: BaseEvent[] = []; - - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - `Cannot send event type 'MESSAGES_SNAPSHOT' after 'TEXT_MESSAGE_START'`, - ); - subscription.unsubscribe(); - }, - }); - - // Start a valid run and open a text message - source$.next({ - type: EventType.RUN_STARTED, - threadId: "test-thread-id", - runId: "test-run-id", - } as RunStartedEvent); source$.next({ - type: EventType.TEXT_MESSAGE_START, + type: EventType.TEXT_MESSAGE_CONTENT, messageId: "1", - } as TextMessageStartEvent); - - // Try to send a messages snapshot inside the text message + delta: "test content", + } as TextMessageContentEvent); source$.next({ - type: EventType.MESSAGES_SNAPSHOT, - messages: [{ role: "user", content: "test" }], - } as MessagesSnapshotEvent); - - // Complete the source and wait a bit for processing - source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TEXT_MESSAGE_START); - }); - - // Test: Cannot send RUN_FINISHED inside a text message - it("should not allow RUN_FINISHED inside a text message", async () => { - const source$ = new Subject(); - const events: BaseEvent[] = []; - - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - `Cannot send event type 'RUN_FINISHED' after 'TEXT_MESSAGE_START'`, - ); - subscription.unsubscribe(); + type: EventType.STATE_SNAPSHOT, + snapshot: { + state: "test_state", + data: { foo: "bar" }, }, - }); - - // Start a valid run and open a text message - source$.next({ - type: EventType.RUN_STARTED, - threadId: "test-thread-id", - runId: "test-run-id", - } as RunStartedEvent); + } as StateSnapshotEvent); source$.next({ - type: EventType.TEXT_MESSAGE_START, + type: EventType.TEXT_MESSAGE_END, messageId: "1", - } as TextMessageStartEvent); - - // Try to send RUN_FINISHED inside the text message + } as TextMessageEndEvent); source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); - // Complete the source and wait a bit for processing + // Complete the source source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TEXT_MESSAGE_START); + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(6); + expect(result[3].type).toBe(EventType.STATE_SNAPSHOT); }); - // NEW TEST: Missing TEXT_MESSAGE_END - it("should not allow RUN_FINISHED when a text message hasn't been closed", async () => { + // Test: Should allow STATE_DELTA inside a text message + it("should allow STATE_DELTA inside a text message", async () => { const source$ = new Subject(); - const events: BaseEvent[] = []; - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - "Cannot send event type 'RUN_FINISHED' after 'TEXT_MESSAGE_START': Send 'TEXT_MESSAGE_END' first.", - ); - subscription.unsubscribe(); - }, - }); + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); - // Start a valid run and open a text message + // Send a valid sequence with a state delta inside a text message source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", @@ -460,82 +342,44 @@ describe("verifyEvents text messages", () => { source$.next({ type: EventType.TEXT_MESSAGE_CONTENT, messageId: "1", - delta: "content 1", + delta: "test content", } as TextMessageContentEvent); - - // Try to end the run without closing the text message - source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); - - // Complete the source and wait a bit for processing - source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - - // Verify only events before the error were processed - expect(events.length).toBe(3); - expect(events[2].type).toBe(EventType.TEXT_MESSAGE_CONTENT); - }); - - // NEW TEST: Nesting text messages - it("should not allow nested text messages", async () => { - const source$ = new Subject(); - const events: BaseEvent[] = []; - - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - "Cannot send event type 'TEXT_MESSAGE_START' after 'TEXT_MESSAGE_START': Send 'TEXT_MESSAGE_END' first.", - ); - subscription.unsubscribe(); - }, - }); - - // Start a valid run and open a text message source$.next({ - type: EventType.RUN_STARTED, - threadId: "test-thread-id", - runId: "test-run-id", - } as RunStartedEvent); + type: EventType.STATE_DELTA, + delta: [{ op: "add", path: "/result", value: "success" }], + } as StateDeltaEvent); source$.next({ - type: EventType.TEXT_MESSAGE_START, + type: EventType.TEXT_MESSAGE_END, messageId: "1", - } as TextMessageStartEvent); - - // Try to start a nested text message - source$.next({ - type: EventType.TEXT_MESSAGE_START, - messageId: "2", - } as TextMessageStartEvent); + } as TextMessageEndEvent); + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); - // Complete the source and wait a bit for processing + // Complete the source source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TEXT_MESSAGE_START); + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(6); + expect(result[3].type).toBe(EventType.STATE_DELTA); }); - // NEW TEST: Mismatched message IDs - it("should not allow text message content with mismatched IDs", async () => { + // Test: Should allow MESSAGES_SNAPSHOT inside a text message + it("should allow MESSAGES_SNAPSHOT inside a text message", async () => { const source$ = new Subject(); - const events: BaseEvent[] = []; - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - "Cannot send 'TEXT_MESSAGE_CONTENT' event: Message ID mismatch. The ID '2' doesn't match the active message ID '1'.", - ); - subscription.unsubscribe(); - }, - }); + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); - // Start a valid run and open a text message with ID "1" + // Send a valid sequence with a messages snapshot inside a text message source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", @@ -545,147 +389,47 @@ describe("verifyEvents text messages", () => { type: EventType.TEXT_MESSAGE_START, messageId: "1", } as TextMessageStartEvent); - - // Try to send content with a different ID - source$.next({ - type: EventType.TEXT_MESSAGE_CONTENT, - messageId: "2", - delta: "content 2", - } as TextMessageContentEvent); - - // Complete the source and wait a bit for processing - source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TEXT_MESSAGE_START); - }); - - // NEW TEST: TEXT_MESSAGE_CONTENT before START - it("should not allow text message content without a prior start event", async () => { - const source$ = new Subject(); - const events: BaseEvent[] = []; - - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - "Cannot send 'TEXT_MESSAGE_CONTENT' event: No active text message found. Start a text message with 'TEXT_MESSAGE_START' first.", - ); - subscription.unsubscribe(); - }, - }); - - // Start a valid run but skip starting a text message - source$.next({ - type: EventType.RUN_STARTED, - threadId: "test-thread-id", - runId: "test-run-id", - } as RunStartedEvent); - - // Try to send content without starting a message source$.next({ type: EventType.TEXT_MESSAGE_CONTENT, messageId: "1", - delta: "content 1", + delta: "test content", } as TextMessageContentEvent); - - // Complete the source and wait a bit for processing - source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - - // Verify only events before the error were processed - expect(events.length).toBe(1); - expect(events[0].type).toBe(EventType.RUN_STARTED); - }); - - // NEW TEST: TEXT_MESSAGE_END before START - it("should not allow ending a text message that was never started", async () => { - const source$ = new Subject(); - const events: BaseEvent[] = []; - - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - "Cannot send 'TEXT_MESSAGE_END' event: No active text message found. A 'TEXT_MESSAGE_START' event must be sent first.", - ); - subscription.unsubscribe(); - }, - }); - - // Start a valid run but skip starting a text message source$.next({ - type: EventType.RUN_STARTED, - threadId: "test-thread-id", - runId: "test-run-id", - } as RunStartedEvent); - - // Try to end a message that was never started + type: EventType.MESSAGES_SNAPSHOT, + messages: [{ role: "user", content: "test", id: "test-id" }], + } as MessagesSnapshotEvent); source$.next({ type: EventType.TEXT_MESSAGE_END, messageId: "1", } as TextMessageEndEvent); + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); - // Complete the source and wait a bit for processing + // Complete the source source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - // Verify only events before the error were processed - expect(events.length).toBe(1); - expect(events[0].type).toBe(EventType.RUN_STARTED); - }); - - // NEW TEST: Starting text message outside of a run - it("should not allow starting a text message before RUN_STARTED", async () => { - const source$ = new Subject(); - const events: BaseEvent[] = []; - - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain("First event must be 'RUN_STARTED'"); - subscription.unsubscribe(); - }, - }); - - // Try to start a text message before RUN_STARTED - source$.next({ - type: EventType.TEXT_MESSAGE_START, - messageId: "1", - } as TextMessageStartEvent); - - // Complete the source and wait a bit for processing - source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); + // Await the promise and expect no errors + const result = await promise; - // Verify no events were processed - expect(events.length).toBe(0); + // Verify all events were processed + expect(result.length).toBe(6); + expect(result[3].type).toBe(EventType.MESSAGES_SNAPSHOT); }); - // NEW TEST: Mismatched IDs for TEXT_MESSAGE_END - it("should not allow text message end with mismatched ID", async () => { + // Test: Should allow lifecycle events (STEP_STARTED/STEP_FINISHED) during text messages + it("should allow lifecycle events during text messages", async () => { const source$ = new Subject(); - const events: BaseEvent[] = []; - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain("Cannot send 'TEXT_MESSAGE_END' event: Message ID mismatch"); - subscription.unsubscribe(); - }, - }); + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); - // Start a valid run and open a text message with ID "1" + // Send a valid sequence with lifecycle events inside a text message source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", @@ -695,29 +439,39 @@ describe("verifyEvents text messages", () => { type: EventType.TEXT_MESSAGE_START, messageId: "1", } as TextMessageStartEvent); + source$.next({ + type: EventType.STEP_STARTED, + stepName: "test-step", + } as StepStartedEvent); source$.next({ type: EventType.TEXT_MESSAGE_CONTENT, messageId: "1", - delta: "content 1", + delta: "test content", } as TextMessageContentEvent); - - // Try to end with a different ID + source$.next({ + type: EventType.STEP_FINISHED, + stepName: "test-step", + } as StepFinishedEvent); source$.next({ type: EventType.TEXT_MESSAGE_END, - messageId: "2", + messageId: "1", } as TextMessageEndEvent); + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); - // Complete the source and wait a bit for processing + // Complete the source source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - // Verify only events before the error were processed - expect(events.length).toBe(3); - expect(events[2].type).toBe(EventType.TEXT_MESSAGE_CONTENT); + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(7); + expect(result[2].type).toBe(EventType.STEP_STARTED); + expect(result[4].type).toBe(EventType.STEP_FINISHED); }); - // NEW TEST: Empty text messages (no content) - it("should allow empty text messages with no content", async () => { + // Test: Should allow tool calls to start during text messages + it("should allow tool calls to start during text messages", async () => { const source$ = new Subject(); // Set up subscription and collect events @@ -730,7 +484,7 @@ describe("verifyEvents text messages", () => { ), ); - // Send a valid sequence with an empty text message + // Send a valid sequence with tool calls inside a text message source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", @@ -740,6 +494,30 @@ describe("verifyEvents text messages", () => { type: EventType.TEXT_MESSAGE_START, messageId: "1", } as TextMessageStartEvent); + source$.next({ + type: EventType.TEXT_MESSAGE_CONTENT, + messageId: "1", + delta: "Starting search...", + } as TextMessageContentEvent); + source$.next({ + type: EventType.TOOL_CALL_START, + toolCallId: "tool1", + toolCallName: "search", + } as ToolCallStartEvent); + source$.next({ + type: EventType.TOOL_CALL_ARGS, + toolCallId: "tool1", + delta: '{"query":"test"}', + } as ToolCallArgsEvent); + source$.next({ + type: EventType.TOOL_CALL_END, + toolCallId: "tool1", + } as ToolCallEndEvent); + source$.next({ + type: EventType.TEXT_MESSAGE_CONTENT, + messageId: "1", + delta: "Search completed.", + } as TextMessageContentEvent); source$.next({ type: EventType.TEXT_MESSAGE_END, messageId: "1", @@ -753,51 +531,13 @@ describe("verifyEvents text messages", () => { const result = await promise; // Verify all events were processed - expect(result.length).toBe(4); - expect(result[1].type).toBe(EventType.TEXT_MESSAGE_START); - expect(result[2].type).toBe(EventType.TEXT_MESSAGE_END); - }); - - // NEW TEST: Missing/undefined IDs for TEXT_MESSAGE_START - it("should not allow text messages with undefined or null IDs", async () => { - const source$ = new Subject(); - const events: BaseEvent[] = []; - - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain("requires a valid message ID"); - subscription.unsubscribe(); - }, - }); - - // Start a valid run - source$.next({ - type: EventType.RUN_STARTED, - threadId: "test-thread-id", - runId: "test-run-id", - } as RunStartedEvent); - - // Try to start a text message with undefined ID - source$.next({ - type: EventType.TEXT_MESSAGE_START, - messageId: "undefined-id", - role: "assistant", - } as TextMessageStartEvent); - - // Complete the source and wait a bit for processing - source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - - // Verify events processed before the error - expect(events.length).toBe(2); - expect(events[0].type).toBe(EventType.RUN_STARTED); - expect(events[1].type).toBe(EventType.TEXT_MESSAGE_START); + expect(result.length).toBe(9); + expect(result[3].type).toBe(EventType.TOOL_CALL_START); + expect(result[4].type).toBe(EventType.TOOL_CALL_ARGS); + expect(result[5].type).toBe(EventType.TOOL_CALL_END); }); - // NEW TEST: Sequential text messages + // Test: Sequential text messages it("should allow multiple sequential text messages", async () => { const source$ = new Subject(); @@ -866,7 +606,7 @@ describe("verifyEvents text messages", () => { expect(result[6].type).toBe(EventType.TEXT_MESSAGE_END); }); - // NEW TEST: Text message at run boundaries + // Test: Text message at run boundaries it("should allow text messages immediately after RUN_STARTED and before RUN_FINISHED", async () => { const source$ = new Subject(); @@ -914,4 +654,33 @@ describe("verifyEvents text messages", () => { expect(result[3].type).toBe(EventType.TEXT_MESSAGE_END); expect(result[4].type).toBe(EventType.RUN_FINISHED); }); + + // Test: Starting text message before RUN_STARTED + it("should not allow starting a text message before RUN_STARTED", async () => { + const source$ = new Subject(); + const events: BaseEvent[] = []; + + // Create a subscription that will complete only after an error + const subscription = verifyEvents(false)(source$).subscribe({ + next: (event) => events.push(event), + error: (err) => { + expect(err).toBeInstanceOf(AGUIError); + expect(err.message).toContain("First event must be 'RUN_STARTED'"); + subscription.unsubscribe(); + }, + }); + + // Try to start a text message before RUN_STARTED + source$.next({ + type: EventType.TEXT_MESSAGE_START, + messageId: "1", + } as TextMessageStartEvent); + + // Complete the source and wait a bit for processing + source$.complete(); + await new Promise((resolve) => setTimeout(resolve, 100)); + + // Verify no events were processed + expect(events.length).toBe(0); + }); }); diff --git a/typescript-sdk/packages/client/src/verify/__tests__/verify.tool-calls.test.ts b/typescript-sdk/packages/client/src/verify/__tests__/verify.tool-calls.test.ts index 61e2a2277..5e68a3431 100644 --- a/typescript-sdk/packages/client/src/verify/__tests__/verify.tool-calls.test.ts +++ b/typescript-sdk/packages/client/src/verify/__tests__/verify.tool-calls.test.ts @@ -25,8 +25,8 @@ import { } from "@ag-ui/core"; describe("verifyEvents tool calls", () => { - // Test: Cannot send lifecycle events inside a tool call - it("should not allow lifecycle events inside a tool call", async () => { + // Test: Cannot send TOOL_CALL_ARGS before TOOL_CALL_START + it("should not allow TOOL_CALL_ARGS before TOOL_CALL_START", async () => { const source$ = new Subject(); const events: BaseEvent[] = []; @@ -36,85 +36,37 @@ describe("verifyEvents tool calls", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send event type 'STEP_STARTED' after 'TOOL_CALL_START'`, + `Cannot send 'TOOL_CALL_ARGS' event: No active tool call found with ID 't1'`, ); subscription.unsubscribe(); }, }); - // Start a valid run and open a tool call + // Start a valid run source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", runId: "test-run-id", } as RunStartedEvent); - source$.next({ - type: EventType.TOOL_CALL_START, - toolCallId: "t1", - toolCallName: "test-tool", - } as ToolCallStartEvent); - - // Try to send a lifecycle event inside the tool call - source$.next({ - type: EventType.STEP_STARTED, - stepName: "step1", - } as StepStartedEvent); - - // Complete the source and wait a bit for processing - source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TOOL_CALL_START); - }); - - // Test: Cannot send text message events inside a tool call - it("should not allow text message events inside a tool call", async () => { - const source$ = new Subject(); - const events: BaseEvent[] = []; - - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - `Cannot send event type 'TEXT_MESSAGE_START' after 'TOOL_CALL_START'`, - ); - subscription.unsubscribe(); - }, - }); - // Start a valid run and open a tool call - source$.next({ - type: EventType.RUN_STARTED, - threadId: "test-thread-id", - runId: "test-run-id", - } as RunStartedEvent); + // Try to send args without starting a tool call source$.next({ - type: EventType.TOOL_CALL_START, + type: EventType.TOOL_CALL_ARGS, toolCallId: "t1", - toolCallName: "test-tool", - } as ToolCallStartEvent); - - // Try to send a text message event inside the tool call - source$.next({ - type: EventType.TEXT_MESSAGE_START, - messageId: "1", - } as TextMessageStartEvent); + delta: "test args", + } as ToolCallArgsEvent); // Complete the source and wait a bit for processing source$.complete(); await new Promise((resolve) => setTimeout(resolve, 100)); // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TOOL_CALL_START); + expect(events.length).toBe(1); + expect(events[0].type).toBe(EventType.RUN_STARTED); }); - // Test: Cannot start a nested tool call - it("should not allow nested tool calls", async () => { + // Test: Cannot send TOOL_CALL_END before TOOL_CALL_START + it("should not allow TOOL_CALL_END before TOOL_CALL_START", async () => { const source$ = new Subject(); const events: BaseEvent[] = []; @@ -124,38 +76,32 @@ describe("verifyEvents tool calls", () => { error: (err) => { expect(err).toBeInstanceOf(AGUIError); expect(err.message).toContain( - `Cannot send 'TOOL_CALL_START' event: A tool call is already in progress`, + `Cannot send 'TOOL_CALL_END' event: No active tool call found with ID 't1'`, ); subscription.unsubscribe(); }, }); - // Start a valid run and open a tool call + // Start a valid run source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", runId: "test-run-id", } as RunStartedEvent); - source$.next({ - type: EventType.TOOL_CALL_START, - toolCallId: "t1", - toolCallName: "test-tool", - } as ToolCallStartEvent); - // Try to start another tool call inside the first one + // Try to end a tool call without starting it source$.next({ - type: EventType.TOOL_CALL_START, - toolCallId: "t2", - toolCallName: "test-tool-2", - } as ToolCallStartEvent); + type: EventType.TOOL_CALL_END, + toolCallId: "t1", + } as ToolCallEndEvent); // Complete the source and wait a bit for processing source$.complete(); await new Promise((resolve) => setTimeout(resolve, 100)); // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TOOL_CALL_START); + expect(events.length).toBe(1); + expect(events[0].type).toBe(EventType.RUN_STARTED); }); // Test: Should allow TOOL_CALL_ARGS and TOOL_CALL_END inside a tool call @@ -267,22 +213,21 @@ describe("verifyEvents tool calls", () => { expect(result[3].type).toBe(EventType.RAW); }); - // Test: Should not allow CUSTOM inside a tool call - it("should not allow CUSTOM inside a tool call", async () => { + // Test: Should allow CUSTOM inside a tool call + it("should allow CUSTOM inside a tool call", async () => { const source$ = new Subject(); - const events: BaseEvent[] = []; - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain(`Cannot send event type 'CUSTOM' after 'TOOL_CALL_START'`); - subscription.unsubscribe(); - }, - }); + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); - // Start a valid run and open a tool call + // Send a valid sequence with a custom event inside a tool call source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", @@ -293,41 +238,48 @@ describe("verifyEvents tool calls", () => { toolCallId: "t1", toolCallName: "test-tool", } as ToolCallStartEvent); - - // Try to send a meta event inside the tool call + source$.next({ + type: EventType.TOOL_CALL_ARGS, + toolCallId: "t1", + delta: "test args", + } as ToolCallArgsEvent); source$.next({ type: EventType.CUSTOM, - name: "PredictState", - value: [{ state_key: "test", tool: "test-tool" }], + name: "test_event", + value: "test_value", } as CustomEvent); + source$.next({ + type: EventType.TOOL_CALL_END, + toolCallId: "t1", + } as ToolCallEndEvent); + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); - // Complete the source and wait a bit for processing + // Complete the source source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TOOL_CALL_START); + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(6); + expect(result[3].type).toBe(EventType.CUSTOM); }); - // Test: Should not allow STATE_SNAPSHOT inside a tool call - it("should not allow STATE_SNAPSHOT inside a tool call", async () => { + // Test: Should allow STATE_SNAPSHOT inside a tool call + it("should allow STATE_SNAPSHOT inside a tool call", async () => { const source$ = new Subject(); - const events: BaseEvent[] = []; - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - `Cannot send event type 'STATE_SNAPSHOT' after 'TOOL_CALL_START'`, - ); - subscription.unsubscribe(); - }, - }); + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); - // Start a valid run and open a tool call + // Send a valid sequence with a state snapshot inside a tool call source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", @@ -338,40 +290,50 @@ describe("verifyEvents tool calls", () => { toolCallId: "t1", toolCallName: "test-tool", } as ToolCallStartEvent); - - // Try to send a state snapshot inside the tool call + source$.next({ + type: EventType.TOOL_CALL_ARGS, + toolCallId: "t1", + delta: "test args", + } as ToolCallArgsEvent); source$.next({ type: EventType.STATE_SNAPSHOT, - snapshot: { test: true }, + snapshot: { + state: "test_state", + data: { foo: "bar" }, + }, } as StateSnapshotEvent); + source$.next({ + type: EventType.TOOL_CALL_END, + toolCallId: "t1", + } as ToolCallEndEvent); + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); - // Complete the source and wait a bit for processing + // Complete the source source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TOOL_CALL_START); + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(6); + expect(result[3].type).toBe(EventType.STATE_SNAPSHOT); }); - // Test: Should not allow STATE_DELTA inside a tool call - it("should not allow STATE_DELTA inside a tool call", async () => { + // Test: Should allow STATE_DELTA inside a tool call + it("should allow STATE_DELTA inside a tool call", async () => { const source$ = new Subject(); - const events: BaseEvent[] = []; - // Create a subscription that will complete only after an error - const subscription = verifyEvents(false)(source$).subscribe({ - next: (event) => events.push(event), - error: (err) => { - expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - `Cannot send event type 'STATE_DELTA' after 'TOOL_CALL_START'`, - ); - subscription.unsubscribe(); - }, - }); + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); - // Start a valid run and open a tool call + // Send a valid sequence with a state delta inside a tool call source$.next({ type: EventType.RUN_STARTED, threadId: "test-thread-id", @@ -382,24 +344,329 @@ describe("verifyEvents tool calls", () => { toolCallId: "t1", toolCallName: "test-tool", } as ToolCallStartEvent); - - // Try to send a state delta inside the tool call + source$.next({ + type: EventType.TOOL_CALL_ARGS, + toolCallId: "t1", + delta: "test args", + } as ToolCallArgsEvent); source$.next({ type: EventType.STATE_DELTA, - delta: [{ op: "add", path: "/test", value: true }], + delta: [{ op: "add", path: "/result", value: "success" }], } as StateDeltaEvent); + source$.next({ + type: EventType.TOOL_CALL_END, + toolCallId: "t1", + } as ToolCallEndEvent); + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); - // Complete the source and wait a bit for processing + // Complete the source source$.complete(); - await new Promise((resolve) => setTimeout(resolve, 100)); - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TOOL_CALL_START); + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(6); + expect(result[3].type).toBe(EventType.STATE_DELTA); }); - // Test: Should not allow MESSAGES_SNAPSHOT inside a tool call - it("should not allow MESSAGES_SNAPSHOT inside a tool call", async () => { + // Test: Should allow MESSAGES_SNAPSHOT inside a tool call + it("should allow MESSAGES_SNAPSHOT inside a tool call", async () => { + const source$ = new Subject(); + + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); + + // Send a valid sequence with a messages snapshot inside a tool call + source$.next({ + type: EventType.RUN_STARTED, + threadId: "test-thread-id", + runId: "test-run-id", + } as RunStartedEvent); + source$.next({ + type: EventType.TOOL_CALL_START, + toolCallId: "t1", + toolCallName: "test-tool", + } as ToolCallStartEvent); + source$.next({ + type: EventType.TOOL_CALL_ARGS, + toolCallId: "t1", + delta: "test args", + } as ToolCallArgsEvent); + source$.next({ + type: EventType.MESSAGES_SNAPSHOT, + messages: [{ role: "user", content: "test", id: "test-id" }], + } as MessagesSnapshotEvent); + source$.next({ + type: EventType.TOOL_CALL_END, + toolCallId: "t1", + } as ToolCallEndEvent); + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); + + // Complete the source + source$.complete(); + + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(6); + expect(result[3].type).toBe(EventType.MESSAGES_SNAPSHOT); + }); + + // Test: Should allow lifecycle events (STEP_STARTED/STEP_FINISHED) during tool calls + it("should allow lifecycle events during tool calls", async () => { + const source$ = new Subject(); + + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); + + // Send a valid sequence with lifecycle events inside a tool call + source$.next({ + type: EventType.RUN_STARTED, + threadId: "test-thread-id", + runId: "test-run-id", + } as RunStartedEvent); + source$.next({ + type: EventType.TOOL_CALL_START, + toolCallId: "t1", + toolCallName: "test-tool", + } as ToolCallStartEvent); + source$.next({ + type: EventType.STEP_STARTED, + stepName: "test-step", + } as StepStartedEvent); + source$.next({ + type: EventType.TOOL_CALL_ARGS, + toolCallId: "t1", + delta: "test args", + } as ToolCallArgsEvent); + source$.next({ + type: EventType.STEP_FINISHED, + stepName: "test-step", + } as StepFinishedEvent); + source$.next({ + type: EventType.TOOL_CALL_END, + toolCallId: "t1", + } as ToolCallEndEvent); + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); + + // Complete the source + source$.complete(); + + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(7); + expect(result[2].type).toBe(EventType.STEP_STARTED); + expect(result[4].type).toBe(EventType.STEP_FINISHED); + }); + + // Test: Should allow text messages to start during tool calls + it("should allow text messages to start during tool calls", async () => { + const source$ = new Subject(); + + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); + + // Send a valid sequence with text messages inside a tool call + source$.next({ + type: EventType.RUN_STARTED, + threadId: "test-thread-id", + runId: "test-run-id", + } as RunStartedEvent); + source$.next({ + type: EventType.TOOL_CALL_START, + toolCallId: "t1", + toolCallName: "test-tool", + } as ToolCallStartEvent); + source$.next({ + type: EventType.TOOL_CALL_ARGS, + toolCallId: "t1", + delta: "Preparing...", + } as ToolCallArgsEvent); + source$.next({ + type: EventType.TEXT_MESSAGE_START, + messageId: "msg1", + } as TextMessageStartEvent); + source$.next({ + type: EventType.TEXT_MESSAGE_CONTENT, + messageId: "msg1", + delta: "Tool is processing...", + } as TextMessageContentEvent); + source$.next({ + type: EventType.TEXT_MESSAGE_END, + messageId: "msg1", + } as TextMessageEndEvent); + source$.next({ + type: EventType.TOOL_CALL_ARGS, + toolCallId: "t1", + delta: "Completed.", + } as ToolCallArgsEvent); + source$.next({ + type: EventType.TOOL_CALL_END, + toolCallId: "t1", + } as ToolCallEndEvent); + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); + + // Complete the source + source$.complete(); + + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(9); + expect(result[3].type).toBe(EventType.TEXT_MESSAGE_START); + expect(result[4].type).toBe(EventType.TEXT_MESSAGE_CONTENT); + expect(result[5].type).toBe(EventType.TEXT_MESSAGE_END); + }); + + // Test: Sequential tool calls + it("should allow multiple sequential tool calls", async () => { + const source$ = new Subject(); + + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); + + // Send a valid sequence with multiple tool calls + source$.next({ + type: EventType.RUN_STARTED, + threadId: "test-thread-id", + runId: "test-run-id", + } as RunStartedEvent); + + // First tool call + source$.next({ + type: EventType.TOOL_CALL_START, + toolCallId: "t1", + toolCallName: "search", + } as ToolCallStartEvent); + source$.next({ + type: EventType.TOOL_CALL_ARGS, + toolCallId: "t1", + delta: '{"query":"test"}', + } as ToolCallArgsEvent); + source$.next({ + type: EventType.TOOL_CALL_END, + toolCallId: "t1", + } as ToolCallEndEvent); + + // Second tool call + source$.next({ + type: EventType.TOOL_CALL_START, + toolCallId: "t2", + toolCallName: "calculate", + } as ToolCallStartEvent); + source$.next({ + type: EventType.TOOL_CALL_ARGS, + toolCallId: "t2", + delta: '{"expression":"1+1"}', + } as ToolCallArgsEvent); + source$.next({ + type: EventType.TOOL_CALL_END, + toolCallId: "t2", + } as ToolCallEndEvent); + + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); + + // Complete the source + source$.complete(); + + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(8); + expect(result[1].type).toBe(EventType.TOOL_CALL_START); + expect(result[2].type).toBe(EventType.TOOL_CALL_ARGS); + expect(result[3].type).toBe(EventType.TOOL_CALL_END); + expect(result[4].type).toBe(EventType.TOOL_CALL_START); + expect(result[5].type).toBe(EventType.TOOL_CALL_ARGS); + expect(result[6].type).toBe(EventType.TOOL_CALL_END); + }); + + // Test: Tool call at run boundaries + it("should allow tool calls immediately after RUN_STARTED and before RUN_FINISHED", async () => { + const source$ = new Subject(); + + // Set up subscription and collect events + const promise = firstValueFrom( + verifyEvents(false)(source$).pipe( + toArray(), + catchError((err) => { + throw err; + }), + ), + ); + + // Send tool call immediately after run start and before run end + source$.next({ + type: EventType.RUN_STARTED, + threadId: "test-thread-id", + runId: "test-run-id", + } as RunStartedEvent); + source$.next({ + type: EventType.TOOL_CALL_START, + toolCallId: "t1", + toolCallName: "test-tool", + } as ToolCallStartEvent); + source$.next({ + type: EventType.TOOL_CALL_ARGS, + toolCallId: "t1", + delta: "test args", + } as ToolCallArgsEvent); + source$.next({ + type: EventType.TOOL_CALL_END, + toolCallId: "t1", + } as ToolCallEndEvent); + source$.next({ type: EventType.RUN_FINISHED } as RunFinishedEvent); + + // Complete the source + source$.complete(); + + // Await the promise and expect no errors + const result = await promise; + + // Verify all events were processed + expect(result.length).toBe(5); + expect(result[0].type).toBe(EventType.RUN_STARTED); + expect(result[1].type).toBe(EventType.TOOL_CALL_START); + expect(result[3].type).toBe(EventType.TOOL_CALL_END); + expect(result[4].type).toBe(EventType.RUN_FINISHED); + }); + + // Test: Starting tool call before RUN_STARTED + it("should not allow starting a tool call before RUN_STARTED", async () => { const source$ = new Subject(); const events: BaseEvent[] = []; @@ -408,37 +675,23 @@ describe("verifyEvents tool calls", () => { next: (event) => events.push(event), error: (err) => { expect(err).toBeInstanceOf(AGUIError); - expect(err.message).toContain( - `Cannot send event type 'MESSAGES_SNAPSHOT' after 'TOOL_CALL_START'`, - ); + expect(err.message).toContain("First event must be 'RUN_STARTED'"); subscription.unsubscribe(); }, }); - // Start a valid run and open a tool call - source$.next({ - type: EventType.RUN_STARTED, - threadId: "test-thread-id", - runId: "test-run-id", - } as RunStartedEvent); + // Try to start a tool call before RUN_STARTED source$.next({ type: EventType.TOOL_CALL_START, toolCallId: "t1", toolCallName: "test-tool", } as ToolCallStartEvent); - // Try to send a messages snapshot inside the tool call - source$.next({ - type: EventType.MESSAGES_SNAPSHOT, - messages: [{ role: "user", content: "test" }], - } as MessagesSnapshotEvent); - // Complete the source and wait a bit for processing source$.complete(); await new Promise((resolve) => setTimeout(resolve, 100)); - // Verify only events before the error were processed - expect(events.length).toBe(2); - expect(events[1].type).toBe(EventType.TOOL_CALL_START); + // Verify no events were processed + expect(events.length).toBe(0); }); }); diff --git a/typescript-sdk/packages/client/src/verify/verify.ts b/typescript-sdk/packages/client/src/verify/verify.ts index 9196ecc6c..217d6e423 100644 --- a/typescript-sdk/packages/client/src/verify/verify.ts +++ b/typescript-sdk/packages/client/src/verify/verify.ts @@ -6,8 +6,8 @@ export const verifyEvents = (debug: boolean) => (source$: Observable): Observable => { // Declare variables in closure to maintain state across events - let activeMessageId: string | undefined; - let activeToolCallId: string | undefined; + let activeMessages = new Map(); // Map of message ID -> active status + let activeToolCalls = new Map(); // Map of tool call ID -> active status let runFinished = false; let runError = false; // New flag to track if RUN_ERROR has been sent // New flags to track first/last event requirements @@ -16,6 +16,19 @@ export const verifyEvents = let activeSteps = new Map(); // Map of step name -> active status let activeThinkingStep = false; let activeThinkingStepMessage = false; + let runStarted = false; // Track if a run has started + + // Function to reset state for a new run + const resetRunState = () => { + activeMessages.clear(); + activeToolCalls.clear(); + activeSteps.clear(); + activeThinkingStep = false; + activeThinkingStepMessage = false; + runFinished = false; + runError = false; + runStarted = true; + }; return source$.pipe( // Process each event through our state machine @@ -36,8 +49,8 @@ export const verifyEvents = ); } - // Check if run has already finished - if (runFinished && eventType !== EventType.RUN_ERROR) { + // Check if run has already finished (but allow new RUN_STARTED to start a new run) + if (runFinished && eventType !== EventType.RUN_ERROR && eventType !== EventType.RUN_STARTED) { return throwError( () => new AGUIError( @@ -46,106 +59,58 @@ export const verifyEvents = ); } - // Forbid lifecycle events and tool events inside a text message - if (activeMessageId !== undefined) { - // Define allowed event types inside a text message - const allowedEventTypes = [ - EventType.TEXT_MESSAGE_CONTENT, - EventType.TEXT_MESSAGE_END, - EventType.RAW, - ]; - - // If the event type is not in the allowed list, throw an error - if (!allowedEventTypes.includes(eventType)) { - return throwError( - () => - new AGUIError( - `Cannot send event type '${eventType}' after 'TEXT_MESSAGE_START': Send 'TEXT_MESSAGE_END' first.`, - ), - ); + // Handle first event requirement and sequential RUN_STARTED + if (!firstEventReceived) { + firstEventReceived = true; + if (eventType !== EventType.RUN_STARTED && eventType !== EventType.RUN_ERROR) { + return throwError(() => new AGUIError(`First event must be 'RUN_STARTED'`)); } - } - - // Forbid lifecycle events and text message events inside a tool call - if (activeToolCallId !== undefined) { - // Define allowed event types inside a tool call - const allowedEventTypes = [ - EventType.TOOL_CALL_ARGS, - EventType.TOOL_CALL_END, - EventType.RAW, - ]; - - // If the event type is not in the allowed list, throw an error - if (!allowedEventTypes.includes(eventType)) { - // Special handling for nested tool calls for better error message - if (eventType === EventType.TOOL_CALL_START) { - return throwError( - () => - new AGUIError( - `Cannot send 'TOOL_CALL_START' event: A tool call is already in progress. Complete it with 'TOOL_CALL_END' first.`, - ), - ); - } - + } else if (eventType === EventType.RUN_STARTED) { + // Allow RUN_STARTED after RUN_FINISHED (new run), but not during an active run + if (runStarted && !runFinished) { return throwError( () => new AGUIError( - `Cannot send event type '${eventType}' after 'TOOL_CALL_START': Send 'TOOL_CALL_END' first.`, + `Cannot send 'RUN_STARTED' while a run is still active. The previous run must be finished with 'RUN_FINISHED' before starting a new run.`, ), ); } - } - - // Handle first event requirement and prevent multiple RUN_STARTED - if (!firstEventReceived) { - firstEventReceived = true; - if (eventType !== EventType.RUN_STARTED && eventType !== EventType.RUN_ERROR) { - return throwError(() => new AGUIError(`First event must be 'RUN_STARTED'`)); + // If we're here, it's either the first RUN_STARTED or a new run after RUN_FINISHED + if (runFinished) { + // This is a new run after the previous one finished, reset state + resetRunState(); } - } else if (eventType === EventType.RUN_STARTED) { - // Prevent multiple RUN_STARTED events - return throwError( - () => - new AGUIError( - `Cannot send multiple 'RUN_STARTED' events: A 'RUN_STARTED' event was already sent. Each run must have exactly one 'RUN_STARTED' event at the beginning.`, - ), - ); } // Validate event based on type and current state switch (eventType) { // Text message flow case EventType.TEXT_MESSAGE_START: { - // Can't start a message if one is already in progress - if (activeMessageId !== undefined) { + const messageId = (event as any).messageId; + + // Check if this message is already in progress + if (activeMessages.has(messageId)) { return throwError( () => new AGUIError( - `Cannot send 'TEXT_MESSAGE_START' event: A text message is already in progress. Complete it with 'TEXT_MESSAGE_END' first.`, + `Cannot send 'TEXT_MESSAGE_START' event: A text message with ID '${messageId}' is already in progress. Complete it with 'TEXT_MESSAGE_END' first.`, ), ); } - activeMessageId = (event as any).messageId; + activeMessages.set(messageId, true); return of(event); } case EventType.TEXT_MESSAGE_CONTENT: { - // Must be in a message and IDs must match - if (activeMessageId === undefined) { - return throwError( - () => - new AGUIError( - `Cannot send 'TEXT_MESSAGE_CONTENT' event: No active text message found. Start a text message with 'TEXT_MESSAGE_START' first.`, - ), - ); - } + const messageId = (event as any).messageId; - if ((event as any).messageId !== activeMessageId) { + // Must be in a message with this ID + if (!activeMessages.has(messageId)) { return throwError( () => new AGUIError( - `Cannot send 'TEXT_MESSAGE_CONTENT' event: Message ID mismatch. The ID '${(event as any).messageId}' doesn't match the active message ID '${activeMessageId}'.`, + `Cannot send 'TEXT_MESSAGE_CONTENT' event: No active text message found with ID '${messageId}'. Start a text message with 'TEXT_MESSAGE_START' first.`, ), ); } @@ -154,62 +119,50 @@ export const verifyEvents = } case EventType.TEXT_MESSAGE_END: { - // Must be in a message and IDs must match - if (activeMessageId === undefined) { - return throwError( - () => - new AGUIError( - `Cannot send 'TEXT_MESSAGE_END' event: No active text message found. A 'TEXT_MESSAGE_START' event must be sent first.`, - ), - ); - } + const messageId = (event as any).messageId; - if ((event as any).messageId !== activeMessageId) { + // Must be in a message with this ID + if (!activeMessages.has(messageId)) { return throwError( () => new AGUIError( - `Cannot send 'TEXT_MESSAGE_END' event: Message ID mismatch. The ID '${(event as any).messageId}' doesn't match the active message ID '${activeMessageId}'.`, + `Cannot send 'TEXT_MESSAGE_END' event: No active text message found with ID '${messageId}'. A 'TEXT_MESSAGE_START' event must be sent first.`, ), ); } - // Reset message state - activeMessageId = undefined; + // Remove message from active set + activeMessages.delete(messageId); return of(event); } // Tool call flow case EventType.TOOL_CALL_START: { - // Can't start a tool call if one is already in progress - if (activeToolCallId !== undefined) { + const toolCallId = (event as any).toolCallId; + + // Check if this tool call is already in progress + if (activeToolCalls.has(toolCallId)) { return throwError( () => new AGUIError( - `Cannot send 'TOOL_CALL_START' event: A tool call is already in progress. Complete it with 'TOOL_CALL_END' first.`, + `Cannot send 'TOOL_CALL_START' event: A tool call with ID '${toolCallId}' is already in progress. Complete it with 'TOOL_CALL_END' first.`, ), ); } - activeToolCallId = (event as any).toolCallId; + activeToolCalls.set(toolCallId, true); return of(event); } case EventType.TOOL_CALL_ARGS: { - // Must be in a tool call and IDs must match - if (activeToolCallId === undefined) { - return throwError( - () => - new AGUIError( - `Cannot send 'TOOL_CALL_ARGS' event: No active tool call found. Start a tool call with 'TOOL_CALL_START' first.`, - ), - ); - } + const toolCallId = (event as any).toolCallId; - if ((event as any).toolCallId !== activeToolCallId) { + // Must be in a tool call with this ID + if (!activeToolCalls.has(toolCallId)) { return throwError( () => new AGUIError( - `Cannot send 'TOOL_CALL_ARGS' event: Tool call ID mismatch. The ID '${(event as any).toolCallId}' doesn't match the active tool call ID '${activeToolCallId}'.`, + `Cannot send 'TOOL_CALL_ARGS' event: No active tool call found with ID '${toolCallId}'. Start a tool call with 'TOOL_CALL_START' first.`, ), ); } @@ -218,27 +171,20 @@ export const verifyEvents = } case EventType.TOOL_CALL_END: { - // Must be in a tool call and IDs must match - if (activeToolCallId === undefined) { - return throwError( - () => - new AGUIError( - `Cannot send 'TOOL_CALL_END' event: No active tool call found. A 'TOOL_CALL_START' event must be sent first.`, - ), - ); - } + const toolCallId = (event as any).toolCallId; - if ((event as any).toolCallId !== activeToolCallId) { + // Must be in a tool call with this ID + if (!activeToolCalls.has(toolCallId)) { return throwError( () => new AGUIError( - `Cannot send 'TOOL_CALL_END' event: Tool call ID mismatch. The ID '${(event as any).toolCallId}' doesn't match the active tool call ID '${activeToolCallId}'.`, + `Cannot send 'TOOL_CALL_END' event: No active tool call found with ID '${toolCallId}'. A 'TOOL_CALL_START' event must be sent first.`, ), ); } - // Reset tool call state - activeToolCallId = undefined; + // Remove tool call from active set + activeToolCalls.delete(toolCallId); return of(event); } @@ -271,6 +217,7 @@ export const verifyEvents = // Run flow case EventType.RUN_STARTED: { // We've already validated this above + runStarted = true; return of(event); } @@ -289,6 +236,28 @@ export const verifyEvents = ); } + // Check that all messages are finished before run ends + if (activeMessages.size > 0) { + const unfinishedMessages = Array.from(activeMessages.keys()).join(", "); + return throwError( + () => + new AGUIError( + `Cannot send 'RUN_FINISHED' while text messages are still active: ${unfinishedMessages}`, + ), + ); + } + + // Check that all tool calls are finished before run ends + if (activeToolCalls.size > 0) { + const unfinishedToolCalls = Array.from(activeToolCalls.keys()).join(", "); + return throwError( + () => + new AGUIError( + `Cannot send 'RUN_FINISHED' while tool calls are still active: ${unfinishedToolCalls}`, + ), + ); + } + runFinished = true; return of(event); } diff --git a/typescript-sdk/packages/core/package.json b/typescript-sdk/packages/core/package.json index defd9b704..67bca0bbc 100644 --- a/typescript-sdk/packages/core/package.json +++ b/typescript-sdk/packages/core/package.json @@ -1,7 +1,7 @@ { "name": "@ag-ui/core", "author": "Markus Ecker ", - "version": "0.0.36", + "version": "0.0.37", "private": false, "publishConfig": { "access": "public" diff --git a/typescript-sdk/packages/core/src/events.ts b/typescript-sdk/packages/core/src/events.ts index e64c62518..a95fc8e15 100644 --- a/typescript-sdk/packages/core/src/events.ts +++ b/typescript-sdk/packages/core/src/events.ts @@ -1,6 +1,14 @@ import { z } from "zod"; import { MessageSchema, StateSchema } from "./types"; +// Text messages can have any role except "tool" +const TextMessageRoleSchema = z.union([ + z.literal("developer"), + z.literal("system"), + z.literal("assistant"), + z.literal("user"), +]); + export enum EventType { TEXT_MESSAGE_START = "TEXT_MESSAGE_START", TEXT_MESSAGE_CONTENT = "TEXT_MESSAGE_CONTENT", @@ -28,7 +36,7 @@ export enum EventType { STEP_FINISHED = "STEP_FINISHED", } -const BaseEventSchema = z.object({ +export const BaseEventSchema = z.object({ type: z.nativeEnum(EventType), timestamp: z.number().optional(), rawEvent: z.any().optional(), @@ -37,7 +45,7 @@ const BaseEventSchema = z.object({ export const TextMessageStartEventSchema = BaseEventSchema.extend({ type: z.literal(EventType.TEXT_MESSAGE_START), messageId: z.string(), - role: z.literal("assistant"), + role: TextMessageRoleSchema.default("assistant"), }); export const TextMessageContentEventSchema = BaseEventSchema.extend({ @@ -54,7 +62,7 @@ export const TextMessageEndEventSchema = BaseEventSchema.extend({ export const TextMessageChunkEventSchema = BaseEventSchema.extend({ type: z.literal(EventType.TEXT_MESSAGE_CHUNK), messageId: z.string().optional(), - role: z.literal("assistant").optional(), + role: TextMessageRoleSchema.optional(), delta: z.string().optional(), }); @@ -177,6 +185,8 @@ export const EventSchemas = z.discriminatedUnion("type", [ TextMessageContentEventSchema, TextMessageEndEventSchema, TextMessageChunkEventSchema, + ThinkingStartEventSchema, + ThinkingEndEventSchema, ThinkingTextMessageStartEventSchema, ThinkingTextMessageContentEventSchema, ThinkingTextMessageEndEventSchema, diff --git a/typescript-sdk/packages/encoder/package.json b/typescript-sdk/packages/encoder/package.json index aead15110..221b7afe4 100644 --- a/typescript-sdk/packages/encoder/package.json +++ b/typescript-sdk/packages/encoder/package.json @@ -1,7 +1,7 @@ { "name": "@ag-ui/encoder", "author": "Markus Ecker ", - "version": "0.0.36", + "version": "0.0.37", "private": false, "publishConfig": { "access": "public" diff --git a/typescript-sdk/packages/proto/package.json b/typescript-sdk/packages/proto/package.json index f49339f09..787edfabe 100644 --- a/typescript-sdk/packages/proto/package.json +++ b/typescript-sdk/packages/proto/package.json @@ -1,7 +1,7 @@ { "name": "@ag-ui/proto", "author": "Markus Ecker ", - "version": "0.0.36", + "version": "0.0.37", "private": false, "publishConfig": { "access": "public" diff --git a/typescript-sdk/pnpm-lock.yaml b/typescript-sdk/pnpm-lock.yaml index 61be2106f..8ae5a06e0 100644 --- a/typescript-sdk/pnpm-lock.yaml +++ b/typescript-sdk/pnpm-lock.yaml @@ -298,13 +298,16 @@ importers: integrations/agno: dependencies: - '@ag-ui/client': - specifier: workspace:* - version: link:../../packages/client rxjs: specifier: 7.8.1 version: 7.8.1 devDependencies: + '@ag-ui/client': + specifier: workspace:* + version: link:../../packages/client + '@ag-ui/core': + specifier: workspace:* + version: link:../../packages/core '@types/jest': specifier: ^29.5.14 version: 29.5.14 @@ -326,13 +329,16 @@ importers: integrations/crewai: dependencies: - '@ag-ui/client': - specifier: workspace:* - version: link:../../packages/client rxjs: specifier: 7.8.1 version: 7.8.1 devDependencies: + '@ag-ui/client': + specifier: workspace:* + version: link:../../packages/client + '@ag-ui/core': + specifier: workspace:* + version: link:../../packages/core '@types/jest': specifier: ^29.5.14 version: 29.5.14 @@ -354,15 +360,12 @@ importers: integrations/langgraph: dependencies: - '@ag-ui/client': - specifier: workspace:* - version: link:../../packages/client '@langchain/core': specifier: ^0.3.66 - version: 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) + version: 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(zod@3.25.71)) '@langchain/langgraph-sdk': - specifier: ^0.0.105 - version: 0.0.105(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(react-dom@19.1.0(react@19.1.0))(react@19.1.0) + specifier: ^0.1.2 + version: 0.1.2(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(zod@3.25.71)))(react-dom@19.1.0(react@19.1.0))(react@19.1.0) partial-json: specifier: ^0.1.7 version: 0.1.7 @@ -370,6 +373,12 @@ importers: specifier: 7.8.1 version: 7.8.1 devDependencies: + '@ag-ui/client': + specifier: workspace:* + version: link:../../packages/client + '@ag-ui/core': + specifier: workspace:* + version: link:../../packages/core '@types/jest': specifier: ^29.5.14 version: 29.5.14 @@ -391,13 +400,16 @@ importers: integrations/llamaindex: dependencies: - '@ag-ui/client': - specifier: workspace:* - version: link:../../packages/client rxjs: specifier: 7.8.1 version: 7.8.1 devDependencies: + '@ag-ui/client': + specifier: workspace:* + version: link:../../packages/client + '@ag-ui/core': + specifier: workspace:* + version: link:../../packages/core '@types/jest': specifier: ^29.5.14 version: 29.5.14 @@ -419,15 +431,12 @@ importers: integrations/mastra: dependencies: - '@ag-ui/client': - specifier: workspace:* - version: link:../../packages/client '@ai-sdk/ui-utils': specifier: ^1.1.19 version: 1.2.11(zod@3.25.67) '@copilotkit/runtime': specifier: ^1.9.3 - version: 1.9.3(@ag-ui/client@packages+client)(@ag-ui/core@0.0.35)(@ag-ui/encoder@0.0.35)(@ag-ui/proto@0.0.35)(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.844.0)(@aws-sdk/client-bedrock-runtime@3.844.0)(@aws-sdk/client-dynamodb@3.859.0)(@aws-sdk/client-kendra@3.844.0)(@aws-sdk/credential-provider-node@3.859.0)(@browserbasehq/sdk@2.6.0)(@browserbasehq/stagehand@2.4.0(deepmerge@4.3.1)(dotenv@17.0.1)(react@19.1.0)(zod@3.25.67))(@ibm-cloud/watsonx-ai@1.6.8)(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(@smithy/util-utf8@2.3.0)(@upstash/redis@1.35.1)(axios@1.10.0)(cohere-ai@7.17.1)(fast-xml-parser@5.2.5)(google-auth-library@10.1.0)(ibm-cloud-sdk-core@5.4.0)(ignore@5.3.2)(jsonwebtoken@9.0.2)(lodash@4.17.21)(pg@8.16.3)(playwright@1.53.2)(react@19.1.0)(redis@5.6.1)(ws@8.18.3) + version: 1.9.3(@ag-ui/client@packages+client)(@ag-ui/core@packages+core)(@ag-ui/encoder@0.0.37)(@ag-ui/proto@0.0.37)(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.844.0)(@aws-sdk/client-bedrock-runtime@3.844.0)(@aws-sdk/client-dynamodb@3.859.0)(@aws-sdk/client-kendra@3.844.0)(@aws-sdk/credential-provider-node@3.859.0)(@browserbasehq/sdk@2.6.0)(@browserbasehq/stagehand@2.4.0(deepmerge@4.3.1)(dotenv@17.0.1)(react@19.1.0)(zod@3.25.67))(@ibm-cloud/watsonx-ai@1.6.8)(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(@smithy/util-utf8@2.3.0)(@upstash/redis@1.35.1)(axios@1.10.0)(cohere-ai@7.17.1)(fast-xml-parser@5.2.5)(google-auth-library@10.1.0)(ibm-cloud-sdk-core@5.4.0)(ignore@5.3.2)(jsonwebtoken@9.0.2)(lodash@4.17.21)(pg@8.16.3)(playwright@1.53.2)(react@19.1.0)(redis@5.6.1)(ws@8.18.3) '@mastra/client-js': specifier: ^0.10.18 version: 0.10.18(@sinclair/typebox@0.34.37)(openapi-types@12.1.3)(react@19.1.0)(zod@3.25.67) @@ -438,6 +447,12 @@ importers: specifier: ^3.25.67 version: 3.25.67 devDependencies: + '@ag-ui/client': + specifier: workspace:* + version: link:../../packages/client + '@ag-ui/core': + specifier: workspace:* + version: link:../../packages/core '@mastra/core': specifier: ^0.13.0 version: 0.13.1(@sinclair/typebox@0.34.37)(openapi-types@12.1.3)(react@19.1.0)(zod@3.25.67) @@ -490,13 +505,16 @@ importers: integrations/pydantic-ai: dependencies: - '@ag-ui/client': - specifier: workspace:* - version: link:../../packages/client rxjs: specifier: 7.8.1 version: 7.8.1 devDependencies: + '@ag-ui/client': + specifier: workspace:* + version: link:../../packages/client + '@ag-ui/core': + specifier: workspace:* + version: link:../../packages/core '@types/jest': specifier: ^29.5.14 version: 29.5.14 @@ -574,9 +592,6 @@ importers: integrations/vercel-ai-sdk: dependencies: - '@ag-ui/client': - specifier: workspace:* - version: link:../../packages/client ai: specifier: ^4.3.16 version: 4.3.16(react@19.1.0)(zod@3.25.17) @@ -587,6 +602,12 @@ importers: specifier: ^3.22.4 version: 3.25.17 devDependencies: + '@ag-ui/client': + specifier: workspace:* + version: link:../../packages/client + '@ag-ui/core': + specifier: workspace:* + version: link:../../packages/core '@types/jest': specifier: ^29.5.14 version: 29.5.14 @@ -792,9 +813,15 @@ packages: '@ag-ui/core@0.0.35': resolution: {integrity: sha512-YAqrln3S3fdo+Hs5FFQPODXiBttyilv/E3xSSHCuxqC0Y/Fp3+VqyDx97BorO3NVp2VKZ9cG2nsO3cbmcTwkQw==} + '@ag-ui/core@0.0.37': + resolution: {integrity: sha512-7bmjPn1Ol0Zo00F+MrPr0eOwH4AFZbhmq/ZMhCsrMILtVYBiBLcLU9QFBpBL3Zm9MCHha8b79N7JE2FzwcMaVA==} + '@ag-ui/encoder@0.0.35': resolution: {integrity: sha512-Ym0h0ZKIiD1Ld3+e3v/WQSogY62xs72ysoEBW1kt+dDs79QazBsW5ZlcBBj2DelEs9NrczQLxTVEvrkcvhrHqA==} + '@ag-ui/encoder@0.0.37': + resolution: {integrity: sha512-KD5t0ll3n1pn1ZX1xwQ1YxYZrtJjIttLEsUpj8mQgfh8+ZQ1ZSvlPSciKOQkHf7+Sw9eS6kHVDd5nOOLV1N1xw==} + '@ag-ui/langgraph@0.0.7': resolution: {integrity: sha512-KARfd7xJ9iDTMF0IOhRLgeVb+Jur9sjXI4rilDTSblkGT9/L56YFTkqrkGt+E7QF+qvbReN1SQuu2JxmUFkO9Q==} @@ -804,6 +831,9 @@ packages: '@ag-ui/proto@0.0.35': resolution: {integrity: sha512-+rz3LAYHcR3D2xVgRKa7QE5mp+cwmZs6j+1XxG5dT7HNdg51uKea12L57EVY2bxE3JzpAvCIgOjFEmQCNH82pw==} + '@ag-ui/proto@0.0.37': + resolution: {integrity: sha512-x6Mzrp2//19PW+BHSHajjrGvL8qv3BDwabvW/eCDa7SaDB4L5o1ZbklSa8JX+jt6DHdb5jnvUVq9KTEDGBtMcw==} + '@ai-sdk/anthropic@1.2.12': resolution: {integrity: sha512-YSzjlko7JvuiyQFmI9RN1tNZdEiZxc+6xld/0tq/VkJaHpEzGAb1yiNxxvmYVcjvfu/PcvCxAAYXmTYQQ63IHQ==} engines: {node: '>=18'} @@ -2729,6 +2759,20 @@ packages: react: optional: true + '@langchain/langgraph-sdk@0.1.2': + resolution: {integrity: sha512-y3daUURAlDdUhmxWWUqwsph5W5YbBq44b5VPpGErZRZqaHWH7RhavfYtfxJqUGCn4dRMMxrNNhQU+ir4jILJJQ==} + peerDependencies: + '@langchain/core': '>=0.2.31 <0.4.0' + react: ^18 || ^19 + react-dom: ^18 || ^19 + peerDependenciesMeta: + '@langchain/core': + optional: true + react: + optional: true + react-dom: + optional: true + '@langchain/openai@0.4.9': resolution: {integrity: sha512-NAsaionRHNdqaMjVLPkFCyjUDze+OqRHghA1Cn4fPoAafz+FXcl9c7LlEl9Xo0FH6/8yiCl7Rw2t780C/SBVxQ==} engines: {node: '>=18'} @@ -7137,7 +7181,6 @@ packages: libsql@0.5.17: resolution: {integrity: sha512-RRlj5XQI9+Wq+/5UY8EnugSWfRmHEw4hn3DKlPrkUgZONsge1PwTtHcpStP6MSNi8ohcbsRgEHJaymA33a8cBw==} - cpu: [x64, arm64, wasm32, arm] os: [darwin, linux, win32] lightningcss-darwin-arm64@1.30.1: @@ -9512,11 +9555,21 @@ snapshots: rxjs: 7.8.1 zod: 3.25.71 + '@ag-ui/core@0.0.37': + dependencies: + rxjs: 7.8.1 + zod: 3.25.71 + '@ag-ui/encoder@0.0.35': dependencies: '@ag-ui/core': 0.0.35 '@ag-ui/proto': 0.0.35 + '@ag-ui/encoder@0.0.37': + dependencies: + '@ag-ui/core': 0.0.37 + '@ag-ui/proto': 0.0.37 + '@ag-ui/langgraph@0.0.7(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71))(react@19.1.0)': dependencies: '@ag-ui/client': 0.0.35 @@ -9551,6 +9604,12 @@ snapshots: '@ag-ui/core': 0.0.35 '@bufbuild/protobuf': 2.6.0 + '@ag-ui/proto@0.0.37': + dependencies: + '@ag-ui/core': 0.0.37 + '@bufbuild/protobuf': 2.6.0 + '@protobuf-ts/protoc': 2.11.1 + '@ai-sdk/anthropic@1.2.12(zod@3.25.67)': dependencies: '@ai-sdk/provider': 1.1.3 @@ -11271,8 +11330,8 @@ snapshots: '@copilotkit/shared': 1.10.1 '@graphql-yoga/plugin-defer-stream': 3.13.4(graphql-yoga@5.13.4(graphql@16.11.0))(graphql@16.11.0) '@langchain/aws': 0.1.11(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71))) - '@langchain/community': 0.3.43(6700d873fa55f615078a7d0bda5c02a5) - '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.100.0(ws@8.18.3)(zod@3.25.67)) + '@langchain/community': 0.3.43(7diojckpk4p4gsegiv67ljolbe) + '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) '@langchain/google-gauth': 0.1.8(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(zod@3.25.71) '@langchain/langgraph-sdk': 0.0.70(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(react@19.1.0) '@langchain/openai': 0.4.9(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(ws@8.18.3) @@ -11444,18 +11503,18 @@ snapshots: - ws - youtubei.js - '@copilotkit/runtime@1.9.3(@ag-ui/client@packages+client)(@ag-ui/core@0.0.35)(@ag-ui/encoder@0.0.35)(@ag-ui/proto@0.0.35)(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.844.0)(@aws-sdk/client-bedrock-runtime@3.844.0)(@aws-sdk/client-dynamodb@3.859.0)(@aws-sdk/client-kendra@3.844.0)(@aws-sdk/credential-provider-node@3.859.0)(@browserbasehq/sdk@2.6.0)(@browserbasehq/stagehand@2.4.0(deepmerge@4.3.1)(dotenv@17.0.1)(react@19.1.0)(zod@3.25.67))(@ibm-cloud/watsonx-ai@1.6.8)(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(@smithy/util-utf8@2.3.0)(@upstash/redis@1.35.1)(axios@1.10.0)(cohere-ai@7.17.1)(fast-xml-parser@5.2.5)(google-auth-library@10.1.0)(ibm-cloud-sdk-core@5.4.0)(ignore@5.3.2)(jsonwebtoken@9.0.2)(lodash@4.17.21)(pg@8.16.3)(playwright@1.53.2)(react@19.1.0)(redis@5.6.1)(ws@8.18.3)': + '@copilotkit/runtime@1.9.3(@ag-ui/client@packages+client)(@ag-ui/core@packages+core)(@ag-ui/encoder@0.0.37)(@ag-ui/proto@0.0.37)(@aws-crypto/sha256-js@5.2.0)(@aws-sdk/client-bedrock-agent-runtime@3.844.0)(@aws-sdk/client-bedrock-runtime@3.844.0)(@aws-sdk/client-dynamodb@3.859.0)(@aws-sdk/client-kendra@3.844.0)(@aws-sdk/credential-provider-node@3.859.0)(@browserbasehq/sdk@2.6.0)(@browserbasehq/stagehand@2.4.0(deepmerge@4.3.1)(dotenv@17.0.1)(react@19.1.0)(zod@3.25.67))(@ibm-cloud/watsonx-ai@1.6.8)(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(@smithy/util-utf8@2.3.0)(@upstash/redis@1.35.1)(axios@1.10.0)(cohere-ai@7.17.1)(fast-xml-parser@5.2.5)(google-auth-library@10.1.0)(ibm-cloud-sdk-core@5.4.0)(ignore@5.3.2)(jsonwebtoken@9.0.2)(lodash@4.17.21)(pg@8.16.3)(playwright@1.53.2)(react@19.1.0)(redis@5.6.1)(ws@8.18.3)': dependencies: '@ag-ui/client': link:packages/client - '@ag-ui/core': 0.0.35 - '@ag-ui/encoder': 0.0.35 + '@ag-ui/core': link:packages/core + '@ag-ui/encoder': 0.0.37 '@ag-ui/langgraph': 0.0.7(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71))(react@19.1.0) - '@ag-ui/proto': 0.0.35 + '@ag-ui/proto': 0.0.37 '@anthropic-ai/sdk': 0.27.3 '@copilotkit/shared': 1.9.3 '@graphql-yoga/plugin-defer-stream': 3.13.4(graphql-yoga@5.13.4(graphql@16.11.0))(graphql@16.11.0) '@langchain/aws': 0.1.11(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71))) - '@langchain/community': 0.3.43(6700d873fa55f615078a7d0bda5c02a5) + '@langchain/community': 0.3.43(7diojckpk4p4gsegiv67ljolbe) '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) '@langchain/google-gauth': 0.1.8(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(zod@3.25.71) '@langchain/langgraph-sdk': 0.0.70(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(react@19.1.0) @@ -12441,15 +12500,15 @@ snapshots: '@aws-sdk/client-bedrock-runtime': 3.844.0 '@aws-sdk/client-kendra': 3.844.0 '@aws-sdk/credential-provider-node': 3.859.0 - '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.100.0(ws@8.18.3)(zod@3.25.67)) + '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) transitivePeerDependencies: - aws-crt - '@langchain/community@0.3.43(6700d873fa55f615078a7d0bda5c02a5)': + '@langchain/community@0.3.43(7diojckpk4p4gsegiv67ljolbe)': dependencies: '@browserbasehq/stagehand': 2.4.0(deepmerge@4.3.1)(dotenv@17.0.1)(react@19.1.0)(zod@3.25.67) '@ibm-cloud/watsonx-ai': 1.6.8 - '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.100.0(ws@8.18.3)(zod@3.25.67)) + '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) '@langchain/openai': 0.4.9(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(ws@8.18.3) binary-extensions: 2.3.0 expr-eval: 2.0.2 @@ -12457,7 +12516,7 @@ snapshots: ibm-cloud-sdk-core: 5.4.0 js-yaml: 4.1.0 langchain: 0.3.26(@langchain/aws@0.1.11(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71))))(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(axios@1.10.0)(openai@4.104.0(ws@8.18.3)(zod@3.25.71))(ws@8.18.3) - langsmith: 0.3.49(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) + langsmith: 0.3.49(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(zod@3.25.71)) openai: 4.104.0(ws@8.18.3)(zod@3.25.71) uuid: 10.0.0 zod: 3.25.71 @@ -12503,7 +12562,7 @@ snapshots: - handlebars - peggy - '@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.100.0(ws@8.18.3)(zod@3.25.67))': + '@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71))': dependencies: '@cfworker/json-schema': 4.1.1 ansi-styles: 5.2.0 @@ -12523,14 +12582,14 @@ snapshots: - '@opentelemetry/sdk-trace-base' - openai - '@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71))': + '@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(zod@3.25.71))': dependencies: '@cfworker/json-schema': 4.1.1 ansi-styles: 5.2.0 camelcase: 6.3.0 decamelize: 1.2.0 js-tiktoken: 1.0.20 - langsmith: 0.3.49(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) + langsmith: 0.3.49(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(zod@3.25.71)) mustache: 4.2.0 p-queue: 6.6.2 p-retry: 4.6.2 @@ -12545,7 +12604,7 @@ snapshots: '@langchain/google-common@0.1.8(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(zod@3.25.71)': dependencies: - '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.100.0(ws@8.18.3)(zod@3.25.67)) + '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) uuid: 10.0.0 zod-to-json-schema: 3.24.6(zod@3.25.71) transitivePeerDependencies: @@ -12553,7 +12612,7 @@ snapshots: '@langchain/google-gauth@0.1.8(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(zod@3.25.71)': dependencies: - '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.100.0(ws@8.18.3)(zod@3.25.67)) + '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) '@langchain/google-common': 0.1.8(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(zod@3.25.71) google-auth-library: 8.9.0 transitivePeerDependencies: @@ -12579,7 +12638,7 @@ snapshots: p-retry: 4.6.2 uuid: 9.0.1 optionalDependencies: - '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.100.0(ws@8.18.3)(zod@3.25.67)) + '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) react: 19.1.0 '@langchain/langgraph-sdk@0.0.78(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(react@19.1.0)': @@ -12592,9 +12651,20 @@ snapshots: '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) react: 19.1.0 + '@langchain/langgraph-sdk@0.1.2(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(zod@3.25.71)))(react-dom@19.1.0(react@19.1.0))(react@19.1.0)': + dependencies: + '@types/json-schema': 7.0.15 + p-queue: 6.6.2 + p-retry: 4.6.2 + uuid: 9.0.1 + optionalDependencies: + '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(zod@3.25.71)) + react: 19.1.0 + react-dom: 19.1.0(react@19.1.0) + '@langchain/openai@0.4.9(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(ws@8.18.3)': dependencies: - '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.100.0(ws@8.18.3)(zod@3.25.67)) + '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) js-tiktoken: 1.0.20 openai: 4.104.0(ws@8.18.3)(zod@3.25.71) zod: 3.25.71 @@ -16469,7 +16539,7 @@ snapshots: transitivePeerDependencies: - supports-color - eslint-module-utils@2.12.0(@typescript-eslint/parser@8.32.1(eslint@9.27.0(jiti@2.4.2))(typescript@5.8.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.27.0(jiti@2.4.2)): + eslint-module-utils@2.12.0(@typescript-eslint/parser@8.32.1(eslint@9.27.0(jiti@2.4.2))(typescript@5.8.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.31.0)(eslint@9.27.0(jiti@2.4.2)))(eslint@9.27.0(jiti@2.4.2)): dependencies: debug: 3.2.7 optionalDependencies: @@ -16491,7 +16561,7 @@ snapshots: doctrine: 2.1.0 eslint: 9.27.0(jiti@2.4.2) eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.12.0(@typescript-eslint/parser@8.32.1(eslint@9.27.0(jiti@2.4.2))(typescript@5.8.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1)(eslint@9.27.0(jiti@2.4.2)) + eslint-module-utils: 2.12.0(@typescript-eslint/parser@8.32.1(eslint@9.27.0(jiti@2.4.2))(typescript@5.8.2))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.10.1(eslint-plugin-import@2.31.0)(eslint@9.27.0(jiti@2.4.2)))(eslint@9.27.0(jiti@2.4.2)) hasown: 2.0.2 is-core-module: 2.16.1 is-glob: 4.0.3 @@ -17436,7 +17506,7 @@ snapshots: isstream: 0.1.2 jsonwebtoken: 9.0.2 mime-types: 2.1.35 - retry-axios: 2.6.0(axios@1.10.0) + retry-axios: 2.6.0(axios@1.10.0(debug@4.4.1)) tough-cookie: 4.1.4 transitivePeerDependencies: - supports-color @@ -18209,7 +18279,7 @@ snapshots: langchain@0.3.26(@langchain/aws@0.1.11(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71))))(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(axios@1.10.0)(openai@4.104.0(ws@8.18.3)(zod@3.25.71))(ws@8.18.3): dependencies: - '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.100.0(ws@8.18.3)(zod@3.25.67)) + '@langchain/core': 0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)) '@langchain/openai': 0.4.9(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)))(ws@8.18.3) '@langchain/textsplitters': 0.1.0(@langchain/core@0.3.66(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71))) js-tiktoken: 1.0.20 @@ -18257,7 +18327,7 @@ snapshots: '@opentelemetry/sdk-trace-base': 2.0.1(@opentelemetry/api@1.9.0) openai: 4.100.0(ws@8.18.3)(zod@3.25.67) - langsmith@0.3.49(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(ws@8.18.3)(zod@3.25.71)): + langsmith@0.3.49(@opentelemetry/api@1.9.0)(@opentelemetry/exporter-trace-otlp-proto@0.203.0(@opentelemetry/api@1.9.0))(@opentelemetry/sdk-trace-base@2.0.1(@opentelemetry/api@1.9.0))(openai@4.104.0(zod@3.25.71)): dependencies: '@types/uuid': 10.0.0 chalk: 4.1.2 @@ -20172,7 +20242,7 @@ snapshots: onetime: 5.1.2 signal-exit: 3.0.7 - retry-axios@2.6.0(axios@1.10.0): + retry-axios@2.6.0(axios@1.10.0(debug@4.4.1)): dependencies: axios: 1.10.0(debug@4.4.1) diff --git a/typescript-sdk/turbo.json b/typescript-sdk/turbo.json index 0ed99e4ab..7e323977f 100644 --- a/typescript-sdk/turbo.json +++ b/typescript-sdk/turbo.json @@ -42,6 +42,11 @@ }, "unlink:global": { "cache": false + }, + "start": { + "dependsOn": ["^build"], + "cache": false, + "persistent": true } } } From e3148a813949a0b4c62dc65fe754a100328ef959 Mon Sep 17 00:00:00 2001 From: Wouter Doppenberg Date: Thu, 18 Sep 2025 08:34:54 +0200 Subject: [PATCH 8/8] Reintroduced JSONValue; Fixed tests --- python-sdk/ag_ui/core/events.py | 18 +++++++++--------- python-sdk/pyproject.toml | 3 +++ python-sdk/tests/test_text_roles.py | 1 - 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/python-sdk/ag_ui/core/events.py b/python-sdk/ag_ui/core/events.py index 2a54a9c8e..74fb23c4f 100644 --- a/python-sdk/ag_ui/core/events.py +++ b/python-sdk/ag_ui/core/events.py @@ -3,11 +3,11 @@ """ from enum import Enum -from typing import Annotated, Any, List, Literal, Optional, Union +from typing import Annotated, List, Literal, Optional, Union, Generic from pydantic import Field -from .types import ConfiguredBaseModel, Message, State, Role +from .types import ConfiguredBaseModel, Message, StateT, JSONValue # Text messages can have any role except "tool" TextMessageRole = Literal["developer", "system", "assistant", "user"] @@ -49,7 +49,7 @@ class BaseEvent(ConfiguredBaseModel): """ type: EventType timestamp: Optional[int] = None - raw_event: Optional[Any] = None + raw_event: Optional[JSONValue] = None class TextMessageStartEvent(BaseEvent): @@ -164,12 +164,12 @@ class ThinkingEndEvent(BaseEvent): """ type: Literal[EventType.THINKING_END] = EventType.THINKING_END # pyright: ignore[reportIncompatibleVariableOverride] -class StateSnapshotEvent(BaseEvent): +class StateSnapshotEvent(BaseEvent, Generic[StateT]): """ Event containing a snapshot of the state. """ type: Literal[EventType.STATE_SNAPSHOT] = EventType.STATE_SNAPSHOT # pyright: ignore[reportIncompatibleVariableOverride] - snapshot: State + snapshot: StateT class StateDeltaEvent(BaseEvent): @@ -177,7 +177,7 @@ class StateDeltaEvent(BaseEvent): Event containing a delta of the state. """ type: Literal[EventType.STATE_DELTA] = EventType.STATE_DELTA # pyright: ignore[reportIncompatibleVariableOverride] - delta: List[Any] # JSON Patch (RFC 6902) + delta: List[JSONValue] # JSON Patch (RFC 6902) class MessagesSnapshotEvent(BaseEvent): @@ -193,7 +193,7 @@ class RawEvent(BaseEvent): Event containing a raw event. """ type: Literal[EventType.RAW] = EventType.RAW # pyright: ignore[reportIncompatibleVariableOverride] - event: Any + event: JSONValue source: Optional[str] = None @@ -203,7 +203,7 @@ class CustomEvent(BaseEvent): """ type: Literal[EventType.CUSTOM] = EventType.CUSTOM # pyright: ignore[reportIncompatibleVariableOverride] name: str - value: Any + value: JSONValue class RunStartedEvent(BaseEvent): @@ -222,7 +222,7 @@ class RunFinishedEvent(BaseEvent): type: Literal[EventType.RUN_FINISHED] = EventType.RUN_FINISHED # pyright: ignore[reportIncompatibleVariableOverride] thread_id: str run_id: str - result: Optional[Any] = None + result: Optional[JSONValue] = None class RunErrorEvent(BaseEvent): diff --git a/python-sdk/pyproject.toml b/python-sdk/pyproject.toml index dbc7fe56c..75f24243c 100644 --- a/python-sdk/pyproject.toml +++ b/python-sdk/pyproject.toml @@ -10,6 +10,9 @@ requires-python = ">=3.9,<4.0" dependencies = [ "pydantic>=2.11.2,<3.0.0", ] +packages = [ + "ag_ui" +] [build-system] requires = ["hatchling"] diff --git a/python-sdk/tests/test_text_roles.py b/python-sdk/tests/test_text_roles.py index d042108bb..0ed02b51b 100644 --- a/python-sdk/tests/test_text_roles.py +++ b/python-sdk/tests/test_text_roles.py @@ -8,7 +8,6 @@ TextMessageContentEvent, TextMessageEndEvent, TextMessageChunkEvent, - Role, ) # Test all available roles for text messages (excluding "tool")