Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
!/agents/
!/apps/
!/docs/
!/examples/
!/helm/
!/mise.toml
!/mise.lock
Expand Down
4 changes: 4 additions & 0 deletions agentstack.code-workspace
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@
"name": "agent-canvas",
"path": "agents/canvas",
},
{
"name": "examples",
"path": "examples",
},
{
"name": "helm",
"path": "helm",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from __future__ import annotations

from typing import Self, TypeVar, cast
from typing import Any, Self, TypeVar, cast

from pydantic import BaseModel, TypeAdapter
from typing_extensions import TypedDict
Expand Down Expand Up @@ -51,4 +51,8 @@ def parse_initial_form(self, *, model: type[T] = FormResponse) -> T | None:
return TypeAdapter(model).validate_python(dict(initial_form))


class FormServiceExtensionClient(BaseExtensionClient[FormServiceExtensionSpec, FormRender]): ...
class FormServiceExtensionClient(BaseExtensionClient[FormServiceExtensionSpec, FormRender]):
def fulfillment_metadata(self, *, form_fulfillments: dict[str, FormResponse]) -> dict[str, Any]:
return {
self.spec.URI: FormServiceExtensionMetadata(form_fulfillments=form_fulfillments).model_dump(mode="json")
}
2 changes: 2 additions & 0 deletions apps/agentstack-sdk-py/src/agentstack_sdk/server/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,8 @@ async def serve(

from agentstack_sdk.server.app import create_app

self_registration = False if self._production_mode else self_registration

@asynccontextmanager
async def _lifespan_fn(app: FastAPI) -> AsyncGenerator[None, None]:
async with self._self_registration_client or nullcontext():
Expand Down
3 changes: 2 additions & 1 deletion apps/agentstack-server/.vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
{
"python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python",
"ruff.interpreter": ["${workspaceFolder}/.venv/bin/python"],
"python.analysis.typeCheckingMode": "basic",
"python.analysis.diagnosticMode": "openFilesOnly",
"python.analysis.indexing": true,
Expand All @@ -12,5 +13,5 @@
"python.testing.pytestEnabled": true,
"cursorpyright.analysis.autoImportCompletions": true,
"cursorpyright.analysis.diagnosticMode": "openFilesOnly",
"cursorpyright.analysis.typeCheckingMode": "basic",
"cursorpyright.analysis.typeCheckingMode": "basic"
}
4 changes: 3 additions & 1 deletion apps/agentstack-server/template.env
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,6 @@ OCI_REGISTRY__AGENTSTACK-REGISTRY-SVC.DEFAULT:5001__INSECURE=true
# Change the following variables when using 'mise agentstack-server:dev:start'

# -- optional auth: enable for testing (this runs in pipeline)
# AUTH__DISABLE_AUTH=false
# AUTH__DISABLE_AUTH=false
# AUTH__OIDC__INSECURE_TRANSPORT=true
# TRUST_PROXY_HEADERS=true
3 changes: 2 additions & 1 deletion apps/agentstack-server/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@ class Configuration(BaseSettings):
model_config = SettingsConfigDict(extra="ignore")
kubeconfig: Path = Path.home() / ".agentstack/lima/agentstack-local-dev/copied-from-guest/kubeconfig.yaml"
llm_api_base: Secret[str] = Secret("http://localhost:11434/v1")
llm_model: str = "other:llama3.1:8b"
# llm_model: str = "other:llama3.1:8b"
llm_model: str = "other:granite4:latest"
llm_api_key: Secret[str] = Secret("dummy")
test_agent_image: str = "agentstack-registry-svc.default:5001/chat-test:latest"
test_agent_build_repo: str = "https://github.com/i-am-bee/agentstack-starter"
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Copyright 2025 © BeeAI a Series of LF Projects, LLC
# SPDX-License-Identifier: Apache-2.0

import pytest
from a2a.client.helpers import create_text_message_object
from a2a.types import TaskState
from agentstack_sdk.a2a.extensions import FormResponse, FormServiceExtensionMetadata, FormServiceExtensionSpec
from agentstack_sdk.a2a.extensions.common.form import TextFieldValue

from tests.e2e.examples.conftest import run_example

pytestmark = pytest.mark.e2e


@pytest.mark.usefixtures("clean_up", "setup_platform_client")
async def test_initial_form_rendering_example(subtests, get_final_task_from_stream, a2a_client_factory):
example_path = "agent-integration/forms/initial-form-rendering"

async with run_example(example_path, a2a_client_factory) as running_example:
spec = FormServiceExtensionSpec.from_agent_card(running_example.provider.agent_card)

with subtests.test("agent responds with greeting using form data"):
message = create_text_message_object()
metadata = FormServiceExtensionMetadata(
form_fulfillments={
"initial_form": FormResponse(
values={"first_name": TextFieldValue(value="Alice"), "last_name": TextFieldValue(value="Smith")}
)
}
).model_dump(mode="json")

message.metadata = {spec.URI: metadata}
message.context_id = running_example.context.id
task = await get_final_task_from_stream(running_example.client.send_message(message))

# Verify response
assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}"
assert "Hello Alice Smith! Nice to meet you." in task.history[-1].parts[0].root.text
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
# Copyright 2025 © BeeAI a Series of LF Projects, LLC
# SPDX-License-Identifier: Apache-2.0

import pytest
from a2a.client.helpers import create_text_message_object
from a2a.types import TaskState
from agentstack_sdk.a2a.extensions import LLMFulfillment, LLMServiceExtensionClient, LLMServiceExtensionSpec
from agentstack_sdk.platform import ModelProvider

from tests.e2e.examples.conftest import run_example

pytestmark = pytest.mark.e2e


@pytest.mark.usefixtures("clean_up", "setup_real_llm", "setup_platform_client")
async def test_advanced_history_example(subtests, get_final_task_from_stream, a2a_client_factory):
example_path = "agent-integration/multi-turn/advanced-history"

async with run_example(example_path, a2a_client_factory) as running_example:
spec = LLMServiceExtensionSpec.from_agent_card(running_example.provider.agent_card)
metadata = LLMServiceExtensionClient(spec).fulfillment_metadata(
llm_fulfillments={
"default": LLMFulfillment(
api_key=running_example.context_token.token.get_secret_value(),
api_model=(await ModelProvider.match())[0].model_id,
api_base="{platform_url}/api/v1/openai/",
)
}
)

with subtests.test("agent responds with a greeting"):
message = create_text_message_object(content=("Hi, my name is John. How are you?"))
message.metadata = metadata
message.context_id = running_example.context.id
task = await get_final_task_from_stream(running_example.client.send_message(message))

# Verify response
assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}"
assert any(sub in task.history[-1].parts[0].root.text.lower() for sub in ["hello", "hi"])

with subtests.test("agent remembers user name from history"):
message = create_text_message_object(content="Can you remind me my name?")
message.metadata = metadata
message.context_id = running_example.context.id
task = await get_final_task_from_stream(running_example.client.send_message(message))

# Verify response
assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}"
assert "john" in task.history[-1].parts[0].root.text.lower()
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
# Copyright 2025 © BeeAI a Series of LF Projects, LLC
# SPDX-License-Identifier: Apache-2.0

import pytest
from a2a.client.helpers import create_text_message_object
from a2a.types import TaskState

from tests.e2e.examples.conftest import run_example

pytestmark = pytest.mark.e2e


@pytest.mark.usefixtures("clean_up", "setup_platform_client")
async def test_basic_history_example(subtests, get_final_task_from_stream, a2a_client_factory):
example_path = "agent-integration/multi-turn/basic-history"

async with run_example(example_path, a2a_client_factory) as running_example:
with subtests.test("agent reports 1 message in history"):
message = create_text_message_object(content="My 1st message")
message.context_id = running_example.context.id
task = await get_final_task_from_stream(running_example.client.send_message(message))
# Verify response
assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}"
assert "I can see we have 1 messages in our conversation." in task.history[-1].parts[0].root.text

with subtests.test("agent reports 3 messages after second exchange"):
message = create_text_message_object(content="My 2nd message")
message.context_id = running_example.context.id
task = await get_final_task_from_stream(running_example.client.send_message(message))

# Verify response
assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}"
assert "I can see we have 3 messages in our conversation." in task.history[-1].parts[0].root.text
93 changes: 93 additions & 0 deletions apps/agentstack-server/tests/e2e/examples/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
# Copyright 2025 © BeeAI a Series of LF Projects, LLC
# SPDX-License-Identifier: Apache-2.0

import os
import signal
import subprocess
from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable
from contextlib import asynccontextmanager
from typing import Any, NamedTuple

import httpx
import pytest
from a2a.client import Client, ClientEvent
from a2a.types import AgentCard, Message, Task
from a2a.utils.constants import AGENT_CARD_WELL_KNOWN_PATH
from agentstack_sdk.platform import Provider
from agentstack_sdk.platform.context import Context, ContextPermissions, ContextToken, Permissions
from tenacity import retry, stop_after_delay, wait_fixed

DEFAULT_PORT = 8000


class RunningExample(NamedTuple):
client: Client
context: Context
context_token: ContextToken
provider: Provider


def run_process(example_dir_path: str, port: int) -> subprocess.Popen:
cwd = f"../../examples/{example_dir_path}"
print(f"Running example in {cwd}")
return subprocess.Popen(
["uv", "run", "server"],
cwd=cwd,
env={**os.environ, "PORT": str(port), "PRODUCTION_MODE": "true"},
preexec_fn=os.setsid,
)


def kill_process(process: subprocess.Popen) -> None:
os.killpg(os.getpgid(process.pid), signal.SIGTERM)
process.wait()


@retry(stop=stop_after_delay(30), wait=wait_fixed(0.5))
async def _get_agent_card(agent_url: str):
async with httpx.AsyncClient(timeout=None) as httpx_client:
card_resp = await httpx_client.get(f"{agent_url}{AGENT_CARD_WELL_KNOWN_PATH}")
card_resp.raise_for_status()
card = AgentCard.model_validate(card_resp.json())
return card


@pytest.fixture
def get_final_task_from_stream() -> Callable[[AsyncIterator[ClientEvent | Message]], Awaitable[Task]]:
async def fn(stream: AsyncIterator[ClientEvent | Message]) -> Task:
"""Helper to extract the final task from a client.send_message stream."""
final_task = None
async for event in stream:
match event:
case (task, None):
final_task = task
case (task, _):
final_task = task
return final_task

return fn


@asynccontextmanager
async def run_example(
example_dir_path: str,
a2a_client_factory: Callable[[AgentCard | dict[str, Any], ContextToken], AsyncIterator[Client]],
port: int = DEFAULT_PORT,
) -> AsyncGenerator[RunningExample]:
process = run_process(example_dir_path, port)
try:
example_url = f"http://localhost:{port}"
agent_card = await _get_agent_card(example_url)
provider = await Provider.create(location=example_url, agent_card=agent_card)

context = await Context.create()
context_token = await context.generate_token(
providers={provider.id},
grant_global_permissions=Permissions(llm={"*"}),
grant_context_permissions=ContextPermissions(context_data={"*"}),
)

async with a2a_client_factory(provider.agent_card, context_token) as a2a_client:
yield RunningExample(a2a_client, context, context_token, provider)
finally:
kill_process(process)
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
# Copyright 2025 © BeeAI a Series of LF Projects, LLC
# SPDX-License-Identifier: Apache-2.0

import pytest
from a2a.client.helpers import create_text_message_object
from a2a.types import TaskState

from tests.e2e.examples.conftest import run_example

pytestmark = pytest.mark.e2e


@pytest.mark.usefixtures("clean_up", "setup_platform_client")
async def test_implement_your_agent_logic_example(subtests, get_final_task_from_stream, a2a_client_factory):
example_path = "deploy-agents/building-agents/implement-your-agent-logic"

async with run_example(example_path, a2a_client_factory) as running_example:
with subtests.test("greets user by name with Italian greeting"):
message = create_text_message_object(content="Pedro")
message.context_id = running_example.context.id
task = await get_final_task_from_stream(running_example.client.send_message(message))

# Verify response
assert task.status.state == TaskState.completed, f"Fail: {task.status.message.parts[0].root.text}"
assert "Ciao Pedro!" in task.history[-1].parts[0].root.text
1 change: 1 addition & 0 deletions docs/.embedmeignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
*/**/deploy-your-agents.mdx
18 changes: 12 additions & 6 deletions docs/development/agent-integration/forms.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -34,28 +34,31 @@ Call `form.parse_initial_form(model=YourModel)` to extract the submitted form da
</Step>
</Steps>

{/* <!-- embedme examples/agent-integration/forms/initial-form-rendering/src/initial_form_rendering/agent.py --> */}
```python
# Copyright 2025 © BeeAI a Series of LF Projects, LLC
# SPDX-License-Identifier: Apache-2.0

import os
from typing import Annotated

from a2a.types import Message
from pydantic import BaseModel
from agentstack_sdk.server import Server
from agentstack_sdk.a2a.extensions.common.form import FormRender, TextField
from agentstack_sdk.a2a.extensions.services.form import (
FormServiceExtensionServer,
FormServiceExtensionSpec,
)
from agentstack_sdk.server import Server
from pydantic import BaseModel

server = Server()


class UserInfo(BaseModel):
first_name: str | None
last_name: str | None


@server.agent()
async def initial_form_agent(
async def initial_form_rendering_example(
_message: Message,
form: Annotated[
FormServiceExtensionServer,
Expand All @@ -82,8 +85,11 @@ async def initial_form_agent(
yield f"Hello {user_info.first_name} {user_info.last_name}! Nice to meet you."


def run():
server.run(host=os.getenv("HOST", "127.0.0.1"), port=int(os.getenv("PORT", 8000)))

if __name__ == "__main__":
server.run()
run()
```

## Dynamic Form Requests
Expand Down
Loading