Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 11 additions & 8 deletions apps/beeai-cli/src/beeai_cli/async_typer.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,16 +80,19 @@ def wrapped_f(*args, **kwargs):
else:
return f(*args, **kwargs)
except* Exception as ex:
is_connect_error = False
for exc_type, message in extract_messages(ex):
err_console.print(format_error(exc_type, message))
if exc_type in ["ConnectionError", "ConnectError"]:
err_console.hint(
"Start the BeeAI platform using: [green]beeai platform start[/green]. If that does not help, run [green]beeai platform delete[/green] to clean up, then [green]beeai platform start[/green] again."
)
else:
err_console.hint(
"Are you having consistent problems? If so, try these troubleshooting steps: [green]beeai platform delete[/green] to remove the platform, and [green]beeai platform start[/green] to recreate it."
)
is_connect_error = is_connect_error or exc_type in ["ConnectionError", "ConnectError"]
err_console.print()
if is_connect_error:
err_console.hint(
"Start the BeeAI platform using: [green]beeai platform start[/green]. If that does not help, run [green]beeai platform delete[/green] to clean up, then [green]beeai platform start[/green] again."
)
else:
err_console.hint(
"Are you having consistent problems? If so, try these troubleshooting steps: [green]beeai platform delete[/green] to remove the platform, and [green]beeai platform start[/green] to recreate it."
)
if DEBUG:
raise

Expand Down
25 changes: 4 additions & 21 deletions apps/beeai-cli/src/beeai_cli/commands/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,11 +92,12 @@
from rich.markdown import Markdown
from rich.table import Column

from beeai_cli.api import a2a_client, api_stream
from beeai_cli.api import a2a_client
from beeai_cli.async_typer import AsyncTyper, console, create_table, err_console
from beeai_cli.utils import (
generate_schema_example,
parse_env_var,
print_log,
prompt_user,
remove_nullable,
run_command,
Expand Down Expand Up @@ -145,24 +146,6 @@ def short_location(provider: Provider) -> str:
configuration = Configuration()


def _print_log(line, ansi_mode=False):
if "error" in line:

class CustomError(Exception): ...

CustomError.__name__ = line["error"]["type"]

raise CustomError(line["error"]["detail"])

def decode(text: str):
return Text.from_ansi(text) if ansi_mode else text

if line["stream"] == "stderr":
err_console.print(decode(line["message"]))
elif line["stream"] == "stdout":
console.print(decode(line["message"]))


@app.command("add")
async def add_agent(
location: typing.Annotated[
Expand Down Expand Up @@ -243,8 +226,8 @@ async def stream_logs(
"""Stream agent provider logs"""
async with configuration.use_platform_client():
provider = select_provider(search_path, await Provider.list()).id
async for message in api_stream("get", f"providers/{provider}/logs"):
_print_log(message)
async for message in Provider.stream_logs(provider):
print_log(message, ansi_mode=True)


async def _ask_form_questions(form_render: FormRender) -> FormResponse:
Expand Down
25 changes: 24 additions & 1 deletion apps/beeai-cli/src/beeai_cli/commands/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,13 @@
import typer
from a2a.utils import AGENT_CARD_WELL_KNOWN_PATH
from anyio import open_process
from beeai_sdk.platform.provider_build import BuildState, ProviderBuild
from httpx import AsyncClient, HTTPError
from tenacity import AsyncRetrying, retry_if_exception_type, stop_after_delay, wait_fixed

from beeai_cli.async_typer import AsyncTyper
from beeai_cli.console import console
from beeai_cli.utils import capture_output, extract_messages, run_command, status, verbosity
from beeai_cli.utils import capture_output, extract_messages, print_log, run_command, status, verbosity


async def find_free_port():
Expand Down Expand Up @@ -128,3 +129,25 @@ async def build(
await driver.import_image(tag)

return tag, agent_card


@app.command("server-side-build")
async def server_side_build_experimental(
github_url: typing.Annotated[
str, typer.Argument(..., help="Github repository URL (public or private if supported by the platform instance)")
],
):
"""EXPERIMENTAL: Build agent from github repository in the platform."""
from beeai_cli.configuration import Configuration

async with Configuration().use_platform_client():
build = await ProviderBuild.create(location=github_url)
async for message in build.stream_logs():
print_log(message, ansi_mode=True)
build = await build.get()
if build.status == BuildState.COMPLETED:
console.success(
f"Agent built successfully, add it to the platform using: [bold]beeai add {build.destination}[/bold]"
)
else:
console.error("Agent build failed, see logs above for details.")
12 changes: 11 additions & 1 deletion apps/beeai-cli/src/beeai_cli/commands/platform/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import functools
import importlib.resources
import os
import pathlib
import platform
import shutil
import sys
Expand Down Expand Up @@ -67,17 +68,26 @@ async def start(
"--import", help="Import an image from a local Docker CLI into BeeAI platform", default_factory=list
),
],
values_file: typing.Annotated[
pathlib.Path | None, typer.Option("-f", help="Set Helm chart values using yaml values file")
] = None,
vm_name: typing.Annotated[str, typer.Option(hidden=True)] = "beeai-platform",
verbose: typing.Annotated[bool, typer.Option("-v", help="Show verbose output")] = False,
):
"""Start BeeAI platform."""
import beeai_cli.commands.server

values_file_path = None
if values_file:
values_file_path = pathlib.Path(values_file)
if not values_file_path.is_file():
raise FileNotFoundError(f"Values file {values_file} not found.")

with verbosity(verbose):
driver = get_driver(vm_name=vm_name)
await driver.create_vm()
await driver.install_tools()
await driver.deploy(set_values_list=set_values_list, import_images=import_images)
await driver.deploy(set_values_list=set_values_list, values_file=values_file_path, import_images=import_images)

with console.status("Waiting for BeeAI platform to be ready...", spinner="dots"):
timeout = datetime.timedelta(minutes=20)
Expand Down
65 changes: 50 additions & 15 deletions apps/beeai-cli/src/beeai_cli/commands/platform/base_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@

import abc
import importlib.resources
import pathlib
import shlex
import typing
from subprocess import CompletedProcess
from textwrap import dedent

import anyio
import pydantic
Expand Down Expand Up @@ -50,6 +52,33 @@ async def import_image(self, tag: str) -> None: ...
async def exec(self, command: list[str]) -> None: ...

async def install_tools(self) -> None:
# Configure k3s registry for local registry access
registry_config = dedent(
"""\
mirrors:
"beeai-platform-registry-svc.default:5001":
endpoint:
- "http://localhost:30501"
configs:
"beeai-platform-registry-svc.default:5001":
tls:
insecure_skip_verify: true
"""
)

await self.run_in_vm(
[
"sh",
"-c",
(
f"sudo mkdir -p /etc/rancher/k3s /registry-data && "
f"echo '{registry_config}' | "
"sudo tee /etc/rancher/k3s/registries.yaml > /dev/null"
),
],
"Configuring k3s registry",
)

await self.run_in_vm(
[
"sh",
Expand All @@ -67,29 +96,35 @@ async def install_tools(self) -> None:
"Installing Helm",
)

async def deploy(self, set_values_list: list[str], import_images: list[str] | None = None) -> None:
async def deploy(
self,
set_values_list: list[str],
values_file: pathlib.Path | None = None,
import_images: list[str] | None = None,
) -> None:
await self.run_in_vm(
["sh", "-c", "mkdir -p /tmp/beeai && cat >/tmp/beeai/chart.tgz"],
"Preparing Helm chart",
input=(importlib.resources.files("beeai_cli") / "data" / "helm-chart.tgz").read_bytes(),
)
values = {
**{svc: {"service": {"type": "LoadBalancer"}} for svc in ["collector", "docling", "ui", "phoenix"]},
"hostNetwork": True,
"externalRegistries": {"public_github": str(Configuration().agent_registry)},
"encryptionKey": "Ovx8qImylfooq4-HNwOzKKDcXLZCB3c_m0JlB9eJBxc=",
"features": {
"uiNavigation": True,
"selfRegistration": True,
"generateConversationTitle": False, # TODO: enable when UI implementation is ready
},
"auth": {"enabled": False},
}
if values_file:
values.update(yaml.safe_load(values_file.read_text()))
await self.run_in_vm(
["sh", "-c", "cat >/tmp/beeai/values.yaml"],
"Preparing Helm values",
input=yaml.dump(
{
**{svc: {"service": {"type": "LoadBalancer"}} for svc in ["collector", "docling", "ui", "phoenix"]},
"hostNetwork": True,
"externalRegistries": {"public_github": str(Configuration().agent_registry)},
"encryptionKey": "Ovx8qImylfooq4-HNwOzKKDcXLZCB3c_m0JlB9eJBxc=",
"features": {
"uiNavigation": True,
"selfRegistration": True,
"generateConversationTitle": False, # TODO: enable when UI implementation is ready
},
"auth": {"enabled": False},
}
).encode("utf-8"),
input=yaml.dump(values).encode("utf-8"),
)

images_str = (
Expand Down
9 changes: 7 additions & 2 deletions apps/beeai-cli/src/beeai_cli/commands/platform/wsl_driver.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,12 @@ async def create_vm(self):
await self.run_in_vm(["dbus-launch", "true"], "Ensuring persistence of BeeAI VM")

@typing.override
async def deploy(self, set_values_list: list[str], import_images: list[str] | None = None) -> None:
async def deploy(
self,
set_values_list: list[str],
values_file: pathlib.Path | None = None,
import_images: list[str] | None = None,
) -> None:
await self.run_in_vm(
["k3s", "kubectl", "apply", "-f", "-"],
"Setting up internal networking",
Expand All @@ -155,7 +160,7 @@ async def deploy(self, set_values_list: list[str], import_images: list[str] | No
}
).encode(),
)
await super().deploy(set_values_list=set_values_list, import_images=import_images)
await super().deploy(set_values_list=set_values_list, values_file=values_file, import_images=import_images)
await self.run_in_vm(
["sh", "-c", "cat >/etc/systemd/system/[email protected]"],
"Installing systemd unit for port-forwarding",
Expand Down
27 changes: 26 additions & 1 deletion apps/beeai-cli/src/beeai_cli/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import anyio
import anyio.abc
import httpx
import typer
import yaml
from anyio import create_task_group
Expand Down Expand Up @@ -53,7 +54,13 @@ def extract_messages(exc):
if isinstance(exc, BaseExceptionGroup):
return [(exc_type, msg) for e in exc.exceptions for exc_type, msg in extract_messages(e)]
else:
return [(type(exc).__name__, str(exc))]
message = str(exc)
if isinstance(exc, httpx.HTTPStatusError):
with contextlib.suppress(Exception):
message = str(exc).split(" for url", maxsplit=1)[0]
message = f"{message}: {exc.response.json()['detail']}"

return [(type(exc).__name__, message)]


def parse_env_var(env_var: str) -> tuple[str, str]:
Expand Down Expand Up @@ -280,3 +287,21 @@ async def get_verify_option(server_url: str):
raise RuntimeError(f"No certificate received from {server_url}")
ca_cert_file.write_text(ssl.DER_cert_to_PEM_cert(der_cert))
return str(ca_cert_file)


def print_log(line, ansi_mode=False):
if "error" in line:

class CustomError(Exception): ...

CustomError.__name__ = line["error"]["type"]

raise CustomError(line["error"]["detail"])

def decode(text: str):
return Text.from_ansi(text) if ansi_mode else text

if line["stream"] == "stderr":
err_console.print(decode(line["message"]))
elif line["stream"] == "stdout":
console.print(decode(line["message"]))
2 changes: 1 addition & 1 deletion apps/beeai-cli/uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions apps/beeai-sdk/src/beeai_sdk/platform/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@
from .file import *
from .model_provider import *
from .provider import *
from .provider_build import *
from .vector_store import *
17 changes: 16 additions & 1 deletion apps/beeai-sdk/src/beeai_sdk/platform/common.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Copyright 2025 © BeeAI a Series of LF Projects, LLC
# SPDX-License-Identifier: Apache-2.0

from enum import StrEnum
from typing import Generic, TypeVar

from pydantic import BaseModel
Expand All @@ -13,3 +13,18 @@ class PaginatedResult(BaseModel, Generic[T]):
total_count: int
has_more: bool = False
next_page_token: str | None = None


class GithubVersionType(StrEnum):
HEAD = "head"
TAG = "tag"


class ResolvedGithubUrl(BaseModel):
host: str = "github.com"
org: str
repo: str
version: str
version_type: GithubVersionType
commit_hash: str
path: str | None = None
17 changes: 17 additions & 0 deletions apps/beeai-sdk/src/beeai_sdk/platform/provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from a2a.types import AgentCard

from beeai_sdk.platform.client import PlatformClient, get_platform_client
from beeai_sdk.util.utils import parse_stream


class ProviderErrorMessage(pydantic.BaseModel):
Expand Down Expand Up @@ -118,6 +119,22 @@ async def update_variables(
await client.put(f"/api/v1/providers/{provider_id}/variables", json={"variables": variables})
).raise_for_status()

async def stream_logs(
self: "Provider| str", *, client: PlatformClient | None = None
) -> typing.AsyncIterator[dict[str, typing.Any]]:
# `self` has a weird type so that you can call both `instance.stream_logs()` or `ProviderBuild.stream_logs("123")`
provider_id = self if isinstance(self, str) else self.id
async with (
client or get_platform_client() as client,
client.stream(
"GET",
url=f"/api/v1/providers/{provider_id}/logs",
timeout=timedelta(hours=1).total_seconds(),
) as response,
):
async for line in parse_stream(response):
yield line

async def list_variables(self: "Provider | str", *, client: PlatformClient | None = None) -> dict[str, str]:
# `self` has a weird type so that you can call both `instance.delete()` or `Provider.delete("123")`
provider_id = self if isinstance(self, str) else self.id
Expand Down
Loading
Loading