Skip to content

Commit d862f72

Browse files
authored
fix(agents): update agents to beeai sdk changes (#1067)
Signed-off-by: Radek Ježek <radek.jezek@ibm.com>
1 parent 05dbc5b commit d862f72

File tree

16 files changed

+1156
-882
lines changed

16 files changed

+1156
-882
lines changed

agents/community/aider/Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
FROM python:3.11-slim-bookworm
1+
FROM python:3.12-slim-bookworm
22
ARG RELEASE_VERSION="main"
33
COPY --from=ghcr.io/astral-sh/uv:0.8.9 /uv /bin/
44
WORKDIR /agents/community/aider

agents/community/aider/pyproject.toml

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@ authors = [
66
{ name = "IBM Corp." },
77
]
88
readme = "README.md"
9-
requires-python = ">=3.11,<3.12"
9+
requires-python = ">=3.12,<3.13"
1010
dependencies = [
11-
"aider-chat>=0.18.1",
11+
"aider-chat>=0.86.1",
1212
"pydantic-settings>=2.7.1",
1313
"pydantic>=2.10.6",
1414
"google-generativeai>=0.8.4",
@@ -34,3 +34,7 @@ line-length = 120
3434

3535
[tool.uv.sources]
3636
beeai-sdk = { path = "../../../apps/beeai-sdk", editable = true }
37+
38+
[tool.pyright]
39+
venvPath = "."
40+
venv = ".venv"

agents/community/aider/uv.lock

Lines changed: 419 additions & 403 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,7 @@
11
# Copyright 2025 © BeeAI a Series of LF Projects, LLC
22
# SPDX-License-Identifier: Apache-2.0
3+
from gpt_researcher_agent.env_patch import patch_os_environ
34

5+
# gpt researcher was never intended to be used in a server, we need to apply some ugly workarounds
6+
# This needs to be done first before importing anything else
7+
patch_os_environ()

agents/community/gpt-researcher/gpt_researcher_agent/agent.py

Lines changed: 59 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,17 @@
88
from a2a.types import AgentSkill, Message
99
from gpt_researcher import GPTResearcher
1010

11+
from gpt_researcher_agent.env_patch import with_local_env
1112

12-
from beeai_sdk.a2a.extensions import TrajectoryExtensionServer, TrajectoryExtensionSpec, AgentDetail
13+
from beeai_sdk.a2a.extensions import (
14+
TrajectoryExtensionServer,
15+
TrajectoryExtensionSpec,
16+
AgentDetail,
17+
LLMServiceExtensionServer,
18+
LLMServiceExtensionSpec,
19+
EmbeddingServiceExtensionServer,
20+
EmbeddingServiceExtensionSpec,
21+
)
1322
from beeai_sdk.a2a.types import RunYield
1423
from beeai_sdk.server import Server
1524
from beeai_sdk.server.context import RunContext
@@ -18,21 +27,12 @@
1827

1928

2029
@server.agent(
21-
name="GPT Researcher",
30+
name="GPT Researcher 2",
2231
documentation_url=(
2332
f"https://github.com/i-am-bee/beeai-platform/blob/{os.getenv('RELEASE_VERSION', 'main')}"
2433
"/agents/community/gpt-researcher"
2534
),
26-
detail=AgentDetail(
27-
interaction_mode="single-turn",
28-
user_greeting="What topic do you want to research?",
29-
use_cases=[
30-
"**Comprehensive Research** – Generates detailed reports using information from multiple sources.",
31-
"**Bias Reduction** – Cross-references data from various platforms to minimize misinformation and bias.",
32-
"**High Performance** – Utilizes parallelized processes for efficient and swift report generation.",
33-
"**Customizable** – Offers customization options to tailor research for specific domains or tasks.",
34-
],
35-
),
35+
detail=AgentDetail(interaction_mode="single-turn", user_greeting="What topic do you want to research?"),
3636
skills=[
3737
AgentSkill(
3838
id="deep_research",
@@ -59,37 +59,58 @@
5959
],
6060
)
6161
async def gpt_researcher(
62-
message: Message, context: RunContext, trajectory: Annotated[TrajectoryExtensionServer, TrajectoryExtensionSpec()]
62+
message: Message,
63+
context: RunContext,
64+
trajectory: Annotated[TrajectoryExtensionServer, TrajectoryExtensionSpec()],
65+
llm_ext: Annotated[LLMServiceExtensionServer, LLMServiceExtensionSpec.single_demand()],
66+
embedding_ext: Annotated[EmbeddingServiceExtensionServer, EmbeddingServiceExtensionSpec.single_demand()],
6367
) -> AsyncGenerator[RunYield, None]:
6468
"""
6569
The agent conducts in-depth local and web research using a language model to generate comprehensive reports with
6670
citations, aimed at delivering factual, unbiased information.
6771
"""
68-
os.environ["RETRIEVER"] = "duckduckgo"
69-
os.environ["OPENAI_BASE_URL"] = os.getenv("LLM_API_BASE", "http://localhost:11434/v1")
70-
os.environ["OPENAI_API_KEY"] = os.getenv("LLM_API_KEY", "dummy")
71-
model = os.getenv("LLM_MODEL", "llama3.1")
72-
os.environ["LLM_MODEL"] = model
73-
74-
class CustomLogsHandler:
75-
async def send_json(self, data: dict[str, Any]) -> None:
76-
if "output" not in data:
77-
return
78-
match data.get("type"):
79-
case "logs":
80-
await context.yield_async(
81-
trajectory.trajectory_metadata(title="log", content=f"{data['output']}\n")
82-
)
83-
case "report":
84-
await context.yield_async(data["output"])
85-
86-
if not message.parts or not (query := message.parts[-1].root.text):
87-
yield "Please enter a topic or query."
88-
return
89-
90-
researcher = GPTResearcher(query=query, report_type="research_report", websocket=CustomLogsHandler())
91-
await researcher.conduct_research()
92-
await researcher.write_report()
72+
# Set up local environment for this request
73+
74+
llm_conf, embedding_conf = None, None
75+
if llm_ext and llm_ext.data:
76+
[llm_conf] = llm_ext.data.llm_fulfillments.values()
77+
78+
if embedding_ext and embedding_ext.data:
79+
[embedding_conf] = embedding_ext.data.embedding_fulfillments.values()
80+
81+
model = llm_conf.api_model if llm_conf else os.getenv("LLM_MODEL", "dummy")
82+
embedding_model = embedding_conf.api_model if embedding_conf else os.getenv("EMBEDDING_MODEL", "dummy")
83+
84+
env = {
85+
"RETRIEVER": "duckduckgo",
86+
"OPENAI_BASE_URL": llm_conf.api_base if llm_conf else os.getenv("LLM_API_BASE", "http://localhost:11434/v1"),
87+
"OPENAI_API_KEY": llm_conf.api_key if llm_conf else os.getenv("LLM_API_KEY", "dummy"),
88+
"LLM_MODEL": model,
89+
"EMBEDDING": f"openai:{embedding_model}",
90+
"FAST_LLM": f"openai:{model}",
91+
"SMART_LLM": f"openai:{model}",
92+
}
93+
with with_local_env(env):
94+
95+
class CustomLogsHandler:
96+
async def send_json(self, data: dict[str, Any]) -> None:
97+
if "output" not in data:
98+
return
99+
match data.get("type"):
100+
case "logs":
101+
await context.yield_async(
102+
trajectory.trajectory_metadata(title="log", content=f"{data['output']}\n")
103+
)
104+
case "report":
105+
await context.yield_async(data["output"])
106+
107+
if not message.parts or not (query := message.parts[-1].root.text):
108+
yield "Please enter a topic or query."
109+
return
110+
111+
researcher = GPTResearcher(query=query, report_type="research_report", websocket=CustomLogsHandler())
112+
await researcher.conduct_research()
113+
await researcher.write_report()
93114

94115

95116
def run():
Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
# Copyright 2025 © BeeAI a Series of LF Projects, LLC
2+
# SPDX-License-Identifier: Apache-2.0
3+
4+
import os
5+
import contextvars
6+
from collections import UserDict
7+
from contextlib import contextmanager
8+
from typing import Dict
9+
10+
11+
# Store original environ and functions
12+
_original_environ = os.environ
13+
_original_getenv = os.getenv
14+
15+
# Context variable for environment overrides
16+
_env_context: contextvars.ContextVar[Dict[str, str]] = contextvars.ContextVar("env_context", default={})
17+
18+
19+
def context_aware_getenv(key: str, default=None):
20+
"""Context-aware replacement for os.getenv"""
21+
context_env = _env_context.get({})
22+
if key in context_env:
23+
return context_env[key]
24+
return _original_getenv(key, default)
25+
26+
27+
class ContextAwareEnviron(UserDict):
28+
"""Context-aware environment dictionary that checks context variables first"""
29+
30+
def __init__(self, original_environ):
31+
# Don't call super().__init__() - we'll handle data ourselves
32+
self._original = original_environ
33+
34+
@property
35+
def data(self):
36+
"""Return merged environment (context overrides original)"""
37+
result = self._original.copy()
38+
context_env = _env_context.get({})
39+
result.update(context_env)
40+
return result
41+
42+
def __getitem__(self, key):
43+
context_env = _env_context.get({})
44+
if key in context_env:
45+
return context_env[key]
46+
return self._original[key]
47+
48+
def __setitem__(self, key, value):
49+
context_env = _env_context.get({}).copy()
50+
context_env[key] = value
51+
_env_context.set(context_env)
52+
53+
def __delitem__(self, key):
54+
context_env = _env_context.get({}).copy()
55+
if key in context_env:
56+
del context_env[key]
57+
_env_context.set(context_env)
58+
elif key in self._original:
59+
del self._original[key]
60+
61+
def __contains__(self, key):
62+
context_env = _env_context.get({})
63+
return key in context_env or key in self._original
64+
65+
66+
def set_context_env(env_vars: Dict[str, str]):
67+
"""Set environment variables for current context"""
68+
current_env = _env_context.get({}).copy()
69+
current_env.update(env_vars)
70+
_env_context.set(current_env)
71+
72+
73+
@contextmanager
74+
def with_local_env(env_vars: Dict[str, str]):
75+
"""Context manager that sets up local environment context for this request"""
76+
# Store the current context
77+
old_context = _env_context.get({})
78+
79+
# Set up new context with provided env vars
80+
set_context_env(env_vars)
81+
82+
try:
83+
yield
84+
finally:
85+
# Restore the previous context
86+
_env_context.set(old_context)
87+
88+
89+
def patch_os_environ():
90+
"""Apply the context-aware patches to os.environ and os.getenv"""
91+
os.environ = ContextAwareEnviron(_original_environ)
92+
os.getenv = context_aware_getenv

agents/community/gpt-researcher/pyproject.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,3 +29,7 @@ line-length = 120
2929

3030
[tool.uv.sources]
3131
beeai-sdk = { path = "../../../apps/beeai-sdk", editable = true }
32+
33+
[tool.pyright]
34+
venvPath = "."
35+
venv = ".venv"

0 commit comments

Comments
 (0)