Skip to content

Commit 0ccf090

Browse files
committed
test: beeai compiler test
Signed-off-by: Nick Mitchell <[email protected]>
1 parent 4f7ad39 commit 0ccf090

File tree

6 files changed

+91
-7
lines changed

6 files changed

+91
-7
lines changed

.github/workflows/tauri-cli.yml

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,17 @@ jobs:
3636
- name: Setup xvfb for screen 0
3737
run: Xvfb :1 -screen 0 1600x1200x24 &
3838

39-
- name: Run production build
39+
- name: Test beeai compiler
40+
env:
41+
DISPLAY: :1
42+
run: |
43+
PATH=./src-tauri/target/release/:$PATH
44+
45+
for i in ./demos/beeai/*.py
46+
do pdl compile beeai $i -g -o /tmp/z.json && jq .description /tmp/z.json
47+
done
48+
49+
- name: Test pdl run against production build
4050
env:
4151
DISPLAY: :1
4252
run: |
@@ -64,4 +74,4 @@ jobs:
6474
done
6575
6676
- name: Tear down xvfb
67-
run: killall Xvfb
77+
run: killall Xvfb || true
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
import asyncio
2+
3+
from beeai_framework.backend.chat import ChatModel
4+
from beeai_framework.tools.search.wikipedia import WikipediaTool
5+
from beeai_framework.tools.weather.openmeteo import OpenMeteoTool
6+
from beeai_framework.workflows.agent import AgentWorkflow, AgentWorkflowInput
7+
8+
9+
async def main() -> None:
10+
llm = ChatModel.from_name("ollama:granite3.2:2b")
11+
workflow = AgentWorkflow(name="Smart assistant")
12+
13+
workflow.add_agent(
14+
name="Researcher",
15+
role="A diligent researcher.",
16+
instructions="You look up and provide information about a specific topic.",
17+
tools=[WikipediaTool()],
18+
llm=llm,
19+
)
20+
21+
workflow.add_agent(
22+
name="WeatherForecaster",
23+
role="A weather reporter.",
24+
instructions="You provide detailed weather reports.",
25+
tools=[OpenMeteoTool()],
26+
llm=llm,
27+
)
28+
29+
workflow.add_agent(
30+
name="DataSynthesizer",
31+
role="A meticulous and creative data synthesizer",
32+
instructions="You can combine disparate information into a final coherent summary.",
33+
llm=llm,
34+
)
35+
36+
location = "Saint-Tropez"
37+
38+
response = await workflow.run(
39+
inputs=[
40+
AgentWorkflowInput(
41+
prompt=f"Provide a short history of {location}.",
42+
),
43+
AgentWorkflowInput(
44+
prompt=f"Provide a comprehensive weather summary for {location} today.",
45+
expected_output="Essential weather details such as chance of rain, temperature and wind. Only report information that is available.",
46+
),
47+
AgentWorkflowInput(
48+
prompt=f"Summarize the historical and weather data for {location}.",
49+
expected_output=f"A paragraph that describes the history of {location}, followed by the current weather conditions.",
50+
),
51+
]
52+
).on(
53+
"success",
54+
lambda data, event: print(
55+
f"\n-> Step '{data.step}' has been completed with the following outcome.\n\n{data.state.final_answer}"
56+
),
57+
)
58+
59+
print("==== Final Answer ====")
60+
print(response.result.final_answer)
61+
62+
63+
if __name__ == "__main__":
64+
asyncio.run(main())

pdl-live-react/package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,9 @@
1414
"tauri": "tauri",
1515
"test:quality": "concurrently -n 'lint,types,formatting' 'npm run lint' 'tsc --build --noEmit' \"prettier --check 'tests/**/*.ts' 'src/**/*.{ts,tsx,css}'\"",
1616
"test:ui": "playwright install --with-deps && playwright test",
17-
"test:bee": "./src-tauri/target/debug/pdl compile beeai demos/beeai.py -g --output - | jq",
17+
"test:bee": "until [ -f ./src-tauri/target/debug/pdl ]; do sleep 1; done; for i in ./demos/beeai/*.py; do ./src-tauri/target/debug/pdl compile beeai $i -g --output - | jq; done",
1818
"types": "(cd .. && python -m src.pdl.pdl --schema > src/pdl/pdl-schema.json) && json2ts ../src/pdl/pdl-schema.json src/pdl_ast.d.ts --unreachableDefinitions && npm run format",
19-
"test": "concurrently -n 'quality,playwright,bee' 'npm run test:quality' 'npm run test:ui' 'npm run test:bee'",
19+
"test": "concurrently -n 'quality,playwright' 'npm run test:quality' 'npm run test:ui'",
2020
"pdl": "./src-tauri/target/debug/pdl",
2121
"view": "npm run pdl view",
2222
"start": "npm run tauri dev"

pdl-live-react/src-tauri/src/interpreter/pip.rs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,9 +25,13 @@ pub async fn pip_install_if_needed(
2525
} else {
2626
"python3"
2727
};
28-
cmd!(python, "-mvenv", &venv_path).run()?;
28+
cmd!(python, "-mvenv", &venv_path)
29+
.stdout_to_stderr()
30+
.run()?;
2931

30-
cmd!(bin_path.join("pip"), "install", "-r", &requirements_path,).run()?;
32+
cmd!(bin_path.join("pip"), "install", "-r", &requirements_path,)
33+
.stdout_to_stderr()
34+
.run()?;
3135

3236
let cached_requirements_path = venv_path.join("requirements.txt");
3337
copy(requirements_path, cached_requirements_path)?;

pdl-live-react/src-tauri/src/interpreter/pull.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,10 @@ fn ollama_exists(model: &str) -> bool {
4242
/// The Ollama implementation of a single model pull
4343
fn ollama_pull_if_needed(model: &str) -> Result<(), LoadError> {
4444
if !ollama_exists(model) {
45-
cmd!("ollama", "pull", model).run().map_err(LoadError::IO)?;
45+
cmd!("ollama", "pull", model)
46+
.stdout_to_stderr()
47+
.run()
48+
.map_err(LoadError::IO)?;
4649
}
4750
Ok(())
4851
}

pyproject.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,3 +78,6 @@ where = ["src"]
7878

7979
[tool.setuptools.package-data]
8080
pdl = ["pdl-schema.json"]
81+
82+
[tool.pyright]
83+
include = ["src", "tests", "examples", "docs"]

0 commit comments

Comments
 (0)