Skip to content

Commit 371442d

Browse files
authored
Merge pull request #455 from DefangLabs/linda-agentic-strands
2 parents 867ec52 + be0c2b3 commit 371442d

File tree

13 files changed

+531
-0
lines changed

13 files changed

+531
-0
lines changed
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
2+
FROM mcr.microsoft.com/devcontainers/python:3.11-bookworm
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
{
2+
"build": {
3+
"dockerfile": "Dockerfile",
4+
"context": ".."
5+
},
6+
"features": {
7+
"ghcr.io/defanglabs/devcontainer-feature/defang-cli:1.0.4": {},
8+
"ghcr.io/devcontainers/features/docker-in-docker:2": {},
9+
"ghcr.io/devcontainers/features/aws-cli:1": {}
10+
}
11+
}
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
name: Deploy
2+
3+
on:
4+
push:
5+
branches:
6+
- main
7+
8+
jobs:
9+
deploy:
10+
environment: playground
11+
runs-on: ubuntu-latest
12+
permissions:
13+
contents: read
14+
id-token: write
15+
16+
steps:
17+
- name: Checkout Repo
18+
uses: actions/checkout@v4
19+
20+
- name: Deploy
21+
uses: DefangLabs/[email protected]

samples/agentic-strands/README.md

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# Agentic Strands
2+
3+
[![1-click-deploy](https://raw.githubusercontent.com/DefangLabs/defang-assets/main/Logos/Buttons/SVG/deploy-with-defang.svg)](https://portal.defang.dev/redirect?url=https%3A%2F%2Fgithub.com%2Fnew%3Ftemplate_name%3Dsample-agentic-strands-template%26template_owner%3DDefangSamples)
4+
5+
This sample demonstrates a Strands Agent application, deployed with Defang. This [Strands](https://strandsagents.com/latest/) Agent can use tools, and is compatible with the [Defang OpenAI Access Gateway](https://github.com/DefangLabs/openai-access-gateway/).
6+
7+
## Prerequisites
8+
9+
1. Download [Defang CLI](https://github.com/DefangLabs/defang)
10+
2. (Optional) If you are using [Defang BYOC](https://docs.defang.io/docs/concepts/defang-byoc) authenticate with your cloud provider account
11+
3. (Optional for local development) [Docker CLI](https://docs.docker.com/engine/install/)
12+
13+
## Development
14+
15+
To run the application locally, you can use the following command:
16+
17+
```bash
18+
docker compose -f compose.dev.yaml up --build
19+
```
20+
21+
## Configuration
22+
23+
For this sample, you will not need to provide any [configuration](https://docs.defang.io/docs/concepts/configuration). However, if you ever need to, below is an example of how to do so in Defang:
24+
25+
```bash
26+
defang config set API_KEY
27+
```
28+
29+
## Deployment
30+
31+
> [!NOTE]
32+
> Download [Defang CLI](https://github.com/DefangLabs/defang)
33+
34+
### Defang Playground
35+
36+
Deploy your application to the Defang Playground by opening up your terminal and typing:
37+
```bash
38+
defang compose up
39+
```
40+
41+
### BYOC
42+
43+
If you want to deploy to your own cloud account, you can [use Defang BYOC](https://docs.defang.io/docs/tutorials/deploy-to-your-cloud).
44+
45+
---
46+
47+
Title: Agentic Strands
48+
49+
Short Description: A Strands Agent application, deployed with Defang.
50+
51+
Tags: Python, Flask, Strands, AI, Agent
52+
53+
Languages: Python
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
# Default .dockerignore file for Defang
2+
**/__pycache__
3+
**/.direnv
4+
**/.DS_Store
5+
**/.envrc
6+
**/.git
7+
**/.github
8+
**/.idea
9+
**/.next
10+
**/.vscode
11+
**/compose.*.yaml
12+
**/compose.*.yml
13+
**/compose.yaml
14+
**/compose.yml
15+
**/docker-compose.*.yaml
16+
**/docker-compose.*.yml
17+
**/docker-compose.yaml
18+
**/docker-compose.yml
19+
**/node_modules
20+
**/Thumbs.db
21+
Dockerfile
22+
*.Dockerfile
23+
# Ignore our own binary, but only in the root to avoid ignoring subfolders
24+
defang
25+
defang.exe
26+
# Ignore our project-level state
27+
.defang
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
.env
2+
__pycache__/
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
FROM python:3.11-slim
2+
3+
WORKDIR /app
4+
5+
COPY . /app
6+
7+
RUN pip install --no-cache-dir -r requirements.txt
8+
9+
CMD ["python", "agent.py"]
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from . import agent
Lines changed: 168 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,168 @@
1+
from strands import Agent, tool
2+
from strands.models.openai import OpenAIModel
3+
from flask import Flask, request, jsonify, send_from_directory
4+
import requests
5+
6+
import os
7+
import dotenv
8+
9+
dotenv.load_dotenv()
10+
11+
message = """
12+
You are a helpful library assistant.
13+
Your goal is to help users discover books available through the library's book API, based on the user's preferences.
14+
When a user makes a request, you should search the API and suggest books that match their query.
15+
16+
When interacting, ask the user clear questions to guide the search.
17+
Make sure to explicitly state the question you are asking,
18+
and provide simple sample answers so the user knows what to type.
19+
Keep it to a maximum of 3 simple questions.
20+
"""
21+
22+
app = Flask(__name__)
23+
latest_response = {"message": "Hello! I'm your library assistant. How can I help you with your reading today?"}
24+
25+
model = OpenAIModel(
26+
client_args={
27+
"base_url": os.getenv("LLM_URL"),
28+
# "api_key": os.getenv("OPENAI_API_KEY")
29+
},
30+
model_id=os.getenv("LLM_MODEL"),
31+
params={
32+
"max_tokens": 1000,
33+
"temperature": 0.7,
34+
}
35+
)
36+
37+
def parse_assistant_response(**kwargs):
38+
# Extract the assistant's text message from JSON
39+
assistant_text = kwargs["message"]["content"][0]["text"]
40+
41+
print("Assistant Text: ", assistant_text)
42+
return assistant_text
43+
44+
45+
def message_buffer_handler(**kwargs):
46+
# When a new message is created from the assistant, print its content
47+
global latest_response
48+
try:
49+
if "message" in kwargs and kwargs["message"].get("role") == "assistant":
50+
# Parse the assistant's response from JSON
51+
assistant_text = parse_assistant_response(**kwargs)
52+
53+
# Send the assistant's message content back to the UI
54+
latest_response = {"message": assistant_text}
55+
56+
# Prevent the agent from closing by not calling exit() or any termination logic here.
57+
# If you have any cleanup or state reset, do it here, but do not terminate the process.
58+
pass
59+
60+
except Exception as e:
61+
print(f"Error in message_buffer_handler: {str(e)}")
62+
63+
@tool
64+
def search_for_books(query, filters=None) -> str:
65+
"""
66+
Search for detailed information about books using the Open Library API.
67+
68+
Args:
69+
query: The search term to look up books.
70+
71+
Returns:
72+
A string summarizing the list of matching books, or a message if none are found.
73+
"""
74+
75+
# Replace spaces in the query with plus signs for URL encoding
76+
clean_query = query.replace(' ', '+')
77+
78+
url = f"https://openlibrary.org/search.json"
79+
headers = {}
80+
params = {
81+
"q": clean_query,
82+
"page": 1,
83+
"limit": 10
84+
}
85+
86+
try:
87+
response = requests.get(url, headers=headers, params=params)
88+
if response.ok:
89+
book_list = response.json()
90+
if book_list.get("num_found", 0) == 0:
91+
return "No books found matching your query."
92+
93+
message = "Here are the books I found:"
94+
for book in book_list.get("docs", []):
95+
title = book.get("title")
96+
author = book.get("author_name", ["Unknown"])[0]
97+
year = book.get("first_publish_year")
98+
message += f"\n- Title: {title}, Author: {author}, Year: {year}"
99+
print(message)
100+
return message
101+
else:
102+
return f"Error: API request failed: {response.status_code}"
103+
except Exception as e:
104+
return f"Error: {str(e)}"
105+
106+
TOOL_SPEC = {
107+
"name": "search_for_books",
108+
"description": "Get detailed information about books from Open Library, based on a search query.",
109+
"inputSchema": {
110+
"type": "object",
111+
"properties": {
112+
"query": {
113+
"type": "string",
114+
"description": "Search query for books",
115+
}
116+
},
117+
"required": ["query"],
118+
},
119+
}
120+
121+
agent = Agent(
122+
tools=[search_for_books],
123+
model=model,
124+
callback_handler=message_buffer_handler,
125+
system_prompt=message
126+
)
127+
128+
print("Agent model:", agent.model.config)
129+
130+
# Flask routes
131+
@app.route('/')
132+
def index():
133+
# This assumes index.html is in the same directory as this script
134+
return send_from_directory('.', 'index.html')
135+
136+
@app.route('/chat', methods=['POST'])
137+
def chat():
138+
try:
139+
global latest_response
140+
data = request.json
141+
if not data:
142+
return jsonify({"error": "No JSON data received"}), 400
143+
144+
user_message = data.get('message')
145+
if not user_message:
146+
return jsonify({"error": "No message provided"}), 400
147+
print(f"Received message: {user_message}")
148+
149+
agent(f"Continue the conversation with the user. The user says: {user_message}")
150+
151+
response_content = latest_response.get("message", "I'm thinking about your question...")
152+
153+
return jsonify({
154+
"response": response_content
155+
})
156+
157+
except Exception as e:
158+
print(f"Error in /chat endpoint: {str(e)}")
159+
return jsonify({"error": str(e), "response": str(e)}), 500
160+
161+
# Start Flask server when this script is run directly
162+
if __name__ == '__main__':
163+
164+
print("Environment variables:")
165+
print(f"- LLM_URL: {os.getenv('LLM_URL')}")
166+
167+
print("Starting Flask server on port 5001")
168+
app.run(host='0.0.0.0', port=5001, debug=False)

0 commit comments

Comments
 (0)