Skip to content

Commit 1b558cf

Browse files
committed
add strands sample
1 parent 867ec52 commit 1b558cf

File tree

13 files changed

+563
-0
lines changed

13 files changed

+563
-0
lines changed
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
2+
FROM mcr.microsoft.com/devcontainers/python:3.11-bookworm
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
{
2+
"build": {
3+
"dockerfile": "Dockerfile",
4+
"context": ".."
5+
},
6+
"features": {
7+
"ghcr.io/defanglabs/devcontainer-feature/defang-cli:1.0.4": {},
8+
"ghcr.io/devcontainers/features/docker-in-docker:2": {},
9+
"ghcr.io/devcontainers/features/aws-cli:1": {}
10+
}
11+
}
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
name: Deploy
2+
3+
on:
4+
push:
5+
branches:
6+
- main
7+
8+
jobs:
9+
deploy:
10+
environment: playground
11+
runs-on: ubuntu-latest
12+
permissions:
13+
contents: read
14+
id-token: write
15+
16+
steps:
17+
- name: Checkout Repo
18+
uses: actions/checkout@v4
19+
20+
- name: Deploy
21+
uses: DefangLabs/[email protected]

samples/agentic-strands/README.md

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# Agentic Strands
2+
3+
[![1-click-deploy](https://raw.githubusercontent.com/DefangLabs/defang-assets/main/Logos/Buttons/SVG/deploy-with-defang.svg)](https://portal.defang.dev/redirect?url=https%3A%2F%2Fgithub.com%2Fnew%3Ftemplate_name%3Dsample-agentic-strands-template%26template_owner%3DDefangSamples)
4+
5+
This sample demonstrates a Strands Agent application, deployed with Defang. This [Strands](https://strandsagents.com/latest/) Agent can use tools, and is compatible with the [Defang OpenAI Access Gateway](https://github.com/DefangLabs/openai-access-gateway/).
6+
7+
## Prerequisites
8+
9+
1. Download [Defang CLI](https://github.com/DefangLabs/defang)
10+
2. (Optional) If you are using [Defang BYOC](https://docs.defang.io/docs/concepts/defang-byoc) authenticate with your cloud provider account
11+
3. (Optional for local development) [Docker CLI](https://docs.docker.com/engine/install/)
12+
13+
## Development
14+
15+
To run the application locally, you can use the following command:
16+
17+
```bash
18+
docker compose -f compose.dev.yaml up --build
19+
```
20+
21+
## Configuration
22+
23+
For this sample, you will not need to provide any [configuration](https://docs.defang.io/docs/concepts/configuration). However, if you ever need to, below is an example of how to do so in Defang:
24+
25+
```bash
26+
defang config set API_KEY
27+
```
28+
29+
## Deployment
30+
31+
> [!NOTE]
32+
> Download [Defang CLI](https://github.com/DefangLabs/defang)
33+
34+
### Defang Playground
35+
36+
Deploy your application to the Defang Playground by opening up your terminal and typing:
37+
```bash
38+
defang compose up
39+
```
40+
41+
### BYOC
42+
43+
If you want to deploy to your own cloud account, you can [use Defang BYOC](https://docs.defang.io/docs/tutorials/deploy-to-your-cloud).
44+
45+
---
46+
47+
Title: Agentic Strands
48+
49+
Short Description: A Strands Agent application, deployed with Defang.
50+
51+
Tags: Python, Flask, Strands, AI, Agent
52+
53+
Languages: Python
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
# Default .dockerignore file for Defang
2+
**/__pycache__
3+
**/.direnv
4+
**/.DS_Store
5+
**/.envrc
6+
**/.git
7+
**/.github
8+
**/.idea
9+
**/.next
10+
**/.vscode
11+
**/compose.*.yaml
12+
**/compose.*.yml
13+
**/compose.yaml
14+
**/compose.yml
15+
**/docker-compose.*.yaml
16+
**/docker-compose.*.yml
17+
**/docker-compose.yaml
18+
**/docker-compose.yml
19+
**/node_modules
20+
**/Thumbs.db
21+
Dockerfile
22+
*.Dockerfile
23+
# Ignore our own binary, but only in the root to avoid ignoring subfolders
24+
defang
25+
defang.exe
26+
# Ignore our project-level state
27+
.defang
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
.env
2+
__pycache__/
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
FROM python:3.11-slim
2+
3+
WORKDIR /app
4+
5+
COPY . /app
6+
7+
RUN pip install --no-cache-dir -r requirements.txt
8+
9+
CMD ["python", "agent.py"]
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from . import agent
Lines changed: 202 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,202 @@
1+
from strands import Agent, tool
2+
from strands.models.openai import OpenAIModel
3+
from flask import Flask, request, jsonify, send_from_directory
4+
import requests
5+
6+
import json
7+
import os
8+
import time
9+
import dotenv
10+
from threading import Lock
11+
12+
dotenv.load_dotenv()
13+
14+
message = """
15+
You are an expert fashion stylist. Your goal is to help users find their personal style.
16+
Your task is to provide fashion advice and offer products based on the user's preferences.
17+
You can use the tools available to you to assist with this.
18+
Note that for any prompts you ask the user, make sure you actually explicitly state the question you are asking, and possible some sample answers so they know what to type.
19+
Keep the questions as simple as possible so the user doesn't have to type much. And don't ask more than 3 questions.
20+
"""
21+
22+
app = Flask(__name__)
23+
latest_response = {"message": "Hello! I'm your fashion stylist assistant. How can I help you with your style today?"}
24+
25+
model = OpenAIModel(
26+
client_args={
27+
"base_url": os.getenv("LLM_URL"),
28+
# "api_key": os.getenv("OPENAI_API_KEY")
29+
},
30+
model_id=os.getenv("LLM_MODEL"),
31+
params={
32+
"max_tokens": 1000,
33+
"temperature": 0.7,
34+
}
35+
)
36+
37+
def parse_assistant_response(**kwargs):
38+
# print(json.dumps(kwargs["message"], indent=2)) # Debugging line
39+
40+
# Extract the assistant's text message
41+
assistant_text = kwargs["message"]["content"][0]["text"]
42+
43+
print("Assistant Text: ", assistant_text)
44+
return assistant_text
45+
46+
47+
def message_buffer_handler(**kwargs):
48+
# When a new message is created from the assistant, print its content
49+
global latest_response
50+
try:
51+
if "message" in kwargs and kwargs["message"].get("role") == "assistant":
52+
# Parse the assistant's response from JSON
53+
assistant_text = parse_assistant_response(**kwargs)
54+
55+
# Send the assistant's message content back to the UI
56+
latest_response = {"message": assistant_text}
57+
58+
# Prevent the agent from closing by not calling exit() or any termination logic here.
59+
# If you have any cleanup or state reset, do it here, but do not terminate the process.
60+
pass
61+
62+
except Exception as e:
63+
print(f"Error in message_buffer_handler: {str(e)}")
64+
65+
@tool
66+
def search_for_fashion_books(query, filters=None) -> str:
67+
"""
68+
Get detailed information about fashion books from Open Library.
69+
70+
Args:
71+
query: The search query for fashion books.
72+
filters: Optional filters to apply to the search results, including title, author, or year.
73+
74+
Returns:
75+
A string containing the list of books found from the search.
76+
"""
77+
def replace_spaces_with_plus(s):
78+
return s.replace(' ', '+')
79+
80+
# Replace spaces in the query with plus signs for URL encoding
81+
clean_query = replace_spaces_with_plus(query)
82+
83+
url = f"https://openlibrary.org/search.json"
84+
headers = {}
85+
params = {
86+
"q": clean_query,
87+
"subject": "fashion",
88+
"page": 1,
89+
"limit": 10
90+
}
91+
92+
if filters:
93+
if "title" in filters:
94+
params["title"] = filters["title"]
95+
if "author" in filters:
96+
params["author"] = filters["author"]
97+
if "year" in filters:
98+
params["year"] = filters["year"]
99+
100+
try:
101+
response = requests.get(url, headers=headers, params=params)
102+
if response.ok:
103+
book_list = response.json()
104+
if book_list.get("num_found", 0) == 0:
105+
return "No fashion books found"
106+
107+
message = "Here are the fashion books I found:"
108+
for book in book_list.get("docs", []):
109+
title = book.get("title")
110+
author = book.get("author_name", ["Unknown"])[0]
111+
year = book.get("first_publish_year")
112+
message += f"\n- Title: {title}, Author: {author}, Year: {year}"
113+
print(message)
114+
return message
115+
else:
116+
return f"Error: API request failed: {response.status_code}"
117+
except Exception as e:
118+
return f"Error: {str(e)}"
119+
120+
TOOL_SPEC = {
121+
"name": "search_for_fashion_books",
122+
"description": "Get detailed information about fashion books from Open Library, based on a search query.",
123+
"inputSchema": {
124+
"type": "object",
125+
"properties": {
126+
"query": {
127+
"type": "string",
128+
"description": "Search query for fashion books",
129+
},
130+
"filters": {
131+
"title": {
132+
"type": "string",
133+
"description": "Filter by book title"
134+
},
135+
"author": {
136+
"type": "string",
137+
"description": "Filter by author name"
138+
},
139+
"year": {
140+
"type": "integer",
141+
"description": "Filter by publication year"
142+
}
143+
}
144+
},
145+
"required": ["query"],
146+
},
147+
}
148+
149+
agent = Agent(
150+
tools=[search_for_fashion_books],
151+
model=model,
152+
callback_handler=message_buffer_handler,
153+
system_prompt=message
154+
)
155+
156+
print("Agent model:", agent.model.config)
157+
158+
159+
# Flask routes
160+
@app.route('/')
161+
def index():
162+
# This assumes index.html is in the same directory as this script
163+
return send_from_directory('.', 'index.html')
164+
165+
@app.route('/chat', methods=['POST'])
166+
def chat():
167+
try:
168+
global latest_response
169+
data = request.json
170+
if not data:
171+
return jsonify({"error": "No JSON data received"}), 400
172+
173+
user_message = data.get('message')
174+
175+
if not user_message:
176+
return jsonify({"error": "No message provided"}), 400
177+
178+
print(f"Received message: {user_message}")
179+
180+
agent(f"Continue the conversation with the user. The user says: {user_message}")
181+
182+
# # Return the response from latest_response
183+
response_content = latest_response.get("message", "I'm thinking about your question...")
184+
185+
return jsonify({
186+
"response": response_content
187+
})
188+
189+
except Exception as e:
190+
import traceback
191+
traceback.print_exc()
192+
print(f"Error in /chat endpoint: {str(e)}")
193+
return jsonify({"error": str(e), "response": str(e)}), 500
194+
195+
# Start Flask server when this script is run directly
196+
if __name__ == '__main__':
197+
198+
print("Environment variables:")
199+
print(f"- LLM_URL: {os.getenv('LLM_URL')}")
200+
201+
print("Starting Flask server on port 5001")
202+
app.run(host='0.0.0.0', port=5001, debug=False)

0 commit comments

Comments
 (0)