|
| 1 | +from strands import Agent, tool |
| 2 | +from strands.models.openai import OpenAIModel |
| 3 | +from flask import Flask, request, jsonify, send_from_directory |
| 4 | +import requests |
| 5 | + |
| 6 | +import json |
| 7 | +import os |
| 8 | +import time |
| 9 | +import dotenv |
| 10 | +from threading import Lock |
| 11 | + |
| 12 | +dotenv.load_dotenv() |
| 13 | + |
| 14 | +message = """ |
| 15 | +You are an expert fashion stylist. Your goal is to help users find their personal style. |
| 16 | +Your task is to provide fashion advice and offer products based on the user's preferences. |
| 17 | +You can use the tools available to you to assist with this. |
| 18 | +Note that for any prompts you ask the user, make sure you actually explicitly state the question you are asking, and possible some sample answers so they know what to type. |
| 19 | +Keep the questions as simple as possible so the user doesn't have to type much. And don't ask more than 3 questions. |
| 20 | +""" |
| 21 | + |
| 22 | +app = Flask(__name__) |
| 23 | +latest_response = {"message": "Hello! I'm your fashion stylist assistant. How can I help you with your style today?"} |
| 24 | + |
| 25 | +model = OpenAIModel( |
| 26 | + client_args={ |
| 27 | + "base_url": os.getenv("LLM_URL"), |
| 28 | + # "api_key": os.getenv("OPENAI_API_KEY") |
| 29 | + }, |
| 30 | + model_id=os.getenv("LLM_MODEL"), |
| 31 | + params={ |
| 32 | + "max_tokens": 1000, |
| 33 | + "temperature": 0.7, |
| 34 | + } |
| 35 | +) |
| 36 | + |
| 37 | +def parse_assistant_response(**kwargs): |
| 38 | + # print(json.dumps(kwargs["message"], indent=2)) # Debugging line |
| 39 | + |
| 40 | + # Extract the assistant's text message |
| 41 | + assistant_text = kwargs["message"]["content"][0]["text"] |
| 42 | + |
| 43 | + print("Assistant Text: ", assistant_text) |
| 44 | + return assistant_text |
| 45 | + |
| 46 | + |
| 47 | +def message_buffer_handler(**kwargs): |
| 48 | + # When a new message is created from the assistant, print its content |
| 49 | + global latest_response |
| 50 | + try: |
| 51 | + if "message" in kwargs and kwargs["message"].get("role") == "assistant": |
| 52 | + # Parse the assistant's response from JSON |
| 53 | + assistant_text = parse_assistant_response(**kwargs) |
| 54 | + |
| 55 | + # Send the assistant's message content back to the UI |
| 56 | + latest_response = {"message": assistant_text} |
| 57 | + |
| 58 | + # Prevent the agent from closing by not calling exit() or any termination logic here. |
| 59 | + # If you have any cleanup or state reset, do it here, but do not terminate the process. |
| 60 | + pass |
| 61 | + |
| 62 | + except Exception as e: |
| 63 | + print(f"Error in message_buffer_handler: {str(e)}") |
| 64 | + |
| 65 | +@tool |
| 66 | +def search_for_fashion_books(query, filters=None) -> str: |
| 67 | + """ |
| 68 | + Get detailed information about fashion books from Open Library. |
| 69 | +
|
| 70 | + Args: |
| 71 | + query: The search query for fashion books. |
| 72 | + filters: Optional filters to apply to the search results, including title, author, or year. |
| 73 | +
|
| 74 | + Returns: |
| 75 | + A string containing the list of books found from the search. |
| 76 | + """ |
| 77 | + def replace_spaces_with_plus(s): |
| 78 | + return s.replace(' ', '+') |
| 79 | + |
| 80 | + # Replace spaces in the query with plus signs for URL encoding |
| 81 | + clean_query = replace_spaces_with_plus(query) |
| 82 | + |
| 83 | + url = f"https://openlibrary.org/search.json" |
| 84 | + headers = {} |
| 85 | + params = { |
| 86 | + "q": clean_query, |
| 87 | + "subject": "fashion", |
| 88 | + "page": 1, |
| 89 | + "limit": 10 |
| 90 | + } |
| 91 | + |
| 92 | + if filters: |
| 93 | + if "title" in filters: |
| 94 | + params["title"] = filters["title"] |
| 95 | + if "author" in filters: |
| 96 | + params["author"] = filters["author"] |
| 97 | + if "year" in filters: |
| 98 | + params["year"] = filters["year"] |
| 99 | + |
| 100 | + try: |
| 101 | + response = requests.get(url, headers=headers, params=params) |
| 102 | + if response.ok: |
| 103 | + book_list = response.json() |
| 104 | + if book_list.get("num_found", 0) == 0: |
| 105 | + return "No fashion books found" |
| 106 | + |
| 107 | + message = "Here are the fashion books I found:" |
| 108 | + for book in book_list.get("docs", []): |
| 109 | + title = book.get("title") |
| 110 | + author = book.get("author_name", ["Unknown"])[0] |
| 111 | + year = book.get("first_publish_year") |
| 112 | + message += f"\n- Title: {title}, Author: {author}, Year: {year}" |
| 113 | + print(message) |
| 114 | + return message |
| 115 | + else: |
| 116 | + return f"Error: API request failed: {response.status_code}" |
| 117 | + except Exception as e: |
| 118 | + return f"Error: {str(e)}" |
| 119 | + |
| 120 | +TOOL_SPEC = { |
| 121 | + "name": "search_for_fashion_books", |
| 122 | + "description": "Get detailed information about fashion books from Open Library, based on a search query.", |
| 123 | + "inputSchema": { |
| 124 | + "type": "object", |
| 125 | + "properties": { |
| 126 | + "query": { |
| 127 | + "type": "string", |
| 128 | + "description": "Search query for fashion books", |
| 129 | + }, |
| 130 | + "filters": { |
| 131 | + "title": { |
| 132 | + "type": "string", |
| 133 | + "description": "Filter by book title" |
| 134 | + }, |
| 135 | + "author": { |
| 136 | + "type": "string", |
| 137 | + "description": "Filter by author name" |
| 138 | + }, |
| 139 | + "year": { |
| 140 | + "type": "integer", |
| 141 | + "description": "Filter by publication year" |
| 142 | + } |
| 143 | + } |
| 144 | + }, |
| 145 | + "required": ["query"], |
| 146 | + }, |
| 147 | +} |
| 148 | + |
| 149 | +agent = Agent( |
| 150 | + tools=[search_for_fashion_books], |
| 151 | + model=model, |
| 152 | + callback_handler=message_buffer_handler, |
| 153 | + system_prompt=message |
| 154 | +) |
| 155 | + |
| 156 | +print("Agent model:", agent.model.config) |
| 157 | + |
| 158 | + |
| 159 | +# Flask routes |
| 160 | +@app.route('/') |
| 161 | +def index(): |
| 162 | + # This assumes index.html is in the same directory as this script |
| 163 | + return send_from_directory('.', 'index.html') |
| 164 | + |
| 165 | +@app.route('/chat', methods=['POST']) |
| 166 | +def chat(): |
| 167 | + try: |
| 168 | + global latest_response |
| 169 | + data = request.json |
| 170 | + if not data: |
| 171 | + return jsonify({"error": "No JSON data received"}), 400 |
| 172 | + |
| 173 | + user_message = data.get('message') |
| 174 | + |
| 175 | + if not user_message: |
| 176 | + return jsonify({"error": "No message provided"}), 400 |
| 177 | + |
| 178 | + print(f"Received message: {user_message}") |
| 179 | + |
| 180 | + agent(f"Continue the conversation with the user. The user says: {user_message}") |
| 181 | + |
| 182 | + # # Return the response from latest_response |
| 183 | + response_content = latest_response.get("message", "I'm thinking about your question...") |
| 184 | + |
| 185 | + return jsonify({ |
| 186 | + "response": response_content |
| 187 | + }) |
| 188 | + |
| 189 | + except Exception as e: |
| 190 | + import traceback |
| 191 | + traceback.print_exc() |
| 192 | + print(f"Error in /chat endpoint: {str(e)}") |
| 193 | + return jsonify({"error": str(e), "response": str(e)}), 500 |
| 194 | + |
| 195 | +# Start Flask server when this script is run directly |
| 196 | +if __name__ == '__main__': |
| 197 | + |
| 198 | + print("Environment variables:") |
| 199 | + print(f"- LLM_URL: {os.getenv('LLM_URL')}") |
| 200 | + |
| 201 | + print("Starting Flask server on port 5001") |
| 202 | + app.run(host='0.0.0.0', port=5001, debug=False) |
0 commit comments