diff --git a/6-deployed-agents/finance/company-overview-agent/README.md b/6-deployed-agents/finance/company-overview-agent/README.md index 1a14c4a..5c605f8 100644 --- a/6-deployed-agents/finance/company-overview-agent/README.md +++ b/6-deployed-agents/finance/company-overview-agent/README.md @@ -85,7 +85,9 @@ from uagents import Agent, Context, Model class CompanyOverviewRequest(Model): - ticker: str + ticker: str = Field( + description="The stock ticker symbol (e.g., AAPL for Apple Inc.) used to identify the company on financial markets.", + ) class CompanyOverviewResponse(Model): @@ -109,7 +111,7 @@ async def send_message(ctx: Context): @agent.on_message(CompanyOverviewResponse) async def handle_response(ctx: Context, sender: str, msg: CompanyOverviewResponse): ctx.logger.info(f"Received response from {sender}:") - ctx.logger.info(msg.overview) + ctx.logger.info(str(msg.overview)) if __name__ == "__main__": diff --git a/6-deployed-agents/finance/company-overview-agent/agent.py b/6-deployed-agents/finance/company-overview-agent/agent.py index 487fe09..2c2cdee 100644 --- a/6-deployed-agents/finance/company-overview-agent/agent.py +++ b/6-deployed-agents/finance/company-overview-agent/agent.py @@ -1,29 +1,17 @@ import os import time from enum import Enum -from typing import Dict -import requests +from chat_proto import chat_proto, struct_output_client_proto from uagents import Agent, Context, Model from uagents.experimental.quota import QuotaProtocol, RateLimit from uagents.models import ErrorMessage +from functions import CompanyOverviewResponse, CompanyOverviewRequest, fetch_overview_json + AGENT_SEED = os.getenv("AGENT_SEED", "company-overview") AGENT_NAME = os.getenv("AGENT_NAME", "Company Overview Agent") -ALPHAVANTAGE_API_KEY = os.getenv("ALPHAVANTAGE_API_KEY") - -if ALPHAVANTAGE_API_KEY is None: - raise ValueError("You need to provide an API key for Alpha Vantage.") - - -class CompanyOverviewRequest(Model): - ticker: str - - -class CompanyOverviewResponse(Model): - overview: Dict[str, str] - PORT = 8000 agent = Agent( @@ -41,24 +29,6 @@ class CompanyOverviewResponse(Model): ) -def fetch_overview_json(ticker: str) -> dict: - url = f"https://www.alphavantage.co/query?function=OVERVIEW&symbol={ticker}&apikey={ALPHAVANTAGE_API_KEY}" - - try: - response = requests.get(url, timeout=10) - except requests.exceptions.Timeout: - return {"error": "The request timed out. Please try again."} - except requests.exceptions.RequestException as e: - return {"error": f"An error occurred: {e}"} - - data = response.json() - - if not data or "Symbol" not in data: - return {"error": "No valid data found in the response."} - - return data - - @proto.on_message( CompanyOverviewRequest, replies={CompanyOverviewResponse, ErrorMessage} ) @@ -91,6 +61,8 @@ async def handle_request(ctx: Context, sender: str, msg: CompanyOverviewRequest) agent.include(proto, publish_manifest=True) +agent.include(chat_proto, publish_manifest=True) +agent.include(struct_output_client_proto, publish_manifest=True) # Health check related code diff --git a/6-deployed-agents/finance/company-overview-agent/chat_proto.py b/6-deployed-agents/finance/company-overview-agent/chat_proto.py new file mode 100644 index 0000000..cd64e7b --- /dev/null +++ b/6-deployed-agents/finance/company-overview-agent/chat_proto.py @@ -0,0 +1,242 @@ +import os +import time +from typing import Any, Literal, TypedDict +from datetime import datetime +from pydantic.v1 import UUID4 +from uagents import Model, Protocol, Context +from uuid import uuid4 + +from functions import CompanyOverviewRequest, fetch_overview_json + + +AI_AGENT_ADDRESS = os.getenv("AI_AGENT_ADDRESS") + + +class Metadata(TypedDict): + + # primarily used with hte `Resource` model. This field specifies the mime_type of + # resource that is being referenced. A full list can be found at `docs/mime_types.md` + mime_type: str + + # the role of the resource + role: str + + +class TextContent(Model): + type: Literal["text"] + + # The text of the content. The format of this field is UTF-8 encoded strings. Additionally, + # markdown based formatting can be used and will be supported by most clients + text: str + + +class Resource(Model): + + # the uri of the resource + uri: str + + # the set of metadata for this resource, for more detailed description of the set of + # fields see `docs/metadata.md` + metadata: dict[str, str] + + +class ResourceContent(Model): + type: Literal["resource"] + + # The resource id + resource_id: UUID4 + + # The resource or list of resource for this content. typically only a single + # resource will be sent, however, if there are accompanying resources like + # thumbnails and audo tracks these can be additionally referenced + # + # In the case of the a list of resources, the first element of the list is always + # considered the primary resource + resource: Resource | list[Resource] + + +class MetadataContent(Model): + type: Literal["metadata"] + + # the set of metadata for this content, for more detailed description of the set of + # fields see `docs/metadata.md` + metadata: dict[str, str] + + +class StartSessionContent(Model): + type: Literal["start-session"] + + +class EndSessionContent(Model): + type: Literal["end-session"] + + +class StartStreamContent(Model): + type: Literal["start-stream"] + + stream_id: UUID4 + + +class EndStreamContent(Model): + type: Literal["start-stream"] + + stream_id: UUID4 + + +# The combined agent content types +AgentContent = ( + TextContent + | ResourceContent + | MetadataContent + | StartSessionContent + | EndSessionContent + | StartStreamContent + | EndStreamContent +) + + +class ChatMessage(Model): + + # the timestamp for the message, should be in UTC + timestamp: datetime + + # a unique message id that is generated from the message instigator + msg_id: UUID4 + + # the list of content elements in the chat + content: list[AgentContent] + + +class ChatAcknowledgement(Model): + + # the timestamp for the message, should be in UTC + timestamp: datetime + + # the msg id that is being acknowledged + acknowledged_msg_id: UUID4 + + # optional acknowledgement metadata + metadata: dict[str, str] | None = None + + +def create_text_chat(text: str) -> ChatMessage: + return ChatMessage( + timestamp=datetime.utcnow(), + msg_id=uuid4(), + content=[TextContent(type="text", text=text)], + ) + +def create_end_session_chat() -> ChatMessage: + return ChatMessage( + timestamp=datetime.utcnow(), + msg_id=uuid4(), + content=[EndSessionContent(type="end-session")], + ) + + +chat_proto = Protocol(name="AgentChatProtcol", version="0.2.1") + +struct_output_client_proto = Protocol( + name="StructuredOutputClientProtocol", version="0.1.0" +) + + +class StructuredOutputPrompt(Model): + prompt: str + output_schema: dict[str, Any] + + +class StructuredOutputResponse(Model): + output: dict[str, Any] + + +@chat_proto.on_message(ChatMessage) +async def handle_message(ctx: Context, sender: str, msg: ChatMessage): + await ctx.send( + sender, + ChatAcknowledgement( + timestamp=datetime.utcnow(), acknowledged_msg_id=msg.msg_id + ), + ) + for item in msg.content: + if isinstance(item, StartSessionContent): + ctx.logger.info(f"Got a start session message from {sender}") + continue + elif isinstance(item, TextContent): + ctx.logger.info(f"Got a message from {sender}: {item.text}") + ctx.storage.set(str(ctx.session), sender) + await ctx.send( + AI_AGENT_ADDRESS, + StructuredOutputPrompt( + prompt=item.text, output_schema=CompanyOverviewRequest.schema() + ), + ) + else: + ctx.logger.info(f"Got unexpected content from {sender}") + + +@chat_proto.on_message(ChatAcknowledgement) +async def handle_ack(ctx: Context, sender: str, msg: ChatAcknowledgement): + ctx.logger.info( + f"Got an acknowledgement from {sender} for {msg.acknowledged_msg_id}" + ) + + +@struct_output_client_proto.on_message(StructuredOutputResponse) +async def handle_structured_output_response( + ctx: Context, sender: str, msg: StructuredOutputResponse +): + prompt = CompanyOverviewRequest.parse_obj(msg.output) + session_sender = ctx.storage.get(str(ctx.session)) + if session_sender is None: + ctx.logger.error( + "Discarding message because no session sender found in storage" + ) + return + + cache = ctx.storage.get(prompt.ticker) or None + if cache: + if int(time.time()) - cache["timestamp"] < 86400: + cache.pop("timestamp") + chat_message = create_text_chat( + f"Company: {cache['Name']} ({cache['Symbol']})\n" + f"Exchange: {cache['Exchange']} | Currency: {cache['Currency']}\n" + f"Industry: {cache['Industry']} | Sector: {cache['Sector']}\n" + f"Market Cap: {cache['Currency']} {cache['MarketCapitalization']}\n" + f"PE Ratio: {cache['PERatio']} | EPS: {cache['EPS']}\n" + f"Website: {cache['OfficialSite']}\n\n" + f"Description: {cache['Description']}" + ) + await ctx.send(session_sender, chat_message) + return + + try: + output_json = fetch_overview_json(prompt.ticker) + except Exception as err: + ctx.logger.error(err) + await ctx.send( + session_sender, + create_text_chat( + "Sorry, I couldn't process your request. Please try again later." + ), + ) + return + + if "error" in output_json: + await ctx.send(session_sender, create_text_chat(str(output_json["error"]))) + return + + chat_message = create_text_chat( + f"Company: {output_json['Name']} ({output_json['Symbol']})\n" + f"Exchange: {output_json['Exchange']} | Currency: {output_json['Currency']}\n" + f"Industry: {output_json['Industry']} | Sector: {output_json['Sector']}\n" + f"Market Cap: {output_json['Currency']} {output_json['MarketCapitalization']}\n" + f"PE Ratio: {output_json['PERatio']} | EPS: {output_json['EPS']}\n" + f"Website: {output_json['OfficialSite']}\n\n" + f"Description: {output_json['Description']}" + ) + + output_json["timestamp"] = int(time.time()) + ctx.storage.set(prompt.ticker, output_json) + await ctx.send(session_sender, chat_message) + await ctx.send(session_sender, create_end_session_chat()) \ No newline at end of file diff --git a/6-deployed-agents/finance/company-overview-agent/functions.py b/6-deployed-agents/finance/company-overview-agent/functions.py new file mode 100644 index 0000000..df8a6a3 --- /dev/null +++ b/6-deployed-agents/finance/company-overview-agent/functions.py @@ -0,0 +1,40 @@ +import os +import requests +from typing import Dict +from uagents import Model +from uagents.models import Field + +ALPHAVANTAGE_API_KEY = os.getenv("ALPHAVANTAGE_API_KEY") + +if ALPHAVANTAGE_API_KEY is None: + raise ValueError("You need to provide an API key for Alpha Vantage.") + + +class CompanyOverviewRequest(Model): + ticker: str = Field( + description="The stock ticker symbol (e.g., AAPL for Apple Inc.) used to identify the company on financial markets.", + ) + + +class CompanyOverviewResponse(Model): + overview: Dict[str, str] + + +def fetch_overview_json(ticker: str) -> dict: + url = f"https://www.alphavantage.co/query?function=OVERVIEW&symbol={ticker}&apikey={ALPHAVANTAGE_API_KEY}" + + try: + response = requests.get(url, timeout=10) + except requests.exceptions.Timeout: + return {"error": "The request timed out. Please try again."} + except requests.exceptions.RequestException as e: + return {"error": f"An error occurred: {e}"} + + data = response.json() + + if not data or "Symbol" not in data: + return {"error": "No valid data found in the response."} + + return data + +print(fetch_overview_json("AMZN")) \ No newline at end of file diff --git a/6-deployed-agents/finance/finbert-financial-sentiment-agent/agent.py b/6-deployed-agents/finance/finbert-financial-sentiment-agent/agent.py index 4570e77..a7e82b4 100644 --- a/6-deployed-agents/finance/finbert-financial-sentiment-agent/agent.py +++ b/6-deployed-agents/finance/finbert-financial-sentiment-agent/agent.py @@ -1,18 +1,16 @@ import os from enum import Enum -import requests +from chat_proto import chat_proto from uagents import Agent, Context, Model from uagents.experimental.quota import QuotaProtocol, RateLimit from uagents.models import ErrorMessage +from finbert import FinancialSentimentResponse, FinancialSentimentRequest, get_finbert_sentiment + AGENT_SEED = os.getenv("AGENT_SEED", "") AGENT_NAME = os.getenv("AGENT_NAME", "Finbert Financial Sentiment Agent") -HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY") -if not HUGGINGFACE_API_KEY: - raise ValueError("You need to provide a Hugging Face API token.") - PORT = 8000 agent = Agent( name=AGENT_NAME, @@ -22,16 +20,6 @@ ) -class FinancialSentimentRequest(Model): - text: str - - -class FinancialSentimentResponse(Model): - positive: float - neutral: float - negative: float - - proto = QuotaProtocol( storage_reference=agent.storage, name="Financial-Sentiment", @@ -40,33 +28,6 @@ class FinancialSentimentResponse(Model): ) -async def get_finbert_sentiment(text) -> FinancialSentimentResponse: - API_URL = "https://api-inference.huggingface.co/models/ProsusAI/finbert" - headers = {"Authorization": f"Bearer {HUGGINGFACE_API_KEY}"} - - payload = { - "inputs": text, - } - - response = requests.post(API_URL, headers=headers, json=payload, timeout=30) - data = response.json() - if "error" in data: - raise ValueError(data["error"]) - - positive, neutral, negative = 0.0, 0.0, 0.0 - for entry in data[0]: - if entry["label"] == "positive": - positive = entry["score"] - elif entry["label"] == "neutral": - neutral = entry["score"] - elif entry["label"] == "negative": - negative = entry["score"] - - return FinancialSentimentResponse( - positive=positive, neutral=neutral, negative=negative - ) - - @proto.on_message( FinancialSentimentRequest, replies={FinancialSentimentResponse, ErrorMessage} ) @@ -88,6 +49,7 @@ async def handle_request(ctx: Context, sender: str, msg: FinancialSentimentReque agent.include(proto, publish_manifest=True) +agent.include(chat_proto, publish_manifest=True) ### Health check related code diff --git a/6-deployed-agents/finance/finbert-financial-sentiment-agent/chat_proto.py b/6-deployed-agents/finance/finbert-financial-sentiment-agent/chat_proto.py new file mode 100644 index 0000000..a80eac1 --- /dev/null +++ b/6-deployed-agents/finance/finbert-financial-sentiment-agent/chat_proto.py @@ -0,0 +1,166 @@ +from typing import Any, Literal, TypedDict +from datetime import datetime +from pydantic.v1 import UUID4 +from uagents import Model, Protocol, Context +from uuid import uuid4 + +from finbert import get_finbert_sentiment + + +class Metadata(TypedDict): + + # primarily used with hte `Resource` model. This field specifies the mime_type of + # resource that is being referenced. A full list can be found at `docs/mime_types.md` + mime_type: str + + # the role of the resource + role: str + + +class TextContent(Model): + type: Literal["text"] + + # The text of the content. The format of this field is UTF-8 encoded strings. Additionally, + # markdown based formatting can be used and will be supported by most clients + text: str + + +class Resource(Model): + + # the uri of the resource + uri: str + + # the set of metadata for this resource, for more detailed description of the set of + # fields see `docs/metadata.md` + metadata: dict[str, str] + + +class ResourceContent(Model): + type: Literal["resource"] + + # The resource id + resource_id: UUID4 + + # The resource or list of resource for this content. typically only a single + # resource will be sent, however, if there are accompanying resources like + # thumbnails and audo tracks these can be additionally referenced + # + # In the case of the a list of resources, the first element of the list is always + # considered the primary resource + resource: Resource | list[Resource] + + +class MetadataContent(Model): + type: Literal["metadata"] + + # the set of metadata for this content, for more detailed description of the set of + # fields see `docs/metadata.md` + metadata: dict[str, str] + + +class StartSessionContent(Model): + type: Literal["start-session"] + + +class EndSessionContent(Model): + type: Literal["end-session"] + + +class StartStreamContent(Model): + type: Literal["start-stream"] + + stream_id: UUID4 + + +class EndStreamContent(Model): + type: Literal["start-stream"] + + stream_id: UUID4 + + +# The combined agent content types +AgentContent = ( + TextContent + | ResourceContent + | MetadataContent + | StartSessionContent + | EndSessionContent + | StartStreamContent + | EndStreamContent +) + + +class ChatMessage(Model): + + # the timestamp for the message, should be in UTC + timestamp: datetime + + # a unique message id that is generated from the message instigator + msg_id: UUID4 + + # the list of content elements in the chat + content: list[AgentContent] + + +class ChatAcknowledgement(Model): + + # the timestamp for the message, should be in UTC + timestamp: datetime + + # the msg id that is being acknowledged + acknowledged_msg_id: UUID4 + + # optional acknowledgement metadata + metadata: dict[str, str] | None = None + + +def create_text_chat(text: str) -> ChatMessage: + return ChatMessage( + timestamp=datetime.utcnow(), + msg_id=uuid4(), + content=[TextContent(type="text", text=text)], + ) + + +chat_proto = Protocol(name="AgentChatProtcol", version="0.2.1") + +struct_output_client_proto = Protocol( + name="StructuredOutputClientProtocol", version="0.1.0" +) + + +@chat_proto.on_message(ChatMessage) +async def handle_message(ctx: Context, sender: str, msg: ChatMessage): + await ctx.send( + sender, + ChatAcknowledgement( + timestamp=datetime.utcnow(), acknowledged_msg_id=msg.msg_id + ), + ) + for item in msg.content: + if isinstance(item, StartSessionContent): + ctx.logger.info(f"Got a start session message from {sender}") + continue + elif isinstance(item, TextContent): + ctx.logger.info(f"Got a message from {sender}: {item.text}") + ctx.storage.set(str(ctx.session), sender) + + response = await get_finbert_sentiment(item.text) + + result = ( + f"Sentiment analysis:\n" + f"- Positive: {response.positive:.2f}\n" + f"- Neutral: {response.neutral:.2f}\n" + f"- Negative: {response.negative:.2f}" + ) + + await ctx.send(sender, create_text_chat(result)) + else: + ctx.logger.info(f"Got unexpected content from {sender}") + + +@chat_proto.on_message(ChatAcknowledgement) +async def handle_ack(ctx: Context, sender: str, msg: ChatAcknowledgement): + ctx.logger.info( + f"Got an acknowledgement from {sender} for {msg.acknowledged_msg_id}" + ) diff --git a/6-deployed-agents/finance/finbert-financial-sentiment-agent/finbert.py b/6-deployed-agents/finance/finbert-financial-sentiment-agent/finbert.py new file mode 100644 index 0000000..06f3775 --- /dev/null +++ b/6-deployed-agents/finance/finbert-financial-sentiment-agent/finbert.py @@ -0,0 +1,44 @@ +import os +import requests +from uagents import Model + +class FinancialSentimentRequest(Model): + text: str + + +class FinancialSentimentResponse(Model): + positive: float + neutral: float + negative: float + +HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY") + +if not HUGGINGFACE_API_KEY: + raise ValueError("You need to provide a Hugging Face API token.") + + +async def get_finbert_sentiment(text) -> FinancialSentimentResponse: + API_URL = "https://api-inference.huggingface.co/models/ProsusAI/finbert" + headers = {"Authorization": f"Bearer {HUGGINGFACE_API_KEY}"} + + payload = { + "inputs": text, + } + + response = requests.post(API_URL, headers=headers, json=payload, timeout=30) + data = response.json() + if "error" in data: + raise ValueError(data["error"]) + + positive, neutral, negative = 0.0, 0.0, 0.0 + for entry in data[0]: + if entry["label"] == "positive": + positive = entry["score"] + elif entry["label"] == "neutral": + neutral = entry["score"] + elif entry["label"] == "negative": + negative = entry["score"] + + return FinancialSentimentResponse( + positive=positive, neutral=neutral, negative=negative + ) \ No newline at end of file diff --git a/6-deployed-agents/utility/weather-agent/README.md b/6-deployed-agents/utility/weather-agent/README.md index 7826525..b4d56c3 100644 --- a/6-deployed-agents/utility/weather-agent/README.md +++ b/6-deployed-agents/utility/weather-agent/README.md @@ -18,7 +18,7 @@ WeatherForecastRequest( ```python WeatherForecastResponse( - location="19.3°C", + location="London", temp=" 19.3°C", condition="Partly cloudy", humidity="46%", @@ -35,7 +35,9 @@ from uagents import Agent, Context, Model class WeatherForecastRequest(Model): - location: str + location: str = Field( + description="Location", + ) class WeatherForecastResponse(Model): diff --git a/6-deployed-agents/utility/weather-agent/agent.py b/6-deployed-agents/utility/weather-agent/agent.py new file mode 100644 index 0000000..0339b9b --- /dev/null +++ b/6-deployed-agents/utility/weather-agent/agent.py @@ -0,0 +1,99 @@ +import os +from enum import Enum + +from uagents import Agent, Context, Model +from uagents.experimental.quota import QuotaProtocol, RateLimit +from uagents.models import ErrorMessage + +from chat_proto import chat_proto, struct_output_client_proto +from weather import get_weather, WeatherForecastRequest, WeatherForecastResponse + +AGENT_SEED = os.getenv("AGENT_SEED", "weather-agent") +AGENT_NAME = os.getenv("AGENT_NAME", "Weather Agent") + + +PORT = 8000 +agent = Agent( + name=AGENT_NAME, + seed=AGENT_SEED, + port=PORT, + endpoint=f"http://localhost:{PORT}/submit", +) + +proto = QuotaProtocol( + storage_reference=agent.storage, + name="Weather-Agent-Protocol", + version="0.1.0", + default_rate_limit=RateLimit(window_size_minutes=60, max_requests=6), +) + +@proto.on_message( + WeatherForecastRequest, replies={WeatherForecastResponse, ErrorMessage} +) +async def handle_request(ctx: Context, sender: str, msg: WeatherForecastRequest): + ctx.logger.info(f"Received Address: {msg.location}") + try: + weather_forecast = await get_weather(msg.location) + except Exception as err: + ctx.logger.error(err) + await ctx.send(sender, ErrorMessage(error=str(err))) + + if "error" in weather_forecast: + await ctx.send(sender, ErrorMessage(error=weather_forecast["error"])) + return + await ctx.send(sender, WeatherForecastResponse(**weather_forecast)) + + +agent.include(proto, publish_manifest=True) + + +### Health check related code +def agent_is_healthy() -> bool: + """ + Implement the actual health check logic here. + + For example, check if the agent can connect to a third party API, + check if the agent has enough resources, etc. + """ + condition = True # TODO: logic here + return bool(condition) + + +class HealthCheck(Model): + pass + + +class HealthStatus(str, Enum): + HEALTHY = "healthy" + UNHEALTHY = "unhealthy" + + +class AgentHealth(Model): + agent_name: str + status: HealthStatus + + +health_protocol = QuotaProtocol( + storage_reference=agent.storage, name="HealthProtocol", version="0.1.0" +) + + +@health_protocol.on_message(HealthCheck, replies={AgentHealth}) +async def handle_health_check(ctx: Context, sender: str, msg: HealthCheck): + status = HealthStatus.UNHEALTHY + try: + if agent_is_healthy(): + status = HealthStatus.HEALTHY + except Exception as err: + ctx.logger.error(err) + finally: + await ctx.send(sender, AgentHealth(agent_name=AGENT_NAME, status=status)) + + +agent.include(health_protocol, publish_manifest=True) +agent.include(chat_proto, publish_manifest=True) +agent.include(struct_output_client_proto, publish_manifest=True) + + +if __name__ == "__main__": + agent.run() diff --git a/6-deployed-agents/utility/weather-agent/chat_proto.py b/6-deployed-agents/utility/weather-agent/chat_proto.py new file mode 100644 index 0000000..a055c37 --- /dev/null +++ b/6-deployed-agents/utility/weather-agent/chat_proto.py @@ -0,0 +1,236 @@ +import os +from typing import Any, Literal, TypedDict +from datetime import datetime +from pydantic.v1 import UUID4 +from uagents import Model, Protocol, Context +from uuid import uuid4 + +from weather import get_weather, WeatherForecastRequest + + +AI_AGENT_ADDRESS = os.getenv("claude-ai-agent") + +if not AI_AGENT_ADDRESS: + raise ValueError("AI_AGENT_ADDRESS not set") + + +class Metadata(TypedDict): + + # primarily used with hte `Resource` model. This field specifies the mime_type of + # resource that is being referenced. A full list can be found at `docs/mime_types.md` + mime_type: str + + # the role of the resource + role: str + + +class TextContent(Model): + type: Literal["text"] + + # The text of the content. The format of this field is UTF-8 encoded strings. Additionally, + # markdown based formatting can be used and will be supported by most clients + text: str + + +class Resource(Model): + + # the uri of the resource + uri: str + + # the set of metadata for this resource, for more detailed description of the set of + # fields see `docs/metadata.md` + metadata: dict[str, str] + + +class ResourceContent(Model): + type: Literal["resource"] + + # The resource id + resource_id: UUID4 + + # The resource or list of resource for this content. typically only a single + # resource will be sent, however, if there are accompanying resources like + # thumbnails and audo tracks these can be additionally referenced + # + # In the case of the a list of resources, the first element of the list is always + # considered the primary resource + resource: Resource | list[Resource] + + +class MetadataContent(Model): + type: Literal["metadata"] + + # the set of metadata for this content, for more detailed description of the set of + # fields see `docs/metadata.md` + metadata: dict[str, str] + + +class StartSessionContent(Model): + type: Literal["start-session"] + + +class EndSessionContent(Model): + type: Literal["end-session"] + + +class StartStreamContent(Model): + type: Literal["start-stream"] + + stream_id: UUID4 + + +class EndStreamContent(Model): + type: Literal["start-stream"] + + stream_id: UUID4 + + +# The combined agent content types +AgentContent = ( + TextContent + | ResourceContent + | MetadataContent + | StartSessionContent + | EndSessionContent + | StartStreamContent + | EndStreamContent +) + + +class ChatMessage(Model): + + # the timestamp for the message, should be in UTC + timestamp: datetime + + # a unique message id that is generated from the message instigator + msg_id: UUID4 + + # the list of content elements in the chat + content: list[AgentContent] + + +class ChatAcknowledgement(Model): + + # the timestamp for the message, should be in UTC + timestamp: datetime + + # the msg id that is being acknowledged + acknowledged_msg_id: UUID4 + + # optional acknowledgement metadata + metadata: dict[str, str] | None = None + + +def create_text_chat(text: str) -> ChatMessage: + return ChatMessage( + timestamp=datetime.utcnow(), + msg_id=uuid4(), + content=[TextContent(type="text", text=text)], + ) + +def create_end_session_chat() -> ChatMessage: + return ChatMessage( + timestamp=datetime.utcnow(), + msg_id=uuid4(), + content=[EndSessionContent(type="end-session")], + ) + + + +chat_proto = Protocol(name="AgentChatProtcol", version="0.2.1") + +struct_output_client_proto = Protocol( + name="StructuredOutputClientProtocol", version="0.1.0" +) + + +class StructuredOutputPrompt(Model): + prompt: str + output_schema: dict[str, Any] + + +class StructuredOutputResponse(Model): + output: dict[str, Any] + + +@chat_proto.on_message(ChatMessage) +async def handle_message(ctx: Context, sender: str, msg: ChatMessage): + ctx.logger.info(f"Got a message from {sender}: {msg.content[0].text}") + ctx.storage.set(str(ctx.session), sender) + await ctx.send( + sender, + ChatAcknowledgement(timestamp=datetime.utcnow(), acknowledged_msg_id=msg.msg_id), + ) + + for item in msg.content: + if isinstance(item, StartSessionContent): + ctx.logger.info(f"Got a start session message from {sender}") + continue + elif isinstance(item, TextContent): + ctx.logger.info(f"Got a message from {sender}: {item.text}") + ctx.storage.set(str(ctx.session), sender) + await ctx.send( + AI_AGENT_ADDRESS, + StructuredOutputPrompt( + prompt=item.text, output_schema=WeatherForecastRequest.schema() + ), + ) + else: + ctx.logger.info(f"Got unexpected content from {sender}") + + +@chat_proto.on_message(ChatAcknowledgement) +async def handle_ack(ctx: Context, sender: str, msg: ChatAcknowledgement): + ctx.logger.info( + f"Got an acknowledgement from {sender} for {msg.acknowledged_msg_id}" + ) + + +@struct_output_client_proto.on_message(StructuredOutputResponse) +async def handle_structured_output_response( + ctx: Context, sender: str, msg: StructuredOutputResponse +): + session_sender = ctx.storage.get(str(ctx.session)) + if session_sender is None: + ctx.logger.error( + "Discarding message because no session sender found in storage" + ) + return + + if "" in str(msg.output): + await ctx.send( + session_sender, + create_text_chat( + "Sorry, I couldn't process your location request. Please try again later." + ), + ) + return + + prompt = WeatherForecastRequest.parse_obj(msg.output) + + try: + weather_forecast = await get_weather(prompt) + except Exception as err: + ctx.logger.error(err) + await ctx.send( + session_sender, + create_text_chat( + "Sorry, I couldn't process your request. Please try again later." + ), + ) + return + + if "error" in weather_forecast: + await ctx.send(session_sender, create_text_chat(str(weather_forecast["error"]))) + return + + chat_message = create_text_chat( + f"Location: {weather_forecast['location']}\n" + f"Temperature: {weather_forecast['temp']}\n" + f"Condition: {weather_forecast['condition']}\n" + f"Humidity: {weather_forecast['humidity']}\n" + f"Windspeed: {weather_forecast['wind_speed']}\n" + ) + + await ctx.send(session_sender, chat_message) + await ctx.send(session_sender, create_end_session_chat()) diff --git a/6-deployed-agents/utility/weather-agent/pyproject.toml b/6-deployed-agents/utility/weather-agent/pyproject.toml new file mode 100644 index 0000000..3e948f1 --- /dev/null +++ b/6-deployed-agents/utility/weather-agent/pyproject.toml @@ -0,0 +1,15 @@ +[tool.poetry] +name = "weather-agent" +version = "0.1.0" +description = "Weather Agent" +authors = ["Kshipra Dhame "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.10,<3.13" +uagents = "^0.15.2" +requests = "^2.32.3" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/6-deployed-agents/utility/weather-agent/weather.py b/6-deployed-agents/utility/weather-agent/weather.py new file mode 100644 index 0000000..6f146f7 --- /dev/null +++ b/6-deployed-agents/utility/weather-agent/weather.py @@ -0,0 +1,43 @@ +import os +import requests + +from uagents import Model +from uagents.models import Field + +API_KEY = os.getenv("WEATHERAPI_KEY") + +class WeatherForecastRequest(Model): + location: str = Field( + description="Location", + ) + +class WeatherForecastResponse(Model): + location: str + temp: float + condition: str + humidity: float + wind_speed: float + + +async def get_weather(location) -> dict: + url = f"http://api.weatherapi.com/v1/current.json?key={API_KEY}&q={location}&aqi=no" + + try: + response = requests.get(url, timeout=10) + except requests.exceptions.Timeout: + return {"error": "The request timed out. Please try again."} + except requests.exceptions.RequestException as e: + return {"error": f"An error occurred: {e}"} + + weather_data = response.json() + + if "error" in weather_data: + return {"error": weather_data["error"]["message"]} + + return { + "location": location, + "temp": weather_data["current"]["temp_c"], + "condition": weather_data["current"]["condition"]["text"], + "humidity": weather_data["current"]["humidity"], + "wind_speed": weather_data["current"]["wind_kph"], + } \ No newline at end of file