|
17 | 17 | import os |
18 | 18 | from typing import Optional, Union |
19 | 19 |
|
| 20 | +# If user didn't set LITELLM_LOCAL_MODEL_COST_MAP, set it to True |
| 21 | +# to enable local model cost map. |
| 22 | +# This value is `false` by default, which brings heavy performance burden, |
| 23 | +# for instance, importing `Litellm` needs about 10s latency. |
| 24 | +if not os.getenv("LITELLM_LOCAL_MODEL_COST_MAP"): |
| 25 | + os.environ["LITELLM_LOCAL_MODEL_COST_MAP"] = "True" |
| 26 | + |
20 | 27 | from google.adk.agents import LlmAgent, RunConfig |
21 | 28 | from google.adk.agents.base_agent import BaseAgent |
22 | 29 | from google.adk.agents.llm_agent import InstructionProvider, ToolUnion |
@@ -71,62 +78,6 @@ class Agent(LlmAgent): |
71 | 78 | short_term_memory (Optional[ShortTermMemory]): Session-based memory for temporary context. |
72 | 79 | long_term_memory (Optional[LongTermMemory]): Cross-session memory for persistent user context. |
73 | 80 | tracers (list[BaseTracer]): List of tracers used for telemetry and monitoring. |
74 | | -
|
75 | | - Notes: |
76 | | - Before creating your agent, you should get the API Key for your model. |
77 | | -
|
78 | | - Examples: |
79 | | - ### Simple agent |
80 | | -
|
81 | | - Create a simplest agent without any extra settings. All agent attributes are come from environment variables and default values. Like: |
82 | | -
|
83 | | - ```python |
84 | | - import asyncio |
85 | | -
|
86 | | - from veadk import Agent, Runner |
87 | | -
|
88 | | - root_agent = Agent() |
89 | | -
|
90 | | - runner = Runner(agent=root_agent) |
91 | | -
|
92 | | - response = asyncio.run(runner.run("hello")) |
93 | | - print(response) |
94 | | - ``` |
95 | | -
|
96 | | - You can set some agent metadata attributes by the following code: |
97 | | -
|
98 | | - ```python |
99 | | - from veadk import Agent |
100 | | -
|
101 | | - from veadk import Agent, Runner |
102 | | -
|
103 | | - root_agent = Agent( |
104 | | - name="meeting_assistant", |
105 | | - description="An assistant that helps user to make meetings.", |
106 | | - # system prompt |
107 | | - instruction="First learn about user's meeting time, location, and other key informations, and give out a meeting plan.", |
108 | | - ) |
109 | | - ``` |
110 | | -
|
111 | | - Or, once you want to use your local-serving model or models from other provider, you can specify some model-related configurations in initiation arguments: |
112 | | -
|
113 | | - ```python |
114 | | - agent = Agent(model_name="", model_api_key="", model_api_base="") |
115 | | - ``` |
116 | | -
|
117 | | - Besides, you can specify some extra options by ARK requirements, such as: |
118 | | -
|
119 | | - ```python |
120 | | - # disable thinking |
121 | | - model_extra_config = {} |
122 | | - ``` |
123 | | -
|
124 | | - In some systems, mulitple-agent based design is necessary, you can implement a multiple-agent system by `sub_agent` argument: |
125 | | -
|
126 | | - ```python |
127 | | - from veadk import Agent |
128 | | - ``` |
129 | | -
|
130 | 81 | """ |
131 | 82 |
|
132 | 83 | model_config = ConfigDict(arbitrary_types_allowed=True, extra="allow") |
@@ -227,9 +178,10 @@ def model_post_init(self, __context: Any) -> None: |
227 | 178 | if self.long_term_memory is not None: |
228 | 179 | from google.adk.tools import load_memory |
229 | 180 |
|
230 | | - if not load_memory.custom_metadata: |
231 | | - load_memory.custom_metadata = {} |
232 | | - load_memory.custom_metadata["backend"] = self.long_term_memory.backend |
| 181 | + if hasattr(load_memory, "custom_metadata"): |
| 182 | + if not load_memory.custom_metadata: |
| 183 | + load_memory.custom_metadata = {} |
| 184 | + load_memory.custom_metadata["backend"] = self.long_term_memory.backend |
233 | 185 | self.tools.append(load_memory) |
234 | 186 |
|
235 | 187 | logger.info(f"VeADK version: {VERSION}") |
|
0 commit comments