|
| 1 | +# Copyright 2025 Google LLC |
| 2 | +# |
| 3 | +# Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | +# you may not use this file except in compliance with the License. |
| 5 | +# You may obtain a copy of the License at |
| 6 | +# |
| 7 | +# http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | +# |
| 9 | +# Unless required by applicable law or agreed to in writing, software |
| 10 | +# distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | +# See the License for the specific language governing permissions and |
| 13 | +# limitations under the License. |
| 14 | + |
| 15 | +from __future__ import annotations |
| 16 | + |
| 17 | +import inspect |
| 18 | +from typing import Optional |
| 19 | +from typing import TYPE_CHECKING |
| 20 | +from typing import Union |
| 21 | + |
| 22 | +from google.adk.agents.callback_context import CallbackContext |
| 23 | +from google.adk.agents.readonly_context import ReadonlyContext |
| 24 | +from google.adk.models.llm_request import LlmRequest |
| 25 | +from google.adk.models.llm_response import LlmResponse |
| 26 | +from google.adk.plugins.base_plugin import BasePlugin |
| 27 | +from google.adk.utils import instructions_utils |
| 28 | + |
| 29 | +if TYPE_CHECKING: |
| 30 | + from google.adk.agents.llm_agent import InstructionProvider |
| 31 | + from google.adk.agents.llm_agent import LlmAgent |
| 32 | + |
| 33 | + |
| 34 | +class GlobalInstructionPlugin(BasePlugin): |
| 35 | + """Plugin that provides global instructions functionality at the App level. |
| 36 | +
|
| 37 | + This plugin replaces the deprecated global_instruction field on LlmAgent. |
| 38 | + Global instructions are applied to all agents in the application, providing |
| 39 | + a consistent way to set application-wide instructions, identity, or |
| 40 | + personality. |
| 41 | +
|
| 42 | + The plugin operates through the before_model_callback, allowing it to modify |
| 43 | + LLM requests before they are sent to the model. |
| 44 | + """ |
| 45 | + |
| 46 | + def __init__( |
| 47 | + self, |
| 48 | + global_instruction: Union[str, InstructionProvider] = "", |
| 49 | + name: str = "global_instruction", |
| 50 | + ) -> None: |
| 51 | + """Initialize the GlobalInstructionPlugin. |
| 52 | +
|
| 53 | + Args: |
| 54 | + global_instruction: The instruction to apply globally. Can be a string or |
| 55 | + an InstructionProvider function that takes ReadonlyContext and returns a |
| 56 | + string (sync or async). |
| 57 | + name: The name of the plugin (defaults to "global_instruction"). |
| 58 | + """ |
| 59 | + super().__init__(name=name) |
| 60 | + self.global_instruction = global_instruction |
| 61 | + |
| 62 | + async def before_model_callback( |
| 63 | + self, *, callback_context: CallbackContext, llm_request: LlmRequest |
| 64 | + ) -> Optional[LlmResponse]: |
| 65 | + """Apply global instructions to the LLM request. |
| 66 | +
|
| 67 | + This callback is executed before each request is sent to the model, |
| 68 | + allowing the plugin to inject global instructions into the request. |
| 69 | +
|
| 70 | + Args: |
| 71 | + callback_context: The context for the current agent call. |
| 72 | + llm_request: The prepared request object to be sent to the model. |
| 73 | +
|
| 74 | + Returns: |
| 75 | + None to allow the LLM request to proceed normally. |
| 76 | + """ |
| 77 | + # Only process if we have a global instruction configured |
| 78 | + if not self.global_instruction: |
| 79 | + return None |
| 80 | + |
| 81 | + # Resolve the global instruction (handle both string and InstructionProvider) |
| 82 | + readonly_context = ReadonlyContext(callback_context.invocation_context) |
| 83 | + final_global_instruction = await self._resolve_global_instruction( |
| 84 | + readonly_context |
| 85 | + ) |
| 86 | + |
| 87 | + if not final_global_instruction: |
| 88 | + return None |
| 89 | + |
| 90 | + # Make the global instruction the leading system instruction. |
| 91 | + existing_instruction = llm_request.config.system_instruction |
| 92 | + |
| 93 | + if not existing_instruction: |
| 94 | + llm_request.config.system_instruction = final_global_instruction |
| 95 | + return None |
| 96 | + |
| 97 | + if isinstance(existing_instruction, str): |
| 98 | + llm_request.config.system_instruction = ( |
| 99 | + f"{final_global_instruction}\n\n{existing_instruction}" |
| 100 | + ) |
| 101 | + else: # It's an Iterable |
| 102 | + # Convert to list to allow prepending |
| 103 | + new_instruction_list = [final_global_instruction] |
| 104 | + new_instruction_list.extend(list(existing_instruction)) |
| 105 | + llm_request.config.system_instruction = new_instruction_list |
| 106 | + |
| 107 | + return None |
| 108 | + |
| 109 | + async def _resolve_global_instruction( |
| 110 | + self, readonly_context: ReadonlyContext |
| 111 | + ) -> str: |
| 112 | + """Resolve the global instruction, handling both string and InstructionProvider. |
| 113 | +
|
| 114 | + Args: |
| 115 | + readonly_context: The readonly context for resolving instructions. |
| 116 | +
|
| 117 | + Returns: |
| 118 | + The fully resolved and processed global instruction string, ready to use. |
| 119 | + """ |
| 120 | + if isinstance(self.global_instruction, str): |
| 121 | + # For string instructions, apply state injection |
| 122 | + return await instructions_utils.inject_session_state( |
| 123 | + self.global_instruction, readonly_context |
| 124 | + ) |
| 125 | + else: |
| 126 | + # Handle InstructionProvider (callable) |
| 127 | + # InstructionProvider already handles state internally, no injection needed |
| 128 | + instruction = self.global_instruction(readonly_context) |
| 129 | + if inspect.isawaitable(instruction): |
| 130 | + instruction = await instruction |
| 131 | + return instruction |
0 commit comments