Skip to content

Commit 8783ac5

Browse files
Python: Introducing Foundry Local Chat Clients (#2915)
* redo foundry local chat client * fix mypy and spelling * better docstring, updated sample * fixed tests and added tests * small sample update
1 parent e15eab7 commit 8783ac5

File tree

11 files changed

+658
-5
lines changed

11 files changed

+658
-5
lines changed
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
MIT License
2+
3+
Copyright (c) Microsoft Corporation.
4+
5+
Permission is hereby granted, free of charge, to any person obtaining a copy
6+
of this software and associated documentation files (the "Software"), to deal
7+
in the Software without restriction, including without limitation the rights
8+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9+
copies of the Software, and to permit persons to whom the Software is
10+
furnished to do so, subject to the following conditions:
11+
12+
The above copyright notice and this permission notice shall be included in all
13+
copies or substantial portions of the Software.
14+
15+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21+
SOFTWARE
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
# Get Started with Microsoft Agent Framework Foundry Local
2+
3+
Please install this package as the extra for `agent-framework`:
4+
5+
```bash
6+
pip install agent-framework-foundry-local --pre
7+
```
8+
9+
and see the [README](https://github.com/microsoft/agent-framework/tree/main/python/README.md) for more information.
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
import importlib.metadata
4+
5+
from ._foundry_local_client import FoundryLocalClient
6+
7+
try:
8+
__version__ = importlib.metadata.version(__name__)
9+
except importlib.metadata.PackageNotFoundError:
10+
__version__ = "0.0.0" # Fallback for development mode
11+
12+
__all__ = [
13+
"FoundryLocalClient",
14+
"__version__",
15+
]
Lines changed: 160 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,160 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
3+
from typing import Any, ClassVar
4+
5+
from agent_framework import use_chat_middleware, use_function_invocation
6+
from agent_framework._pydantic import AFBaseSettings
7+
from agent_framework.exceptions import ServiceInitializationError
8+
from agent_framework.observability import use_instrumentation
9+
from agent_framework.openai._chat_client import OpenAIBaseChatClient
10+
from foundry_local import FoundryLocalManager
11+
from foundry_local.models import DeviceType
12+
from openai import AsyncOpenAI
13+
14+
__all__ = [
15+
"FoundryLocalClient",
16+
]
17+
18+
19+
class FoundryLocalSettings(AFBaseSettings):
20+
"""Foundry local model settings.
21+
22+
The settings are first loaded from environment variables with the prefix 'FOUNDRY_LOCAL_'.
23+
If the environment variables are not found, the settings can be loaded from a .env file
24+
with the encoding 'utf-8'. If the settings are not found in the .env file, the settings
25+
are ignored; however, validation will fail alerting that the settings are missing.
26+
27+
Attributes:
28+
model_id: The name of the model deployment to use.
29+
(Env var FOUNDRY_LOCAL_MODEL_ID)
30+
Parameters:
31+
env_file_path: If provided, the .env settings are read from this file path location.
32+
env_file_encoding: The encoding of the .env file, defaults to 'utf-8'.
33+
"""
34+
35+
env_prefix: ClassVar[str] = "FOUNDRY_LOCAL_"
36+
37+
model_id: str
38+
39+
40+
@use_function_invocation
41+
@use_instrumentation
42+
@use_chat_middleware
43+
class FoundryLocalClient(OpenAIBaseChatClient):
44+
"""Foundry Local Chat completion class."""
45+
46+
def __init__(
47+
self,
48+
model_id: str | None = None,
49+
*,
50+
bootstrap: bool = True,
51+
timeout: float | None = None,
52+
prepare_model: bool = True,
53+
device: DeviceType | None = None,
54+
env_file_path: str | None = None,
55+
env_file_encoding: str = "utf-8",
56+
**kwargs: Any,
57+
) -> None:
58+
"""Initialize a FoundryLocalClient.
59+
60+
Keyword Args:
61+
model_id: The Foundry Local model ID or alias to use. If not provided,
62+
it will be loaded from the FoundryLocalSettings.
63+
bootstrap: Whether to start the Foundry Local service if not already running.
64+
Default is True.
65+
timeout: Optional timeout for requests to Foundry Local.
66+
This timeout is applied to any call to the Foundry Local service.
67+
prepare_model: Whether to download the model into the cache, and load the model into
68+
the inferencing service upon initialization. Default is True.
69+
If false, the first call to generate a completion will load the model,
70+
and might take a long time.
71+
device: The device type to use for model inference.
72+
The device is used to select the appropriate model variant.
73+
If not provided, the default device for your system will be used.
74+
The values are in the foundry_local.models.DeviceType enum.
75+
env_file_path: If provided, the .env settings are read from this file path location.
76+
env_file_encoding: The encoding of the .env file, defaults to 'utf-8'.
77+
kwargs: Additional keyword arguments, are passed to the OpenAIBaseChatClient.
78+
This can include middleware and additional properties.
79+
80+
Examples:
81+
82+
.. code-block:: python
83+
84+
# Create a FoundryLocalClient with a specific model ID:
85+
from agent_framework_foundry_local import FoundryLocalClient
86+
87+
client = FoundryLocalClient(model_id="phi-4-mini")
88+
89+
agent = client.create_agent(
90+
name="LocalAgent",
91+
instructions="You are a helpful agent.",
92+
tools=get_weather,
93+
)
94+
response = await agent.run("What's the weather like in Seattle?")
95+
96+
# Or you can set the model id in the environment:
97+
os.environ["FOUNDRY_LOCAL_MODEL_ID"] = "phi-4-mini"
98+
client = FoundryLocalClient()
99+
100+
# A FoundryLocalManager is created and if set, the service is started.
101+
# The FoundryLocalManager is available via the `manager` property.
102+
# For instance to find out which models are available:
103+
for model in client.manager.list_catalog_models():
104+
print(f"- {model.alias} for {model.task} - id={model.id}")
105+
106+
# Other options include specifying the device type:
107+
from foundry_local.models import DeviceType
108+
109+
client = FoundryLocalClient(
110+
model_id="phi-4-mini",
111+
device=DeviceType.GPU,
112+
)
113+
# and choosing if the model should be prepared on initialization:
114+
client = FoundryLocalClient(
115+
model_id="phi-4-mini",
116+
prepare_model=False,
117+
)
118+
# Beware, in this case the first request to generate a completion
119+
# will take a long time as the model is loaded then.
120+
# Alternatively, you could call the `download_model` and `load_model` methods
121+
# on the `manager` property manually.
122+
client.manager.download_model(alias_or_model_id="phi-4-mini", device=DeviceType.CPU)
123+
client.manager.load_model(alias_or_model_id="phi-4-mini", device=DeviceType.CPU)
124+
125+
# You can also use the CLI:
126+
`foundry model load phi-4-mini --device Auto`
127+
128+
Raises:
129+
ServiceInitializationError: If the specified model ID or alias is not found.
130+
Sometimes a model might be available but if you have specified a device
131+
type that is not supported by the model, it will not be found.
132+
133+
"""
134+
settings = FoundryLocalSettings(
135+
model_id=model_id, # type: ignore
136+
env_file_path=env_file_path,
137+
env_file_encoding=env_file_encoding,
138+
)
139+
manager = FoundryLocalManager(bootstrap=bootstrap, timeout=timeout)
140+
model_info = manager.get_model_info(
141+
alias_or_model_id=settings.model_id,
142+
device=device,
143+
)
144+
if model_info is None:
145+
message = (
146+
f"Model with ID or alias '{settings.model_id}:{device.value}' not found in Foundry Local."
147+
if device
148+
else f"Model with ID or alias '{settings.model_id}' for your current device not found in Foundry Local."
149+
)
150+
raise ServiceInitializationError(message)
151+
if prepare_model:
152+
manager.download_model(alias_or_model_id=model_info.id, device=device)
153+
manager.load_model(alias_or_model_id=model_info.id, device=device)
154+
155+
super().__init__(
156+
model_id=model_info.id,
157+
client=AsyncOpenAI(base_url=manager.endpoint, api_key=manager.api_key),
158+
**kwargs,
159+
)
160+
self.manager = manager

python/packages/foundry_local/agent_framework_foundry_local/py.typed

Whitespace-only changes.
Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
[project]
2+
name = "agent-framework-foundry-local"
3+
description = "Foundry Local integration for Microsoft Agent Framework."
4+
authors = [{ name = "Microsoft", email = "[email protected]"}]
5+
readme = "README.md"
6+
requires-python = ">=3.10"
7+
version = "1.0.0b251218"
8+
license-files = ["LICENSE"]
9+
urls.homepage = "https://aka.ms/agent-framework"
10+
urls.source = "https://github.com/microsoft/agent-framework/tree/main/python"
11+
urls.release_notes = "https://github.com/microsoft/agent-framework/releases?q=tag%3Apython-1&expanded=true"
12+
urls.issues = "https://github.com/microsoft/agent-framework/issues"
13+
classifiers = [
14+
"License :: OSI Approved :: MIT License",
15+
"Development Status :: 4 - Beta",
16+
"Intended Audience :: Developers",
17+
"Programming Language :: Python :: 3",
18+
"Programming Language :: Python :: 3.10",
19+
"Programming Language :: Python :: 3.11",
20+
"Programming Language :: Python :: 3.12",
21+
"Programming Language :: Python :: 3.13",
22+
"Programming Language :: Python :: 3.14",
23+
"Typing :: Typed",
24+
]
25+
dependencies = [
26+
"agent-framework-core",
27+
"foundry-local-sdk>=0.5.1,<1",
28+
]
29+
30+
[tool.uv]
31+
prerelease = "if-necessary-or-explicit"
32+
environments = [
33+
"sys_platform == 'darwin'",
34+
"sys_platform == 'linux'",
35+
"sys_platform == 'win32'"
36+
]
37+
38+
[tool.uv-dynamic-versioning]
39+
fallback-version = "0.0.0"
40+
[tool.pytest.ini_options]
41+
testpaths = 'tests'
42+
addopts = "-ra -q -r fEX"
43+
asyncio_mode = "auto"
44+
asyncio_default_fixture_loop_scope = "function"
45+
filterwarnings = []
46+
timeout = 120
47+
48+
[tool.ruff]
49+
extend = "../../pyproject.toml"
50+
51+
[tool.coverage.run]
52+
omit = [
53+
"**/__init__.py"
54+
]
55+
56+
[tool.pyright]
57+
extends = "../../pyproject.toml"
58+
exclude = ['tests']
59+
60+
[tool.mypy]
61+
plugins = ['pydantic.mypy']
62+
strict = true
63+
python_version = "3.10"
64+
ignore_missing_imports = true
65+
disallow_untyped_defs = true
66+
no_implicit_optional = true
67+
check_untyped_defs = true
68+
warn_return_any = true
69+
show_error_codes = true
70+
warn_unused_ignores = false
71+
disallow_incomplete_defs = true
72+
disallow_untyped_decorators = true
73+
74+
[tool.bandit]
75+
targets = ["agent_framework_foundry_local"]
76+
exclude_dirs = ["tests"]
77+
78+
[tool.poe]
79+
executor.type = "uv"
80+
include = "../../shared_tasks.toml"
81+
[tool.poe.tasks]
82+
mypy = "mypy --config-file $POE_ROOT/pyproject.toml agent_framework_foundry_local"
83+
test = "pytest --cov=agent_framework_foundry_local --cov-report=term-missing:skip-covered tests"
84+
85+
[build-system]
86+
requires = ["flit-core >= 3.11,<4.0"]
87+
build-backend = "flit_core.buildapi"
Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
# Copyright (c) Microsoft. All rights reserved.
2+
# ruff: noqa
3+
4+
import asyncio
5+
from random import randint
6+
from typing import TYPE_CHECKING, Annotated
7+
8+
from agent_framework_foundry_local import FoundryLocalClient
9+
10+
if TYPE_CHECKING:
11+
from agent_framework import ChatAgent
12+
13+
"""
14+
This sample demonstrates basic usage of the FoundryLocalClient.
15+
Shows both streaming and non-streaming responses with function tools.
16+
17+
Running this sample the first time will be slow, as the model needs to be
18+
downloaded and initialized.
19+
20+
Also, not every model supports function calling, so be sure to check the
21+
model capabilities in the Foundry catalog, or pick one from the list printed
22+
when running this sample.
23+
"""
24+
25+
26+
def get_weather(
27+
location: Annotated[str, "The location to get the weather for."],
28+
) -> str:
29+
"""Get the weather for a given location."""
30+
conditions = ["sunny", "cloudy", "rainy", "stormy"]
31+
return f"The weather in {location} is {conditions[randint(0, 3)]} with a high of {randint(10, 30)}°C."
32+
33+
34+
async def non_streaming_example(agent: "ChatAgent") -> None:
35+
"""Example of non-streaming response (get the complete result at once)."""
36+
print("=== Non-streaming Response Example ===")
37+
38+
query = "What's the weather like in Seattle?"
39+
print(f"User: {query}")
40+
result = await agent.run(query)
41+
print(f"Agent: {result}\n")
42+
43+
44+
async def streaming_example(agent: "ChatAgent") -> None:
45+
"""Example of streaming response (get results as they are generated)."""
46+
print("=== Streaming Response Example ===")
47+
48+
query = "What's the weather like in Amsterdam?"
49+
print(f"User: {query}")
50+
print("Agent: ", end="", flush=True)
51+
async for chunk in agent.run_stream(query):
52+
if chunk.text:
53+
print(chunk.text, end="", flush=True)
54+
print("\n")
55+
56+
57+
async def main() -> None:
58+
print("=== Basic Foundry Local Client Agent Example ===")
59+
60+
client = FoundryLocalClient(model_id="phi-4-mini")
61+
print(f"Client Model ID: {client.model_id}\n")
62+
print("Other available models (tool calling supported only):")
63+
for model in client.manager.list_catalog_models():
64+
if model.supports_tool_calling:
65+
print(
66+
f"- {model.alias} for {model.task} - id={model.id} - {(model.file_size_mb / 1000):.2f} GB - {model.license}"
67+
)
68+
agent = client.create_agent(
69+
name="LocalAgent",
70+
instructions="You are a helpful agent.",
71+
tools=get_weather,
72+
)
73+
await non_streaming_example(agent)
74+
await streaming_example(agent)
75+
76+
77+
if __name__ == "__main__":
78+
asyncio.run(main())

0 commit comments

Comments
 (0)