Skip to content

Commit 986e07f

Browse files
authored
Merge pull request #155 from ks6088ts-labs/feature/issue-154_foundry-local
support foundry local
2 parents 6571fc4 + 6baec27 commit 986e07f

File tree

5 files changed

+84
-7
lines changed

5 files changed

+84
-7
lines changed

.env.template

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,9 @@ AZURE_AI_FOUNDRY_INFERENCE_MODEL_CHAT="gpt-5"
3030
## Ollama Settings
3131
OLLAMA_MODEL_CHAT="gemma3:latest"
3232

33+
## Foundry Local Settings
34+
FOUNDRY_LOCAL_MODEL_CHAT="phi-3-mini-4k"
35+
3336
# ---------
3437
# Tools
3538
# ---------

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ dependencies = [
1414
"dspy>=3.0.3",
1515
"elasticsearch>=9.1.0",
1616
"fastapi[standard]>=0.116.1",
17+
"foundry-local-sdk>=0.4.0",
1718
"httpx>=0.28.1",
1819
"jinja2>=3.1.2",
1920
"langchain-azure-ai>=0.1.4",
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
from functools import lru_cache
2+
3+
from foundry_local import FoundryLocalManager
4+
from langchain_openai import ChatOpenAI
5+
from pydantic_settings import BaseSettings, SettingsConfigDict
6+
7+
8+
class Settings(BaseSettings):
9+
foundry_local_model_chat: str = "phi-3-mini-4k"
10+
11+
model_config = SettingsConfigDict(
12+
env_file=".env",
13+
env_ignore_empty=True,
14+
extra="ignore",
15+
)
16+
17+
18+
@lru_cache
19+
def get_foundry_local_settings() -> Settings:
20+
return Settings()
21+
22+
23+
class FoundryLocalWrapper:
24+
def __init__(self, settings: Settings = None):
25+
if settings is None:
26+
settings = get_foundry_local_settings()
27+
28+
self.foundry_local_manager = FoundryLocalManager(
29+
alias_or_model_id=settings.foundry_local_model_chat,
30+
)
31+
32+
self.chat_model = ChatOpenAI(
33+
model=self.foundry_local_manager.get_model_info(settings.foundry_local_model_chat).id,
34+
base_url=self.foundry_local_manager.endpoint,
35+
api_key=self.foundry_local_manager.api_key,
36+
temperature=0.0,
37+
streaming=True,
38+
)

template_langgraph/services/streamlits/pages/playground_chat.py

Lines changed: 27 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@
88
from langchain_openai import AzureChatOpenAI
99
from openai import APIConnectionError, APIStatusError, APITimeoutError
1010

11+
from template_langgraph.llms.foundry_locals import FoundryLocalWrapper
12+
from template_langgraph.llms.foundry_locals import Settings as FoundryLocalSettings
1113
from template_langgraph.loggers import get_logger
1214

1315
load_dotenv(override=True)
@@ -24,7 +26,7 @@ def image_to_base64(image_bytes: bytes) -> str:
2426
"# Common Settings"
2527
stream_mode = st.checkbox(
2628
label="ストリーム出力を有効にする",
27-
value=True,
29+
value=False,
2830
key="STREAM_MODE",
2931
)
3032
"# Model"
@@ -33,6 +35,7 @@ def image_to_base64(image_bytes: bytes) -> str:
3335
options=[
3436
"azure",
3537
"ollama",
38+
"foundry_local",
3639
],
3740
index=0,
3841
key="model_choice",
@@ -66,7 +69,6 @@ def image_to_base64(image_bytes: bytes) -> str:
6669
"### Documents"
6770
"[Azure Portal](https://portal.azure.com/)"
6871
"[Azure OpenAI Studio](https://oai.azure.com/resource/overview)"
69-
"[View the source code](https://github.com/ks6088ts-labs/template-streamlit)"
7072
elif model_choice == "ollama":
7173
ollama_model_chat = st.text_input(
7274
label="OLLAMA_MODEL_CHAT",
@@ -76,10 +78,18 @@ def image_to_base64(image_bytes: bytes) -> str:
7678
)
7779
"### Documents"
7880
"[Ollama Docs](https://github.com/ollama/ollama)"
79-
"[View the source code](https://github.com/ks6088ts-labs/template-streamlit)"
81+
elif model_choice == "foundry_local":
82+
foundry_local_model_chat = st.text_input(
83+
label="FOUNDRY_LOCAL_MODEL_CHAT",
84+
value=getenv("FOUNDRY_LOCAL_MODEL_CHAT", "phi-3-mini-4k"),
85+
key="FOUNDRY_LOCAL_MODEL_CHAT",
86+
type="default",
87+
)
88+
"### Documents"
89+
"[Get started with Foundry Local](https://learn.microsoft.com/en-us/azure/ai-foundry/foundry-local/get-started)"
8090
else:
81-
st.error("Invalid model choice. Please select either 'azure' or 'ollama'.")
82-
raise ValueError("Invalid model choice. Please select either 'azure' or 'ollama'.")
91+
st.error("Invalid model choice. Please select either 'azure', 'ollama', or 'foundry_local'.")
92+
raise ValueError("Invalid model choice. Please select either 'azure', 'ollama', or 'foundry_local'.")
8393

8494

8595
def is_azure_configured():
@@ -96,8 +106,12 @@ def is_ollama_configured():
96106
return st.session_state.get("OLLAMA_MODEL_CHAT") and st.session_state.get("model_choice") == "ollama"
97107

98108

109+
def is_foundry_local_configured():
110+
return st.session_state.get("FOUNDRY_LOCAL_MODEL_CHAT") and st.session_state.get("model_choice") == "foundry_local"
111+
112+
99113
def is_configured():
100-
return is_azure_configured() or is_ollama_configured()
114+
return is_azure_configured() or is_ollama_configured() or is_foundry_local_configured()
101115

102116

103117
def get_model():
@@ -112,7 +126,13 @@ def get_model():
112126
return ChatOllama(
113127
model=st.session_state.get("OLLAMA_MODEL_CHAT", ""),
114128
)
115-
raise ValueError("No model is configured. Please set up the Azure or Ollama model in the sidebar.")
129+
elif is_foundry_local_configured():
130+
return FoundryLocalWrapper(
131+
settings=FoundryLocalSettings(
132+
foundry_local_model_chat=st.session_state.get("FOUNDRY_LOCAL_MODEL_CHAT", "phi-3-mini-4k"),
133+
)
134+
).chat_model
135+
raise ValueError("No model is configured. Please set up the Azure, Ollama, or Foundry Local model in the sidebar.")
116136

117137

118138
st.title("Chat Playground")

uv.lock

Lines changed: 15 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)