88from langchain_openai import AzureChatOpenAI
99from openai import APIConnectionError , APIStatusError , APITimeoutError
1010
11+ from template_langgraph .llms .foundry_locals import FoundryLocalWrapper
12+ from template_langgraph .llms .foundry_locals import Settings as FoundryLocalSettings
1113from template_langgraph .loggers import get_logger
1214
1315load_dotenv (override = True )
@@ -24,7 +26,7 @@ def image_to_base64(image_bytes: bytes) -> str:
2426 "# Common Settings"
2527 stream_mode = st .checkbox (
2628 label = "ストリーム出力を有効にする" ,
27- value = True ,
29+ value = False ,
2830 key = "STREAM_MODE" ,
2931 )
3032 "# Model"
@@ -33,6 +35,7 @@ def image_to_base64(image_bytes: bytes) -> str:
3335 options = [
3436 "azure" ,
3537 "ollama" ,
38+ "foundry_local" ,
3639 ],
3740 index = 0 ,
3841 key = "model_choice" ,
@@ -66,7 +69,6 @@ def image_to_base64(image_bytes: bytes) -> str:
6669 "### Documents"
6770 "[Azure Portal](https://portal.azure.com/)"
6871 "[Azure OpenAI Studio](https://oai.azure.com/resource/overview)"
69- "[View the source code](https://github.com/ks6088ts-labs/template-streamlit)"
7072 elif model_choice == "ollama" :
7173 ollama_model_chat = st .text_input (
7274 label = "OLLAMA_MODEL_CHAT" ,
@@ -76,10 +78,18 @@ def image_to_base64(image_bytes: bytes) -> str:
7678 )
7779 "### Documents"
7880 "[Ollama Docs](https://github.com/ollama/ollama)"
79- "[View the source code](https://github.com/ks6088ts-labs/template-streamlit)"
81+ elif model_choice == "foundry_local" :
82+ foundry_local_model_chat = st .text_input (
83+ label = "FOUNDRY_LOCAL_MODEL_CHAT" ,
84+ value = getenv ("FOUNDRY_LOCAL_MODEL_CHAT" , "phi-3-mini-4k" ),
85+ key = "FOUNDRY_LOCAL_MODEL_CHAT" ,
86+ type = "default" ,
87+ )
88+ "### Documents"
89+ "[Get started with Foundry Local](https://learn.microsoft.com/en-us/azure/ai-foundry/foundry-local/get-started)"
8090 else :
81- st .error ("Invalid model choice. Please select either 'azure' or 'ollama '." )
82- raise ValueError ("Invalid model choice. Please select either 'azure' or 'ollama '." )
91+ st .error ("Invalid model choice. Please select either 'azure', 'ollama', or 'foundry_local '." )
92+ raise ValueError ("Invalid model choice. Please select either 'azure', 'ollama', or 'foundry_local '." )
8393
8494
8595def is_azure_configured ():
@@ -96,8 +106,12 @@ def is_ollama_configured():
96106 return st .session_state .get ("OLLAMA_MODEL_CHAT" ) and st .session_state .get ("model_choice" ) == "ollama"
97107
98108
109+ def is_foundry_local_configured ():
110+ return st .session_state .get ("FOUNDRY_LOCAL_MODEL_CHAT" ) and st .session_state .get ("model_choice" ) == "foundry_local"
111+
112+
99113def is_configured ():
100- return is_azure_configured () or is_ollama_configured ()
114+ return is_azure_configured () or is_ollama_configured () or is_foundry_local_configured ()
101115
102116
103117def get_model ():
@@ -112,7 +126,13 @@ def get_model():
112126 return ChatOllama (
113127 model = st .session_state .get ("OLLAMA_MODEL_CHAT" , "" ),
114128 )
115- raise ValueError ("No model is configured. Please set up the Azure or Ollama model in the sidebar." )
129+ elif is_foundry_local_configured ():
130+ return FoundryLocalWrapper (
131+ settings = FoundryLocalSettings (
132+ foundry_local_model_chat = st .session_state .get ("FOUNDRY_LOCAL_MODEL_CHAT" , "phi-3-mini-4k" ),
133+ )
134+ ).chat_model
135+ raise ValueError ("No model is configured. Please set up the Azure, Ollama, or Foundry Local model in the sidebar." )
116136
117137
118138st .title ("Chat Playground" )
0 commit comments