File tree Expand file tree Collapse file tree 1 file changed +2
-2
lines changed
articles/ai-foundry/foundry-local/includes/use-langchain Expand file tree Collapse file tree 1 file changed +2
-2
lines changed Original file line number Diff line number Diff line change @@ -36,15 +36,15 @@ import { ChatPromptTemplate } from "@langchain/core/prompts";
3636// to your end-user's device.
3737// TIP: You can find a list of available models by running the
3838// following command in your terminal: `foundry model list`.
39- const modelAlias = " phi-3-mini-4k" ;
39+ const alias = " phi-3-mini-4k" ;
4040
4141// Create a FoundryLocalManager instance. This will start the Foundry
4242// Local service if it is not already running.
4343const foundryLocalManager = new FoundryLocalManager ()
4444
4545// Initialize the manager with a model. This will download the model
4646// if it is not already present on the user's device.
47- const modelInfo = await foundryLocalManager .init (modelAlias )
47+ const modelInfo = await foundryLocalManager .init (alias )
4848console .log (" Model Info:" , modelInfo)
4949
5050// Configure ChatOpenAI to use your locally-running model
You can’t perform that action at this time.
0 commit comments