Skip to content

Commit d7a6e8f

Browse files
authored
Local apps: add MLX LM (huggingface#1443)
Still hesitating between this (adding it as a local app) and updating the libraries snippets directly wdyt @pcuenca ? (maybe let's add it as a local app and change the library snippets to python?)
1 parent da193f1 commit d7a6e8f

File tree

1 file changed

+42
-0
lines changed

1 file changed

+42
-0
lines changed

packages/tasks/src/local-apps.ts

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -262,6 +262,41 @@ const snippetTgi = (model: ModelData): LocalAppSnippet[] => {
262262
];
263263
};
264264

265+
const snippetMlxLm = (model: ModelData): LocalAppSnippet[] => {
266+
const openaiCurl = [
267+
"# Calling the OpenAI-compatible server with curl",
268+
`curl -X POST "http://localhost:8000/v1/chat/completions" \\`,
269+
` -H "Content-Type: application/json" \\`,
270+
` --data '{`,
271+
` "model": "${model.id}",`,
272+
` "messages": [`,
273+
` {"role": "user", "content": "Hello"}`,
274+
` ]`,
275+
` }'`,
276+
];
277+
278+
return [
279+
{
280+
title: "Generate or start a chat session",
281+
setup: ["# Install MLX LM", "uv tool install mlx-lm"].join("\n"),
282+
content: [
283+
...(model.tags.includes("conversational")
284+
? ["# Interactive chat REPL", `mlx_lm.chat --model "${model.id}"`]
285+
: ["# Generate some text", `mlx_lm.generate --model "${model.id}" --prompt "Once upon a time"`]),
286+
].join("\n"),
287+
},
288+
...(model.tags.includes("conversational")
289+
? [
290+
{
291+
title: "Run an OpenAI-compatible server",
292+
setup: ["# Install MLX LM", "uv tool install mlx-lm"].join("\n"),
293+
content: ["# Start the server", `mlx_lm.server --model "${model.id}"`, ...openaiCurl].join("\n"),
294+
},
295+
]
296+
: []),
297+
];
298+
};
299+
265300
/**
266301
* Add your new local app here.
267302
*
@@ -302,6 +337,13 @@ export const LOCAL_APPS = {
302337
(model.pipeline_tag === "text-generation" || model.pipeline_tag === "image-text-to-text"),
303338
snippet: snippetVllm,
304339
},
340+
"mlx-lm": {
341+
prettyLabel: "MLX LM",
342+
docsUrl: "https://github.com/ml-explore/mlx-lm",
343+
mainTask: "text-generation",
344+
displayOnModelPage: (model) => model.pipeline_tag === "text-generation" && isMlxModel(model),
345+
snippet: snippetMlxLm,
346+
},
305347
tgi: {
306348
prettyLabel: "TGI",
307349
docsUrl: "https://huggingface.co/docs/text-generation-inference/",

0 commit comments

Comments
 (0)