@@ -1355,15 +1355,14 @@ model = SwarmFormerModel.from_pretrained("${model.id}")
13551355
13561356const mlx_unknown = ( model : ModelData ) : string [ ] => [
13571357 `# Download the model from the Hub
1358- pip install huggingface_hub hf_transfer
1358+ pip install huggingface_hub[hf_xet]
13591359
1360- export HF_HUB_ENABLE_HF_TRANSFER=1
13611360huggingface-cli download --local-dir ${ nameWithoutNamespace ( model . id ) } ${ model . id } ` ,
13621361] ;
13631362
13641363const mlxlm = ( model : ModelData ) : string [ ] => [
13651364 `# Make sure mlx-lm is installed
1366- pip install --upgrade mlx-lm
1365+ # pip install --upgrade mlx-lm
13671366
13681367# Generate text with mlx-lm
13691368from mlx_lm import load, generate
@@ -1376,7 +1375,7 @@ text = generate(model, tokenizer, prompt=prompt, verbose=True)`,
13761375
13771376const mlxchat = ( model : ModelData ) : string [ ] => [
13781377 `# Make sure mlx-lm is installed
1379- pip install --upgrade mlx-lm
1378+ # pip install --upgrade mlx-lm
13801379
13811380# Generate text with mlx-lm
13821381from mlx_lm import load, generate
@@ -1393,7 +1392,9 @@ text = generate(model, tokenizer, prompt=prompt, verbose=True)`,
13931392] ;
13941393
13951394const mlxvlm = ( model : ModelData ) : string [ ] => [
1396- `Make sure mlx-vlm is installed
1395+ `# Make sure mlx-vlm is installed
1396+ # pip install --upgrade mlx-vlm
1397+
13971398from mlx_vlm import load, generate
13981399from mlx_vlm.prompt_utils import apply_chat_template
13991400from mlx_vlm.utils import load_config
0 commit comments