Skip to content

Commit b5b7b1c

Browse files
authored
Merge branch 'main' into node-llama-cpp-option
2 parents e88b85d + 9cce322 commit b5b7b1c

File tree

8 files changed

+93
-41
lines changed

8 files changed

+93
-41
lines changed

.github/pull_request_template/new_library.md

Lines changed: 0 additions & 34 deletions
This file was deleted.

.github/workflows/lint.yml

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,15 @@ jobs:
1818
- name: "Extracting the merge base into 'SINCE'"
1919
id: since
2020
run: |
21-
if [ -z "${{ github.event.pull_request.head.ref }}" ]
21+
if [ -z $PR_REF ]
2222
then
23-
echo "SINCE=${{ github.sha }}^1" >> $GITHUB_OUTPUT
23+
echo "SINCE=$SHA^1" >> $GITHUB_OUTPUT
2424
else
25-
echo "SINCE=$(git merge-base origin/${{ github.event.pull_request.base.ref }} ${{ github.sha }})" >> $GITHUB_OUTPUT
25+
echo "SINCE=$(git merge-base origin/$PR_REF $SHA)" >> $GITHUB_OUTPUT
2626
fi
27+
env:
28+
PR_REF: ${{ github.event.pull_request.head.ref }}
29+
SHA: ${{ github.sha }}
2730

2831
- run: corepack enable
2932

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ You can run our packages with vanilla JS, without any bundler, by using a CDN or
9393
```html
9494
<script type="module">
9595
import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/[email protected]/+esm';
96-
import { createRepo, commit, deleteRepo, listFiles } from "https://cdn.jsdelivr.net/npm/@huggingface/[email protected].1/+esm";
96+
import { createRepo, commit, deleteRepo, listFiles } from "https://cdn.jsdelivr.net/npm/@huggingface/[email protected].2/+esm";
9797
</script>
9898
```
9999

packages/hub/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"name": "@huggingface/hub",
33
"packageManager": "[email protected]",
4-
"version": "0.18.1",
4+
"version": "0.18.2",
55
"description": "Utilities to interact with the Hugging Face hub",
66
"repository": "https://github.com/huggingface/huggingface.js.git",
77
"publishConfig": {

packages/tasks/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"name": "@huggingface/tasks",
33
"packageManager": "[email protected]",
4-
"version": "0.12.20",
4+
"version": "0.12.21",
55
"description": "List of ML tasks for huggingface.co/tasks",
66
"repository": "https://github.com/huggingface/huggingface.js.git",
77
"publishConfig": {

packages/tasks/src/local-apps.ts

Lines changed: 43 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,11 +77,18 @@ function isMarlinModel(model: ModelData): boolean {
7777
function isTransformersModel(model: ModelData): boolean {
7878
return model.tags.includes("transformers");
7979
}
80+
function isTgiModel(model: ModelData): boolean {
81+
return model.tags.includes("text-generation-inference");
82+
}
8083

8184
function isLlamaCppGgufModel(model: ModelData) {
8285
return !!model.gguf?.context_length;
8386
}
8487

88+
function isMlxModel(model: ModelData) {
89+
return model.tags.includes("mlx");
90+
}
91+
8592
const snippetLlamacpp = (model: ModelData, filepath?: string): LocalAppSnippet[] => {
8693
const command = (binary: string) =>
8794
[
@@ -197,6 +204,34 @@ const snippetVllm = (model: ModelData): LocalAppSnippet[] => {
197204
},
198205
];
199206
};
207+
const snippetTgi = (model: ModelData): LocalAppSnippet[] => {
208+
const runCommand = [
209+
"# Call the server using curl:",
210+
`curl -X POST "http://localhost:8000/v1/chat/completions" \\`,
211+
` -H "Content-Type: application/json" \\`,
212+
` --data '{`,
213+
` "model": "${model.id}",`,
214+
` "messages": [`,
215+
` {"role": "user", "content": "What is the capital of France?"}`,
216+
` ]`,
217+
` }'`,
218+
];
219+
return [
220+
{
221+
title: "Use Docker images",
222+
setup: [
223+
"# Deploy with docker on Linux:",
224+
`docker run --gpus all \\`,
225+
` -v ~/.cache/huggingface:/root/.cache/huggingface \\`,
226+
` -e HF_TOKEN="<secret>" \\`,
227+
` -p 8000:80 \\`,
228+
` ghcr.io/huggingface/text-generation-inference:latest \\`,
229+
` --model-id ${model.id}`,
230+
].join("\n"),
231+
content: [runCommand.join("\n")],
232+
},
233+
];
234+
};
200235

201236
/**
202237
* Add your new local app here.
@@ -238,11 +273,18 @@ export const LOCAL_APPS = {
238273
(model.pipeline_tag === "text-generation" || model.pipeline_tag === "image-text-to-text"),
239274
snippet: snippetVllm,
240275
},
276+
tgi: {
277+
prettyLabel: "TGI",
278+
docsUrl: "https://huggingface.co/docs/text-generation-inference/",
279+
mainTask: "text-generation",
280+
displayOnModelPage: isTgiModel,
281+
snippet: snippetTgi,
282+
},
241283
lmstudio: {
242284
prettyLabel: "LM Studio",
243285
docsUrl: "https://lmstudio.ai",
244286
mainTask: "text-generation",
245-
displayOnModelPage: isLlamaCppGgufModel,
287+
displayOnModelPage: (model) => isLlamaCppGgufModel(model) || isMlxModel(model),
246288
deeplink: (model, filepath) =>
247289
new URL(`lmstudio://open_from_hf?model=${model.id}${filepath ? `&file=${filepath}` : ""}`),
248290
},

packages/tasks/src/model-libraries-snippets.ts

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -139,6 +139,32 @@ depth = model.infer_image(raw_img) # HxW raw depth map in numpy
139139
];
140140
};
141141

142+
export const depth_pro = (model: ModelData): string[] => {
143+
const installSnippet = `# Download checkpoint
144+
pip install huggingface-hub
145+
huggingface-cli download --local-dir checkpoints ${model.id}`;
146+
147+
const inferenceSnippet = `import depth_pro
148+
149+
# Load model and preprocessing transform
150+
model, transform = depth_pro.create_model_and_transforms()
151+
model.eval()
152+
153+
# Load and preprocess an image.
154+
image, _, f_px = depth_pro.load_rgb("example.png")
155+
image = transform(image)
156+
157+
# Run inference.
158+
prediction = model.infer(image, f_px=f_px)
159+
160+
# Results: 1. Depth in meters
161+
depth = prediction["depth"]
162+
# Results: 2. Focal length in pixels
163+
focallength_px = prediction["focallength_px"]`;
164+
165+
return [installSnippet, inferenceSnippet];
166+
};
167+
142168
const diffusersDefaultPrompt = "Astronaut in a jungle, cold color palette, muted colors, detailed, 8k";
143169

144170
const diffusers_default = (model: ModelData) => [

packages/tasks/src/model-libraries.ts

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,14 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
165165
filter: false,
166166
countDownloads: `path_extension:"pth"`,
167167
},
168+
"depth-pro": {
169+
prettyLabel: "Depth Pro",
170+
repoName: "Depth Pro",
171+
repoUrl: "https://github.com/apple/ml-depth-pro",
172+
countDownloads: `path_extension:"pt"`,
173+
snippets: snippets.depth_pro,
174+
filter: false,
175+
},
168176
diffree: {
169177
prettyLabel: "Diffree",
170178
repoName: "Diffree",
@@ -658,6 +666,13 @@ export const MODEL_LIBRARIES_UI_ELEMENTS = {
658666
filter: true,
659667
countDownloads: `path:"models/default.zip"`,
660668
},
669+
"f5-tts": {
670+
prettyLabel: "F5-TTS",
671+
repoName: "F5-TTS",
672+
repoUrl: "https://github.com/SWivid/F5-TTS",
673+
filter: false,
674+
countDownloads: `path_extension:"safetensors" OR path_extension:"pt"`,
675+
},
661676
tensorflowtts: {
662677
prettyLabel: "TensorFlowTTS",
663678
repoName: "TensorFlowTTS",

0 commit comments

Comments
 (0)