Skip to content

Commit fd649bd

Browse files
Fix huggingface_hub snippets (huggingface#1448)
Fixes huggingface#1440. Thank you @tomaarsen for flagging this! This PR addresses all the incorrect snippets mentioned in the issue. I've added example snippets for `feature-extraction`, `question-answering`,` table-question-answering`, and `text-classification` to ensure everything is now correct.
1 parent d7a6e8f commit fd649bd

File tree

14 files changed

+144
-5
lines changed

14 files changed

+144
-5
lines changed

packages/inference/src/snippets/getInferenceSnippets.ts

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -285,6 +285,16 @@ const prepareConversationalInput = (
285285
};
286286
};
287287

288+
const prepareQuestionAnsweringInput = (model: ModelDataMinimal): object => {
289+
const data = JSON.parse(getModelInputSnippet(model) as string);
290+
return { question: data.question, context: data.context };
291+
};
292+
293+
const prepareTableQuestionAnsweringInput = (model: ModelDataMinimal): object => {
294+
const data = JSON.parse(getModelInputSnippet(model) as string);
295+
return { query: data.query, table: JSON.stringify(data.table) };
296+
};
297+
288298
const snippets: Partial<
289299
Record<
290300
PipelineType,
@@ -309,12 +319,12 @@ const snippets: Partial<
309319
"image-to-image": snippetGenerator("imageToImage", prepareImageToImageInput),
310320
"image-to-text": snippetGenerator("basicImage"),
311321
"object-detection": snippetGenerator("basicImage"),
312-
"question-answering": snippetGenerator("basic"),
322+
"question-answering": snippetGenerator("questionAnswering", prepareQuestionAnsweringInput),
313323
"sentence-similarity": snippetGenerator("basic"),
314324
summarization: snippetGenerator("basic"),
315325
"tabular-classification": snippetGenerator("tabular"),
316326
"tabular-regression": snippetGenerator("tabular"),
317-
"table-question-answering": snippetGenerator("basic"),
327+
"table-question-answering": snippetGenerator("tableQuestionAnswering", prepareTableQuestionAnsweringInput),
318328
"text-classification": snippetGenerator("basic"),
319329
"text-generation": snippetGenerator("basic"),
320330
"text-to-audio": snippetGenerator("textToAudio"),
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
result = client.{{ methodName }}(
2-
inputs={{ inputs.asObj.inputs }},
2+
{{ inputs.asObj.inputs }},
33
model="{{ model.id }}",
44
)
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
answer = client.question_answering(
2+
question="{{ inputs.asObj.question }}",
3+
context="{{ inputs.asObj.context }}",
4+
model="{{ model.id }}",
5+
)
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
answer = client.question_answering(
2+
query="{{ inputs.asObj.query }}",
3+
table={{ inputs.asObj.table }},
4+
model="{{ model.id }}",
5+
)

packages/tasks-gen/scripts/generate-snippets-fixtures.ts

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -250,6 +250,39 @@ const TEST_CASES: {
250250
},
251251
providers: ["fal-ai"],
252252
},
253+
{
254+
testName: "feature-extraction",
255+
task: "feature-extraction",
256+
model: {
257+
id: "intfloat/multilingual-e5-large-instruct",
258+
pipeline_tag: "feature-extraction",
259+
tags: [],
260+
inference: "",
261+
},
262+
providers: ["hf-inference"],
263+
},
264+
{
265+
testName: "question-answering",
266+
task: "question-answering",
267+
model: {
268+
id: "google-bert/bert-large-uncased-whole-word-masking-finetuned-squad",
269+
pipeline_tag: "question-answering",
270+
tags: [],
271+
inference: "",
272+
},
273+
providers: ["hf-inference"],
274+
},
275+
{
276+
testName: "table-question-answering",
277+
task: "table-question-answering",
278+
model: {
279+
id: "google-bert/bert-large-uncased-whole-word-masking-finetuned-squad",
280+
pipeline_tag: "table-question-answering",
281+
tags: [],
282+
inference: "",
283+
},
284+
providers: ["hf-inference"],
285+
},
253286
] as const;
254287

255288
const rootDirFinder = (): string => {

packages/tasks-gen/snippets-fixtures/basic-snippet--token-classification/python/huggingface_hub/0.hf-inference.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,6 @@
66
)
77

88
result = client.token_classification(
9-
inputs="My name is Sarah Jessica Parker but you can call me Jessica",
9+
"My name is Sarah Jessica Parker but you can call me Jessica",
1010
model="FacebookAI/xlm-roberta-large-finetuned-conll03-english",
1111
)
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
async function query(data) {
2+
const response = await fetch(
3+
"https://router.huggingface.co/hf-inference/models/intfloat/multilingual-e5-large-instruct/pipeline/feature-extraction",
4+
{
5+
headers: {
6+
Authorization: "Bearer api_token",
7+
"Content-Type": "application/json",
8+
},
9+
method: "POST",
10+
body: JSON.stringify(data),
11+
}
12+
);
13+
const result = await response.json();
14+
return result;
15+
}
16+
17+
query({ inputs: "Today is a sunny day and I will get some ice cream." }).then((response) => {
18+
console.log(JSON.stringify(response));
19+
});
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
import { InferenceClient } from "@huggingface/inference";
2+
3+
const client = new InferenceClient("api_token");
4+
5+
const output = await client.featureExtraction({
6+
model: "intfloat/multilingual-e5-large-instruct",
7+
inputs: "Today is a sunny day and I will get some ice cream.",
8+
provider: "hf-inference",
9+
});
10+
11+
console.log(output);
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
from huggingface_hub import InferenceClient
2+
3+
client = InferenceClient(
4+
provider="hf-inference",
5+
api_key="api_token",
6+
)
7+
8+
result = client.feature_extraction(
9+
"Today is a sunny day and I will get some ice cream.",
10+
model="intfloat/multilingual-e5-large-instruct",
11+
)
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
import requests
2+
3+
API_URL = "https://router.huggingface.co/hf-inference/models/intfloat/multilingual-e5-large-instruct/pipeline/feature-extraction"
4+
headers = {
5+
"Authorization": "Bearer api_token",
6+
}
7+
8+
def query(payload):
9+
response = requests.post(API_URL, headers=headers, json=payload)
10+
return response.json()
11+
12+
output = query({
13+
"inputs": "Today is a sunny day and I will get some ice cream.",
14+
})

0 commit comments

Comments
 (0)