Skip to content

Commit 81c5a4f

Browse files
committed
js snippets tweaks
1 parent c4f93ae commit 81c5a4f

File tree

1 file changed

+26
-26
lines changed
  • packages/tasks/src/snippets

1 file changed

+26
-26
lines changed

packages/tasks/src/snippets/js.ts

Lines changed: 26 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -27,21 +27,23 @@ export const snippetBasic = (
2727
return [
2828
...(model.pipeline_tag && model.pipeline_tag in HFJS_METHODS
2929
? [
30-
{
31-
client: "huggingface.js",
32-
content: `\
30+
{
31+
client: "huggingface.js",
32+
content: `\
3333
import { HfInference } from "@huggingface/inference";
3434
3535
const client = new HfInference("${accessToken || `{API_TOKEN}`}");
3636
37-
const image = await client.${HFJS_METHODS[model.pipeline_tag]}({
37+
const output = await client.${HFJS_METHODS[model.pipeline_tag]}({
3838
model: "${model.id}",
3939
inputs: ${getModelInputSnippet(model)},
4040
provider: "${provider}",
4141
});
42+
43+
console.log(output)
4244
`,
43-
},
44-
]
45+
},
46+
]
4547
: []),
4648
{
4749
client: "fetch",
@@ -212,8 +214,8 @@ export const snippetZeroShotClassification = (model: ModelDataMinimal, accessTok
212214
}
213215
214216
query({"inputs": ${getModelInputSnippet(
215-
model
216-
)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}).then((response) => {
217+
model
218+
)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}).then((response) => {
217219
console.log(JSON.stringify(response));
218220
});`,
219221
},
@@ -240,14 +242,15 @@ const image = await client.textToImage({
240242
parameters: { num_inference_steps: 5 },
241243
provider: "${provider}",
242244
});
245+
243246
/// Use the generated image (it's a Blob)
244247
`,
245248
},
246249
...(provider === "hf-inference"
247250
? [
248-
{
249-
client: "fetch",
250-
content: `async function query(data) {
251+
{
252+
client: "fetch",
253+
content: `async function query(data) {
251254
const response = await fetch(
252255
"https://api-inference.huggingface.co/models/${model.id}",
253256
{
@@ -265,8 +268,8 @@ const image = await client.textToImage({
265268
query({"inputs": ${getModelInputSnippet(model)}}).then((response) => {
266269
// Use image
267270
});`,
268-
},
269-
]
271+
},
272+
]
270273
: []),
271274
];
272275
};
@@ -335,22 +338,19 @@ export const snippetAutomaticSpeechRecognition = (
335338
client: "huggingface.js",
336339
setup: `npm install @huggingface/inference`,
337340
content: `\
338-
import { automaticSpeechRecognition } from "@huggingface/inference";
341+
import { HfInference } from "@huggingface/inference";
339342
340-
async function infer(filename, parameters) {
341-
const data = fs.readFileSync(filename);
342-
return await automaticSpeechRecognition({
343-
data,
344-
parameters,
345-
model: "${model.id}",
346-
provider: "${provider}",
347-
accessToken: "${accessToken}",
348-
});
349-
}
343+
const client = new HfInference("${accessToken || `{API_TOKEN}`}");
350344
351-
infer(${getModelInputSnippet(model)}).then((output) => {
352-
console.log("Transcription: ", output.text);
345+
const data = fs.readFileSync(${getModelInputSnippet(model)});
346+
347+
const output = await client.automaticSpeechRecognition({
348+
data,
349+
model: "${model.id}",
350+
provider: "${provider}",
353351
});
352+
353+
console.log(output);
354354
`,
355355
},
356356
...(provider === "hf-inference" ? snippetFile(model, accessToken, provider) : []),

0 commit comments

Comments
 (0)