Skip to content

Commit 7cfb641

Browse files
committed
separate snippets
1 parent 91fffb0 commit 7cfb641

File tree

3 files changed

+16
-6
lines changed

3 files changed

+16
-6
lines changed

docs/api-inference/tasks/chat-completion.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,7 @@ curl 'https://api-inference.huggingface.co/models/google/gemma-2-2b-it/v1/chat/c
7979
</curl>
8080

8181
<python>
82+
With huggingface_hub client:
8283
```py
8384
from huggingface_hub import InferenceClient
8485

@@ -102,6 +103,7 @@ for chunk in stream:
102103
print(chunk.choices[0].delta.content, end="")
103104
```
104105

106+
With openai client:
105107
```py
106108
from openai import OpenAI
107109

@@ -132,6 +134,7 @@ To use the Python client, see `huggingface_hub`'s [package reference](https://hu
132134
</python>
133135

134136
<js>
137+
With huggingface_hub client:
135138
```js
136139
import { HfInference } from "@huggingface/inference"
137140

@@ -159,6 +162,7 @@ for await (const chunk of stream) {
159162
}
160163
```
161164

165+
With openai client:
162166
```js
163167
import { OpenAI } from "openai"
164168

@@ -233,6 +237,7 @@ curl 'https://api-inference.huggingface.co/models/meta-llama/Llama-3.2-11B-Visio
233237
</curl>
234238

235239
<python>
240+
With huggingface_hub client:
236241
```py
237242
from huggingface_hub import InferenceClient
238243

@@ -267,6 +272,7 @@ for chunk in stream:
267272
print(chunk.choices[0].delta.content, end="")
268273
```
269274

275+
With openai client:
270276
```py
271277
from openai import OpenAI
272278

@@ -308,6 +314,7 @@ To use the Python client, see `huggingface_hub`'s [package reference](https://hu
308314
</python>
309315

310316
<js>
317+
With huggingface_hub client:
311318
```js
312319
import { HfInference } from "@huggingface/inference"
313320

@@ -346,6 +353,7 @@ for await (const chunk of stream) {
346353
}
347354
```
348355

356+
With openai client:
349357
```js
350358
import { OpenAI } from "openai"
351359

docs/api-inference/tasks/image-text-to-text.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ curl https://api-inference.huggingface.co/models/meta-llama/Llama-3.2-11B-Vision
4545
</curl>
4646

4747
<python>
48+
With huggingface_hub client:
4849
```py
4950
import requests
5051

@@ -68,6 +69,7 @@ for chunk in stream:
6869
print(chunk.choices[0].delta.content, end="")
6970
```
7071

72+
With openai client:
7173
```py
7274
import requests
7375

scripts/api-inference/scripts/generate.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -100,22 +100,22 @@ const TASKS_DATA = (await response.json()) as any;
100100
///////////////////////
101101

102102
const formatSnippets = (result: snippets.types.InferenceSnippet | snippets.types.InferenceSnippet[], defaultClient: string, language: string): string => {
103-
// For single snippet, return just the content
103+
// For single snippet, just wrap with code block
104104
if (!Array.isArray(result) || result.length === 1) {
105105
const snippet = Array.isArray(result) ? result[0] : result;
106106
return `\`\`\`${language}\n${snippet.content}\n\`\`\``;
107107
}
108108

109-
// For multiple snippets, wrap each one in its own code block
109+
// For multiple snippets, add description and wrap each one
110110
return result
111-
.map(snippet =>
112-
`\`\`\`${language}\n${snippet.content}\n\`\`\``
113-
)
111+
.map(snippet => {
112+
const client = snippet.client || defaultClient;
113+
return `With ${client} client:\n\`\`\`${language}\n${snippet.content}\n\`\`\``;
114+
})
114115
.join('\n\n');
115116
};
116117

117118

118-
119119
const GET_SNIPPET_FN = {
120120
curl: (modelData: any, token: string) => {
121121
const result = snippets.curl.getCurlInferenceSnippet(modelData, token);

0 commit comments

Comments
 (0)