Skip to content

Commit 111eca8

Browse files
committed
update
1 parent 8331942 commit 111eca8

File tree

4 files changed

+121
-16
lines changed

4 files changed

+121
-16
lines changed

docs/api-inference/tasks/feature-extraction.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ client = InferenceClient(
5050

5151
result = client.feature_extraction(
5252
inputs="Today is a sunny day and I will get some ice cream.",
53-
model="mixedbread-ai/mxbai-embed-large-v1",
53+
model="intfloat/multilingual-e5-large-instruct",
5454
)
5555
```
5656

@@ -63,7 +63,7 @@ To use the Python `InferenceClient`, see the [package reference](https://hugging
6363
```python
6464
import requests
6565

66-
API_URL = "https://router.huggingface.co/hf-inference/pipeline/feature-extraction/mixedbread-ai/mxbai-embed-large-v1"
66+
API_URL = "https://router.huggingface.co/hf-inference/pipeline/feature-extraction/intfloat/multilingual-e5-large-instruct"
6767
headers = {"Authorization": "Bearer hf_***"}
6868

6969
def query(payload):
@@ -83,7 +83,7 @@ output = query({
8383
```js
8484
async function query(data) {
8585
const response = await fetch(
86-
"https://router.huggingface.co/hf-inference/pipeline/feature-extraction/mixedbread-ai/mxbai-embed-large-v1",
86+
"https://router.huggingface.co/hf-inference/pipeline/feature-extraction/intfloat/multilingual-e5-large-instruct",
8787
{
8888
headers: {
8989
Authorization: "Bearer hf_***",
@@ -113,7 +113,7 @@ import { InferenceClient } from "@huggingface/inference";
113113
const client = new InferenceClient("hf_***");
114114

115115
const output = await client.featureExtraction({
116-
model: "mixedbread-ai/mxbai-embed-large-v1",
116+
model: "intfloat/multilingual-e5-large-instruct",
117117
inputs: "Today is a sunny day and I will get some ice cream.",
118118
provider: "hf-inference",
119119
});
@@ -128,7 +128,7 @@ To use the JavaScript `InferenceClient`, see `huggingface.js`'s [package referen
128128
<snippet provider="hf-inference" language="sh" client="curl">
129129

130130
```sh
131-
curl https://router.huggingface.co/hf-inference/pipeline/feature-extraction/mixedbread-ai/mxbai-embed-large-v1 \
131+
curl https://router.huggingface.co/hf-inference/pipeline/feature-extraction/intfloat/multilingual-e5-large-instruct \
132132
-X POST \
133133
-H 'Authorization: Bearer hf_***' \
134134
-H 'Content-Type: application/json' \

docs/api-inference/tasks/fill-mask.md

Lines changed: 106 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,112 @@ Explore all available models and find the one that suits you best [here](https:/
3030
### Using the API
3131

3232

33-
No snippet available for this task.
33+
<inferencesnippet>
34+
35+
36+
<snippet provider="hf-inference" language="python" client="huggingface_hub">
37+
38+
```python
39+
from huggingface_hub import InferenceClient
40+
41+
client = InferenceClient(
42+
provider="hf-inference",
43+
api_key="hf_***",
44+
)
45+
46+
result = client.fill_mask(
47+
inputs="The answer to the universe is [MASK].",
48+
model="google-bert/bert-base-multilingual-cased",
49+
)
50+
```
51+
52+
</snippet>
53+
54+
To use the Python `InferenceClient`, see the [package reference](https://huggingface.co/docs/huggingface_hub/package_reference/inference_client#huggingface_hub.InferenceClient.).
55+
56+
<snippet provider="hf-inference" language="python" client="requests">
57+
58+
```python
59+
import requests
60+
61+
API_URL = "https://router.huggingface.co/hf-inference/models/google-bert/bert-base-multilingual-cased"
62+
headers = {"Authorization": "Bearer hf_***"}
63+
64+
def query(payload):
65+
response = requests.post(API_URL, headers=headers, json=payload)
66+
return response.json()
67+
68+
output = query({
69+
"inputs": "The answer to the universe is [MASK].",
70+
})
71+
```
72+
73+
</snippet>
74+
75+
76+
<snippet provider="hf-inference" language="js" client="fetch">
77+
78+
```js
79+
async function query(data) {
80+
const response = await fetch(
81+
"https://router.huggingface.co/hf-inference/models/google-bert/bert-base-multilingual-cased",
82+
{
83+
headers: {
84+
Authorization: "Bearer hf_***",
85+
"Content-Type": "application/json",
86+
},
87+
method: "POST",
88+
body: JSON.stringify(data),
89+
}
90+
);
91+
const result = await response.json();
92+
return result;
93+
}
94+
95+
query({ inputs: "The answer to the universe is [MASK]." }).then((response) => {
96+
console.log(JSON.stringify(response));
97+
});
98+
```
99+
100+
</snippet>
101+
102+
103+
<snippet provider="hf-inference" language="js" client="huggingface.js">
104+
105+
```js
106+
import { InferenceClient } from "@huggingface/inference";
107+
108+
const client = new InferenceClient("hf_***");
109+
110+
const output = await client.fillMask({
111+
model: "google-bert/bert-base-multilingual-cased",
112+
inputs: "The answer to the universe is [MASK].",
113+
provider: "hf-inference",
114+
});
115+
116+
console.log(output);
117+
```
118+
119+
</snippet>
120+
121+
To use the JavaScript `InferenceClient`, see `huggingface.js`'s [package reference](https://huggingface.co/docs/huggingface.js/inference/classes/InferenceClient#).
122+
123+
<snippet provider="hf-inference" language="sh" client="curl">
124+
125+
```sh
126+
curl https://router.huggingface.co/hf-inference/models/google-bert/bert-base-multilingual-cased \
127+
-X POST \
128+
-H 'Authorization: Bearer hf_***' \
129+
-H 'Content-Type: application/json' \
130+
-d '{
131+
"inputs": "\"The answer to the universe is [MASK].\""
132+
}'
133+
```
134+
135+
</snippet>
136+
137+
138+
</inferencesnippet>
34139

35140

36141

docs/api-inference/tasks/token-classification.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ client = InferenceClient(
4545

4646
result = client.token_classification(
4747
inputs="My name is Sarah Jessica Parker but you can call me Jessica",
48-
model="FacebookAI/xlm-roberta-large-finetuned-conll03-english",
48+
model="dslim/bert-base-NER",
4949
)
5050
```
5151

@@ -58,7 +58,7 @@ To use the Python `InferenceClient`, see the [package reference](https://hugging
5858
```python
5959
import requests
6060

61-
API_URL = "https://router.huggingface.co/hf-inference/models/FacebookAI/xlm-roberta-large-finetuned-conll03-english"
61+
API_URL = "https://router.huggingface.co/hf-inference/models/dslim/bert-base-NER"
6262
headers = {"Authorization": "Bearer hf_***"}
6363

6464
def query(payload):
@@ -78,7 +78,7 @@ output = query({
7878
```js
7979
async function query(data) {
8080
const response = await fetch(
81-
"https://router.huggingface.co/hf-inference/models/FacebookAI/xlm-roberta-large-finetuned-conll03-english",
81+
"https://router.huggingface.co/hf-inference/models/dslim/bert-base-NER",
8282
{
8383
headers: {
8484
Authorization: "Bearer hf_***",
@@ -108,7 +108,7 @@ import { InferenceClient } from "@huggingface/inference";
108108
const client = new InferenceClient("hf_***");
109109

110110
const output = await client.tokenClassification({
111-
model: "FacebookAI/xlm-roberta-large-finetuned-conll03-english",
111+
model: "dslim/bert-base-NER",
112112
inputs: "My name is Sarah Jessica Parker but you can call me Jessica",
113113
provider: "hf-inference",
114114
});
@@ -123,7 +123,7 @@ To use the JavaScript `InferenceClient`, see `huggingface.js`'s [package referen
123123
<snippet provider="hf-inference" language="sh" client="curl">
124124

125125
```sh
126-
curl https://router.huggingface.co/hf-inference/models/FacebookAI/xlm-roberta-large-finetuned-conll03-english \
126+
curl https://router.huggingface.co/hf-inference/models/dslim/bert-base-NER \
127127
-X POST \
128128
-H 'Authorization: Bearer hf_***' \
129129
-H 'Content-Type: application/json' \

docs/api-inference/tasks/translation.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ client = InferenceClient(
4545

4646
result = client.translation(
4747
inputs="Меня зовут Вольфганг и я живу в Берлине",
48-
model="facebook/mbart-large-50-many-to-many-mmt",
48+
model="facebook/nllb-200-distilled-600M",
4949
)
5050
```
5151

@@ -58,7 +58,7 @@ To use the Python `InferenceClient`, see the [package reference](https://hugging
5858
```python
5959
import requests
6060

61-
API_URL = "https://router.huggingface.co/hf-inference/models/facebook/mbart-large-50-many-to-many-mmt"
61+
API_URL = "https://router.huggingface.co/hf-inference/models/facebook/nllb-200-distilled-600M"
6262
headers = {"Authorization": "Bearer hf_***"}
6363

6464
def query(payload):
@@ -78,7 +78,7 @@ output = query({
7878
```js
7979
async function query(data) {
8080
const response = await fetch(
81-
"https://router.huggingface.co/hf-inference/models/facebook/mbart-large-50-many-to-many-mmt",
81+
"https://router.huggingface.co/hf-inference/models/facebook/nllb-200-distilled-600M",
8282
{
8383
headers: {
8484
Authorization: "Bearer hf_***",
@@ -108,7 +108,7 @@ import { InferenceClient } from "@huggingface/inference";
108108
const client = new InferenceClient("hf_***");
109109

110110
const output = await client.translation({
111-
model: "facebook/mbart-large-50-many-to-many-mmt",
111+
model: "facebook/nllb-200-distilled-600M",
112112
inputs: "Меня зовут Вольфганг и я живу в Берлине",
113113
provider: "hf-inference",
114114
});
@@ -123,7 +123,7 @@ To use the JavaScript `InferenceClient`, see `huggingface.js`'s [package referen
123123
<snippet provider="hf-inference" language="sh" client="curl">
124124

125125
```sh
126-
curl https://router.huggingface.co/hf-inference/models/facebook/mbart-large-50-many-to-many-mmt \
126+
curl https://router.huggingface.co/hf-inference/models/facebook/nllb-200-distilled-600M \
127127
-X POST \
128128
-H 'Authorization: Bearer hf_***' \
129129
-H 'Content-Type: application/json' \

0 commit comments

Comments
 (0)