Skip to content

Commit f1bbdd5

Browse files
author
machineuser
committed
🔖 @hugginface/inference v1.7.0
1 parent b033ad1 commit f1bbdd5

28 files changed

+517
-90
lines changed

‎README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ You can run our packages with vanilla JS, without any bundler, by using a CDN or
4848
```html
4949

5050
<script type="module">
51-
import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/inference@1.6.3/+esm';
51+
import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/inference@1.7.0/+esm';
5252
import { createRepo, commit, deleteRepo, listFiles } from "https://cdn.jsdelivr.net/npm/@huggingface/[email protected]/+esm";
5353
</script>
5454
```

‎docs/_toctree.yml

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,10 @@
6262
sections:
6363
- title: HfInference
6464
local: inference/classes/HfInference
65+
- title: Enums
66+
sections:
67+
- title: TextGenerationStreamFinishReason
68+
local: inference/enums/TextGenerationStreamFinishReason
6569
- title: Interfaces
6670
sections:
6771
- title: Args
@@ -88,6 +92,16 @@
8892
local: inference/interfaces/TableQuestionAnswerReturn
8993
- title: TextGenerationReturn
9094
local: inference/interfaces/TextGenerationReturn
95+
- title: TextGenerationStreamBestOfSequence
96+
local: inference/interfaces/TextGenerationStreamBestOfSequence
97+
- title: TextGenerationStreamDetails
98+
local: inference/interfaces/TextGenerationStreamDetails
99+
- title: TextGenerationStreamPrefillToken
100+
local: inference/interfaces/TextGenerationStreamPrefillToken
101+
- title: TextGenerationStreamReturn
102+
local: inference/interfaces/TextGenerationStreamReturn
103+
- title: TextGenerationStreamToken
104+
local: inference/interfaces/TextGenerationStreamToken
91105
- title: TokenClassificationReturnValue
92106
local: inference/interfaces/TokenClassificationReturnValue
93107
- title: TranslationReturn

‎docs/index.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ You can run our packages with vanilla JS, without any bundler, by using a CDN or
4848
```html
4949

5050
<script type="module">
51-
import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/inference@1.6.3/+esm';
51+
import { HfInference } from 'https://cdn.jsdelivr.net/npm/@huggingface/inference@1.7.0/+esm';
5252
import { createRepo, commit, deleteRepo, listFiles } from "https://cdn.jsdelivr.net/npm/@huggingface/[email protected]/+esm";
5353
</script>
5454
```

‎docs/inference/README.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,13 @@ await hf.textGeneration({
7272
inputs: 'The answer to the universe is'
7373
})
7474

75+
for await const (output of hf.textGenerationStream({
76+
model: "google/flan-t5-xxl",
77+
inputs: 'repeat "one two three four"'
78+
})) {
79+
console.log(output.token.text, output.generated_text);
80+
}
81+
7582
await hf.tokenClassification({
7683
model: 'dbmdz/bert-large-cased-finetuned-conll03-english',
7784
inputs: 'My name is Sarah Jessica Parker but you can call me Jessica'

‎docs/inference/classes/HfInference.md

Lines changed: 110 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515

1616
#### Defined in
1717

18-
[HfInference.ts:513](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L513)
18+
[HfInference.ts:597](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L597)
1919

2020
## Properties
2121

@@ -25,7 +25,7 @@
2525

2626
#### Defined in
2727

28-
[HfInference.ts:510](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L510)
28+
[HfInference.ts:594](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L594)
2929

3030
___
3131

@@ -35,7 +35,7 @@ ___
3535

3636
#### Defined in
3737

38-
[HfInference.ts:511](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L511)
38+
[HfInference.ts:595](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L595)
3939

4040
## Methods
4141

@@ -59,7 +59,7 @@ Recommended model: superb/hubert-large-superb-er
5959

6060
#### Defined in
6161

62-
[HfInference.ts:737](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L737)
62+
[HfInference.ts:831](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L831)
6363

6464
___
6565

@@ -83,7 +83,7 @@ Recommended model (english language): facebook/wav2vec2-large-960h-lv60-self
8383

8484
#### Defined in
8585

86-
[HfInference.ts:718](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L718)
86+
[HfInference.ts:812](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L812)
8787

8888
___
8989

@@ -106,7 +106,7 @@ This task corresponds to any chatbot like structure. Models tend to have shorter
106106

107107
#### Defined in
108108

109-
[HfInference.ts:688](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L688)
109+
[HfInference.ts:782](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L782)
110110

111111
___
112112

@@ -129,7 +129,7 @@ This task reads some text and outputs raw float values, that are usually consume
129129

130130
#### Defined in
131131

132-
[HfInference.ts:709](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L709)
132+
[HfInference.ts:803](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L803)
133133

134134
___
135135

@@ -152,7 +152,7 @@ Tries to fill in a hole with a missing word (token to be precise). That’s the
152152

153153
#### Defined in
154154

155-
[HfInference.ts:521](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L521)
155+
[HfInference.ts:605](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L605)
156156

157157
___
158158

@@ -176,7 +176,7 @@ Recommended model: google/vit-base-patch16-224
176176

177177
#### Defined in
178178

179-
[HfInference.ts:757](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L757)
179+
[HfInference.ts:851](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L851)
180180

181181
___
182182

@@ -200,7 +200,44 @@ Recommended model: facebook/detr-resnet-50-panoptic
200200

201201
#### Defined in
202202

203-
[HfInference.ts:805](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L805)
203+
[HfInference.ts:899](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L899)
204+
205+
___
206+
207+
### makeRequestOptions
208+
209+
â–¸ `Private` **makeRequestOptions**(`args`, `options?`): `Object`
210+
211+
Helper that prepares request arguments
212+
213+
#### Parameters
214+
215+
| Name | Type |
216+
| :------ | :------ |
217+
| `args` | [`Args`](../interfaces/Args) & { `data?`: `ArrayBuffer` \| `Blob` ; `stream?`: `boolean` } |
218+
| `options?` | [`Options`](../interfaces/Options) & { `binary?`: `boolean` ; `blob?`: `boolean` ; `includeCredentials?`: `boolean` } |
219+
220+
#### Returns
221+
222+
`Object`
223+
224+
| Name | Type |
225+
| :------ | :------ |
226+
| `info` | `RequestInit` |
227+
| `mergedOptions` | { `binary?`: `boolean` ; `blob?`: `boolean` ; `dont_load_model?`: `boolean` ; `includeCredentials?`: `boolean` ; `retry_on_error?`: `boolean` ; `use_cache?`: `boolean` ; `use_gpu?`: `boolean` ; `wait_for_model?`: `boolean` } |
228+
| `mergedOptions.binary?` | `boolean` |
229+
| `mergedOptions.blob?` | `boolean` |
230+
| `mergedOptions.dont_load_model?` | `boolean` |
231+
| `mergedOptions.includeCredentials?` | `boolean` |
232+
| `mergedOptions.retry_on_error?` | `boolean` |
233+
| `mergedOptions.use_cache?` | `boolean` |
234+
| `mergedOptions.use_gpu?` | `boolean` |
235+
| `mergedOptions.wait_for_model?` | `boolean` |
236+
| `url` | `string` |
237+
238+
#### Defined in
239+
240+
[HfInference.ts:934](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L934)
204241

205242
___
206243

@@ -224,7 +261,7 @@ Recommended model: facebook/detr-resnet-50
224261

225262
#### Defined in
226263

227-
[HfInference.ts:777](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L777)
264+
[HfInference.ts:871](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L871)
228265

229266
___
230267

@@ -247,7 +284,7 @@ Want to have a nice know-it-all bot that can answer any question?. Recommended m
247284

248285
#### Defined in
249286

250-
[HfInference.ts:555](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L555)
287+
[HfInference.ts:639](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L639)
251288

252289
___
253290

@@ -274,7 +311,36 @@ ___
274311

275312
#### Defined in
276313

277-
[HfInference.ts:837](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L837)
314+
[HfInference.ts:986](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L986)
315+
316+
___
317+
318+
### streamingRequest
319+
320+
â–¸ **streamingRequest**<`T`\>(`args`, `options?`): `AsyncGenerator`<`T`, `any`, `unknown`\>
321+
322+
Make request that uses server-sent events and returns response as a generator
323+
324+
#### Type parameters
325+
326+
| Name |
327+
| :------ |
328+
| `T` |
329+
330+
#### Parameters
331+
332+
| Name | Type |
333+
| :------ | :------ |
334+
| `args` | [`Args`](../interfaces/Args) & { `data?`: `ArrayBuffer` \| `Blob` } |
335+
| `options?` | [`Options`](../interfaces/Options) & { `binary?`: `boolean` ; `blob?`: `boolean` ; `includeCredentials?`: `boolean` } |
336+
337+
#### Returns
338+
339+
`AsyncGenerator`<`T`, `any`, `unknown`\>
340+
341+
#### Defined in
342+
343+
[HfInference.ts:1022](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L1022)
278344

279345
___
280346

@@ -297,7 +363,7 @@ This task is well known to summarize longer text into shorter text. Be careful,
297363

298364
#### Defined in
299365

300-
[HfInference.ts:543](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L543)
366+
[HfInference.ts:627](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L627)
301367

302368
___
303369

@@ -320,7 +386,7 @@ Don’t know SQL? Don’t want to dive into a large spreadsheet? Ask questions i
320386

321387
#### Defined in
322388

323-
[HfInference.ts:573](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L573)
389+
[HfInference.ts:657](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L657)
324390

325391
___
326392

@@ -343,7 +409,7 @@ Usually used for sentiment-analysis this will output the likelihood of classes o
343409

344410
#### Defined in
345411

346-
[HfInference.ts:596](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L596)
412+
[HfInference.ts:680](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L680)
347413

348414
___
349415

@@ -366,7 +432,30 @@ Use to continue text from a prompt. This is a very generic task. Recommended mod
366432

367433
#### Defined in
368434

369-
[HfInference.ts:609](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L609)
435+
[HfInference.ts:693](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L693)
436+
437+
___
438+
439+
### textGenerationStream
440+
441+
â–¸ **textGenerationStream**(`args`, `options?`): `AsyncGenerator`<[`TextGenerationStreamReturn`](../interfaces/TextGenerationStreamReturn), `any`, `unknown`\>
442+
443+
Use to continue text from a prompt. Same as `textGeneration` but returns generator that can be read one token at a time
444+
445+
#### Parameters
446+
447+
| Name | Type |
448+
| :------ | :------ |
449+
| `args` | [`TextGenerationArgs`](../modules#textgenerationargs) |
450+
| `options?` | [`Options`](../interfaces/Options) |
451+
452+
#### Returns
453+
454+
`AsyncGenerator`<[`TextGenerationStreamReturn`](../interfaces/TextGenerationStreamReturn), `any`, `unknown`\>
455+
456+
#### Defined in
457+
458+
[HfInference.ts:705](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L705)
370459

371460
___
372461

@@ -390,7 +479,7 @@ Recommended model: stabilityai/stable-diffusion-2
390479

391480
#### Defined in
392481

393-
[HfInference.ts:825](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L825)
482+
[HfInference.ts:919](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L919)
394483

395484
___
396485

@@ -413,7 +502,7 @@ Usually used for sentence parsing, either grammatical, or Named Entity Recogniti
413502

414503
#### Defined in
415504

416-
[HfInference.ts:621](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L621)
505+
[HfInference.ts:715](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L715)
417506

418507
___
419508

@@ -436,7 +525,7 @@ This task is well known to translate text from one language to another. Recommen
436525

437526
#### Defined in
438527

439-
[HfInference.ts:647](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L647)
528+
[HfInference.ts:741](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L741)
440529

441530
___
442531

@@ -459,4 +548,4 @@ This task is super useful to try out classification with zero code, you simply p
459548

460549
#### Defined in
461550

462-
[HfInference.ts:659](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L659)
551+
[HfInference.ts:753](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L753)
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
# Enumeration: TextGenerationStreamFinishReason
2+
3+
## Enumeration Members
4+
5+
### EndOfSequenceToken
6+
7+
• **EndOfSequenceToken** = ``"eos_token"``
8+
9+
the model generated its end of sequence token
10+
11+
#### Defined in
12+
13+
[HfInference.ts:275](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L275)
14+
15+
___
16+
17+
### Length
18+
19+
• **Length** = ``"length"``
20+
21+
number of generated tokens == `max_new_tokens`
22+
23+
#### Defined in
24+
25+
[HfInference.ts:273](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L273)
26+
27+
___
28+
29+
### StopSequence
30+
31+
• **StopSequence** = ``"stop_sequence"``
32+
33+
the model generated a text included in `stop_sequences`
34+
35+
#### Defined in
36+
37+
[HfInference.ts:277](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L277)

‎docs/inference/interfaces/Args.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,4 @@
88

99
#### Defined in
1010

11-
[HfInference.ts:28](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L28)
11+
[HfInference.ts:32](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L32)

‎docs/inference/interfaces/AudioClassificationReturnValue.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ The label for the class (model specific)
1010

1111
#### Defined in
1212

13-
[HfInference.ts:485](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L485)
13+
[HfInference.ts:569](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L569)
1414

1515
___
1616

@@ -22,4 +22,4 @@ A float that represents how likely it is that the audio file belongs to this cla
2222

2323
#### Defined in
2424

25-
[HfInference.ts:490](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L490)
25+
[HfInference.ts:574](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L574)

‎docs/inference/interfaces/AutomaticSpeechRecognitionReturn.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,4 +10,4 @@ The text that was recognized from the audio
1010

1111
#### Defined in
1212

13-
[HfInference.ts:471](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L471)
13+
[HfInference.ts:555](https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L555)

0 commit comments

Comments
 (0)