15
15
16
16
#### Defined in
17
17
18
- [ HfInference.ts:513 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L513 )
18
+ [ HfInference.ts:597 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L597 )
19
19
20
20
## Properties
21
21
25
25
26
26
#### Defined in
27
27
28
- [ HfInference.ts:510 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L510 )
28
+ [ HfInference.ts:594 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L594 )
29
29
30
30
___
31
31
35
35
36
36
#### Defined in
37
37
38
- [ HfInference.ts:511 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L511 )
38
+ [ HfInference.ts:595 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L595 )
39
39
40
40
## Methods
41
41
@@ -59,7 +59,7 @@ Recommended model: superb/hubert-large-superb-er
59
59
60
60
#### Defined in
61
61
62
- [ HfInference.ts:737 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L737 )
62
+ [ HfInference.ts:831 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L831 )
63
63
64
64
___
65
65
@@ -83,7 +83,7 @@ Recommended model (english language): facebook/wav2vec2-large-960h-lv60-self
83
83
84
84
#### Defined in
85
85
86
- [ HfInference.ts:718 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L718 )
86
+ [ HfInference.ts:812 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L812 )
87
87
88
88
___
89
89
@@ -106,7 +106,7 @@ This task corresponds to any chatbot like structure. Models tend to have shorter
106
106
107
107
#### Defined in
108
108
109
- [ HfInference.ts:688 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L688 )
109
+ [ HfInference.ts:782 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L782 )
110
110
111
111
___
112
112
@@ -129,7 +129,7 @@ This task reads some text and outputs raw float values, that are usually consume
129
129
130
130
#### Defined in
131
131
132
- [ HfInference.ts:709 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L709 )
132
+ [ HfInference.ts:803 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L803 )
133
133
134
134
___
135
135
@@ -152,7 +152,7 @@ Tries to fill in a hole with a missing word (token to be precise). That’s the
152
152
153
153
#### Defined in
154
154
155
- [ HfInference.ts:521 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L521 )
155
+ [ HfInference.ts:605 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L605 )
156
156
157
157
___
158
158
@@ -176,7 +176,7 @@ Recommended model: google/vit-base-patch16-224
176
176
177
177
#### Defined in
178
178
179
- [ HfInference.ts:757 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L757 )
179
+ [ HfInference.ts:851 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L851 )
180
180
181
181
___
182
182
@@ -200,7 +200,44 @@ Recommended model: facebook/detr-resnet-50-panoptic
200
200
201
201
#### Defined in
202
202
203
- [ HfInference.ts:805] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L805 )
203
+ [ HfInference.ts:899] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L899 )
204
+
205
+ ___
206
+
207
+ ### makeRequestOptions
208
+
209
+ â–¸ ` Private ` ** makeRequestOptions** (` args ` , ` options? ` ): ` Object `
210
+
211
+ Helper that prepares request arguments
212
+
213
+ #### Parameters
214
+
215
+ | Name | Type |
216
+ | :------ | :------ |
217
+ | ` args ` | [ ` Args ` ] ( ../interfaces/Args ) & { ` data? ` : ` ArrayBuffer ` \| ` Blob ` ; ` stream? ` : ` boolean ` } |
218
+ | ` options? ` | [ ` Options ` ] ( ../interfaces/Options ) & { ` binary? ` : ` boolean ` ; ` blob? ` : ` boolean ` ; ` includeCredentials? ` : ` boolean ` } |
219
+
220
+ #### Returns
221
+
222
+ ` Object `
223
+
224
+ | Name | Type |
225
+ | :------ | :------ |
226
+ | ` info ` | ` RequestInit ` |
227
+ | ` mergedOptions ` | { ` binary? ` : ` boolean ` ; ` blob? ` : ` boolean ` ; ` dont_load_model? ` : ` boolean ` ; ` includeCredentials? ` : ` boolean ` ; ` retry_on_error? ` : ` boolean ` ; ` use_cache? ` : ` boolean ` ; ` use_gpu? ` : ` boolean ` ; ` wait_for_model? ` : ` boolean ` } |
228
+ | ` mergedOptions.binary? ` | ` boolean ` |
229
+ | ` mergedOptions.blob? ` | ` boolean ` |
230
+ | ` mergedOptions.dont_load_model? ` | ` boolean ` |
231
+ | ` mergedOptions.includeCredentials? ` | ` boolean ` |
232
+ | ` mergedOptions.retry_on_error? ` | ` boolean ` |
233
+ | ` mergedOptions.use_cache? ` | ` boolean ` |
234
+ | ` mergedOptions.use_gpu? ` | ` boolean ` |
235
+ | ` mergedOptions.wait_for_model? ` | ` boolean ` |
236
+ | ` url ` | ` string ` |
237
+
238
+ #### Defined in
239
+
240
+ [ HfInference.ts:934] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L934 )
204
241
205
242
___
206
243
@@ -224,7 +261,7 @@ Recommended model: facebook/detr-resnet-50
224
261
225
262
#### Defined in
226
263
227
- [ HfInference.ts:777 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L777 )
264
+ [ HfInference.ts:871 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L871 )
228
265
229
266
___
230
267
@@ -247,7 +284,7 @@ Want to have a nice know-it-all bot that can answer any question?. Recommended m
247
284
248
285
#### Defined in
249
286
250
- [ HfInference.ts:555 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L555 )
287
+ [ HfInference.ts:639 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L639 )
251
288
252
289
___
253
290
274
311
275
312
#### Defined in
276
313
277
- [ HfInference.ts:837] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L837 )
314
+ [ HfInference.ts:986] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L986 )
315
+
316
+ ___
317
+
318
+ ### streamingRequest
319
+
320
+ â–¸ ** streamingRequest** <` T ` \> (` args ` , ` options? ` ): ` AsyncGenerator ` <` T ` , ` any ` , ` unknown ` \>
321
+
322
+ Make request that uses server-sent events and returns response as a generator
323
+
324
+ #### Type parameters
325
+
326
+ | Name |
327
+ | :------ |
328
+ | ` T ` |
329
+
330
+ #### Parameters
331
+
332
+ | Name | Type |
333
+ | :------ | :------ |
334
+ | ` args ` | [ ` Args ` ] ( ../interfaces/Args ) & { ` data? ` : ` ArrayBuffer ` \| ` Blob ` } |
335
+ | ` options? ` | [ ` Options ` ] ( ../interfaces/Options ) & { ` binary? ` : ` boolean ` ; ` blob? ` : ` boolean ` ; ` includeCredentials? ` : ` boolean ` } |
336
+
337
+ #### Returns
338
+
339
+ ` AsyncGenerator ` <` T ` , ` any ` , ` unknown ` \>
340
+
341
+ #### Defined in
342
+
343
+ [ HfInference.ts:1022] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L1022 )
278
344
279
345
___
280
346
@@ -297,7 +363,7 @@ This task is well known to summarize longer text into shorter text. Be careful,
297
363
298
364
#### Defined in
299
365
300
- [ HfInference.ts:543 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L543 )
366
+ [ HfInference.ts:627 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L627 )
301
367
302
368
___
303
369
@@ -320,7 +386,7 @@ Don’t know SQL? Don’t want to dive into a large spreadsheet? Ask questions i
320
386
321
387
#### Defined in
322
388
323
- [ HfInference.ts:573 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L573 )
389
+ [ HfInference.ts:657 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L657 )
324
390
325
391
___
326
392
@@ -343,7 +409,7 @@ Usually used for sentiment-analysis this will output the likelihood of classes o
343
409
344
410
#### Defined in
345
411
346
- [ HfInference.ts:596 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L596 )
412
+ [ HfInference.ts:680 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L680 )
347
413
348
414
___
349
415
@@ -366,7 +432,30 @@ Use to continue text from a prompt. This is a very generic task. Recommended mod
366
432
367
433
#### Defined in
368
434
369
- [ HfInference.ts:609] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L609 )
435
+ [ HfInference.ts:693] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L693 )
436
+
437
+ ___
438
+
439
+ ### textGenerationStream
440
+
441
+ â–¸ ** textGenerationStream** (` args ` , ` options? ` ): ` AsyncGenerator ` <[ ` TextGenerationStreamReturn ` ] ( ../interfaces/TextGenerationStreamReturn ) , ` any ` , ` unknown ` \>
442
+
443
+ Use to continue text from a prompt. Same as ` textGeneration ` but returns generator that can be read one token at a time
444
+
445
+ #### Parameters
446
+
447
+ | Name | Type |
448
+ | :------ | :------ |
449
+ | ` args ` | [ ` TextGenerationArgs ` ] ( ../modules#textgenerationargs ) |
450
+ | ` options? ` | [ ` Options ` ] ( ../interfaces/Options ) |
451
+
452
+ #### Returns
453
+
454
+ ` AsyncGenerator ` <[ ` TextGenerationStreamReturn ` ] ( ../interfaces/TextGenerationStreamReturn ) , ` any ` , ` unknown ` \>
455
+
456
+ #### Defined in
457
+
458
+ [ HfInference.ts:705] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L705 )
370
459
371
460
___
372
461
@@ -390,7 +479,7 @@ Recommended model: stabilityai/stable-diffusion-2
390
479
391
480
#### Defined in
392
481
393
- [ HfInference.ts:825 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L825 )
482
+ [ HfInference.ts:919 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L919 )
394
483
395
484
___
396
485
@@ -413,7 +502,7 @@ Usually used for sentence parsing, either grammatical, or Named Entity Recogniti
413
502
414
503
#### Defined in
415
504
416
- [ HfInference.ts:621 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L621 )
505
+ [ HfInference.ts:715 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L715 )
417
506
418
507
___
419
508
@@ -436,7 +525,7 @@ This task is well known to translate text from one language to another. Recommen
436
525
437
526
#### Defined in
438
527
439
- [ HfInference.ts:647 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L647 )
528
+ [ HfInference.ts:741 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L741 )
440
529
441
530
___
442
531
@@ -459,4 +548,4 @@ This task is super useful to try out classification with zero code, you simply p
459
548
460
549
#### Defined in
461
550
462
- [ HfInference.ts:659 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L659 )
551
+ [ HfInference.ts:753 ] ( https://github.com/huggingface/huggingface.js/blob/main/packages/inference/src/HfInference.ts#L753 )
0 commit comments