Skip to content

Commit 4a6a7f1

Browse files
authored
Merge branch 'main' into add-contexttab
2 parents 43696e8 + 4732260 commit 4a6a7f1

File tree

19 files changed

+52
-95
lines changed

19 files changed

+52
-95
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@ You can run our packages with vanilla JS, without any bundler, by using a CDN or
9797

9898
```html
9999
<script type="module">
100-
import { InferenceClient } from 'https://cdn.jsdelivr.net/npm/@huggingface/[email protected].4/+esm';
100+
import { InferenceClient } from 'https://cdn.jsdelivr.net/npm/@huggingface/[email protected].6/+esm';
101101
import { createRepo, commit, deleteRepo, listFiles } from "https://cdn.jsdelivr.net/npm/@huggingface/[email protected]/+esm";
102102
</script>
103103
```

packages/inference/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@huggingface/inference",
3-
"version": "4.0.4",
3+
"version": "4.0.6",
44
"packageManager": "[email protected]",
55
"license": "MIT",
66
"author": "Hugging Face and Tim Mikeladze <[email protected]>",

packages/inference/src/lib/getInferenceProviderMapping.ts

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,23 +2,12 @@ import type { WidgetType } from "@huggingface/tasks";
22
import { HF_HUB_URL } from "../config.js";
33
import { HARDCODED_MODEL_INFERENCE_MAPPING } from "../providers/consts.js";
44
import { EQUIVALENT_SENTENCE_TRANSFORMERS_TASKS } from "../providers/hf-inference.js";
5-
import type { InferenceProvider, InferenceProviderOrPolicy, ModelId } from "../types.js";
5+
import type { InferenceProvider, InferenceProviderMappingEntry, InferenceProviderOrPolicy, ModelId } from "../types.js";
66
import { typedInclude } from "../utils/typedInclude.js";
77
import { InferenceClientHubApiError, InferenceClientInputError } from "../errors.js";
88

99
export const inferenceProviderMappingCache = new Map<ModelId, InferenceProviderMappingEntry[]>();
1010

11-
export interface InferenceProviderMappingEntry {
12-
adapter?: string;
13-
adapterWeightsPath?: string;
14-
hfModelId: ModelId;
15-
provider: string;
16-
providerId: string;
17-
status: "live" | "staging";
18-
task: WidgetType;
19-
type?: "single-model" | "tag-filter";
20-
}
21-
2211
/**
2312
* Normalize inferenceProviderMapping to always return an array format.
2413
* This provides backward and forward compatibility for the API changes.

packages/inference/src/lib/makeRequestOptions.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import { HF_HEADER_X_BILL_TO, HF_HUB_URL } from "../config.js";
22
import { PACKAGE_NAME, PACKAGE_VERSION } from "../package.js";
3-
import type { InferenceTask, Options, RequestArgs } from "../types.js";
4-
import type { InferenceProviderMappingEntry } from "./getInferenceProviderMapping.js";
3+
import type { InferenceTask, InferenceProviderMappingEntry, Options, RequestArgs } from "../types.js";
54
import { getInferenceProviderMapping } from "./getInferenceProviderMapping.js";
65
import type { getProviderHelper } from "./getProviderHelper.js";
76
import { isUrl } from "./isUrl.js";

packages/inference/src/package.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
// Generated file from package.json. Issues importing JSON directly when publishing on commonjs/ESM - see https://github.com/microsoft/TypeScript/issues/51783
2-
export const PACKAGE_VERSION = "4.0.4";
2+
export const PACKAGE_VERSION = "4.0.6";
33
export const PACKAGE_NAME = "@huggingface/inference";

packages/inference/src/providers/consts.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
import type { InferenceProviderMappingEntry } from "../lib/getInferenceProviderMapping.js";
2-
import type { InferenceProvider } from "../types.js";
1+
import type { InferenceProvider, InferenceProviderMappingEntry } from "../types.js";
32
import { type ModelId } from "../types.js";
43

54
/**

packages/inference/src/snippets/getInferenceSnippets.ts

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,9 @@ import {
88
} from "@huggingface/tasks";
99
import type { PipelineType, WidgetType } from "@huggingface/tasks";
1010
import type { ChatCompletionInputMessage, GenerationParameters } from "@huggingface/tasks";
11-
import type { InferenceProviderMappingEntry } from "../lib/getInferenceProviderMapping.js";
1211
import { getProviderHelper } from "../lib/getProviderHelper.js";
1312
import { makeRequestOptionsFromResolvedModel } from "../lib/makeRequestOptions.js";
14-
import type { InferenceProviderOrPolicy, InferenceTask, RequestArgs } from "../types.js";
13+
import type { InferenceProviderMappingEntry, InferenceProviderOrPolicy, InferenceTask, RequestArgs } from "../types.js";
1514
import { templates } from "./templates.exported.js";
1615

1716
export type InferenceSnippetOptions = {
@@ -466,7 +465,7 @@ function replaceAccessTokenPlaceholder(
466465
!endpointUrl && // custom endpointUrl => use a generic API_TOKEN
467466
(provider == "hf-inference" || // hf-inference provider => use $HF_TOKEN
468467
(!directRequest && // if explicit directRequest => use provider-specific token
469-
(!snippet.includes("https://") || // no URL provided => using a client => use $HF_TOKEN
468+
(snippet.includes("InferenceClient") || // using a client => use $HF_TOKEN
470469
snippet.includes("https://router.huggingface.co")))); // explicit routed request => use $HF_TOKEN
471470
const accessTokenEnvVar = useHfToken
472471
? "HF_TOKEN" // e.g. routed request or hf-inference

packages/inference/src/types.ts

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
import type { ChatCompletionInput, PipelineType } from "@huggingface/tasks";
2-
import type { InferenceProviderMappingEntry } from "./lib/getInferenceProviderMapping.js";
1+
import type { ChatCompletionInput, PipelineType, WidgetType } from "@huggingface/tasks";
32

43
/**
54
* HF model id, like "meta-llama/Llama-3.3-70B-Instruct"
@@ -63,6 +62,17 @@ export type InferenceProvider = (typeof INFERENCE_PROVIDERS)[number];
6362

6463
export type InferenceProviderOrPolicy = (typeof PROVIDERS_OR_POLICIES)[number];
6564

65+
export interface InferenceProviderMappingEntry {
66+
adapter?: string;
67+
adapterWeightsPath?: string;
68+
hfModelId: ModelId;
69+
provider: string;
70+
providerId: string;
71+
status: "live" | "staging";
72+
task: WidgetType;
73+
type?: "single-model" | "tag-filter";
74+
}
75+
6676
export interface BaseArgs {
6777
/**
6878
* The access token to use. Without it, you'll get rate-limited quickly.

packages/jinja/test/templates.test.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4337,7 +4337,7 @@ describe("Templates", () => {
43374337
// TODO add failure cases
43384338
});
43394339

4340-
describe("Parsing and intepretation", () => {
4340+
describe("Parsing and interpretation", () => {
43414341
describe("should interpret an AST", () => {
43424342
for (const [name, text] of Object.entries(TEST_PARSED)) {
43434343
const ast = parse(text);

packages/languages/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,6 @@ import { language, wikiLink, type Language } from "https://esm.sh/@huggingface/l
2121
import { language, wikiLink, type Language } from "npm:@huggingface/languages"
2222
```
2323

24-
Check out the [full documentation](https://huggingface.co/docs/huggingface.js/languages/README).
24+
Check out the [full documentation](https://huggingface.co/docs/huggingface.js/index).
2525

2626
Acknowledging Loïck Bourdois (https://github.com/lbourdois)'s help with this package and more generally supporting languages on the HF Hub.

0 commit comments

Comments
 (0)