Skip to content

Commit 01ccbf6

Browse files
committed
lint
1 parent e801bfa commit 01ccbf6

File tree

5 files changed

+24
-27
lines changed

5 files changed

+24
-27
lines changed

packages/inference/src/lib/makeRequestOptions.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ export async function makeRequestOptions(
126126
throw new Error("Inference proxying is not implemented yet");
127127
} else {
128128
switch (provider) {
129-
case 'fal-ai':
129+
case "fal-ai":
130130
return `${FAL_AI_API_BASE_URL}/${model}`;
131131
case "replicate":
132132
return `${REPLICATE_API_BASE_URL}/v1/models/${model}/predictions`;
@@ -166,10 +166,10 @@ export async function makeRequestOptions(
166166
body: binary
167167
? args.data
168168
: JSON.stringify({
169-
...((otherArgs.model && isUrl(otherArgs.model)) || provider === "replicate" || provider === "fal-ai"
170-
? omit(otherArgs, "model")
171-
: { ...otherArgs, model }),
172-
}),
169+
...((otherArgs.model && isUrl(otherArgs.model)) || provider === "replicate" || provider === "fal-ai"
170+
? omit(otherArgs, "model")
171+
: { ...otherArgs, model }),
172+
}),
173173
...(credentials ? { credentials } : undefined),
174174
signal: options?.signal,
175175
};
Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import type { ModelId } from "../types";
22

3-
export const FAL_AI_API_BASE_URL = "https://fal.run"
3+
export const FAL_AI_API_BASE_URL = "https://fal.run";
44

55
type FalAiId = string;
66

@@ -10,4 +10,4 @@ export const FAL_AI_MODEL_IDS: Record<ModelId, FalAiId> = {
1010
"black-forest-labs/FLUX.1-Redux-dev": "fal-ai/flux/dev/redux",
1111
"openai/whisper-large-v3": "fal-ai/wizper",
1212
"TencentARC/PhotoMaker": "fal-ai/photomaker",
13-
}
13+
};

packages/inference/src/tasks/audio/automaticSpeechRecognition.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,11 @@ export async function automaticSpeechRecognition(
2525
args: AutomaticSpeechRecognitionArgs,
2626
options?: Options
2727
): Promise<AutomaticSpeechRecognitionOutput> {
28-
2928
if (args.provider === "fal-ai") {
3029
const contentType = args.data instanceof Blob ? args.data.type : "audio/mpeg";
31-
const base64audio = base64FromBytes(new Uint8Array(args.data instanceof ArrayBuffer ? args.data : await args.data.arrayBuffer()));
30+
const base64audio = base64FromBytes(
31+
new Uint8Array(args.data instanceof ArrayBuffer ? args.data : await args.data.arrayBuffer())
32+
);
3233
(args as RequestArgs & { audio_url: string }).audio_url = `data:${contentType};base64,${base64audio}`;
3334
delete (args as RequestArgs & { data: unknown }).data;
3435
}

packages/inference/src/tasks/custom/request.ts

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -27,14 +27,12 @@ export async function request<T>(
2727

2828
if (!response.ok) {
2929
const contentType = response.headers.get("Content-Type");
30-
if (
31-
["application/json", "application/problem+json"].some(
32-
(ct) => contentType?.startsWith(ct)
33-
)
34-
) {
30+
if (["application/json", "application/problem+json"].some((ct) => contentType?.startsWith(ct))) {
3531
const output = await response.json();
3632
if ([400, 422, 404, 500].includes(response.status) && options?.chatCompletion) {
37-
throw new Error(`Server ${args.model} does not seem to support chat completion. Error: ${JSON.stringify(output.error)}`);
33+
throw new Error(
34+
`Server ${args.model} does not seem to support chat completion. Error: ${JSON.stringify(output.error)}`
35+
);
3836
}
3937
if (output.error || output.detail) {
4038
throw new Error(JSON.stringify(output.error ?? output.detail));

packages/inference/test/HfInference.spec.ts

Lines changed: 10 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ import type { ChatCompletionStreamOutput } from "@huggingface/tasks";
55
import { HfInference } from "../src";
66
import "./vcr";
77
import { readTestFile } from "./test-files";
8-
import { fail } from "assert";
98

109
const TIMEOUT = 60000 * 3;
1110
const env = import.meta.env;
@@ -551,7 +550,8 @@ describe.concurrent(
551550
});
552551
it("textToImage", async () => {
553552
const res = await hf.textToImage({
554-
inputs: "award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]",
553+
inputs:
554+
"award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]",
555555
model: "stabilityai/stable-diffusion-2",
556556
});
557557
expect(res).toBeInstanceOf(Blob);
@@ -563,7 +563,8 @@ describe.concurrent(
563563
const num_inference_steps = 10;
564564

565565
const res = await hf.textToImage({
566-
inputs: "award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]",
566+
inputs:
567+
"award winning high resolution photo of a giant tortoise/((ladybird)) hybrid, [trending on artstation]",
567568
model: "stabilityai/stable-diffusion-2",
568569
parameters: {
569570
negative_prompt: "blurry",
@@ -758,11 +759,10 @@ describe.concurrent(
758759
}
759760
expect(out).toContain("two");
760761
});
761-
762762
});
763763
/**
764-
* Compatibility with third-party Inference Providers
765-
*/
764+
* Compatibility with third-party Inference Providers
765+
*/
766766
describe.concurrent("SambaNova", () => {
767767
const client = new HfInference(env.HF_SAMBANOVA_KEY);
768768

@@ -831,8 +831,7 @@ describe.concurrent(
831831
});
832832
expect(res).toBeInstanceOf(Blob);
833833
});
834-
})
835-
834+
});
836835

837836
describe.concurrent("Replicate", () => {
838837
const client = new HfInference(env.HF_REPLICATE_KEY);
@@ -857,7 +856,7 @@ describe.concurrent(
857856
inputs: "black forest gateau cake spelling out the words FLUX SCHNELL, tasty, food photography, dynamic shot",
858857
});
859858
expect(res).toBeInstanceOf(Blob);
860-
})
859+
});
861860

862861
it("speechToText fal-ai", async () => {
863862
const res = await client.automaticSpeechRecognition({
@@ -868,9 +867,8 @@ describe.concurrent(
868867
expect(res).toMatchObject({
869868
text: "HE HAS GRAVE DOUBTS WHETHER SIR FREDERICK LEIGHTON'S WORK IS REALLY GREEK AFTER ALL AND CAN DISCOVER IN IT BUT LITTLE OF ROCKY ITHACA",
870869
});
871-
})
872-
})
873-
870+
});
871+
});
874872
},
875873
TIMEOUT
876874
);

0 commit comments

Comments
 (0)