From f416a617a78e938e53279a0b8ec7e40772db0319 Mon Sep 17 00:00:00 2001 From: Wauplin Date: Tue, 5 Nov 2024 17:39:51 +0100 Subject: [PATCH 1/3] Document python text to image snippets --- packages/tasks/src/snippets/python.spec.ts | 39 ++++++++++++++++++++-- packages/tasks/src/snippets/python.ts | 25 +++++++++++--- 2 files changed, 57 insertions(+), 7 deletions(-) diff --git a/packages/tasks/src/snippets/python.spec.ts b/packages/tasks/src/snippets/python.spec.ts index 3f1ee4979a..6b1be11aff 100644 --- a/packages/tasks/src/snippets/python.spec.ts +++ b/packages/tasks/src/snippets/python.spec.ts @@ -1,6 +1,6 @@ -import type { ModelDataMinimal } from "./types"; +import type { InferenceSnippet, ModelDataMinimal } from "./types"; import { describe, expect, it } from "vitest"; -import { snippetConversational } from "./python"; +import { snippetConversational, getPythonInferenceSnippet } from "./python"; describe("inference API snippets", () => { it("conversational llm", async () => { @@ -75,4 +75,39 @@ stream = client.chat.completions.create( for chunk in stream: print(chunk.choices[0].delta.content, end="")`); }); + + it("text-to-image", async () => { + const model: ModelDataMinimal = { + id: "black-forest-labs/FLUX.1-schnell", + pipeline_tag: "text-to-image", + tags: [], + inference: "", + }; + const snippets = getPythonInferenceSnippet(model, "api_token") as InferenceSnippet[]; + + expect(snippets.length).toEqual(2); + + expect(snippets[0].client).toEqual("huggingface_hub"); + expect(snippets[0].content).toEqual(`from huggingface_hub import InferenceClient +client = InferenceClient("black-forest-labs/FLUX.1-schnell", token="api_token") +# output is a PIL.Image object +image = client.text_to_image("Astronaut riding a horse")`); + + expect(snippets[1].client).toEqual("requests"); + expect(snippets[1].content).toEqual(`import requests + +API_URL = "https://api-inference.huggingface.co/models/black-forest-labs/FLUX.1-schnell" +headers = {"Authorization": "Bearer api_token"} + +def query(payload): + response = requests.post(API_URL, headers=headers, json=payload) + return response.content +image_bytes = query({ + "inputs": "Astronaut riding a horse", +}) +# You can access the image with PIL.Image for example +import io +from PIL import Image +image = Image.open(io.BytesIO(image_bytes))`); + }); }); diff --git a/packages/tasks/src/snippets/python.ts b/packages/tasks/src/snippets/python.ts index 31ce47a10b..ff1ee5e2e9 100644 --- a/packages/tasks/src/snippets/python.ts +++ b/packages/tasks/src/snippets/python.ts @@ -4,6 +4,10 @@ import { stringifyGenerationConfig, stringifyMessages } from "./common.js"; import { getModelInputSnippet } from "./inputs.js"; import type { InferenceSnippet, ModelDataMinimal } from "./types.js"; +const snippetImportInferenceClient = (model: ModelDataMinimal, accessToken: string): string => + `from huggingface_hub import InferenceClient +client = InferenceClient("${model.id}", token="${accessToken || "{API_TOKEN}"}")`; + export const snippetConversational = ( model: ModelDataMinimal, accessToken: string, @@ -161,8 +165,16 @@ export const snippetFile = (model: ModelDataMinimal): InferenceSnippet => ({ output = query(${getModelInputSnippet(model)})`, }); -export const snippetTextToImage = (model: ModelDataMinimal): InferenceSnippet => ({ - content: `def query(payload): +export const snippetTextToImage = (model: ModelDataMinimal, accessToken: string): InferenceSnippet[] => [ + { + client: "huggingface_hub", + content: `${snippetImportInferenceClient(model, accessToken)} +# output is a PIL.Image object +image = client.text_to_image(${getModelInputSnippet(model)})`, + }, + { + client: "requests", + content: `def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.content image_bytes = query({ @@ -172,7 +184,8 @@ image_bytes = query({ import io from PIL import Image image = Image.open(io.BytesIO(image_bytes))`, -}); + }, +]; export const snippetTabular = (model: ModelDataMinimal): InferenceSnippet => ({ content: `def query(payload): @@ -288,12 +301,14 @@ export function getPythonInferenceSnippet( return snippets.map((snippet) => { return { ...snippet, - content: `import requests + content: snippet.content.includes("requests") + ? `import requests API_URL = "https://api-inference.huggingface.co/models/${model.id}" headers = {"Authorization": ${accessToken ? `"Bearer ${accessToken}"` : `f"Bearer {API_TOKEN}"`}} -${snippet.content}`, +${snippet.content}` + : snippet.content, }; }); } From 46027fecbc1b59ac2287ba9497b0ad30700e5efb Mon Sep 17 00:00:00 2001 From: Lucain Date: Thu, 14 Nov 2024 14:50:37 +0100 Subject: [PATCH 2/3] Apply suggestions from code review Co-authored-by: Mishig --- packages/tasks/src/snippets/python.spec.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/tasks/src/snippets/python.spec.ts b/packages/tasks/src/snippets/python.spec.ts index 6b1be11aff..9dff1606f7 100644 --- a/packages/tasks/src/snippets/python.spec.ts +++ b/packages/tasks/src/snippets/python.spec.ts @@ -90,6 +90,7 @@ for chunk in stream: expect(snippets[0].client).toEqual("huggingface_hub"); expect(snippets[0].content).toEqual(`from huggingface_hub import InferenceClient client = InferenceClient("black-forest-labs/FLUX.1-schnell", token="api_token") + # output is a PIL.Image object image = client.text_to_image("Astronaut riding a horse")`); @@ -105,6 +106,7 @@ def query(payload): image_bytes = query({ "inputs": "Astronaut riding a horse", }) + # You can access the image with PIL.Image for example import io from PIL import Image From e4957cd7037062673ccfbfe35aab7974b82953a9 Mon Sep 17 00:00:00 2001 From: Wauplin Date: Thu, 14 Nov 2024 15:00:20 +0100 Subject: [PATCH 3/3] fix tests --- packages/tasks/src/snippets/python.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/packages/tasks/src/snippets/python.ts b/packages/tasks/src/snippets/python.ts index ff1ee5e2e9..bdb148e391 100644 --- a/packages/tasks/src/snippets/python.ts +++ b/packages/tasks/src/snippets/python.ts @@ -6,7 +6,8 @@ import type { InferenceSnippet, ModelDataMinimal } from "./types.js"; const snippetImportInferenceClient = (model: ModelDataMinimal, accessToken: string): string => `from huggingface_hub import InferenceClient -client = InferenceClient("${model.id}", token="${accessToken || "{API_TOKEN}"}")`; +client = InferenceClient("${model.id}", token="${accessToken || "{API_TOKEN}"}") +`; export const snippetConversational = ( model: ModelDataMinimal, @@ -180,6 +181,7 @@ image = client.text_to_image(${getModelInputSnippet(model)})`, image_bytes = query({ "inputs": ${getModelInputSnippet(model)}, }) + # You can access the image with PIL.Image for example import io from PIL import Image