diff --git a/packages/tasks/src/model-libraries-snippets.ts b/packages/tasks/src/model-libraries-snippets.ts index ad4c771ac4..8d8532e156 100644 --- a/packages/tasks/src/model-libraries-snippets.ts +++ b/packages/tasks/src/model-libraries-snippets.ts @@ -788,11 +788,11 @@ export const transformers = (model: ModelData): string[] => { if (model.pipeline_tag && LIBRARY_TASK_MAPPING.transformers?.includes(model.pipeline_tag)) { const pipelineSnippet = ["# Use a pipeline as a high-level helper", "from transformers import pipeline", ""]; - if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) { + if (model.tags?.includes("conversational") && model.config?.tokenizer_config?.chat_template) { pipelineSnippet.push("messages = [", ' {"role": "user", "content": "Who are you?"},', "]"); } pipelineSnippet.push(`pipe = pipeline("${model.pipeline_tag}", model="${model.id}"` + remote_code_snippet + ")"); - if (model.tags.includes("conversational") && model.config?.tokenizer_config?.chat_template) { + if (model.tags?.includes("conversational") && model.config?.tokenizer_config?.chat_template) { pipelineSnippet.push("pipe(messages)"); } diff --git a/packages/tasks/src/snippets/curl.ts b/packages/tasks/src/snippets/curl.ts index c8f39b677e..ed465a7b06 100644 --- a/packages/tasks/src/snippets/curl.ts +++ b/packages/tasks/src/snippets/curl.ts @@ -10,7 +10,7 @@ export const snippetBasic = (model: ModelDataMinimal, accessToken: string): stri -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}"`; export const snippetTextGeneration = (model: ModelDataMinimal, accessToken: string): string => { - if (model.tags.includes("conversational")) { + if (model.tags?.includes("conversational")) { // Conversational model detected, so we display a code snippet that features the Messages API return `curl 'https://api-inference.huggingface.co/models/${model.id}/v1/chat/completions' \\ -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}" \\ @@ -28,7 +28,7 @@ export const snippetTextGeneration = (model: ModelDataMinimal, accessToken: stri }; export const snippetImageTextToTextGeneration = (model: ModelDataMinimal, accessToken: string): string => { - if (model.tags.includes("conversational")) { + if (model.tags?.includes("conversational")) { // Conversational model detected, so we display a code snippet that features the Messages API return `curl 'https://api-inference.huggingface.co/models/${model.id}/v1/chat/completions' \\ -H "Authorization: Bearer ${accessToken || `{API_TOKEN}`}" \\ diff --git a/packages/tasks/src/snippets/js.ts b/packages/tasks/src/snippets/js.ts index 746a4de377..54ff31c53e 100644 --- a/packages/tasks/src/snippets/js.ts +++ b/packages/tasks/src/snippets/js.ts @@ -24,7 +24,7 @@ query({"inputs": ${getModelInputSnippet(model)}}).then((response) => { });`; export const snippetTextGeneration = (model: ModelDataMinimal, accessToken: string): string => { - if (model.tags.includes("conversational")) { + if (model.tags?.includes("conversational")) { // Conversational model detected, so we display a code snippet that features the Messages API return `import { HfInference } from "@huggingface/inference"; @@ -43,7 +43,7 @@ for await (const chunk of inference.chatCompletionStream({ }; export const snippetImageTextToTextGeneration = (model: ModelDataMinimal, accessToken: string): string => { - if (model.tags.includes("conversational")) { + if (model.tags?.includes("conversational")) { // Conversational model detected, so we display a code snippet that features the Messages API return `import { HfInference } from "@huggingface/inference"; diff --git a/packages/tasks/src/snippets/python.ts b/packages/tasks/src/snippets/python.ts index 2e383a275d..f93fb4ef6a 100644 --- a/packages/tasks/src/snippets/python.ts +++ b/packages/tasks/src/snippets/python.ts @@ -175,10 +175,10 @@ export const pythonSnippets: Partial