Skip to content

Commit e8c0ecb

Browse files
HuggingFaceInframachineuserngxson
authored
[ollama-utils] 🤖 Auto-update chat templates (2025-05-26) (#1494)
This PR is auto-generated by [generate-automap.ts](https://github.com/huggingface/huggingface.js/blob/main/packages/ollama-utils/scripts/generate-automap.ts). Co-authored-by: machineuser <[email protected]> Co-authored-by: Xuan-Son Nguyen <[email protected]>
1 parent fafe109 commit e8c0ecb

File tree

1 file changed

+11
-12
lines changed

1 file changed

+11
-12
lines changed

‎packages/ollama-utils/src/chat-template-automap.ts‎

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -5,19 +5,9 @@ import type { OllamaChatTemplateMapEntry } from "./types";
55

66
/**
77
* Skipped these models due to error:
8+
* - library/llama4:latest
89
* - library/llama3.1:latest
9-
* - library/gemma3:latest
10-
* - library/llama3.3:70b
11-
* - library/command-r-plus:latest
12-
* - library/codeqwen:latest
13-
* - library/bakllava:latest
14-
* - library/command-r-plus:latest
15-
* - library/qwen2-math:1.5b
16-
* - library/reflection:latest
17-
* - library/deepscaler:1.5b
18-
* - library/nemotron:latest
19-
* - library/nemotron:70b
20-
* - library/tulu3:latest
10+
* - library/granite3.2:latest
2111
*/
2212

2313
export const OLLAMA_CHAT_TEMPLATE_MAPPING: OllamaChatTemplateMapEntry[] = [
@@ -337,6 +327,15 @@ export const OLLAMA_CHAT_TEMPLATE_MAPPING: OllamaChatTemplateMapEntry[] = [
337327
},
338328
},
339329
},
330+
{
331+
model: "library/devstral:latest",
332+
gguf: "{%- set today = strftime_now(\"%Y-%m-%d\") %}\n{%- set default_system_message = \"You are Devstral, a Large Language Model (LLM) created by Mistral AI, a French startup headquartered in Paris.\\nYour knowledge base was last updated on 2023-10-01. The current date is \" + today + \".\\n\\nWhen you're not sure about some information, you say that you don't have the information and don't make up anything.\\nIf the user's question is not clear, ambiguous, or does not provide enough context for you to accurately answer the question, you do not try to answer it right away and you rather ask the user to clarify their request (e.g. \\\"What are some good restaurants around me?\\\" => \\\"Where are you?\\\" or \\\"When is the next flight to Tokyo\\\" => \\\"Where do you travel from?\\\")\" %}\n\n{{- bos_token }}\n\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content'] %}\n {%- set loop_messages = messages[1:] %}\n{%- else %}\n {%- set system_message = default_system_message %}\n {%- set loop_messages = messages %}\n{%- endif %}\n{{- '[SYSTEM_PROMPT]' + system_message + '[/SYSTEM_PROMPT]' }}\n\n{%- for message in loop_messages %}\n {%- if message['role'] == 'user' %}\n {{- '[INST]' + message['content'] + '[/INST]' }}\n {%- elif message['role'] == 'system' %}\n {{- '[SYSTEM_PROMPT]' + message['content'] + '[/SYSTEM_PROMPT]' }}\n {%- elif message['role'] == 'assistant' %}\n {{- message['content'] + eos_token }}\n {%- else %}\n {{- raise_exception('Only user, system and assistant roles are supported!') }}\n {%- endif %}\n{%- endfor %}",
333+
ollama: {
334+
template:
335+
'{{- range $index, $_ := .Messages }}\n{{- if eq .Role "system" }}[SYSTEM_PROMPT]{{ .Content }}[/SYSTEM_PROMPT]\n{{- else if eq .Role "user" }}\n{{- if and (le (len (slice $.Messages $index)) 2) $.Tools }}[AVAILABLE_TOOLS]{{ $.Tools }}[/AVAILABLE_TOOLS]\n{{- end }}[INST]{{ .Content }}[/INST]\n{{- else if eq .Role "assistant" }}\n{{- if .Content }}{{ .Content }}\n{{- if not (eq (len (slice $.Messages $index)) 1) }}</s>\n{{- end }}\n{{- else if .ToolCalls }}[TOOL_CALLS][\n{{- range .ToolCalls }}{"name": "{{ .Function.Name }}", "arguments": {{ .Function.Arguments }}}\n{{- end }}]</s>\n{{- end }}\n{{- else if eq .Role "tool" }}[TOOL_RESULTS]{"content": {{ .Content }}}[/TOOL_RESULTS]\n{{- end }}\n{{- end }}',
336+
tokens: ["[INST]"],
337+
},
338+
},
340339
{
341340
model: "library/dolphin-llama3:8b",
342341
gguf: "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",

0 commit comments

Comments
 (0)