Skip to content

Commit 297439d

Browse files
committed
llama_chat_format: Add Gemma3ChatHandler for gemma3 multimodel support
1 parent 3c4ef83 commit 297439d

File tree

1 file changed

+61
-1
lines changed

1 file changed

+61
-1
lines changed

llama_cpp/llama_chat_format.py

Lines changed: 61 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,9 +57,10 @@
5757
LLAMA3_INSTRUCT_CHAT_TEMPLATE = "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{% if add_generation_prompt %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}{% endif %}"
5858

5959
# Source: https://huggingface.co/meta-llama/Llama-4-Scout-17B-16E-Instruct/blob/main/tokenizer_config.json
60-
LLAMA4_INSTRUCT_CHAT_TEMPLATE = "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- if strftime_now is defined %}\n {%- set date_string = strftime_now(\"%d %b %Y\") %}\n {%- else %}\n {%- set date_string = \"26 Jul 2024\" %}\n {%- endif %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %} \n {%- if messages[0]['content'] is string %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- else %}\n {#- FIXME: The processor requires an array, always. #}\n {%- set system_message = messages[0]['content'][0]['text']|trim %}\n {%- endif %}\n {%- set messages = messages[1:] %}\n {%- set user_supplied_system_message = true %}\n{%- else %}\n {%- set system_message = \"\" %}\n {%- set user_supplied_system_message = false %}\n{%- endif %}\n\n{#- System message if the user supplied one #}\n{%- if user_supplied_system_message %}\n {{- \"<|header_start|>system<|header_end|>\\n\\n\" }}\n {%- if tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n {%- endif %}\n {%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {%- endif %}\n {{- system_message }}\n {{- \"<|eot|>\" }}\n{%- endif %}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|header_start|>user<|header_end|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|header_start|>' + message['role'] + '<|header_end|>\\n\\n' }}\n {%- if message['content'] is string %}\n {{- message['content'] }}\n {%- else %}\n {%- for content in message['content'] %}\n {%- if content['type'] == 'image' %}\n {{- '<|image|>' }}\n {%- elif content['type'] == 'text' %}\n {{- content['text'] }}\n {%- endif %}\n {%- endfor %}\n {%- endif %}\n {{- \"<|eot|>\" }}\n {%- elif 'tool_calls' in message and message.tool_calls|length > 0 %}\n {{- '<|header_start|>assistant<|header_end|>\\n\\n' -}}\n {{- '<|python_start|>' }}\n {%- if message['content'] is string %}\n {{- message['content'] }}\n {%- else %}\n {%- for content in message['content'] %}\n {%- if content['type'] == 'image' %}\n {{- '<|image|>' }}\n {%- elif content['type'] == 'text' %}\n {{- content['text'] }}\n {%- endif %}\n {%- endfor %}\n {%- endif %}\n {{- '<|python_end|>' }}\n {%- for tool_call in message.tool_calls %}\n {{- '{\"name\": \"' + tool_call.function.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.function.arguments | tojson }}\n {{- \"}\" }}\n {%- endfor %}\n {{- \"<|eot|>\" }}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|header_start|>ipython<|header_end|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|header_start|>assistant<|header_end|>\\n\\n' }}\n{%- endif %}\n"
6160
LLAMA4_INSTRUCT_BOS_TOKEN = "<|begin_of_text|>"
6261
LLAMA4_INSTRUCT_EOS_TOKEN = "<|eot|>"
62+
LLAMA4_INSTRUCT_CHAT_TEMPLATE = "{% if custom_tools is defined %}\n {% set tools = custom_tools %}\n{% endif %}\n{% if not tools_in_user_message is defined %}\n {% set tools_in_user_message = true %}\n{% endif %}\n{% if not date_string is defined %}\n {% if strftime_now is defined %}\n {% set date_string = strftime_now(\"%d %b %Y\") %}\n {% else %}\n {% set date_string = \"26 Jul 2024\" %}\n {% endif %}\n{% endif %}\n{% if not tools is defined %}\n {% set tools = none %}\n{% endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{% if messages[0]['role'] == 'system' %} \n {% if messages[0]['content'] is string %}\n {% set system_message = messages[0]['content']|trim %}\n {% else %}\n {#- FIXME: The processor requires an array, always. #}\n {% set system_message = messages[0]['content'][0]['text']|trim %}\n {% endif %}\n {% set messages = messages[1:] %}\n {% set user_supplied_system_message = true %}\n{% else %}\n {% set system_message = \"\" %}\n {% set user_supplied_system_message = false %}\n{% endif %}\n\n{#- System message if the user supplied one #}\n{% if user_supplied_system_message %}\n {{ \"<|header_start|>system<|header_end|>\\n\\n\" }}\n {% if tools is not none %}\n {{ \"Environment: ipython\\n\" }}\n {% endif %}\n {% if tools is not none and not tools_in_user_message %}\n {{ \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{ 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{ \"Do not use variables.\\n\\n\" }}\n {% for t in tools %}\n {{ t | tojson(indent=4) }}\n {{ \"\\n\\n\" }}\n {% endfor %}\n {% endif %}\n {{ system_message }}\n {{ \"<|eot|>\" }}\n{% endif %}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{% if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {% if messages | length != 0 %}\n {% set first_user_message = messages[0]['content']|trim %}\n {% set messages = messages[1:] %}\n {% else %}\n {{ raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{% endif %}\n {{ '<|header_start|>user<|header_end|>\\n\\n' -}}\n {{ \"Given the following functions, please respond with a JSON for a function call \" }}\n {{ \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{ 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{ \"Do not use variables.\\n\\n\" }}\n {% for t in tools %}\n {{ t | tojson(indent=4) }}\n {{ \"\\n\\n\" }}\n {% endfor %}\n {{ first_user_message + \"<|eot|>\"}}\n{% endif %}\n\n{% for message in messages %}\n {% if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{ '<|header_start|>' + message['role'] + '<|header_end|>\\n\\n' }}\n {% if message['content'] is string %}\n {{ message['content'] }}\n {% else %}\n {% for content in message['content'] %}\n {% if content['type'] == 'image' %}\n {{ '<|image|>' }}\n {% elif content['type'] == 'text' %}\n {{ content['text'] }}\n {% endif %}\n {% endfor %}\n {% endif %}\n {{ \"<|eot|>\" }}\n {% elif 'tool_calls' in message and message.tool_calls|length > 0 %}\n {{ '<|header_start|>assistant<|header_end|>\\n\\n' -}}\n {{ '<|python_start|>' }}\n {% if message['content'] is string %}\n {{ message['content'] }}\n {% else %}\n {% for content in message['content'] %}\n {% if content['type'] == 'image' %}\n {{ '<|image|>' }}\n {% elif content['type'] == 'text' %}\n {{ content['text'] }}\n {% endif %}\n {% endfor %}\n {% endif %}\n {{ '<|python_end|>' }}\n {% for tool_call in message.tool_calls %}\n {{ '{\"name\": \"' + tool_call.function.name + '\", ' }}\n {{ '\"parameters\": ' }}\n {{ tool_call.function.arguments | tojson }}\n {{ \"}\" }}\n {% endfor %}\n {{ \"<|eot|>\" }}\n {% elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{ \"<|header_start|>ipython<|header_end|>\\n\\n\" }}\n {% if message.content is mapping or message.content is iterable %}\n {{ message.content | tojson }}\n {% else %}\n {{ message.content }}\n {% endif %}\n {{ \"<|eot|>\" }}\n {% endif %}\n{% endfor %}\n{% if add_generation_prompt %}\n {{ '<|header_start|>assistant<|header_end|>\\n\\n' }}\n{% endif %}\n"
63+
6364

6465
# Source: https://huggingface.co/openai/gpt-oss-20b/blob/main/tokenizer_config.json
6566
GPT_OSS_BOS_TOKEN = "<|startoftext|>"
@@ -3570,6 +3571,65 @@ class MiniCPMv26ChatHandler(Llava15ChatHandler):
35703571
)
35713572

35723573

3574+
class Gemma3ChatHandler(Llava15ChatHandler):
3575+
DEFAULT_SYSTEM_MESSAGE = "You are a helpful assistant."
3576+
3577+
GEMMA3_BOI_TOKEN = "<start_of_image>"
3578+
GEMMA3_EOI_TOKEN = "<end_of_image>"
3579+
GEMMA3_BOS_TOKEN = "<bos>"
3580+
GEMMA3_EOS_TOKEN = "<eos>"
3581+
3582+
CHAT_FORMAT = (
3583+
"{% if messages[0]['role'] == 'system' %}"
3584+
"{% set loop_messages = messages[1:] %}"
3585+
"{% if messages[0]['content'] is string %}"
3586+
"{% set first_user_prefix = messages[0]['content'] + '\n\n' %}"
3587+
"{% else %}"
3588+
"{% set first_user_prefix = messages[0]['content'][0]['text'] + '\n\n' %}"
3589+
"{% endif %}"
3590+
"{% else %}"
3591+
"{% set loop_messages = messages %}"
3592+
"{% set first_user_prefix = '' %}"
3593+
"{% endif %}"
3594+
3595+
"{% for message in loop_messages %}"
3596+
"{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}"
3597+
"{{ raise_exception(\"Conversation roles must alternate user/assistant/user/assistant/...\") }}"
3598+
"{% endif %}"
3599+
3600+
"{% if message['role'] == 'assistant' %}"
3601+
"{% set role = 'model' %}"
3602+
"{% else %}"
3603+
"{% set role = message['role'] %}"
3604+
"{% endif %}"
3605+
3606+
"{{ '<start_of_turn>' + role + '\n' + (first_user_prefix if loop.first else '') }}"
3607+
3608+
"{% if message['content'] is string %}"
3609+
"{{ message['content'] | trim }}"
3610+
"{% elif message['content'] is iterable %}"
3611+
"{% for item in message['content'] %}"
3612+
"{% if item['type'] == 'image_url' and item['image_url'] is string %}"
3613+
"{{ '<start_of_image>' + item['image_url'] + '<end_of_image>' }}"
3614+
"{% elif item['type'] == 'image_url' and item['image_url'] is mapping %}"
3615+
"{{ '<start_of_image>' + item['image_url']['url'] + '<end_of_image>' }}"
3616+
"{% elif item['type'] == 'text' %}"
3617+
"{{ item['text'] | trim }}"
3618+
"{% endif %}"
3619+
"{% endfor %}"
3620+
"{% else %}"
3621+
"{{ raise_exception('Invalid content type') }}"
3622+
"{% endif %}"
3623+
3624+
"<end_of_turn>\n"
3625+
"{% endfor %}"
3626+
3627+
"{% if add_generation_prompt %}"
3628+
"<start_of_turn>model\n"
3629+
"{% endif %}"
3630+
)
3631+
3632+
35733633
class Qwen25VLChatHandler(Llava15ChatHandler):
35743634
DEFAULT_SYSTEM_MESSAGE = "You are a helpful assistant."
35753635

0 commit comments

Comments
 (0)