Skip to content

Commit 9d84a7c

Browse files
committed
Revert "fix: apply_prompt_template"
This reverts commit 3e0e40b.
1 parent 3e0e40b commit 9d84a7c

File tree

1 file changed

+0
-45
lines changed

1 file changed

+0
-45
lines changed

litellm/llms/watsonx/chat/transformation.py

Lines changed: 0 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -120,48 +120,3 @@ def _prepare_payload(self, model: str, api_params: WatsonXAPIParams) -> dict:
120120
None if model.startswith("deployment/") else api_params["project_id"]
121121
)
122122
return payload
123-
124-
@staticmethod
125-
def apply_prompt_template(model: str, messages: List[Dict[str, str]]) -> dict:
126-
"""
127-
Apply prompt template to messages for WatsonX Provider
128-
"""
129-
if "granite" in model and "chat" in model:
130-
# granite-13b-chat-v1 and granite-13b-chat-v2 use a specific prompt template
131-
return ibm_granite_pt(messages=messages)
132-
elif "ibm-mistral" in model and "instruct" in model:
133-
# models like ibm-mistral/mixtral-8x7b-instruct-v01-q use the mistral instruct prompt template
134-
return mistral_instruct_pt(messages=messages)
135-
elif "openai/gpt-oss" in model:
136-
# gpt-oss models (e.g., openai/gpt-oss-120b) use HuggingFace chat templates
137-
# These models have chat templates in separate .jinja files, not in tokenizer_config.json
138-
# Extract the model name for HuggingFace lookup
139-
hf_model = model.split("watsonx/")[-1] if "watsonx/" in model else model
140-
try:
141-
return hf_chat_template(model=hf_model, messages=messages)
142-
except Exception:
143-
# If HF template fetch fails, fall back to trying the generic handler below
144-
pass
145-
elif "meta-llama/llama-3" in model and "instruct" in model:
146-
# https://llama.meta.com/docs/model-cards-and-prompt-formats/meta-llama-3/
147-
return custom_prompt(
148-
role_dict={
149-
"system": {
150-
"pre_message": "<|start_header_id|>system<|end_header_id|>\n",
151-
"post_message": "<|eot_id|>",
152-
},
153-
"user": {
154-
"pre_message": "<|start_header_id|>user<|end_header_id|>\n",
155-
"post_message": "<|eot_id|>",
156-
},
157-
"assistant": {
158-
"pre_message": "<|start_header_id|>assistant<|end_header_id|>\n",
159-
"post_message": "<|eot_id|>",
160-
},
161-
},
162-
messages=messages,
163-
initial_prompt_value="<|begin_of_text|>",
164-
final_prompt_value="<|start_header_id|>assistant<|end_header_id|>\n",
165-
)
166-
else:
167-
return messages

0 commit comments

Comments
 (0)