@@ -412,10 +412,27 @@ def test_get_prompt_with_chat_template():
412412 assert prompt == "Mock chat template applied"
413413
414414
415+ def test_get_prompt_with_default_chat_template ():
416+ with patch ('transformers.PreTrainedTokenizer' ) as tok :
417+ mock_tokenizer = tok .return_value
418+ mock_tokenizer .chat_template = None
419+ mock_tokenizer .default_chat_template = "Mock default chat template"
420+ mock_tokenizer .apply_chat_template .return_value = "Mock default chat template applied"
421+ messages = [
422+ PromptMessage (content = "Alright?" , role = PromptRole .USER .value ),
423+ PromptMessage (content = "Yeah." , role = PromptRole .ASSISTANT .value ),
424+ ]
425+
426+ prompt = get_prompt_from_messages (mock_tokenizer , messages )
427+
428+ assert prompt == "Mock default chat template applied"
429+
430+
415431def test_get_prompt_without_chat_template ():
416432 with patch ('transformers.PreTrainedTokenizer' ) as tok :
417433 mock_tokenizer = tok .return_value
418434 mock_tokenizer .chat_template = None
435+ mock_tokenizer .default_chat_template = None
419436 messages = [
420437 PromptMessage (content = "You are a helpful assistant." , role = PromptRole .SYSTEM .value ),
421438 PromptMessage (content = "Alright?" , role = PromptRole .USER .value ),
@@ -432,9 +449,9 @@ def test_get_prompt_with_no_messages():
432449 with patch ('transformers.PreTrainedTokenizer' ) as tok :
433450 mock_tokenizer = tok .return_value
434451 mock_tokenizer .chat_template = None
452+ mock_tokenizer .default_chat_template = None
435453 messages = []
436454
437455 prompt = get_prompt_from_messages (mock_tokenizer , messages )
438456
439- expected_prompt = "\n <|assistant|>\n "
440- assert prompt == expected_prompt
457+ assert prompt == "\n <|assistant|>\n "
0 commit comments