We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 903fd21 commit bb206caCopy full SHA for bb206ca
test_llm_serialization.py
@@ -32,7 +32,7 @@ def test_serialization(tokenizer_path: str, model_path: str):
32
)
33
34
output_text = tokenizer.decode(output_tokens)
35
- print(f🧠 Prompt: {prompt} Generated Text from Serialized Model: '{output_text}'")
+ print(f"🧠 (serialized) Prompt: {prompt} Generated Text from Serialized Model: '{output_text}'")
36
37
return True
38
0 commit comments