Skip to content
This repository was archived by the owner on Sep 10, 2025. It is now read-only.

Commit 54eeece

Browse files
committed
add formatting exception for llama2 "".res
1 parent dfe6fe2 commit 54eeece

File tree

1 file changed

+6
-1
lines changed

1 file changed

+6
-1
lines changed

dist_run.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -456,7 +456,12 @@ def main(args):
456456
if pp_rank == last_pp_rank and tp_rank == 0:
457457
for i in range(len(prompt_lengths)):
458458
logger.info(f"\nPrompt:{color.green} {prompt[i]} {color.reset}")
459-
formatted_response = "".join(res[i])
459+
460+
# TODO: resolve issue with llama2-7b-chat model and "".join
461+
if model_name != "llama2-7b-chat":
462+
formatted_response = "".join(res[i])
463+
else:
464+
formatted_response = " ".join(res[i])
460465
logger.info(f"$$ {color.red}{formatted_response} {color.reset} $$\n")
461466

462467
# Cleanup

0 commit comments

Comments
 (0)