@@ -1177,7 +1177,7 @@ def test_text(test_prompt: str, max_new_tokens: int, sample_number: int, result:
11771177 repetition_penalty = perm_0 ['repetition_penalty' ],
11781178 presence_penalty = perm_0 ['presence_penalty' ],
11791179 frequency_penalty = perm_0 ['frequency_penalty' ])
1180- f"Trial #: { trial_id } Text Sample #: { text_sample_number } GENERATE PARAMS: temperature={ perm_0 ['temperature' ]} , top_k={ perm_0 ['top_k' ]} , top_p={ perm_0 ['top_p' ]} , repetition_penalty={ perm_0 ['repetition_penalty' ]} presence_penalty={ perm_0 ['presence_penalty' ]} frequency_penalty{ perm_0 ['frequency_penalty' ]} PROMPT: { test_prompt } RESPONSE: { response_0 } "
1180+ print ( f"Trial #: { trial_id } Text Sample #: { test_sample_number } GENERATE PARAMS: temperature={ perm_0 ['temperature' ]} , top_k={ perm_0 ['top_k' ]} , top_p={ perm_0 ['top_p' ]} , repetition_penalty={ perm_0 ['repetition_penalty' ]} presence_penalty={ perm_0 ['presence_penalty' ]} frequency_penalty{ perm_0 ['frequency_penalty' ]} PROMPT: { test_prompt } RESPONSE: { response_0 } " )
11811181 #
11821182 # print(f"Sample {sample_number}: I ask the generator (Beam: - max_new_tokens: 10, temperature=0.6, top_k=75, top_p=0.98, repetition_penalty=None, presence_penalty = 1.3, frequency_penalty = 1.4): {test_prompt}... It responds: '{response_3}'.")
11831183 # response_4 = complete_text_beam(text=test_prompt, max_new_tokens=max_new_tokens, temperature=0.7, top_k=75, top_p=0.98, repetition_penalty=None, presence_penalty = 1.3, frequency_penalty = 1.4)
@@ -1200,7 +1200,14 @@ def test_text(test_prompt: str, max_new_tokens: int, sample_number: int, result:
12001200
12011201 counter = 0
12021202 for sample in prompt_samples :
1203- test_text (test_prompt = sample , max_new_tokens = MAX_NEW_TOKENS , sample_number = counter , result = result , result_cutoff = RESULT_CUTOFF , trial_id = trial_number , test_sample_number = counter )
1203+ test_text (
1204+ test_prompt = sample ,
1205+ max_new_tokens = MAX_NEW_TOKENS ,
1206+ sample_number = counter ,
1207+ result = result ,
1208+ result_cutoff = RESULT_CUTOFF ,
1209+ trial_id = trial_number ,
1210+ test_sample_number = counter )
12041211 counter += 1
12051212
12061213 # # Tokenize the text without padding first to get actual tokens
0 commit comments