Skip to content

Commit 888f1f3

Browse files
authored
[Fix] Update loglikehood compatibility (#1659)
1 parent f7d8998 commit 888f1f3

File tree

1 file changed

+20
-9
lines changed

1 file changed

+20
-9
lines changed

opencompass/models/turbomind.py

Lines changed: 20 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -189,15 +189,26 @@ def get_loglikelihood(
189189
assert isinstance(
190190
inputs, List), f'List(str) is expected, but got {type(inputs)}'
191191
results = []
192-
for text, cont in zip(inputs, conts):
193-
input_ids = self.tokenizer.encode(text)
194-
res = self.pipe.get_ppl(input_ids)
195-
logit_sum = res * len(input_ids)
196-
input_ids = self.tokenizer.encode(text.replace(cont, ''))
197-
res = self.pipe.get_ppl(input_ids)
198-
logit_part = res * len(input_ids)
199-
results.append(-(logit_sum - logit_part))
200-
results = np.concatenate(results)
192+
if self.version_info <= (0, 6, 0):
193+
for text, cont in zip(inputs, conts):
194+
input_ids = self.tokenizer.encode(text)
195+
res = self.pipe.get_ppl(input_ids)
196+
logit_sum = res * len(input_ids)
197+
input_ids = self.tokenizer.encode(text.replace(cont, ''))
198+
res = self.pipe.get_ppl(input_ids)
199+
logit_part = res * len(input_ids)
200+
results.append(-(logit_sum - logit_part))
201+
results = np.concatenate(results)
202+
else:
203+
for text, cont in zip(inputs, conts):
204+
input_ids = self.tokenizer.encode(text)
205+
res = self.pipe.get_ppl(input_ids)
206+
logit_sum = res * len(input_ids)
207+
input_ids = self.tokenizer.encode(text.replace(cont, ''))
208+
res = self.pipe.get_ppl(input_ids)
209+
logit_part = res * len(input_ids)
210+
results.append(-(logit_sum[0] - logit_part[0]))
211+
results = np.array(results)
201212
return results
202213

203214
def _build_pipe(self, model_path, backend, engine_config):

0 commit comments

Comments
 (0)