Skip to content

Commit 5abaaec

Browse files
authored
fix bugs in doccano.py and task.py (#2417)
* optimize code * optimize code
1 parent 41daa25 commit 5abaaec

File tree

2 files changed

+10
-10
lines changed

2 files changed

+10
-10
lines changed

model_zoo/uie/doccano.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
import time
1717
import argparse
1818
import json
19+
from decimal import Decimal
1920
import numpy as np
2021
from paddlenlp.utils.log import logger
2122

@@ -35,7 +36,11 @@ def do_convert():
3536
if len(args.splits) != 0 and len(args.splits) != 3:
3637
raise ValueError("Only []/ len(splits)==3 accepted for splits.")
3738

38-
if args.splits and sum(args.splits) != 1:
39+
def _check_sum(splits):
40+
return Decimal(str(splits[0])) + Decimal(str(splits[1])) + Decimal(
41+
str(splits[2])) == Decimal("1")
42+
43+
if len(args.splits) == 3 and not _check_sum(args.splits):
3944
raise ValueError(
4045
"Please set correct splits, sum of elements in splits should be equal to 1."
4146
)

paddlenlp/taskflow/task.py

Lines changed: 4 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -208,15 +208,10 @@ def _prepare_onnx_mode(self):
208208
sess_options.inter_op_num_threads = self._num_threads
209209
self.predictor = ort.InferenceSession(
210210
fp16_model_file, sess_options=sess_options, providers=providers)
211-
try:
212-
assert 'CUDAExecutionProvider' in self.predictor.get_providers()
213-
except AssertionError:
214-
raise AssertionError(
215-
f"The environment for GPU inference is not set properly. "
216-
"A possible cause is that you had installed both onnxruntime and onnxruntime-gpu. "
217-
"Please run the following commands to reinstall: \n "
218-
"1) pip uninstall -y onnxruntime onnxruntime-gpu \n 2) pip install onnxruntime-gpu"
219-
)
211+
assert 'CUDAExecutionProvider' in self.predictor.get_providers(), f"The environment for GPU inference is not set properly. " \
212+
"A possible cause is that you had installed both onnxruntime and onnxruntime-gpu. " \
213+
"Please run the following commands to reinstall: \n " \
214+
"1) pip uninstall -y onnxruntime onnxruntime-gpu \n 2) pip install onnxruntime-gpu"
220215

221216
def _get_inference_model(self):
222217
"""

0 commit comments

Comments
 (0)