Skip to content

Commit c03cafa

Browse files
authored
fix attrubute not found np.long (#3654)
1 parent e6ef5fb commit c03cafa

File tree

1 file changed

+5
-5
lines changed

1 file changed

+5
-5
lines changed

tools/accuracy_checker/openvino/tools/accuracy_checker/evaluators/custom_evaluators/opennmt_encoder_decoder_generator_evaluator.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -286,19 +286,19 @@ def __init__(self, config):
286286

287287
# beam parameters
288288
self.top_beam_finished = np.zeros([self.batch_size], dtype=np.uint8)
289-
self._batch_offset = np.arange(self.batch_size, dtype=np.long)
289+
self._batch_offset = np.arange(self.batch_size, dtype="long")
290290
self.select_indices = None
291291
self.done = False
292292

293-
self.alive_seq = np.full([self.batch_size * self.beam_size, 1], self.bos, dtype=np.long)
293+
self.alive_seq = np.full([self.batch_size * self.beam_size, 1], self.bos, dtype="long")
294294
self.is_finished = np.zeros((self.batch_size, self.beam_size), dtype=np.uint8)
295295
self.best_scores = np.full([self.batch_size], -1e10, dtype=float)
296-
self._beam_offset = np.arange(0, self.batch_size * self.beam_size, step=self.beam_size, dtype=np.long)
296+
self._beam_offset = np.arange(0, self.batch_size * self.beam_size, step=self.beam_size, dtype="long")
297297
self.topk_log_probs = np.asarray([0.0] + [float("-inf")] * (self.beam_size - 1))
298298
self.topk_log_probs = self.topk_log_probs.repeat(self.batch_size).reshape(self.batch_size, self.beam_size)
299299
self.topk_scores = np.empty((self.batch_size, self.beam_size), dtype=float)
300-
self.topk_ids = np.empty((self.batch_size, self.beam_size), dtype=np.long)
301-
self._batch_index = np.empty([self.batch_size, self.beam_size], dtype=np.long)
300+
self.topk_ids = np.empty((self.batch_size, self.beam_size), dtype="long")
301+
self._batch_index = np.empty([self.batch_size, self.beam_size], dtype="long")
302302

303303
@property
304304
def current_predictions(self):

0 commit comments

Comments
 (0)