Skip to content

Commit bf8e1c4

Browse files
dudalevwenbingl
andauthored
Fix PReLU conversion from CoreML (#425)
Signed-off-by: Michael Dudalev <[email protected]> Co-authored-by: Wenbing Li <[email protected]>
1 parent 7631c1a commit bf8e1c4

File tree

1 file changed

+4
-2
lines changed
  • onnxmltools/convert/coreml/operator_converters/neural_network

1 file changed

+4
-2
lines changed

onnxmltools/convert/coreml/operator_converters/neural_network/Activation.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,10 @@
44
# license information.
55
# --------------------------------------------------------------------------
66

7+
import numpy as np
8+
79
from ....common._apply_operation import apply_elu, apply_hard_sigmoid, apply_leaky_relu, apply_prelu, apply_relu, \
8-
apply_sigmoid, apply_tanh, apply_affine, apply_parametric_softplus, apply_scaled_tanh
10+
apply_sigmoid, apply_tanh, apply_affine, apply_parametric_softplus, apply_scaled_tanh, apply_thresholded_relu
911
from ....common._registration import register_converter
1012

1113

@@ -24,7 +26,7 @@ def convert_activation(scope, operator, container):
2426
elif activation_type == 'ReLU':
2527
apply_relu(scope, inputs, outputs, container, operator_name=attrs['name'])
2628
elif activation_type == 'PReLU':
27-
apply_prelu(scope, inputs, outputs, container, operator_name=attrs['name'], slope=[params.PReLU.alpha])
29+
apply_prelu(scope, inputs[0], outputs, container, operator_name=attrs['name'], slope=np.asarray([params.PReLU.alpha.floatValue]))
2830
elif activation_type == 'ELU':
2931
apply_elu(scope, inputs, outputs, container, operator_name=attrs['name'], alpha=params.ELU.alpha)
3032
elif activation_type == 'tanh':

0 commit comments

Comments
 (0)