44# license information.
55# --------------------------------------------------------------------------
66
7+ import numpy as np
8+
79from ....common ._apply_operation import apply_elu , apply_hard_sigmoid , apply_leaky_relu , apply_prelu , apply_relu , \
8- apply_sigmoid , apply_tanh , apply_affine , apply_parametric_softplus , apply_scaled_tanh
10+ apply_sigmoid , apply_tanh , apply_affine , apply_parametric_softplus , apply_scaled_tanh , apply_thresholded_relu
911from ....common ._registration import register_converter
1012
1113
@@ -24,7 +26,7 @@ def convert_activation(scope, operator, container):
2426 elif activation_type == 'ReLU' :
2527 apply_relu (scope , inputs , outputs , container , operator_name = attrs ['name' ])
2628 elif activation_type == 'PReLU' :
27- apply_prelu (scope , inputs , outputs , container , operator_name = attrs ['name' ], slope = [params .PReLU .alpha ] )
29+ apply_prelu (scope , inputs [ 0 ] , outputs , container , operator_name = attrs ['name' ], slope = np . asarray ( [params .PReLU .alpha . floatValue ]) )
2830 elif activation_type == 'ELU' :
2931 apply_elu (scope , inputs , outputs , container , operator_name = attrs ['name' ], alpha = params .ELU .alpha )
3032 elif activation_type == 'tanh' :
0 commit comments