Skip to content

Commit 3c09622

Browse files
Fixed bug in tflite relu6 (#1280)
Signed-off-by: Tom Wildenhain <[email protected]>
1 parent 72fb208 commit 3c09622

File tree

1 file changed

+5
-2
lines changed

1 file changed

+5
-2
lines changed

tf2onnx/tflite_handlers/tfl_math.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,11 @@ def separate_fused_activation_function(ctx, node):
2222
if activation_fn == b'RELU':
2323
ctx.insert_new_node_on_output("Relu", node.output[0])
2424
elif activation_fn == b'RELU6':
25-
new_node = ctx.insert_new_node_on_output("Relu6", node.output[0])
26-
new_node.skip_conversion = False
25+
# This is a TF op. We will convert it on the 2nd pass.
26+
shape = ctx.get_shape(node.output[0])
27+
dtype = ctx.get_dtype(node.output[0])
28+
new_node = ctx.make_node("Relu6", [node.output[0]], skip_conversion=False, shapes=[shape], dtypes=[dtype])
29+
ctx.insert_node_on_output(new_node, node.output[0])
2730
elif activation_fn == b'TANH':
2831
ctx.insert_new_node_on_output("Tanh", node.output[0])
2932
else:

0 commit comments

Comments
 (0)