Skip to content

Commit 43cdfec

Browse files
committed
make squeeze op more rebust, turn down logging for some warnings
1 parent 9fcb069 commit 43cdfec

File tree

2 files changed

+8
-6
lines changed

2 files changed

+8
-6
lines changed

tf2onnx/graph.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ def attr_onnx(self):
9090
"""Return onnx valid attributes"""
9191
schema = get_schema(self.type, self.graph.opset, self.domain)
9292
if schema is None and not (self.is_const() or self.is_graph_input()):
93-
log.warning("Node %s uses non-stardard onnx op <%s, %s>, skip attribute check", self.name, self.domain,
93+
log.debug("Node %s uses non-stardard onnx op <%s, %s>, skip attribute check", self.name, self.domain,
9494
self.type)
9595

9696
onnx_attrs = {}

tf2onnx/tfonnx.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -279,12 +279,15 @@ def squeeze_op(ctx, node, name, args):
279279
del node.attr["axis"]
280280

281281
shape = ctx.get_shape(node.input[0])
282-
utils.make_sure(shape is not None, "squeeze input shape cannot be None")
283-
shape_len = len(shape)
284282
if axis and axis.ints:
285283
axis = axis.ints
286-
axis = [a + shape_len if a < 0 else a for a in axis]
284+
neg_axis = any([val < 0 for val in axis])
285+
if neg_axis:
286+
utils.make_sure(shape is not None, "squeeze input shape cannot be None")
287+
shape_len = len(shape)
288+
axis = [a + shape_len if a < 0 else a for a in axis]
287289
else:
290+
utils.make_sure(shape is not None, "squeeze input shape cannot be None")
288291
axis = [i for i, j in enumerate(shape) if j == 1]
289292
node.set_attr("axes", axis)
290293

@@ -460,7 +463,7 @@ def add_padding(ctx, node, kernel_shape, strides, dilations=None, spatial=2):
460463
output_shape = spatial_map(output_shape, NHWC_TO_NCHW)
461464
# calculate pads
462465
if any(input_shape[i + 2] == -1 for i in range(spatial)):
463-
log.warning("node %s has unknown dim %s for pads calculation, fallback to auto_pad" % (
466+
log.debug("node %s has unknown dim %s for pads calculation, fallback to auto_pad" % (
464467
node.name, str(input_shape)))
465468
node.set_attr("auto_pad", "SAME_UPPER")
466469
else:
@@ -1217,7 +1220,6 @@ def minmax_op(ctx, node, name, args):
12171220
# get a tensor with zeros (since there is no Fill op as of opset8)
12181221
sub_node = ctx.make_node("Sub", [has_correct_shape, has_correct_shape],
12191222
op_name_scope=input_node.name)
1220-
12211223
# use add as 'broadcast' op
12221224
add_node = ctx.make_node("Add", [input_node.output[0], sub_node.output[0]],
12231225
op_name_scope=input_node.name)

0 commit comments

Comments
 (0)