@@ -87,7 +87,7 @@ def create_onnx_random_uniform_op(g, tmax, tmin, ru_op, output, to_delete):
87
87
# to make that work for onnx we just need to remove the shape op.
88
88
new_node = g .make_node ("RandomUniformLike" , inputs = [shape_node .input [0 ]], name = op_name ,
89
89
attr = {"low" : tmin , "high" : tmax , "dtype" : dtype },
90
- shapes = shape , dtypes = [dtype ])
90
+ shapes = [ shape ] , dtypes = [dtype ])
91
91
else :
92
92
# if the shape is calculated we need to create a tensor so RandomUniformLike
93
93
# can take the shape from there. Pre opset9 this is somewhat hacky because there is
@@ -99,11 +99,11 @@ def create_onnx_random_uniform_op(g, tmax, tmin, ru_op, output, to_delete):
99
99
# create a fill op with the shape of the value of the input tensor
100
100
zero = g .make_const (utils .make_name ("zero" ), np .zeros ((), dtype = np .float32 ))
101
101
fill_node = g .make_node ("Fill" , inputs = [shape_node .output [0 ], zero .name ],
102
- shapes = shape , dtypes = [dtype ])
102
+ shapes = [ shape ] , dtypes = [dtype ])
103
103
func , _ = handler .tf_op .find_effective_op ("Fill" )
104
104
func (g , fill_node )
105
105
# and use RandomUniformLike to create the random tensor
106
106
new_node = g .make_node ("RandomUniformLike" , inputs = [fill_node .output [0 ]], name = op_name ,
107
107
attr = {"low" : tmin , "high" : tmax , "dtype" : dtype },
108
- shapes = shape , dtypes = [dtype ])
108
+ shapes = [ shape ] , dtypes = [dtype ])
109
109
return new_node
0 commit comments