@@ -153,7 +153,7 @@ def run_tensorflow(self, sess, inputs):
153
153
154
154
def to_onnx (self , tf_graph , opset = None , shape_override = None , input_names = None ):
155
155
"""Convert graph to tensorflow."""
156
- return process_tf_graph (tf_graph , continue_on_error = True , verbose = True , opset = opset ,
156
+ return process_tf_graph (tf_graph , continue_on_error = False , verbose = True , opset = opset ,
157
157
target = Test .target , shape_override = shape_override ,
158
158
input_names = input_names , output_names = self .output_names )
159
159
@@ -186,7 +186,6 @@ def run_onnxruntime(self, name, model_proto, inputs):
186
186
"""Run test against msrt-next backend."""
187
187
import onnxruntime as rt
188
188
model_path = utils .save_onnx_model (TEMP_DIR , name , inputs , model_proto , include_test_data = True )
189
- utils .save_onnx_model (TEMP_DIR , name , inputs , model_proto , include_test_data = False , as_text = True )
190
189
print ("\t \t " + model_path )
191
190
m = rt .InferenceSession (model_path )
192
191
results = m .run (self .output_names , inputs )
0 commit comments