15
15
import time
16
16
import traceback
17
17
import zipfile
18
- import logging
19
18
20
19
import PIL .Image
21
20
import numpy as np
28
27
import yaml
29
28
30
29
import tf2onnx
31
- from tf2onnx import loader
32
- from tf2onnx import utils
33
- from tf2onnx import optimizer
30
+ from tf2onnx import loader , logging , optimizer , utils
34
31
from tf2onnx .tfonnx import process_tf_graph
35
32
36
33
# pylint: disable=broad-except,logging-not-lazy,unused-argument,unnecessary-lambda
37
34
38
- logging .basicConfig (level = logging .INFO )
39
- log = logging .getLogger ("tf2onnx" )
35
+ logger = logging .getLogger ("run_pretrained" )
40
36
41
37
TEMP_DIR = os .path .join (utils .get_temp_directory (), "run_pretrained" )
42
38
PERFITER = 1000
@@ -157,7 +153,7 @@ def run_tensorflow(self, sess, inputs):
157
153
158
154
def to_onnx (self , tf_graph , opset = None , extra_opset = None , shape_override = None , input_names = None ):
159
155
"""Convert graph to tensorflow."""
160
- return process_tf_graph (tf_graph , continue_on_error = False , verbose = True , opset = opset ,
156
+ return process_tf_graph (tf_graph , continue_on_error = False , opset = opset ,
161
157
extra_opset = extra_opset , target = Test .target , shape_override = shape_override ,
162
158
input_names = input_names , output_names = self .output_names )
163
159
@@ -207,7 +203,7 @@ def create_onnx_file(name, model_proto, inputs, outdir):
207
203
utils .save_protobuf (model_path , model_proto )
208
204
print ("\t created" , model_path )
209
205
210
- def run_test (self , name , backend = "caffe2" , debug = False , onnx_file = None , opset = None , extra_opset = None ,
206
+ def run_test (self , name , backend = "caffe2" , onnx_file = None , opset = None , extra_opset = None ,
211
207
perf = None , fold_const = None ):
212
208
"""Run complete test against backend."""
213
209
print (name )
@@ -222,18 +218,20 @@ def run_test(self, name, backend="caffe2", debug=False, onnx_file=None, opset=No
222
218
dir_name = os .path .dirname (self .local )
223
219
print ("\t downloaded" , model_path )
224
220
225
- inputs = list (self .input_names .keys ())
221
+ input_names = list (self .input_names .keys ())
226
222
outputs = self .output_names
227
223
if self .model_type in ["checkpoint" ]:
228
- graph_def , inputs , outputs = loader .from_checkpoint (model_path , inputs , outputs )
224
+ graph_def , input_names , outputs = loader .from_checkpoint (model_path , input_names , outputs )
229
225
elif self .model_type in ["saved_model" ]:
230
- graph_def , inputs , outputs = loader .from_saved_model (model_path , inputs , outputs )
226
+ graph_def , input_names , outputs = loader .from_saved_model (model_path , input_names , outputs )
231
227
else :
232
- graph_def , inputs , outputs = loader .from_graphdef (model_path , inputs , outputs )
228
+ graph_def , input_names , outputs = loader .from_graphdef (model_path , input_names , outputs )
233
229
234
230
# create the input data
235
231
inputs = {}
236
232
for k , v in self .input_names .items ():
233
+ if k not in input_names :
234
+ continue
237
235
if isinstance (v , six .text_type ) and v .startswith ("np." ):
238
236
inputs [k ] = eval (v ) # pylint: disable=eval-used
239
237
else :
@@ -243,7 +241,7 @@ def run_test(self, name, backend="caffe2", debug=False, onnx_file=None, opset=No
243
241
inputs [k ] = v
244
242
245
243
graph_def = tf2onnx .tfonnx .tf_optimize (inputs .keys (), self .output_names , graph_def , fold_const )
246
- if debug :
244
+ if utils . is_debug_mode () :
247
245
utils .save_protobuf (os .path .join (TEMP_DIR , name + "_after_tf_optimize.pb" ), graph_def )
248
246
shape_override = {}
249
247
g = tf .import_graph_def (graph_def , name = '' )
@@ -255,7 +253,7 @@ def run_test(self, name, backend="caffe2", debug=False, onnx_file=None, opset=No
255
253
dtype = tf .as_dtype (t .dtype ).name
256
254
v = inputs [k ]
257
255
if dtype != v .dtype :
258
- log .warning ("input dtype doesn't match tensorflow's" )
256
+ logger .warning ("input dtype doesn't match tensorflow's" )
259
257
inputs [k ] = np .array (v , dtype = dtype )
260
258
if self .force_input_shape :
261
259
for k , v in inputs .items ():
@@ -273,13 +271,13 @@ def run_test(self, name, backend="caffe2", debug=False, onnx_file=None, opset=No
273
271
onnx_graph = self .to_onnx (sess .graph , opset = opset , extra_opset = extra_opset ,
274
272
shape_override = shape_override , input_names = inputs .keys ())
275
273
model_proto = onnx_graph .make_model ("converted from tf2onnx" )
276
- new_model_proto = optimizer .optimize_graph (onnx_graph , debug = debug ).make_model ("optimized" )
274
+ new_model_proto = optimizer .optimize_graph (onnx_graph ).make_model ("optimized" )
277
275
if new_model_proto :
278
276
model_proto = new_model_proto
279
277
else :
280
278
print ("\t NON-CRITICAL, optimizers are not applied successfully" )
281
279
print ("\t to_onnx" , "OK" )
282
- if debug :
280
+ if utils . is_debug_mode () :
283
281
onnx_graph .dump_graph ()
284
282
if onnx_file :
285
283
self .create_onnx_file (name , model_proto , inputs , onnx_file )
@@ -312,10 +310,12 @@ def run_test(self, name, backend="caffe2", debug=False, onnx_file=None, opset=No
312
310
print ("\t Results: OK" )
313
311
return True
314
312
except Exception as ex :
315
- print ("\t Results: " , ex )
313
+ tb = traceback .format_exc ()
314
+ print ("\t Results" , ex , tb )
316
315
317
316
except Exception as ex :
318
- print ("\t run_onnx" , "FAIL" , ex )
317
+ tb = traceback .format_exc ()
318
+ print ("\t run_onnx" , "FAIL" , ex , tb )
319
319
320
320
return False
321
321
@@ -329,11 +329,11 @@ def get_args():
329
329
parser .add_argument ("--target" , default = "" , help = "target platform" )
330
330
parser .add_argument ("--backend" , default = "onnxruntime" ,
331
331
choices = ["caffe2" , "onnxmsrtnext" , "onnxruntime" ], help = "backend to use" )
332
- parser .add_argument ("--verbose" , help = "verbose output" , action = "store_true" )
333
332
parser .add_argument ("--opset" , type = int , default = None , help = "opset to use" )
334
333
parser .add_argument ("--extra_opset" , default = None ,
335
334
help = "extra opset with format like domain:version, e.g. com.microsoft:1" )
336
- parser .add_argument ("--debug" , help = "debug vlog" , action = "store_true" )
335
+ parser .add_argument ("--verbose" , "-v" , help = "verbose output, option is additive" , action = "count" )
336
+ parser .add_argument ("--debug" , help = "debug mode" , action = "store_true" )
337
337
parser .add_argument ("--list" , help = "list tests" , action = "store_true" )
338
338
parser .add_argument ("--onnx-file" , help = "create onnx file in directory" )
339
339
parser .add_argument ("--perf" , help = "capture performance numbers" )
@@ -370,11 +370,11 @@ def tests_from_yaml(fname):
370
370
371
371
372
372
def main ():
373
- # suppress log info of tensorflow so that result of test can be seen much easier
374
- os .environ ['TF_CPP_MIN_LOG_LEVEL' ] = '3'
375
- tf .logging .set_verbosity (tf .logging .WARN )
376
-
377
373
args = get_args ()
374
+ logging .basicConfig (level = logging .get_verbosity_level (args .verbose ))
375
+ if args .debug :
376
+ utils .set_debug_mode (True )
377
+
378
378
Test .cache_dir = args .cache
379
379
Test .target = args .target
380
380
tests = tests_from_yaml (args .config )
@@ -394,14 +394,15 @@ def main():
394
394
continue
395
395
count += 1
396
396
try :
397
- ret = t .run_test (test , backend = args .backend , debug = args . debug , onnx_file = args .onnx_file ,
397
+ ret = t .run_test (test , backend = args .backend , onnx_file = args .onnx_file ,
398
398
opset = args .opset , extra_opset = args .extra_opset , perf = args .perf ,
399
399
fold_const = args .fold_const )
400
400
except Exception as ex :
401
401
ret = None
402
- print (ex )
402
+ tb = traceback .format_exc ()
403
+ print (ex , tb )
403
404
finally :
404
- if not args . debug :
405
+ if not utils . is_debug_mode () :
405
406
utils .delete_directory (TEMP_DIR )
406
407
if not ret :
407
408
failed += 1
0 commit comments