Skip to content
This repository was archived by the owner on Feb 3, 2025. It is now read-only.

Commit 653ea0a

Browse files
Refactoring of folder - Multiple Bug Fixes
1 parent 3265a3f commit 653ea0a

File tree

75 files changed

+734
-133
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

75 files changed

+734
-133
lines changed

.gitignore

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -138,3 +138,6 @@ variables.data*
138138

139139
# Local files
140140
/*.txt
141+
142+
# Top Level Script Files
143+
/*.sh

tftrt/examples/benchmark_args.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -140,13 +140,6 @@ def __init__(self):
140140
help="If set to True, the benchmark will use XLA JIT Compilation."
141141
)
142142

143-
self._add_bool_argument(
144-
name="skip_accuracy_testing",
145-
default=False,
146-
required=False,
147-
help="If set to True, accuracy calculation will be skipped."
148-
)
149-
150143
self._add_bool_argument(
151144
name="use_synthetic_data",
152145
default=False,
@@ -187,6 +180,15 @@ def __init__(self):
187180
help="Minimum number of TensorFlow ops in a TRT engine."
188181
)
189182

183+
self._parser.add_argument(
184+
"--num_build_batches",
185+
type=int,
186+
default=1,
187+
help="How many iterations(batches) to use to build the TF-TRT "
188+
"engines. If not supplied, only one batch will be used. This "
189+
"parameter has only an effect if `--optimize_offline=True`"
190+
)
191+
190192
self._parser.add_argument(
191193
"--num_calib_batches",
192194
type=int,

tftrt/examples/benchmark_runner.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -169,7 +169,11 @@ def get_trt_precision(precision):
169169
print("\n[*] TF-TRT Converter Parameters:")
170170
print_dict(trt_converter_params)
171171

172-
converter = trt.TrtGraphConverterV2(**trt_converter_params)
172+
try:
173+
converter = trt.TrtGraphConverterV2(**trt_converter_params)
174+
except TypeError:
175+
del trt_converter_params["enable_sparse_compute"]
176+
converter = trt.TrtGraphConverterV2(**trt_converter_params)
173177

174178
def engine_build_input_fn(num_batches, model_phase):
175179
dataset, _ = self.get_dataset_batches()
@@ -218,14 +222,14 @@ def engine_build_input_fn(num_batches, model_phase):
218222
except AttributeError:
219223
pass
220224

221-
if strtobool(os.environ.get("TF_TRT_BENCHMARK_QUIT_AFTER_SUMMARY",
222-
"0")):
225+
if strtobool(os.environ.get("TF_TRT_BENCHMARK_EARLY_QUIT", "0")):
223226
sys.exit(0)
224227

225-
if self._args.optimize_offline or self._args.use_dynamic_shape:
228+
if self._args.optimize_offline:
226229

227230
offline_opt_input_fn = lambda: engine_build_input_fn(
228-
num_batches=1, model_phase="Building"
231+
num_batches=self._args.num_build_batches,
232+
model_phase="Building"
229233
)
230234

231235
with timed_section("Building TensorRT engines"):
@@ -248,7 +252,9 @@ def engine_build_input_fn(num_batches, model_phase):
248252
del converter
249253
del graph_func
250254
graph_func = load_model_from_disk(
251-
self._args.output_saved_model_dir
255+
self._args.output_saved_model_dir,
256+
tags=self._args.model_tag.split(","),
257+
signature_key=self._args.input_signature_key
252258
)
253259

254260
if isinstance(graph_func.structured_outputs, (tuple, list)):

tftrt/examples/image_classification/scripts/base_script.sh renamed to tftrt/examples/image_classification/base_run_inference.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ do
4040
shift # Remove --output_tensors_name= from processing
4141
;;
4242
--use_xla_auto_jit)
43-
TF_AUTO_JIT_XLA_FLAG="TF_XLA_FLAGS=--tf_xla_auto_jit=2"
43+
TF_AUTO_JIT_XLA_FLAG="TF_XLA_FLAGS=\"--tf_xla_auto_jit=2 --tf_xla_cpu_global_jit\""
4444
shift # Remove --use_xla_auto_jit from processing
4545
;;
4646
*)
@@ -54,7 +54,7 @@ done
5454
INPUT_SIZE=224
5555
PREPROCESS_METHOD="vgg"
5656
NUM_CLASSES=1001
57-
MAX_SAMPLES=49920
57+
MAX_SAMPLES=50000
5858
OUTPUT_TENSORS_NAME="logits"
5959

6060
case ${MODEL_NAME} in
@@ -153,7 +153,7 @@ fi
153153

154154
# %%%%%%%%%%%%%%%%%%%%%%% ARGUMENT VALIDATION %%%%%%%%%%%%%%%%%%%%%%% #
155155

156-
BENCH_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." >/dev/null 2>&1 && pwd )"
156+
BENCH_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
157157
cd ${BENCH_DIR}
158158

159159
# Execute the example
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
2+
MetaGraphDef with tag-set: 'serve' contains the following SignatureDefs:
3+
4+
signature_def['serving_default']:
5+
The given SavedModel SignatureDef contains the following input(s):
6+
inputs['input'] tensor_info:
7+
dtype: DT_FLOAT
8+
shape: (-1, 299, 299, 3)
9+
name: input:0
10+
The given SavedModel SignatureDef contains the following output(s):
11+
outputs['logits'] tensor_info:
12+
dtype: DT_FLOAT
13+
shape: (-1, 1001)
14+
name: logits:0
15+
Method name is: tensorflow/serving/predict
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
#!/bin/bash
2+
3+
BASE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )/../.."
4+
5+
bash ${BASE_DIR}/base_run_inference.sh --model_name="inception_v3" ${@}
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
2+
MetaGraphDef with tag-set: 'serve' contains the following SignatureDefs:
3+
4+
signature_def['serving_default']:
5+
The given SavedModel SignatureDef contains the following input(s):
6+
inputs['input'] tensor_info:
7+
dtype: DT_FLOAT
8+
shape: (-1, 299, 299, 3)
9+
name: input:0
10+
The given SavedModel SignatureDef contains the following output(s):
11+
outputs['logits'] tensor_info:
12+
dtype: DT_FLOAT
13+
shape: (-1, 1001)
14+
name: logits:0
15+
Method name is: tensorflow/serving/predict
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
#!/bin/bash
2+
3+
BASE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )/../.."
4+
5+
bash ${BASE_DIR}/base_run_inference.sh --model_name="inception_v4" ${@}
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
2+
MetaGraphDef with tag-set: 'serve' contains the following SignatureDefs:
3+
4+
signature_def['serving_default']:
5+
The given SavedModel SignatureDef contains the following input(s):
6+
inputs['input'] tensor_info:
7+
dtype: DT_FLOAT
8+
shape: (-1, 224, 224, 3)
9+
name: input:0
10+
The given SavedModel SignatureDef contains the following output(s):
11+
outputs['logits'] tensor_info:
12+
dtype: DT_FLOAT
13+
shape: (-1, 1001)
14+
name: logits:0
15+
Method name is: tensorflow/serving/predict
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
#!/bin/bash
2+
3+
BASE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )/../.."
4+
5+
bash ${BASE_DIR}/base_run_inference.sh --model_name="mobilenet_v1" ${@}

0 commit comments

Comments
 (0)