Skip to content
This repository was archived by the owner on Feb 3, 2025. It is now read-only.

Commit ba4865d

Browse files
author
DEKHTIARJonathan
committed
[TF-TRT] Adding TF-Hub Electra
1 parent 7ba8937 commit ba4865d

File tree

9 files changed

+1558
-0
lines changed

9 files changed

+1558
-0
lines changed
Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
#!/bin/bash
2+
3+
nvidia-smi
4+
5+
BASE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
6+
7+
#install packages for the rest of the script
8+
pip install tensorflow_text
9+
10+
# Runtime Parameters
11+
MODEL_NAME=""
12+
OUTPUT_TENSOR_NAMES=""
13+
14+
# Default Argument Values
15+
BATCH_SIZE=32
16+
SEQ_LEN=128
17+
VOCAB_SIZE=30522
18+
TOTAL_MAX_SAMPLES=50000
19+
20+
BYPASS_ARGUMENTS=""
21+
22+
# Loop through arguments and process them
23+
for arg in "$@"
24+
do
25+
case $arg in
26+
--model_name=*)
27+
MODEL_NAME="${arg#*=}"
28+
shift # Remove --model_name from processing
29+
;;
30+
--output_tensors_name=*)
31+
OUTPUT_TENSOR_NAMES="${arg#*=}"
32+
shift # Remove --output_tensors_name= from processing
33+
;;
34+
--batch_size=*)
35+
BATCH_SIZE="${arg#*=}"
36+
shift # Remove --batch_size= from processing
37+
;;
38+
--sequence_length=*)
39+
SEQ_LEN="${arg#*=}"
40+
shift # Remove --sequence_length= from processing
41+
;;
42+
--vocab_size=*)
43+
VOCAB_SIZE="${arg#*=}"
44+
shift # Remove --vocab_size= from processing
45+
;;
46+
--data_dir=*)
47+
DATA_DIR="${arg#*=}"
48+
shift # Remove --data_dir= from processing
49+
;;
50+
--input_saved_model_dir=*)
51+
MODEL_DIR="${arg#*=}"
52+
shift # Remove --input_saved_model_dir= from processing
53+
;;
54+
--tokenizer_dir=*)
55+
TOKENIZER_DIR="${arg#*=}"
56+
shift # Remove --tokenizer_model_dir= from processing
57+
;;
58+
--total_max_samples=*)
59+
TOTAL_MAX_SAMPLES="${arg#*=}"
60+
shift # Remove --total_max_samples= from processing
61+
;;
62+
*)
63+
BYPASS_ARGUMENTS="${BYPASS_ARGUMENTS} ${arg}"
64+
;;
65+
esac
66+
done
67+
68+
# Trimming front and back whitespaces
69+
BYPASS_ARGUMENTS=$(echo ${BYPASS_ARGUMENTS} | tr -s " ")
70+
71+
echo -e "\n********************************************************************"
72+
echo "[*] MODEL_NAME: ${MODEL_NAME}"
73+
echo ""
74+
echo "[*] SEQ_LEN: ${SEQ_LEN}"
75+
echo "[*] VOCAB_SIZE: ${VOCAB_SIZE}"
76+
echo ""
77+
echo "[*] DATA_DIR: ${DATA_DIR}"
78+
echo "[*] MODEL_DIR: ${MODEL_DIR}"
79+
echo "[*] TOKENIZER_DIR: ${TOKENIZER_DIR}"
80+
echo ""
81+
echo "[*] BATCH_SIZE: ${BATCH_SIZE}"
82+
echo "[*] OUTPUT_TENSOR_NAMES: ${OUTPUT_TENSOR_NAMES}"
83+
echo "[*] TOTAL_MAX_SAMPLES: ${TOTAL_MAX_SAMPLES}"
84+
echo ""
85+
echo "[*] BYPASS_ARGUMENTS: ${BYPASS_ARGUMENTS}"
86+
87+
echo -e "********************************************************************\n"
88+
89+
MODEL_DIR="${MODEL_DIR}/${MODEL_NAME}"
90+
91+
if [[ ! -d ${DATA_DIR} ]]; then
92+
echo "ERROR: \`--data_dir=/path/to/directory\` does not exist. [Received: \`${DATA_DIR}\`]"
93+
exit 1
94+
fi
95+
96+
if [[ ! -d ${MODEL_DIR} ]]; then
97+
echo "ERROR: \`--input_saved_model_dir=/path/to/directory\` does not exist. [Received: \`${MODEL_DIR}\`]"
98+
exit 1
99+
fi
100+
101+
if [[ ! -d ${TOKENIZER_DIR} ]]; then
102+
echo "ERROR: \`--tokenizer_dir=/path/to/directory\` does not exist. [Received: \`${TOKENIZER_DIR}\`]"
103+
exit 1
104+
fi
105+
106+
set -x
107+
108+
python ${BASE_DIR}/infer.py \
109+
--data_dir=${DATA_DIR} \
110+
--calib_data_dir=${DATA_DIR} \
111+
--sequence_length=${SEQ_LEN} \
112+
--vocab_size=${VOCAB_SIZE} \
113+
--input_saved_model_dir=${MODEL_DIR} \
114+
--tokenizer_dir=${TOKENIZER_DIR}\
115+
--batch_size=${BATCH_SIZE} \
116+
--output_tensors_name=${OUTPUT_TENSOR_NAMES} \
117+
--total_max_samples=${TOTAL_MAX_SAMPLES} \
118+
${BYPASS_ARGUMENTS}

0 commit comments

Comments
 (0)