Skip to content

Commit db74464

Browse files
Srikanth MADIKERIqindazhu
authored andcommitted
modified architecture of egs/babel_multilang/s5/local/chain2/run_tdnn.sh
1 parent 90bfb5c commit db74464

File tree

1 file changed

+5
-3
lines changed

1 file changed

+5
-3
lines changed

egs/babel_multilang/s5/local/chain2/run_tdnn.sh

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,7 @@ done
8787

8888
if [ "$speed_perturb" == "true" ]; then suffix=_sp; fi
8989
dir=${dir}${suffix}
90+
dir=exp/chain2_cleaned/tdnn_multi_sp_v7/
9091

9192
ivec_feat_suffix=${feat_suffix}
9293
if $use_pitch; then feat_suffix=${feat_suffix}_pitch ; fi
@@ -222,6 +223,7 @@ if [ $stage -le 9 ]; then
222223
steps/align_fmllr_lats.sh --nj $nj --cmd "$train_cmd" ${lores_train_data_dir} \
223224
$langdir $gmm_dir $lat_dir
224225
rm $lat_dir/fsts.*.gz # save space
226+
exit
225227
done
226228
fi
227229

@@ -248,7 +250,7 @@ fi
248250
if [ $stage -le 11 ]; then
249251
echo "$0: creating multilingual neural net configs using the xconfig parser";
250252
if [ -z $bnf_dim ]; then
251-
bnf_dim=1024
253+
bnf_dim=80
252254
fi
253255
mkdir -p $dir/configs
254256
ivector_node_xconfig=""
@@ -268,13 +270,13 @@ if [ $stage -le 11 ]; then
268270
# as the layer immediately preceding the fixed-affine-layer to enable
269271
# the use of short notation for the descriptor
270272
# the first splicing is moved before the lda layer, so no splicing here
271-
relu-renorm-layer name=tdnn1 input=Append(input@-2,input@-1,input,input@1,input@2$ivector_to_append) dim=450
273+
relu-batchnorm-layer name=tdnn1 input=Append(input@-2,input@-1,input,input@1,input@2$ivector_to_append) dim=450
272274
relu-batchnorm-layer name=tdnn2 input=Append(-1,0,1,2) dim=450
273275
relu-batchnorm-layer name=tdnn4 input=Append(-3,0,3) dim=450
274276
relu-batchnorm-layer name=tdnn5 input=Append(-3,0,3) dim=450
275277
relu-batchnorm-layer name=tdnn6 input=Append(-3,0,3) dim=450
276278
relu-batchnorm-layer name=tdnn7 input=Append(-6,-3,0) dim=450
277-
relu-renorm-layer name=tdnn_bn dim=$bnf_dim
279+
relu-batchnorm-layer name=tdnn_bn dim=$bnf_dim
278280
# adding the layers for diffrent language's output
279281
# dummy output node
280282
output-layer name=output dim=$num_targets max-change=1.5

0 commit comments

Comments
 (0)