Skip to content

Commit 953a635

Browse files
committed
yolov2: update yolov2 train and solver prototxt
Signed-off-by: Huaqi Fang <[email protected]>
1 parent 8e2084c commit 953a635

File tree

2 files changed

+32
-31
lines changed

2 files changed

+32
-31
lines changed
Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,17 @@
11
net: "yolo_voc_trainval.prototxt"
22
test_iter: 1238
3-
test_interval: 5000000
3+
test_interval: 50000
44
test_initialization: false
5-
display: 100
6-
average_loss: 100
5+
display: 10
6+
average_loss: 10
7+
iter_size: 10
78
lr_policy: "step"
89
base_lr: 0.00001
9-
gamma: 0.8
10-
stepsize: 10000
11-
max_iter: 100000
10+
gamma: 0.95
11+
stepsize: 200
12+
max_iter: 20000
1213
momentum: 0.9
1314
weight_decay: 0.0005
14-
snapshot: 5000
15+
snapshot: 500
1516
snapshot_prefix: "snapshot/"
16-
#type: "Adam"
17+
type: "Adam"

caffe_models/yolo_v2_voc/caffe_model/yolo_voc_trainval.prototxt

Lines changed: 23 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ layer {
2828
batch_size: 16
2929
backend: LMDB
3030
side: 13
31-
prefetch: 3
31+
#prefetch: 2
3232
}
3333
}
3434
layer {
@@ -83,7 +83,7 @@ layer {
8383
bottom: "conv1"
8484
top: "bn1"
8585
batch_norm_param {
86-
use_global_stats: false
86+
#use_global_stats: false
8787
}
8888
}
8989
layer {
@@ -139,7 +139,7 @@ layer {
139139
bottom: "conv2"
140140
top: "bn2"
141141
batch_norm_param {
142-
use_global_stats: false
142+
#use_global_stats: false
143143
}
144144
}
145145
layer {
@@ -195,7 +195,7 @@ layer {
195195
bottom: "conv3"
196196
top: "bn3"
197197
batch_norm_param {
198-
use_global_stats: false
198+
#use_global_stats: false
199199
}
200200
}
201201
layer {
@@ -238,7 +238,7 @@ layer {
238238
bottom: "conv4"
239239
top: "bn4"
240240
batch_norm_param {
241-
use_global_stats: false
241+
#use_global_stats: false
242242
}
243243
}
244244
layer {
@@ -282,7 +282,7 @@ layer {
282282
bottom: "conv5"
283283
top: "bn5"
284284
batch_norm_param {
285-
use_global_stats: false
285+
#use_global_stats: false
286286
}
287287
}
288288
layer {
@@ -338,7 +338,7 @@ layer {
338338
bottom: "conv6"
339339
top: "bn6"
340340
batch_norm_param {
341-
use_global_stats: false
341+
#use_global_stats: false
342342
}
343343
}
344344
layer {
@@ -381,7 +381,7 @@ layer {
381381
bottom: "conv7"
382382
top: "bn7"
383383
batch_norm_param {
384-
use_global_stats: false
384+
#use_global_stats: false
385385
}
386386
}
387387
layer {
@@ -425,7 +425,7 @@ layer {
425425
bottom: "conv8"
426426
top: "bn8"
427427
batch_norm_param {
428-
use_global_stats: false
428+
#use_global_stats: false
429429
}
430430
}
431431
layer {
@@ -481,7 +481,7 @@ layer {
481481
bottom: "conv9"
482482
top: "bn9"
483483
batch_norm_param {
484-
use_global_stats: false
484+
#use_global_stats: false
485485
}
486486
}
487487
layer {
@@ -524,7 +524,7 @@ layer {
524524
bottom: "conv10"
525525
top: "bn10"
526526
batch_norm_param {
527-
use_global_stats: false
527+
#use_global_stats: false
528528
}
529529
}
530530
layer {
@@ -568,7 +568,7 @@ layer {
568568
bottom: "conv11"
569569
top: "bn11"
570570
batch_norm_param {
571-
use_global_stats: false
571+
#use_global_stats: false
572572
}
573573
}
574574
layer {
@@ -611,7 +611,7 @@ layer {
611611
bottom: "conv12"
612612
top: "bn12"
613613
batch_norm_param {
614-
use_global_stats: false
614+
#use_global_stats: false
615615
}
616616
}
617617
layer {
@@ -655,7 +655,7 @@ layer {
655655
bottom: "conv13"
656656
top: "bn13"
657657
batch_norm_param {
658-
use_global_stats: false
658+
#use_global_stats: false
659659
}
660660
}
661661
layer {
@@ -711,7 +711,7 @@ layer {
711711
bottom: "conv14"
712712
top: "bn14"
713713
batch_norm_param {
714-
use_global_stats: false
714+
#use_global_stats: false
715715
}
716716
}
717717
layer {
@@ -754,7 +754,7 @@ layer {
754754
bottom: "conv15"
755755
top: "bn15"
756756
batch_norm_param {
757-
use_global_stats: false
757+
#use_global_stats: false
758758
}
759759
}
760760
layer {
@@ -798,7 +798,7 @@ layer {
798798
bottom: "conv16"
799799
top: "bn16"
800800
batch_norm_param {
801-
use_global_stats: false
801+
#use_global_stats: false
802802
}
803803
}
804804
layer {
@@ -841,7 +841,7 @@ layer {
841841
bottom: "conv17"
842842
top: "bn17"
843843
batch_norm_param {
844-
use_global_stats: false
844+
#use_global_stats: false
845845
}
846846
}
847847
layer {
@@ -885,7 +885,7 @@ layer {
885885
bottom: "conv18"
886886
top: "bn18"
887887
batch_norm_param {
888-
use_global_stats: false
888+
#use_global_stats: false
889889
}
890890
}
891891
layer {
@@ -929,7 +929,7 @@ layer {
929929
bottom: "conv19"
930930
top: "bn19"
931931
batch_norm_param {
932-
use_global_stats: false
932+
#use_global_stats: false
933933
}
934934
}
935935
layer {
@@ -973,7 +973,7 @@ layer {
973973
bottom: "conv20"
974974
top: "bn20"
975975
batch_norm_param {
976-
use_global_stats: false
976+
#use_global_stats: false
977977
}
978978
}
979979
layer {
@@ -1023,7 +1023,7 @@ layer {
10231023
bottom: "conv22"
10241024
top: "bn22"
10251025
batch_norm_param {
1026-
use_global_stats: false
1026+
#use_global_stats: false
10271027
}
10281028
}
10291029
layer {
@@ -1084,7 +1084,7 @@ layer {
10841084
bottom: "conv25"
10851085
top: "bn25"
10861086
batch_norm_param {
1087-
use_global_stats: false
1087+
#use_global_stats: false
10881088
}
10891089
}
10901090
layer {

0 commit comments

Comments
 (0)