File tree Expand file tree Collapse file tree 3 files changed +89
-3
lines changed
trainer_config_helpers/tests/configs Expand file tree Collapse file tree 3 files changed +89
-3
lines changed Original file line number Diff line number Diff line change @@ -11,12 +11,18 @@ test_sequence_pooling test_lstmemory_layer test_grumemory_layer
11
11
last_first_seq test_expand_layer test_ntm_layers test_hsigmoid
12
12
img_layers img_trans_layers util_layers simple_rnn_layers unused_layers test_cost_layers
13
13
test_rnn_group shared_fc shared_lstm test_cost_layers_with_weight
14
- test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops
15
- test_split_datasource)
14
+ test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops)
16
15
16
+ whole_configs=(test_split_datasource)
17
17
18
18
for conf in ${configs[*]}
19
19
do
20
20
echo " Generating " $conf
21
21
python -m paddle.utils.dump_config $conf .py > $protostr /$conf .protostr.unitest
22
22
done
23
+
24
+ for conf in ${whole_configs[*]}
25
+ do
26
+ echo " Generating " $conf
27
+ python -m paddle.utils.dump_config $conf .py " " --whole > $protostr /$conf .protostr.unitest
28
+ done
Original file line number Diff line number Diff line change
1
+ model_config {
2
+ type: "nn"
3
+ layers {
4
+ name: "a"
5
+ type: "data"
6
+ size: 10
7
+ active_type: ""
8
+ }
9
+ input_layer_names: "a"
10
+ output_layer_names: "a"
11
+ sub_models {
12
+ name: "root"
13
+ layer_names: "a"
14
+ input_layer_names: "a"
15
+ output_layer_names: "a"
16
+ is_recurrent_layer_group: false
17
+ }
18
+ }
19
+ data_config {
20
+ type: "py2"
21
+ files: "train.list"
22
+ async_load_data: true
23
+ for_test: false
24
+ load_data_module: "a"
25
+ load_data_object: "c"
26
+ load_data_args: ""
27
+ data_ratio: 1
28
+ is_main_data: true
29
+ usage_ratio: 1.0
30
+ }
31
+ opt_config {
32
+ batch_size: 1000
33
+ algorithm: "sgd"
34
+ learning_rate: 0.001
35
+ learning_rate_decay_a: 0.0
36
+ learning_rate_decay_b: 0.0
37
+ l1weight: 0.1
38
+ l2weight: 0.0
39
+ c1: 0.0001
40
+ backoff: 0.5
41
+ owlqn_steps: 10
42
+ max_backoff: 5
43
+ l2weight_zero_iter: 0
44
+ average_window: 0
45
+ learning_method: "momentum"
46
+ ada_epsilon: 1e-06
47
+ do_average_in_cpu: false
48
+ ada_rou: 0.95
49
+ learning_rate_schedule: "poly"
50
+ delta_add_rate: 1.0
51
+ shrink_parameter_value: 0
52
+ adam_beta1: 0.9
53
+ adam_beta2: 0.999
54
+ adam_epsilon: 1e-08
55
+ learning_rate_args: ""
56
+ async_lagged_grad_discard_ratio: 1.5
57
+ }
58
+ test_data_config {
59
+ type: "py2"
60
+ files: "test.list"
61
+ async_load_data: true
62
+ for_test: true
63
+ load_data_module: "b"
64
+ load_data_object: "d"
65
+ load_data_args: ""
66
+ data_ratio: 1
67
+ is_main_data: true
68
+ usage_ratio: 1.0
69
+ }
70
+ save_dir: "./output/model"
71
+ start_pass: 0
72
+
Original file line number Diff line number Diff line change 19
19
__all__ = []
20
20
21
21
if __name__ == '__main__' :
22
+ whole_conf = False
22
23
if len (sys .argv ) == 2 :
23
24
conf = parse_config (sys .argv [1 ], '' )
24
25
elif len (sys .argv ) == 3 :
25
26
conf = parse_config (sys .argv [1 ], sys .argv [2 ])
27
+ elif len (sys .argv ) == 4 :
28
+ conf = parse_config (sys .argv [1 ], sys .argv [2 ])
29
+ if sys .argv [3 ] == '--whole' :
30
+ whole_conf = True
26
31
else :
27
32
raise RuntimeError ()
28
33
29
34
assert isinstance (conf , TrainerConfig_pb2 .TrainerConfig )
30
35
31
- print conf .model_config
36
+ if whole_conf :
37
+ print conf
38
+ else :
39
+ print conf .model_config
You can’t perform that action at this time.
0 commit comments