Skip to content

Commit 4607d51

Browse files
committed
Add unittest for split datasource
* Fix #436
1 parent afba3a2 commit 4607d51

File tree

3 files changed

+89
-3
lines changed

3 files changed

+89
-3
lines changed

python/paddle/trainer_config_helpers/tests/configs/generate_protostr.sh

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,18 @@ test_sequence_pooling test_lstmemory_layer test_grumemory_layer
1111
last_first_seq test_expand_layer test_ntm_layers test_hsigmoid
1212
img_layers img_trans_layers util_layers simple_rnn_layers unused_layers test_cost_layers
1313
test_rnn_group shared_fc shared_lstm test_cost_layers_with_weight
14-
test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops
15-
test_split_datasource)
14+
test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops)
1615

16+
whole_configs=(test_split_datasource)
1717

1818
for conf in ${configs[*]}
1919
do
2020
echo "Generating " $conf
2121
python -m paddle.utils.dump_config $conf.py > $protostr/$conf.protostr.unitest
2222
done
23+
24+
for conf in ${whole_configs[*]}
25+
do
26+
echo "Generating " $conf
27+
python -m paddle.utils.dump_config $conf.py "" --whole > $protostr/$conf.protostr.unitest
28+
done
Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
model_config {
2+
type: "nn"
3+
layers {
4+
name: "a"
5+
type: "data"
6+
size: 10
7+
active_type: ""
8+
}
9+
input_layer_names: "a"
10+
output_layer_names: "a"
11+
sub_models {
12+
name: "root"
13+
layer_names: "a"
14+
input_layer_names: "a"
15+
output_layer_names: "a"
16+
is_recurrent_layer_group: false
17+
}
18+
}
19+
data_config {
20+
type: "py2"
21+
files: "train.list"
22+
async_load_data: true
23+
for_test: false
24+
load_data_module: "a"
25+
load_data_object: "c"
26+
load_data_args: ""
27+
data_ratio: 1
28+
is_main_data: true
29+
usage_ratio: 1.0
30+
}
31+
opt_config {
32+
batch_size: 1000
33+
algorithm: "sgd"
34+
learning_rate: 0.001
35+
learning_rate_decay_a: 0.0
36+
learning_rate_decay_b: 0.0
37+
l1weight: 0.1
38+
l2weight: 0.0
39+
c1: 0.0001
40+
backoff: 0.5
41+
owlqn_steps: 10
42+
max_backoff: 5
43+
l2weight_zero_iter: 0
44+
average_window: 0
45+
learning_method: "momentum"
46+
ada_epsilon: 1e-06
47+
do_average_in_cpu: false
48+
ada_rou: 0.95
49+
learning_rate_schedule: "poly"
50+
delta_add_rate: 1.0
51+
shrink_parameter_value: 0
52+
adam_beta1: 0.9
53+
adam_beta2: 0.999
54+
adam_epsilon: 1e-08
55+
learning_rate_args: ""
56+
async_lagged_grad_discard_ratio: 1.5
57+
}
58+
test_data_config {
59+
type: "py2"
60+
files: "test.list"
61+
async_load_data: true
62+
for_test: true
63+
load_data_module: "b"
64+
load_data_object: "d"
65+
load_data_args: ""
66+
data_ratio: 1
67+
is_main_data: true
68+
usage_ratio: 1.0
69+
}
70+
save_dir: "./output/model"
71+
start_pass: 0
72+

python/paddle/utils/dump_config.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,13 +19,21 @@
1919
__all__ = []
2020

2121
if __name__ == '__main__':
22+
whole_conf = False
2223
if len(sys.argv) == 2:
2324
conf = parse_config(sys.argv[1], '')
2425
elif len(sys.argv) == 3:
2526
conf = parse_config(sys.argv[1], sys.argv[2])
27+
elif len(sys.argv) == 4:
28+
conf = parse_config(sys.argv[1], sys.argv[2])
29+
if sys.argv[3] == '--whole':
30+
whole_conf = True
2631
else:
2732
raise RuntimeError()
2833

2934
assert isinstance(conf, TrainerConfig_pb2.TrainerConfig)
3035

31-
print conf.model_config
36+
if whole_conf:
37+
print conf
38+
else:
39+
print conf.model_config

0 commit comments

Comments
 (0)