Skip to content

Commit 253da41

Browse files
committed
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into develop
2 parents 8ed7463 + 349e799 commit 253da41

35 files changed

+973
-137
lines changed

cmake/coverallsGcovJsons.cmake

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -110,14 +110,13 @@ endmacro()
110110

111111
# Get the coverage data.
112112
file(GLOB_RECURSE GCDA_FILES "${COV_PATH}" "*.gcda")
113-
message("GCDA files:")
113+
message("Process GCDA files:")
114+
message("===============================")
114115

115116
# Get a list of all the object directories needed by gcov
116117
# (The directories the .gcda files and .o files are found in)
117118
# and run gcov on those.
118119
foreach(GCDA ${GCDA_FILES})
119-
message("Process: ${GCDA}")
120-
message("------------------------------------------------------------------------------")
121120
get_filename_component(GCDA_DIR ${GCDA} PATH)
122121

123122
#
@@ -135,7 +134,7 @@ foreach(GCDA ${GCDA_FILES})
135134
# If -p is not specified then the file is named only "the_file.c.gcov"
136135
#
137136
execute_process(
138-
COMMAND ${GCOV_EXECUTABLE} -p -o ${GCDA_DIR} ${GCDA}
137+
COMMAND "${GCOV_EXECUTABLE} -p -o ${GCDA_DIR} ${GCDA} >/dev/null"
139138
WORKING_DIRECTORY ${GCDA_DIR}
140139
)
141140
endforeach()
@@ -383,7 +382,6 @@ foreach(NOT_COVERED_SRC ${COVERAGE_SRCS_REMAINING})
383382
set(GCOV_FILE_COVERAGE "${GCOV_FILE_COVERAGE}]")
384383

385384
# Generate the final JSON for this file.
386-
message("Generate JSON for non-gcov file: ${NOT_COVERED_SRC}...")
387385
string(CONFIGURE ${SRC_FILE_TEMPLATE} FILE_JSON)
388386
set(JSON_GCOV_FILES "${JSON_GCOV_FILES}${FILE_JSON}, ")
389387
endforeach()

demo/introduction/api_train_v2.py

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
import paddle.v2 as paddle
2+
import paddle.v2.dataset.uci_housing as uci_housing
3+
4+
5+
def main():
6+
# init
7+
paddle.init(use_gpu=False, trainer_count=1)
8+
9+
# network config
10+
x = paddle.layer.data(name='x', type=paddle.data_type.dense_vector(13))
11+
y_predict = paddle.layer.fc(input=x,
12+
param_attr=paddle.attr.Param(name='w'),
13+
size=1,
14+
act=paddle.activation.Linear(),
15+
bias_attr=paddle.attr.Param(name='b'))
16+
y = paddle.layer.data(name='y', type=paddle.data_type.dense_vector(1))
17+
cost = paddle.layer.regression_cost(input=y_predict, label=y)
18+
19+
# create parameters
20+
parameters = paddle.parameters.create(cost)
21+
22+
# create optimizer
23+
optimizer = paddle.optimizer.Momentum(momentum=0)
24+
25+
trainer = paddle.trainer.SGD(cost=cost,
26+
parameters=parameters,
27+
update_equation=optimizer)
28+
29+
# event_handler to print training and testing info
30+
def event_handler(event):
31+
if isinstance(event, paddle.event.EndIteration):
32+
if event.batch_id % 100 == 0:
33+
print "Pass %d, Batch %d, Cost %f, %s" % (
34+
event.pass_id, event.batch_id, event.cost, event.metrics)
35+
36+
if isinstance(event, paddle.event.EndPass):
37+
result = trainer.test(
38+
reader=paddle.reader.batched(
39+
uci_housing.test(), batch_size=2),
40+
reader_dict={'x': 0,
41+
'y': 1})
42+
if event.pass_id % 10 == 0:
43+
print "Test %d, %s" % (event.pass_id, result.metrics)
44+
45+
# training
46+
trainer.train(
47+
reader=paddle.reader.batched(
48+
paddle.reader.shuffle(
49+
uci_housing.train(), buf_size=500),
50+
batch_size=2),
51+
reader_dict={'x': 0,
52+
'y': 1},
53+
event_handler=event_handler,
54+
num_passes=30)
55+
56+
57+
if __name__ == '__main__':
58+
main()

demo/mnist/api_train_v2.py

Lines changed: 87 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,59 @@
11
import paddle.v2 as paddle
22

33

4+
def softmax_regression(img):
5+
predict = paddle.layer.fc(input=img,
6+
size=10,
7+
act=paddle.activation.Softmax())
8+
return predict
9+
10+
11+
def multilayer_perceptron(img):
12+
# The first fully-connected layer
13+
hidden1 = paddle.layer.fc(input=img, size=128, act=paddle.activation.Relu())
14+
# The second fully-connected layer and the according activation function
15+
hidden2 = paddle.layer.fc(input=hidden1,
16+
size=64,
17+
act=paddle.activation.Relu())
18+
# The thrid fully-connected layer, note that the hidden size should be 10,
19+
# which is the number of unique digits
20+
predict = paddle.layer.fc(input=hidden2,
21+
size=10,
22+
act=paddle.activation.Softmax())
23+
return predict
24+
25+
26+
def convolutional_neural_network(img):
27+
# first conv layer
28+
conv_pool_1 = paddle.networks.simple_img_conv_pool(
29+
input=img,
30+
filter_size=5,
31+
num_filters=20,
32+
num_channel=1,
33+
pool_size=2,
34+
pool_stride=2,
35+
act=paddle.activation.Tanh())
36+
# second conv layer
37+
conv_pool_2 = paddle.networks.simple_img_conv_pool(
38+
input=conv_pool_1,
39+
filter_size=5,
40+
num_filters=50,
41+
num_channel=20,
42+
pool_size=2,
43+
pool_stride=2,
44+
act=paddle.activation.Tanh())
45+
# The first fully-connected layer
46+
fc1 = paddle.layer.fc(input=conv_pool_2,
47+
size=128,
48+
act=paddle.activation.Tanh())
49+
# The softmax layer, note that the hidden size should be 10,
50+
# which is the number of unique digits
51+
predict = paddle.layer.fc(input=fc1,
52+
size=10,
53+
act=paddle.activation.Softmax())
54+
return predict
55+
56+
457
def main():
558
paddle.init(use_gpu=False, trainer_count=1)
659

@@ -9,45 +62,58 @@ def main():
962
name='pixel', type=paddle.data_type.dense_vector(784))
1063
label = paddle.layer.data(
1164
name='label', type=paddle.data_type.integer_value(10))
12-
hidden1 = paddle.layer.fc(input=images, size=200)
13-
hidden2 = paddle.layer.fc(input=hidden1, size=200)
14-
inference = paddle.layer.fc(input=hidden2,
15-
size=10,
16-
act=paddle.activation.Softmax())
17-
cost = paddle.layer.classification_cost(input=inference, label=label)
65+
66+
# Here we can build the prediction network in different ways. Please
67+
# choose one by uncomment corresponding line.
68+
predict = softmax_regression(images)
69+
#predict = multilayer_perceptron(images)
70+
#predict = convolutional_neural_network(images)
71+
72+
cost = paddle.layer.classification_cost(input=predict, label=label)
1873

1974
parameters = paddle.parameters.create(cost)
2075

21-
adam_optimizer = paddle.optimizer.Adam(learning_rate=0.01)
76+
optimizer = paddle.optimizer.Momentum(
77+
learning_rate=0.1 / 128.0,
78+
momentum=0.9,
79+
regularization=paddle.optimizer.L2Regularization(rate=0.0005 * 128))
2280

2381
trainer = paddle.trainer.SGD(cost=cost,
2482
parameters=parameters,
25-
update_equation=adam_optimizer)
83+
update_equation=optimizer)
84+
85+
lists = []
2686

2787
def event_handler(event):
2888
if isinstance(event, paddle.event.EndIteration):
29-
if event.batch_id % 1000 == 0:
30-
result = trainer.test(reader=paddle.reader.batched(
31-
paddle.dataset.mnist.test(), batch_size=256))
32-
33-
print "Pass %d, Batch %d, Cost %f, %s, Testing metrics %s" % (
34-
event.pass_id, event.batch_id, event.cost, event.metrics,
35-
result.metrics)
36-
37-
else:
38-
pass
89+
if event.batch_id % 100 == 0:
90+
print "Pass %d, Batch %d, Cost %f, %s" % (
91+
event.pass_id, event.batch_id, event.cost, event.metrics)
92+
if isinstance(event, paddle.event.EndPass):
93+
result = trainer.test(reader=paddle.reader.batched(
94+
paddle.dataset.mnist.test(), batch_size=128))
95+
print "Test with Pass %d, Cost %f, %s\n" % (
96+
event.pass_id, result.cost, result.metrics)
97+
lists.append((event.pass_id, result.cost,
98+
result.metrics['classification_error_evaluator']))
3999

40100
trainer.train(
41101
reader=paddle.reader.batched(
42102
paddle.reader.shuffle(
43103
paddle.dataset.mnist.train(), buf_size=8192),
44-
batch_size=32),
45-
event_handler=event_handler)
104+
batch_size=128),
105+
event_handler=event_handler,
106+
num_passes=100)
107+
108+
# find the best pass
109+
best = sorted(lists, key=lambda list: float(list[1]))[0]
110+
print 'Best pass is %s, testing Avgcost is %s' % (best[0], best[1])
111+
print 'The classification accuracy is %.2f%%' % (100 - float(best[2]) * 100)
46112

47113
# output is a softmax layer. It returns probabilities.
48114
# Shape should be (100, 10)
49115
probs = paddle.infer(
50-
output=inference,
116+
output=predict,
51117
parameters=parameters,
52118
reader=paddle.reader.batched(
53119
paddle.reader.firstn(

demo/semantic_role_labeling/api_train_v2.py

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -167,8 +167,23 @@ def event_handler(event):
167167
paddle.reader.shuffle(
168168
conll05.test(), buf_size=8192), batch_size=10)
169169

170+
reader_dict = {
171+
'word_data': 0,
172+
'ctx_n2_data': 1,
173+
'ctx_n1_data': 2,
174+
'ctx_0_data': 3,
175+
'ctx_p1_data': 4,
176+
'ctx_p2_data': 5,
177+
'verb_data': 6,
178+
'mark_data': 7,
179+
'target': 8
180+
}
181+
170182
trainer.train(
171-
reader=trn_reader, event_handler=event_handler, num_passes=10000)
183+
reader=trn_reader,
184+
event_handler=event_handler,
185+
num_passes=10000,
186+
reader_dict=reader_dict)
172187

173188

174189
if __name__ == '__main__':

0 commit comments

Comments
 (0)