Skip to content

Commit 349e799

Browse files
authored
Merge pull request #1534 from helinwang/arg_sum
Rename Argument::sumCost to Argument::cost since Argument should not …
2 parents 044ad94 + 3219c83 commit 349e799

File tree

8 files changed

+13
-15
lines changed

8 files changed

+13
-15
lines changed

paddle/api/Arguments.cpp

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -144,9 +144,7 @@ void Arguments::setSlotSequenceDim(size_t idx, IVector* vec) throw(RangeError) {
144144
a.cpuSequenceDims = m->cast<paddle::IVector>(vec->getSharedPtr());
145145
}
146146

147-
float Arguments::sumCosts() const {
148-
return paddle::Argument::sumCosts(m->outputs);
149-
}
147+
float Arguments::sum() const { return paddle::Argument::sum(m->outputs); }
150148

151149
int64_t Arguments::getBatchSize(size_t idx) const throw(RangeError) {
152150
auto& a = m->getArg(idx);

paddle/api/PaddleAPI.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -453,7 +453,7 @@ class Arguments {
453453
IVector* vec) throw(RangeError);
454454
void setSlotSequenceDim(size_t idx, IVector* vec) throw(RangeError);
455455

456-
float sumCosts() const;
456+
float sum() const;
457457

458458
private:
459459
static Arguments* createByPaddleArgumentVector(void* ptr);

paddle/api/test/testArguments.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ def test_load_arguments(self):
2222
args = swig_paddle.Arguments.createArguments(1)
2323
args.setSlotValue(0, m)
2424

25-
self.assertAlmostEqual(27.0, args.sumCosts())
25+
self.assertAlmostEqual(27.0, args.sum())
2626

2727
mat = args.getSlotValue(0)
2828
assert isinstance(mat, swig_paddle.Matrix)

paddle/gserver/tests/LayerGradUtil.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ real getCostSum(LayerPtr& testLayer, MatrixPtr weights) {
2424
if (weights) {
2525
outArgs[0].value->dotMul(*outArgs[0].value, *weights);
2626
}
27-
return Argument::sumCosts(outArgs);
27+
return Argument::sum(outArgs);
2828
}
2929

3030
real getDiffAndPrint(real newCost1,
@@ -241,7 +241,7 @@ void testBatchState(LayerPtr testLayer,
241241

242242
std::vector<Argument> args;
243243
args.push_back(out);
244-
EXPECT_EQ(0, Argument::sumCosts(args)) << "testBatchState failed";
244+
EXPECT_EQ(0, Argument::sum(args)) << "testBatchState failed";
245245
for (size_t seqId = 0; seqId < numSequences; ++seqId) {
246246
start[seqId] += seqLens[seqId];
247247
}
@@ -672,7 +672,7 @@ void testLayerGradKernel(TestConfig testConf,
672672
outArgs[0].value->dotMul(*testLayer->getOutput().value, *weights);
673673
}
674674

675-
real cost = Argument::sumCosts(outArgs);
675+
real cost = Argument::sum(outArgs);
676676
LOG(INFO) << " cost " << cost;
677677
EXPECT_FALSE(std::isnan(cost));
678678

paddle/parameter/Argument.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -163,7 +163,7 @@ struct Argument {
163163
: sequenceStartPositions->getData(false);
164164
}
165165

166-
static inline real sumCosts(const std::vector<Argument>& arguments) {
166+
static inline real sum(const std::vector<Argument>& arguments) {
167167
real cost = 0;
168168
for (auto& arg : arguments) {
169169
if (arg.value) {

paddle/trainer/Tester.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ real Tester::forwardOneBatch(const DataBatch& dataBatch,
208208
return 0.0; // In this case, there is no meaning to calculate cost
209209
}
210210

211-
return Argument::sumCosts(outArgs);
211+
return Argument::sum(outArgs);
212212
}
213213

214214
void Tester::testOnePassBatch(int passId) {

paddle/trainer/Trainer.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -310,7 +310,7 @@ real Trainer::checkGradient() {
310310
std::vector<Argument> outArgs;
311311

312312
trainerInternal_.getGradientMachine()->forward(inArgs, &outArgs, PASS_GC);
313-
real cost = Argument::sumCosts(outArgs);
313+
real cost = Argument::sum(outArgs);
314314
LOG(INFO) << "original cost=" << cost;
315315
trainerInternal_.getGradientMachine()->backward();
316316

@@ -340,7 +340,7 @@ real Trainer::checkGradient() {
340340
parameter->getBuf(PARAMETER_VALUE)->copyFrom(newPara);
341341
parameter->setValueUpdated();
342342
trainerInternal_.getGradientMachine()->forward(inArgs, &outArgs, PASS_GC);
343-
real newCost1 = Argument::sumCosts(outArgs);
343+
real newCost1 = Argument::sum(outArgs);
344344

345345
for (size_t i = 0; i < dim; ++i) {
346346
newp[i] = oldp[i] - step * d[i];
@@ -349,7 +349,7 @@ real Trainer::checkGradient() {
349349
parameter->getBuf(PARAMETER_VALUE)->copyFrom(newPara);
350350
parameter->setValueUpdated();
351351
trainerInternal_.getGradientMachine()->forward(inArgs, &outArgs, PASS_GC);
352-
real newCost2 = Argument::sumCosts(outArgs);
352+
real newCost2 = Argument::sum(outArgs);
353353

354354
real trueDelta = 0.5 * (newCost1 - newCost2);
355355
real diff = (1e-20 + trueDelta) / (1e-20 + delta) - 1;
@@ -575,7 +575,7 @@ real Trainer::calcGradient(const DataBatch& dataBatch,
575575

576576
trainerInternal_.getGradientMachine()->forwardBackward(
577577
inArgs, &outArgs, PASS_TRAIN);
578-
real cost = Argument::sumCosts(outArgs);
578+
real cost = Argument::sum(outArgs);
579579

580580
offset = 0;
581581
for (auto& para : parameters) {

paddle/trainer/TrainerInternal.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ void TrainerInternal::trainOneBatch(int64_t batchId,
134134
real cost = 0;
135135
{
136136
REGISTER_TIMER("sumCost");
137-
cost = Argument::sumCosts(*outArgs);
137+
cost = Argument::sum(*outArgs);
138138
}
139139

140140
if (batchId % intconfig_->log_period == 0) {

0 commit comments

Comments
 (0)