@@ -310,7 +310,7 @@ real Trainer::checkGradient() {
310
310
std::vector<Argument> outArgs;
311
311
312
312
trainerInternal_.getGradientMachine ()->forward (inArgs, &outArgs, PASS_GC);
313
- real cost = Argument::sumCosts (outArgs);
313
+ real cost = Argument::sum (outArgs);
314
314
LOG (INFO) << " original cost=" << cost;
315
315
trainerInternal_.getGradientMachine ()->backward ();
316
316
@@ -340,7 +340,7 @@ real Trainer::checkGradient() {
340
340
parameter->getBuf (PARAMETER_VALUE)->copyFrom (newPara);
341
341
parameter->setValueUpdated ();
342
342
trainerInternal_.getGradientMachine ()->forward (inArgs, &outArgs, PASS_GC);
343
- real newCost1 = Argument::sumCosts (outArgs);
343
+ real newCost1 = Argument::sum (outArgs);
344
344
345
345
for (size_t i = 0 ; i < dim; ++i) {
346
346
newp[i] = oldp[i] - step * d[i];
@@ -349,7 +349,7 @@ real Trainer::checkGradient() {
349
349
parameter->getBuf (PARAMETER_VALUE)->copyFrom (newPara);
350
350
parameter->setValueUpdated ();
351
351
trainerInternal_.getGradientMachine ()->forward (inArgs, &outArgs, PASS_GC);
352
- real newCost2 = Argument::sumCosts (outArgs);
352
+ real newCost2 = Argument::sum (outArgs);
353
353
354
354
real trueDelta = 0.5 * (newCost1 - newCost2);
355
355
real diff = (1e-20 + trueDelta) / (1e-20 + delta) - 1 ;
@@ -575,7 +575,7 @@ real Trainer::calcGradient(const DataBatch& dataBatch,
575
575
576
576
trainerInternal_.getGradientMachine ()->forwardBackward (
577
577
inArgs, &outArgs, PASS_TRAIN);
578
- real cost = Argument::sumCosts (outArgs);
578
+ real cost = Argument::sum (outArgs);
579
579
580
580
offset = 0 ;
581
581
for (auto & para : parameters) {
0 commit comments