Skip to content

Commit 996a747

Browse files
chengduoYibing Liu
authored andcommitted
Fix cross_entropy bug (#16237)
test=release/1.3
1 parent 2c5fee5 commit 996a747

File tree

1 file changed

+2
-1
lines changed

1 file changed

+2
-1
lines changed

paddle/fluid/operators/softmax_with_cross_entropy_op.cu

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -439,7 +439,8 @@ class SoftmaxWithCrossEntropyGradCUDAKernel : public framework::OpKernel<T> {
439439
context.Input<Tensor>(framework::GradVarName("Loss"))->data<T>();
440440
Tensor* logit_grad =
441441
context.Output<Tensor>(framework::GradVarName("Logits"));
442-
logit_grad->ShareDataWith(*context.Input<Tensor>("Softmax"));
442+
framework::TensorCopy(*context.Input<Tensor>("Softmax"), context.GetPlace(),
443+
context.device_context(), logit_grad);
443444
T* logit_grad_data = logit_grad->data<T>();
444445

445446
const int batch_size = logit_grad->dims()[0];

0 commit comments

Comments
 (0)