Skip to content

Commit 9f83f0f

Browse files
authored
API/OP (group_norm, layer_norm, random_crop, unpool) error message enhancement (#24413)
* API/OP (group_norm, layer_norm, unpool) error message enhancement test=develop
1 parent 7f85dfe commit 9f83f0f

File tree

5 files changed

+48
-17
lines changed

5 files changed

+48
-17
lines changed

paddle/fluid/operators/group_norm_op.cc

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -122,12 +122,20 @@ class GroupNormOpMaker : public framework::OpProtoAndCheckerMaker {
122122
"Constant for numerical stability [default 1e-5].")
123123
.SetDefault(1e-5)
124124
.AddCustomChecker([](const float &epsilon) {
125-
PADDLE_ENFORCE(epsilon >= 0.0f && epsilon <= 1.0f,
126-
"'epsilon' should be between 0.0 and 1.0.");
125+
PADDLE_ENFORCE_EQ(epsilon >= 0.0f && epsilon <= 1.0f, true,
126+
platform::errors::InvalidArgument(
127+
"'epsilon' in Op(GroupNorm) should be between"
128+
"0.0 and 1.0f, But received [%s].",
129+
epsilon));
127130
});
128131
AddAttr<int>("groups", "The number of groups that divided from channels.")
129132
.AddCustomChecker([](const int &groups) {
130-
PADDLE_ENFORCE_GT(groups, 0, "'groups' should be greater than zero.");
133+
PADDLE_ENFORCE_GT(
134+
groups, 0,
135+
platform::errors::InvalidArgument(
136+
"'groups' in Op(GroupNorm) should be greater than zero,"
137+
"But received [%s].",
138+
groups));
131139
});
132140
AddAttr<std::string>("data_layout",
133141
"An optional string from: \"NHWC\", \"NCHW\". ")

paddle/fluid/operators/layer_norm_op.cc

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -179,18 +179,16 @@ class LayerNormGradOp : public framework::OperatorWithKernel {
179179
framework::OpKernelType GetExpectedKernelType(
180180
const framework::ExecutionContext &ctx) const override {
181181
const auto *var = ctx.InputVar(framework::GradVarName("Y"));
182-
if (var == nullptr) {
183-
PADDLE_THROW("can't find Y@GRAD");
184-
}
182+
PADDLE_ENFORCE_NOT_NULL(var, platform::errors::NotFound(
183+
"Y@GRAD of LayerNorm Op is not found."));
185184
const Tensor *t = nullptr;
186185
if (var->IsType<Tensor>()) {
187186
t = &var->Get<Tensor>();
188187
} else if (var->IsType<LoDTensor>()) {
189188
t = &var->Get<LoDTensor>();
190189
}
191-
if (t == nullptr) {
192-
PADDLE_THROW("can't find Y@GRAD");
193-
}
190+
PADDLE_ENFORCE_NOT_NULL(
191+
t, platform::errors::NotFound("Y@GRAD of LayerNorm Op is not found."));
194192
return framework::OpKernelType(t->type(), ctx.GetPlace());
195193
}
196194
};

paddle/fluid/operators/layer_norm_op.cu

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -528,8 +528,8 @@ class LayerNormKernel<platform::CUDADeviceContext, T>
528528
x_data, scale_data, bias_data, y_data, mean_data, var_data,
529529
epsilon, feature_size));
530530
default:
531-
PADDLE_THROW(
532-
"Product from begin_norm_axis to end must be larger than 1");
531+
PADDLE_THROW(platform::errors::InvalidArgument(
532+
"Product from begin_norm_axis to end must be larger than 1"));
533533
break;
534534
}
535535
}

paddle/fluid/operators/random_crop_op.h

Lines changed: 22 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -106,8 +106,21 @@ struct RandomCropFunctor {
106106
num_batchsize_dims_(num_batchsize_dims),
107107
rank_(x_dims.size()),
108108
seed_(seed) {
109-
PADDLE_ENFORCE_EQ(x_dims.size(), out_dims.size());
110-
PADDLE_ENFORCE_GT(rank_, num_batchsize_dims_);
109+
PADDLE_ENFORCE_EQ(
110+
x_dims.size(), out_dims.size(),
111+
platform::errors::InvalidArgument(
112+
"The dimensions of Input(X) must equal to be the dimensions"
113+
"of Output(Out), but received dimensions of Input(X) is [%d],"
114+
"received dimensions of Output(Out) is [%d].",
115+
x_dims.size(), out_dims.size()));
116+
PADDLE_ENFORCE_GT(
117+
rank_, num_batchsize_dims_,
118+
platform::errors::InvalidArgument(
119+
"The dimensions of Input(X) must be greater than the diff"
120+
"value of Input(X)'s dimensions minus Atrr(shape)'s dimensions,"
121+
"But received Input(X)'s dimensions is [%d], received value of"
122+
"Input(X)'s dimensions minus Attr(shape)'s dimensions is [%d].",
123+
rank_, num_batchsize_dims_));
111124
prod_batchsize_dims_ = 1;
112125
prod_x_ins_dims_ = 1;
113126
prod_out_ins_dims_ = 1;
@@ -117,7 +130,13 @@ struct RandomCropFunctor {
117130
x_dims_[i] = x_dim_i;
118131
out_dims_[i] = out_dim_i;
119132
if (i < static_cast<size_t>(num_batchsize_dims_)) {
120-
PADDLE_ENFORCE_EQ(x_dim_i, out_dim_i);
133+
PADDLE_ENFORCE_EQ(
134+
x_dim_i, out_dim_i,
135+
platform::errors::InvalidArgument(
136+
"The first [%d] dimension value of Input(X) and Output(Out)"
137+
"must be equal, but received the [%d] dimension value of"
138+
"Input(X) and Output(Out) respectively are [%d] and [%d].",
139+
num_batchsize_dims_, i, x_dim_i, out_dim_i));
121140
prod_batchsize_dims_ *= x_dim_i;
122141
} else {
123142
prod_x_ins_dims_ *= x_dim_i;

paddle/fluid/operators/unpool_op.cc

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -95,10 +95,16 @@ class UnpoolOp : public framework::OperatorWithKernel {
9595
std::vector<int> paddings = ctx->Attrs().Get<std::vector<int>>("paddings");
9696
PADDLE_ENFORCE_EQ(in_x_dims.size() == 4, true,
9797
platform::errors::InvalidArgument(
98-
"Unpooling Intput(X) must be of 4-dimensional, but "
99-
"received Input(X)'s dimension is %d.",
98+
"Unpool Intput(X) must be of 4-dimensional, but "
99+
"received Input(X)'s dimensions is %d.",
100100
in_x_dims.size()));
101-
PADDLE_ENFORCE_EQ(in_x_dims, in_y_dims);
101+
PADDLE_ENFORCE_EQ(in_x_dims, in_y_dims,
102+
platform::errors::InvalidArgument(
103+
"The dimensions of Input(X) must equal to be"
104+
"the dimensions of Input(Indices), but received"
105+
"dimensions of Input(X) is [%d], received dimensions"
106+
"of Input(Indices) is [%d]",
107+
in_x_dims, in_y_dims));
102108

103109
std::vector<int64_t> output_shape({in_x_dims[0], in_x_dims[1]});
104110
for (size_t i = 0; i < ksize.size(); ++i) {

0 commit comments

Comments
 (0)