Skip to content

Commit a77d75c

Browse files
authored
[Cherry-pick 1.6]Fix op run log when memory optimization strategy is enabled (#20696)
* fix op log bug, test=release/1.6 * add unittests, test=release/1.6
1 parent 8c1e1de commit a77d75c

File tree

3 files changed

+84
-4
lines changed

3 files changed

+84
-4
lines changed

paddle/fluid/framework/operator.cc

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -132,9 +132,6 @@ static LoD GetLoDDebug(const Scope& scope, const std::string& name) {
132132

133133
if (var->IsType<LoDTensor>()) {
134134
const LoDTensor& tensor = var->Get<LoDTensor>();
135-
if (UNLIKELY(!tensor.IsInitialized())) {
136-
return default_lod;
137-
}
138135
return tensor.lod();
139136
} else {
140137
return default_lod;
@@ -238,8 +235,16 @@ const std::vector<std::string>& OperatorBase::Outputs(
238235
std::string OperatorBase::DebugStringEx(const Scope* scope) const {
239236
std::stringstream ss;
240237
ss << "Op(" << type_ << "), inputs:{";
238+
239+
std::unordered_set<std::string> no_need_buffer_vars;
240+
if (info_ && info_->NoNeedBufferVarsInferer()) {
241+
no_need_buffer_vars =
242+
Info().NoNeedBufferVarsInferer()(Inputs(), Outputs(), Attrs());
243+
}
244+
241245
for (auto it = inputs_.begin(); it != inputs_.end();) {
242246
auto& input = *it;
247+
bool is_no_need_buffer_var = (no_need_buffer_vars.count(input.first) > 0);
243248
ss << input.first << "[";
244249
for (size_t i = 0; i < input.second.size(); ++i) {
245250
auto var_name = input.second[i];
@@ -252,7 +257,9 @@ std::string OperatorBase::DebugStringEx(const Scope* scope) const {
252257
if (row_size >= 0) {
253258
ss << "[row_size=" << row_size << "]";
254259
}
255-
std::string dtype = GetDtype(*scope, var_name);
260+
std::string dtype = is_no_need_buffer_var
261+
? "unknown_dtype"
262+
: GetDtype(*scope, var_name);
256263
ss << ":" << dtype;
257264
ss << "[" << GetDimsDebug(*scope, var_name, true) << "]";
258265
ss << "(" << GetLoDDebug(*scope, var_name) << ")";

paddle/fluid/operators/CMakeLists.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,3 +128,5 @@ endif()
128128

129129
set(GLOB_OP_LIB ${OP_LIBRARY} CACHE INTERNAL "Global OP library")
130130
add_subdirectory(benchmark)
131+
132+
cc_test(op_debug_string_test SRCS op_debug_string_test.cc DEPS elementwise_add_op)
Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,71 @@
1+
// Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
2+
//
3+
// Licensed under the Apache License, Version 2.0 (the "License");
4+
// you may not use this file except in compliance with the License.
5+
// You may obtain a copy of the License at
6+
//
7+
// http://www.apache.org/licenses/LICENSE-2.0
8+
//
9+
// Unless required by applicable law or agreed to in writing, software
10+
// distributed under the License is distributed on an "AS IS" BASIS,
11+
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
// See the License for the specific language governing permissions and
13+
// limitations under the License.
14+
15+
#include <string>
16+
#include "glog/logging.h"
17+
#include "gtest/gtest.h"
18+
#include "paddle/fluid/framework/op_registry.h"
19+
#include "paddle/fluid/framework/operator.h"
20+
21+
USE_OP(elementwise_add_grad);
22+
23+
namespace paddle {
24+
namespace operators {
25+
26+
TEST(op_debug_str, test_unknown_dtype) {
27+
platform::Place place = platform::CPUPlace();
28+
framework::DDim dim{3, 4, 5, 6};
29+
const std::string unknown_dtype = "unknown_dtype";
30+
31+
framework::OpDesc desc;
32+
framework::Scope scope;
33+
34+
desc.SetType("elementwise_add_grad");
35+
desc.SetInput("Y", {"Y"});
36+
desc.SetInput(framework::GradVarName("Out"), {framework::GradVarName("Out")});
37+
desc.SetOutput(framework::GradVarName("X"), {framework::GradVarName("X")});
38+
desc.SetOutput(framework::GradVarName("Y"), {framework::GradVarName("Y")});
39+
desc.SetAttr("axis", -1);
40+
desc.SetAttr("use_mkldnn", false);
41+
desc.SetAttr("x_data_format", "");
42+
desc.SetAttr("y_data_format", "");
43+
44+
auto y_tensor = scope.Var("Y")->GetMutable<framework::LoDTensor>();
45+
y_tensor->Resize(dim);
46+
y_tensor->mutable_data<float>(place);
47+
48+
auto out_grad_tensor = scope.Var(framework::GradVarName("Out"))
49+
->GetMutable<framework::LoDTensor>();
50+
out_grad_tensor->Resize(dim);
51+
out_grad_tensor->mutable_data<float>(place);
52+
53+
scope.Var(framework::GradVarName("X"))->GetMutable<framework::LoDTensor>();
54+
scope.Var(framework::GradVarName("Y"))->GetMutable<framework::LoDTensor>();
55+
56+
auto op = framework::OpRegistry::CreateOp(desc);
57+
58+
auto before_run_str = op->DebugStringEx(&scope);
59+
LOG(INFO) << before_run_str;
60+
ASSERT_TRUE(before_run_str.find(unknown_dtype) != std::string::npos);
61+
62+
op->Run(scope, place);
63+
platform::DeviceContextPool::Instance().Get(place)->Wait();
64+
65+
auto after_run_str = op->DebugStringEx(&scope);
66+
LOG(INFO) << after_run_str;
67+
ASSERT_TRUE(after_run_str.find(unknown_dtype) != std::string::npos);
68+
}
69+
70+
} // namespace operators
71+
} // namespace paddle

0 commit comments

Comments
 (0)