Skip to content

Commit f09aed0

Browse files
authored
Fix CPPLint issues in framework/data_transform framework/prune.cc (#10178)
* Fic CPPLint issues with data_transform * Fic CPPLint issues with prune.cc
1 parent 72ee737 commit f09aed0

File tree

4 files changed

+14
-14
lines changed

4 files changed

+14
-14
lines changed

paddle/fluid/framework/data_transform.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,16 +63,16 @@ void DataTransform(const OpKernelType& expected_kernel_type,
6363
}
6464

6565
void CopyVariableWithTensor(const Variable& in_var, const Tensor& tensor,
66-
Variable& out_var) {
66+
Variable* out_var) {
6767
if (in_var.IsType<LoDTensor>()) {
6868
auto& in_lod_tensor = in_var.Get<LoDTensor>();
69-
auto* tran_lod_tensor = out_var.GetMutable<LoDTensor>();
69+
auto* tran_lod_tensor = out_var->GetMutable<LoDTensor>();
7070
tran_lod_tensor->set_lod(in_lod_tensor.lod());
7171
tran_lod_tensor->set_layout(in_lod_tensor.layout());
7272
tran_lod_tensor->ShareDataWith(tensor);
7373
} else if (in_var.IsType<SelectedRows>()) {
7474
auto& in_selected_rows = in_var.Get<SelectedRows>();
75-
auto* trans_selected_rows = out_var.GetMutable<SelectedRows>();
75+
auto* trans_selected_rows = out_var->GetMutable<SelectedRows>();
7676
trans_selected_rows->set_height(in_selected_rows.height());
7777
trans_selected_rows->set_rows(in_selected_rows.rows());
7878
trans_selected_rows->mutable_value()->ShareDataWith(tensor);

paddle/fluid/framework/data_transform.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ void DataTransform(const OpKernelType& expected_kernel_type,
3535
const Tensor& input_tensor, Tensor* out);
3636

3737
void CopyVariableWithTensor(const Variable& in_var, const Tensor& tensor,
38-
Variable& out_var);
38+
Variable* out_var);
3939

4040
} // namespace framework
4141
} // namespace paddle

paddle/fluid/framework/operator.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -554,7 +554,7 @@ void OperatorWithKernel::RunImpl(const Scope& scope,
554554
std::shared_ptr<Tensor> out(new Tensor);
555555
DataTransform(expected_kernel_key, kernel_type_for_var, *tensor_in,
556556
out.get());
557-
CopyVariableWithTensor(*var, *(out.get()), *trans_var);
557+
CopyVariableWithTensor(*var, *(out.get()), trans_var);
558558
}
559559
}
560560
}

paddle/fluid/framework/prune.cc

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -14,19 +14,19 @@ limitations under the License. */
1414

1515
#include "paddle/fluid/framework/prune.h"
1616

17+
#include <glog/logging.h>
18+
1719
#include <algorithm>
1820
#include <set>
1921
#include <string>
2022
#include <unordered_map>
2123
#include <vector>
2224

23-
#include <glog/logging.h>
24-
2525
namespace paddle {
2626
namespace framework {
2727

28-
const std::string kFeedOpType = "feed";
29-
const std::string kFetchOpType = "fetch";
28+
const char kFeedOpType[] = "feed";
29+
const char kFetchOpType[] = "fetch";
3030

3131
bool HasDependentVar(const proto::OpDesc& op_desc,
3232
const std::set<std::string>& dependent_vars) {
@@ -68,7 +68,7 @@ bool HasSubBlock(const proto::OpDesc& op_desc) {
6868
// the child block to help pruning
6969
void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
7070
int block_id, int parent_block_id,
71-
std::set<std::string>& dependent_vars) {
71+
std::set<std::string>* dependent_vars) {
7272
auto& block = input.blocks(block_id);
7373
auto& ops = block.ops();
7474

@@ -90,11 +90,11 @@ void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
9090
std::vector<bool> should_run;
9191
for (auto op_iter = ops.rbegin(); op_iter != ops.rend(); ++op_iter) {
9292
auto& op_desc = *op_iter;
93-
if (IsTarget(op_desc) || HasDependentVar(op_desc, dependent_vars)) {
93+
if (IsTarget(op_desc) || HasDependentVar(op_desc, *dependent_vars)) {
9494
// insert its input to the dependency graph
9595
for (auto& var : op_desc.inputs()) {
9696
for (auto& argu : var.arguments()) {
97-
dependent_vars.insert(argu);
97+
dependent_vars->insert(argu);
9898
}
9999
}
100100
should_run.push_back(true);
@@ -138,7 +138,7 @@ void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
138138
// GetSubBlockIndex(*op) is the idx of the sub_block in the input desc
139139
// output_block_id is the idx of the current block in the output desc
140140
prune_impl(input, output, GetSubBlockIndex(*op), output_block_id,
141-
sub_block_dependent_vars);
141+
&sub_block_dependent_vars);
142142
}
143143
}
144144
}
@@ -181,7 +181,7 @@ void prune_impl(const proto::ProgramDesc& input, proto::ProgramDesc* output,
181181
void Prune(const proto::ProgramDesc& input, proto::ProgramDesc* output) {
182182
std::set<std::string> dependent_vars;
183183
output->clear_blocks();
184-
prune_impl(input, output, 0, -1, dependent_vars);
184+
prune_impl(input, output, 0, -1, &dependent_vars);
185185
}
186186

187187
void inference_optimize_impl(proto::ProgramDesc* input, int block_id) {

0 commit comments

Comments
 (0)