Skip to content

Commit 1a9008c

Browse files
committed
code style fix
test=develop
1 parent b2a770c commit 1a9008c

File tree

12 files changed

+67
-67
lines changed

12 files changed

+67
-67
lines changed

paddle/fluid/framework/ir/attention_lstm_fuse_pass.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -213,10 +213,10 @@ void PrepareLSTMWeight(const LoDTensor& W_forget_w0,
213213
float* out_data = out->mutable_data<float>(platform::CPUPlace());
214214
std::array<const float*, 4> tensors{
215215
W_forget_w0.data<float>(), W_input_w0.data<float>(),
216-
W_output_w0.data<float>(), W_cell_w0.data<float>()};
216+
W_output_w0.data<float>(), W_cell_w0.data<float>()};
217217
std::array<const float*, 4> tensors1{
218218
W_forget_w1.data<float>(), W_input_w1.data<float>(),
219-
W_output_w1.data<float>(), W_cell_w1.data<float>()};
219+
W_output_w1.data<float>(), W_cell_w1.data<float>()};
220220

221221
for (int row = 0; row < D; row++) {
222222
for (int col = 0; col < 4; col++) {
@@ -240,7 +240,7 @@ void PrepareLSTMBias(const LoDTensor& B_forget, const LoDTensor& B_input,
240240
LoDTensor* out) {
241241
std::array<const float*, 4> tensors{
242242
B_forget.data<float>(), B_input.data<float>(), B_output.data<float>(),
243-
B_cell.data<float>()};
243+
B_cell.data<float>()};
244244

245245
PADDLE_ENFORCE_EQ(B_forget.dims().size(), 1);
246246
int D = B_forget.dims()[0];

paddle/fluid/framework/ir/node.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,9 @@ namespace framework {
1919
namespace ir {
2020
// msvc15 don't support constexpr in correct way.
2121
#if !defined(_WIN32)
22-
constexpr char Node::kControlDepVarName[];
22+
constexpr char Node::kControlDepVarName[];
2323
#else
24-
const char Node::kControlDepVarName[] = "__control_var";
24+
const char Node::kControlDepVarName[] = "__control_var";
2525
#endif
2626

2727
std::unique_ptr<Node> CreateNodeForTest(const std::string& name,

paddle/fluid/framework/ir/node.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,9 +56,9 @@ class Node {
5656

5757
enum class Type { kOperation, kVariable };
5858
#if !defined(_WIN32) // msvc not support constexpr correctly.
59-
static constexpr char kControlDepVarName[] = "__control_var";
59+
static constexpr char kControlDepVarName[] = "__control_var";
6060
#else
61-
static const char kControlDepVarName[];
61+
static const char kControlDepVarName[];
6262
#endif
6363

6464
Type NodeType() const { return type_; }

paddle/fluid/framework/ir/pass.h

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -197,26 +197,26 @@ struct PassRegistrar : public Registrar {
197197
msg)
198198

199199
// Register a new pass that can be applied on the IR.
200-
#define REGISTER_PASS(pass_type, pass_class) \
201-
STATIC_ASSERT_PASS_GLOBAL_NAMESPACE( \
202-
__reg_pass__##pass_type, \
203-
"REGISTER_PASS must be called in global namespace"); \
204-
static ::paddle::framework::ir::PassRegistrar<pass_class> \
205-
__pass_registrar_##pass_type##__(#pass_type); \
206-
int TouchPassRegistrar_##pass_type() { \
207-
__pass_registrar_##pass_type##__.Touch(); \
208-
return 0; \
209-
} \
210-
static ::paddle::framework::ir::PassRegistrar<pass_class> \
211-
&__pass_tmp_registrar_##pass_type##__ UNUSED = \
200+
#define REGISTER_PASS(pass_type, pass_class) \
201+
STATIC_ASSERT_PASS_GLOBAL_NAMESPACE( \
202+
__reg_pass__##pass_type, \
203+
"REGISTER_PASS must be called in global namespace"); \
204+
static ::paddle::framework::ir::PassRegistrar<pass_class> \
205+
__pass_registrar_##pass_type##__(#pass_type); \
206+
int TouchPassRegistrar_##pass_type() { \
207+
__pass_registrar_##pass_type##__.Touch(); \
208+
return 0; \
209+
} \
210+
static ::paddle::framework::ir::PassRegistrar<pass_class> \
211+
&__pass_tmp_registrar_##pass_type##__ UNUSED = \
212212
__pass_registrar_##pass_type##__
213213

214-
#define USE_PASS(pass_type) \
215-
STATIC_ASSERT_PASS_GLOBAL_NAMESPACE( \
216-
__use_pass_itself_##pass_type, \
217-
"USE_PASS must be called in global namespace"); \
218-
extern int TouchPassRegistrar_##pass_type(); \
219-
static int use_pass_itself_##pass_type##_ UNUSED = \
214+
#define USE_PASS(pass_type) \
215+
STATIC_ASSERT_PASS_GLOBAL_NAMESPACE( \
216+
__use_pass_itself_##pass_type, \
217+
"USE_PASS must be called in global namespace"); \
218+
extern int TouchPassRegistrar_##pass_type(); \
219+
static int use_pass_itself_##pass_type##_ UNUSED = \
220220
TouchPassRegistrar_##pass_type()
221221

222222
} // namespace ir

paddle/fluid/framework/operator.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -150,9 +150,9 @@ void OperatorBase::Run(const Scope& scope, const platform::Place& place) {
150150
#endif
151151
}
152152

153-
// The profile has a process-wide mutex, results in serious performance issue
154-
// in concurrency scenerio. Here use an `if` to fix this issue.
155-
// Please not remove the `if`, ask @Superjomn if there are any concern.
153+
// The profile has a process-wide mutex, results in serious performance issue
154+
// in concurrency scenerio. Here use an `if` to fix this issue.
155+
// Please not remove the `if`, ask @Superjomn if there are any concern.
156156
#ifndef _WIN32
157157
if (platform::IsProfileEnabled()) {
158158
platform::DeviceContextPool& pool = platform::DeviceContextPool::Instance();

paddle/fluid/inference/api/helper.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,9 @@
2020
#else
2121
#endif
2222

23-
#include <iterator>
2423
#include <algorithm>
2524
#include <chrono> // NOLINT
25+
#include <iterator>
2626
#include <numeric>
2727
#include <sstream>
2828
#include <string>

paddle/fluid/operators/elementwise_op_function.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ class RowwiseTransformIterator<T, platform::CPUDeviceContext>
112112
}
113113

114114
RowwiseTransformIterator<T, platform::CPUDeviceContext> &operator+(int n) {
115-
while(n-- > 0) {
115+
while (n-- > 0) {
116116
++i_;
117117
if (UNLIKELY(i_ == n_)) {
118118
i_ = 0;
@@ -161,7 +161,7 @@ class MidWiseTransformIterator<T, platform::CPUDeviceContext>
161161
}
162162

163163
MidWiseTransformIterator<T, platform::CPUDeviceContext> &operator+(int n) {
164-
while(n-- > 0) {
164+
while (n-- > 0) {
165165
++j_;
166166
if (UNLIKELY(j_ == post_)) {
167167
++i_;

paddle/fluid/operators/grid_sampler_op.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -67,10 +67,10 @@ static void CalcGridLocations(const platform::CPUDeviceContext& ctx,
6767
Tensor half_ymax;
6868
half_xmax.mutable_data<T>({n, h, w}, ctx.GetPlace());
6969
auto half_xmax_t =
70-
EigenTensor<T, 3>::From(half_xmax).setConstant(0.5 * x_max);
70+
EigenTensor<T, 3>::From(half_xmax).setConstant(0.5 * x_max);
7171
half_ymax.mutable_data<T>({n, h, w}, ctx.GetPlace());
7272
auto half_ymax_t =
73-
EigenTensor<T, 3>::From(half_ymax).setConstant(0.5 * y_max);
73+
EigenTensor<T, 3>::From(half_ymax).setConstant(0.5 * y_max);
7474

7575
// scale grid to [0, h-1/w-1]
7676
auto grid_x_t = EigenTensor<T, 3>::From(grid_x);

paddle/fluid/platform/init.cc

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -115,9 +115,9 @@ void InitDevices(bool init_p2p, const std::vector<int> devices) {
115115

116116
// windows has no support for openblas multi-thread
117117
#ifdef _WIN32
118-
if (FLAGS_paddle_num_threads > 1) {
119-
FLAGS_paddle_num_threads = 1;
120-
}
118+
if (FLAGS_paddle_num_threads > 1) {
119+
FLAGS_paddle_num_threads = 1;
120+
}
121121
#endif
122122

123123
#ifndef PADDLE_WITH_MKLDNN

paddle/fluid/platform/port.h

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -24,38 +24,38 @@
2424
#include "glog/logging.h"
2525

2626
#if !defined(_WIN32)
27-
#include <dlfcn.h> // dladdr
28-
#include <execinfo.h> // backtrace
29-
#include <sys/stat.h>
30-
#include <algorithm> // std::accumulate
27+
#include <dlfcn.h> // dladdr
28+
#include <execinfo.h> // backtrace
29+
#include <sys/stat.h>
30+
#include <algorithm> // std::accumulate
3131
#else
32-
#include <stdio.h>
33-
#include <io.h> // _popen, _pclose
34-
#include <windows.h>
35-
#include <numeric> // std::accumulate in msvc
36-
#ifndef S_ISDIR // windows port for sys/stat.h
37-
#define S_ISDIR(mode) (((mode)&S_IFMT) == S_IFDIR)
38-
#endif // S_ISDIR
39-
40-
static void *dlsym(void *handle, const char *symbol_name) {
41-
FARPROC found_symbol;
42-
found_symbol = GetProcAddress((HMODULE)handle, symbol_name);
43-
44-
if (found_symbol == NULL) {
45-
throw std::runtime_error(std::string(symbol_name) + " not found.");
46-
}
47-
return reinterpret_cast<void *>(found_symbol);
32+
#include <io.h> // _popen, _pclose
33+
#include <stdio.h>
34+
#include <windows.h>
35+
#include <numeric> // std::accumulate in msvc
36+
#ifndef S_ISDIR // windows port for sys/stat.h
37+
#define S_ISDIR(mode) (((mode)&S_IFMT) == S_IFDIR)
38+
#endif // S_ISDIR
39+
40+
static void *dlsym(void *handle, const char *symbol_name) {
41+
FARPROC found_symbol;
42+
found_symbol = GetProcAddress((HMODULE)handle, symbol_name);
43+
44+
if (found_symbol == NULL) {
45+
throw std::runtime_error(std::string(symbol_name) + " not found.");
4846
}
47+
return reinterpret_cast<void *>(found_symbol);
48+
}
4949

50-
static void *dlopen(const char *filename, int flag) {
51-
std::string file_name(filename);
52-
file_name.replace(0, file_name.size() - 1, '/', '\\');
53-
HMODULE hModule = LoadLibrary(file_name.c_str());
54-
if (!hModule) {
55-
throw std::runtime_error(file_name + " not found.");
56-
}
57-
return reinterpret_cast<void *>(hModule);
50+
static void *dlopen(const char *filename, int flag) {
51+
std::string file_name(filename);
52+
file_name.replace(0, file_name.size() - 1, '/', '\\');
53+
HMODULE hModule = LoadLibrary(file_name.c_str());
54+
if (!hModule) {
55+
throw std::runtime_error(file_name + " not found.");
5856
}
57+
return reinterpret_cast<void *>(hModule);
58+
}
5959

6060
#endif // !_WIN32
6161

0 commit comments

Comments
 (0)