Skip to content

Commit a89afd4

Browse files
authored
Merge pull request #13685 from luotao1/naive_cmake
update libpaddle_fluid.a/so
2 parents 2f5a7cc + 9cbf202 commit a89afd4

File tree

10 files changed

+148
-173
lines changed

10 files changed

+148
-173
lines changed

paddle/fluid/framework/ir/CMakeLists.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
set(pass_file ${PADDLE_BINARY_DIR}/paddle/fluid/inference/api/paddle_inference_pass.h)
22
file(WRITE ${pass_file} "// Generated by the paddle/fluid/framework/ir/CMakeLists.txt. DO NOT EDIT!\n\n")
3+
file(APPEND ${pass_file} "\#pragma once\n")
34
file(APPEND ${pass_file} "\#include \"paddle/fluid/framework/ir/pass.h\"\n")
45

56

paddle/fluid/inference/CMakeLists.txt

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,8 @@ cc_library(paddle_fluid_origin DEPS ${fluid_modules} paddle_fluid_api)
2020
add_subdirectory(api)
2121

2222
# Create static library
23-
cc_library(paddle_fluid DEPS ${fluid_modules} paddle_fluid_api paddle_inference_api analysis_predictor)
23+
cc_library(paddle_fluid DEPS ${fluid_modules} paddle_fluid_api paddle_inference_api
24+
analysis_predictor zero_copy_tensor)
2425
if(NOT APPLE)
2526
# TODO(liuyiqu: Temporarily disable the link flag because it is not support on Mac.
2627
set(LINK_FLAGS "-Wl,--retain-symbols-file ${CMAKE_CURRENT_SOURCE_DIR}/paddle_fluid.sym")
@@ -31,6 +32,7 @@ endif()
3132
cc_library(paddle_fluid_shared SHARED
3233
SRCS io.cc ${CMAKE_CURRENT_SOURCE_DIR}/api/api.cc ${CMAKE_CURRENT_SOURCE_DIR}/api/api_impl.cc
3334
${CMAKE_CURRENT_SOURCE_DIR}/api/analysis_predictor.cc
35+
${CMAKE_CURRENT_SOURCE_DIR}/api/details/zero_copy_tensor.cc
3436
DEPS ${fluid_modules} paddle_fluid_api)
3537

3638
set_target_properties(paddle_fluid_shared PROPERTIES OUTPUT_NAME paddle_fluid)

paddle/fluid/inference/api/analysis_predictor.cc

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@
2424
#include "paddle/fluid/inference/api/helper.h"
2525
#include "paddle/fluid/inference/api/paddle_inference_api.h"
2626
#include "paddle/fluid/inference/api/paddle_inference_pass.h"
27-
#include "paddle/fluid/inference/api/timer.h"
2827
#include "paddle/fluid/inference/utils/singleton.h"
2928
#include "paddle/fluid/platform/profiler.h"
3029

paddle/fluid/inference/api/api_impl.cc

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@ limitations under the License. */
2323
#include "paddle/fluid/framework/feed_fetch_method.h"
2424
#include "paddle/fluid/inference/api/api_impl.h"
2525
#include "paddle/fluid/inference/api/helper.h"
26-
#include "paddle/fluid/inference/api/timer.h"
2726
#include "paddle/fluid/platform/profiler.h"
2827

2928
DEFINE_bool(profile, false, "Turn on profiler for fluid");

paddle/fluid/inference/api/helper.h

Lines changed: 19 additions & 126 deletions
Original file line numberDiff line numberDiff line change
@@ -16,19 +16,34 @@
1616

1717
#include <glog/logging.h>
1818
#include <sys/time.h>
19-
#include <algorithm>
19+
#include <chrono> // NOLINT
2020
#include <numeric>
2121
#include <sstream>
2222
#include <string>
2323
#include <vector>
24-
#include "paddle/fluid/framework/lod_tensor.h"
25-
#include "paddle/fluid/inference/api/paddle_inference_api.h"
26-
#include "paddle/fluid/inference/api/timer.h"
2724
#include "paddle/fluid/string/printf.h"
25+
#include "paddle_inference_api.h"
2826

2927
namespace paddle {
3028
namespace inference {
3129

30+
// Timer for timer
31+
class Timer {
32+
public:
33+
std::chrono::high_resolution_clock::time_point start;
34+
std::chrono::high_resolution_clock::time_point startu;
35+
36+
void tic() { start = std::chrono::high_resolution_clock::now(); }
37+
double toc() {
38+
startu = std::chrono::high_resolution_clock::now();
39+
std::chrono::duration<double> time_span =
40+
std::chrono::duration_cast<std::chrono::duration<double>>(startu -
41+
start);
42+
double used_time_ms = static_cast<double>(time_span.count()) * 1000.0;
43+
return used_time_ms;
44+
}
45+
};
46+
3247
static void split(const std::string &str, char sep,
3348
std::vector<std::string> *pieces) {
3449
pieces->clear();
@@ -154,127 +169,5 @@ static void PrintTime(int batch_size, int repeat, int num_threads, int tid,
154169
}
155170
}
156171

157-
template <typename T>
158-
std::string LoDTensorSummary(const framework::LoDTensor &tensor) {
159-
std::stringstream ss;
160-
ss << "\n---- tensor ---" << '\n';
161-
ss << "lod: [";
162-
for (const auto &level : tensor.lod()) {
163-
ss << "[ ";
164-
for (auto i : level) {
165-
ss << i << ", ";
166-
}
167-
ss << "]";
168-
}
169-
ss << "]\n";
170-
171-
ss << "shape: [";
172-
int size = 1;
173-
for (int i = 0; i < tensor.dims().size(); i++) {
174-
int dim = tensor.dims()[i];
175-
ss << dim << ", ";
176-
size *= dim;
177-
}
178-
ss << "]\n";
179-
180-
ss << "data: ";
181-
for (int i = 0; i < std::min(20, size); i++) {
182-
ss << tensor.data<T>()[i] << " ";
183-
}
184-
ss << "\n";
185-
186-
return ss.str();
187-
}
188-
189-
static bool CompareLoD(const framework::LoD &a, const framework::LoD &b) {
190-
if (a.size() != b.size()) {
191-
LOG(ERROR) << string::Sprintf("lod size not match %d != %d", a.size(),
192-
b.size());
193-
return false;
194-
}
195-
for (size_t i = 0; i < a.size(); i++) {
196-
auto &al = a[i];
197-
auto &bl = b[i];
198-
if (al.size() != bl.size()) {
199-
LOG(ERROR) << string::Sprintf("level size %d != %d", al.size(),
200-
bl.size());
201-
return false;
202-
}
203-
}
204-
return true;
205-
}
206-
207-
static bool CompareShape(const std::vector<int64_t> &a,
208-
const std::vector<int64_t> &b) {
209-
if (a.size() != b.size()) {
210-
LOG(ERROR) << string::Sprintf("shape size not match %d != %d", a.size(),
211-
b.size());
212-
return false;
213-
}
214-
for (size_t i = 0; i < a.size(); i++) {
215-
if (a[i] != b[i]) {
216-
LOG(ERROR) << string::Sprintf("shape %d-th element not match %d != %d", i,
217-
a[i], b[i]);
218-
return false;
219-
}
220-
}
221-
return true;
222-
}
223-
224-
static bool CompareTensorData(const framework::LoDTensor &a,
225-
const framework::LoDTensor &b) {
226-
auto a_shape = framework::vectorize(a.dims());
227-
auto b_shape = framework::vectorize(b.dims());
228-
size_t a_size = std::accumulate(a_shape.begin(), a_shape.end(), 1,
229-
[](int a, int b) { return a * b; });
230-
size_t b_size = std::accumulate(b_shape.begin(), b_shape.end(), 1,
231-
[](int a, int b) { return a * b; });
232-
if (a_size != b_size) {
233-
LOG(ERROR) << string::Sprintf("tensor data size not match, %d != %d",
234-
a_size, b_size);
235-
}
236-
237-
for (size_t i = 0; i < a_size; i++) {
238-
if (a.type() == typeid(float)) {
239-
const auto *a_data = a.data<float>();
240-
const auto *b_data = b.data<float>();
241-
if (std::abs(a_data[i] - b_data[i]) > 1e-3) {
242-
LOG(ERROR) << string::Sprintf(
243-
"tensor data %d-th element not match, %f != %f", i, a_data[i],
244-
b_data[i]);
245-
return false;
246-
}
247-
} else if (a.type() == typeid(int64_t)) {
248-
const auto *a_data = a.data<int64_t>();
249-
const auto *b_data = b.data<int64_t>();
250-
if (std::abs(a_data[i] - b_data[i]) > 1e-3) {
251-
LOG(ERROR) << string::Sprintf(
252-
"tensor data %d-th element not match, %f != %f", i, a_data[i],
253-
b_data[i]);
254-
return false;
255-
}
256-
}
257-
}
258-
259-
return true;
260-
}
261-
262-
static bool CompareTensor(const framework::LoDTensor &a,
263-
const framework::LoDTensor &b) {
264-
if (!CompareLoD(a.lod(), b.lod())) {
265-
return false;
266-
}
267-
if (!CompareShape(framework::vectorize(a.dims()),
268-
framework::vectorize(b.dims()))) {
269-
return false;
270-
}
271-
272-
if (!CompareTensorData(a, b)) {
273-
return false;
274-
}
275-
276-
return true;
277-
}
278-
279172
} // namespace inference
280173
} // namespace paddle

paddle/fluid/inference/api/paddle_inference_api.h

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -268,9 +268,8 @@ struct AnalysisConfig : public NativeConfig {
268268
// NOT stable yet.
269269
bool use_feed_fetch_ops{true};
270270

271-
// NOTE this is just for internal development, please not use it. NOT
272-
// stable
273-
// yet.
271+
// NOTE this is just for internal development, please not use it.
272+
// NOT stable yet.
274273
bool _use_mkldnn{false};
275274
};
276275

paddle/fluid/inference/api/timer.h

Lines changed: 0 additions & 39 deletions
This file was deleted.

paddle/fluid/inference/tests/api/anakin_rnn1_tester.cc

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ limitations under the License. */
2222
#include <vector>
2323
#include "paddle/fluid/inference/api/helper.h"
2424
#include "paddle/fluid/inference/api/paddle_inference_api.h"
25-
#include "paddle/fluid/inference/api/timer.h"
2625
#include "utils/logger/logger.h"
2726

2827
DEFINE_string(model, "", "Directory of the inference model.");

paddle/fluid/inference/tests/api/analyzer_rnn1_tester.cc

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
// See the License for the specific language governing permissions and
1313
// limitations under the License.
1414

15-
#include "paddle/fluid/inference/api/analysis_predictor.h"
1615
#include "paddle/fluid/inference/tests/api/tester_helper.h"
1716

1817
DEFINE_bool(with_precision_check, true, "turn on test");

0 commit comments

Comments
 (0)