Skip to content

Commit dce0383

Browse files
authored
Merge pull request #8404 from Xreki/core_refine_inference
Refine the inference API and unittests
2 parents 0dbaad5 + efb6ba3 commit dce0383

16 files changed

+450
-351
lines changed

paddle/fluid/framework/lod_tensor.cc

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,14 @@ std::ostream &operator<<(std::ostream &os, const LoD &lod) {
3131
os << "{";
3232
for (auto &v : lod) {
3333
os << "{";
34+
bool is_first = true;
3435
for (auto &i : v) {
35-
os << i << ",";
36+
if (is_first) {
37+
os << i;
38+
is_first = false;
39+
} else {
40+
os << ", " << i;
41+
}
3642
}
3743
os << "}";
3844
}

paddle/fluid/inference/io.cc

Lines changed: 7 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -32,23 +32,11 @@ void ReadBinaryFile(const std::string& filename, std::string& contents) {
3232
inputfs.close();
3333
}
3434

35-
bool IsParameter(const framework::VarDesc* var,
36-
const framework::ProgramDesc& main_program) {
37-
if (var->Persistable()) {
38-
// There are many unreachable variables in the program
39-
for (size_t i = 0; i < main_program.Size(); ++i) {
40-
const framework::BlockDesc& block = main_program.Block(i);
41-
for (auto* op : block.AllOps()) {
42-
if (op->Type() == framework::kFeedOpType) {
43-
continue;
44-
}
45-
for (auto input_argument_name : op->InputArgumentNames()) {
46-
if (input_argument_name == var->Name()) {
47-
return true;
48-
}
49-
}
50-
}
51-
}
35+
bool IsPersistable(const framework::VarDesc* var) {
36+
if (var->Persistable() &&
37+
var->GetType() != framework::proto::VarType::FEED_MINIBATCH &&
38+
var->GetType() != framework::proto::VarType::FETCH_LIST) {
39+
return true;
5240
}
5341
return false;
5442
}
@@ -65,8 +53,8 @@ void LoadPersistables(framework::Executor& executor,
6553
std::vector<std::string> paramlist;
6654

6755
for (auto* var : global_block.AllVars()) {
68-
if (IsParameter(var, main_program)) {
69-
VLOG(3) << "parameter's name: " << var->Name();
56+
if (IsPersistable(var)) {
57+
VLOG(3) << "persistable variable's name: " << var->Name();
7058

7159
framework::VarDesc* new_var = load_block->Var(var->Name());
7260
new_var->SetShape(var->GetShape());
@@ -101,7 +89,6 @@ void LoadPersistables(framework::Executor& executor,
10189

10290
executor.Run(*load_program, &scope, 0, true, true);
10391

104-
VLOG(3) << "Ran loading successfully";
10592
delete load_program;
10693
}
10794

paddle/fluid/inference/tests/book/CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,5 +30,5 @@ inference_test(label_semantic_roles)
3030
inference_test(recognize_digits ARGS mlp conv)
3131
inference_test(recommender_system)
3232
#inference_test(rnn_encoder_decoder)
33-
inference_test(understand_sentiment)
33+
inference_test(understand_sentiment ARGS conv)
3434
inference_test(word2vec)

paddle/fluid/inference/tests/book/test_inference_label_semantic_roles.cc

Lines changed: 36 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -32,16 +32,42 @@ TEST(inference, label_semantic_roles) {
3232
paddle::framework::LoDTensor word, predicate, ctx_n2, ctx_n1, ctx_0, ctx_p1,
3333
ctx_p2, mark;
3434
paddle::framework::LoD lod{{0, 4, 10}};
35-
36-
SetupLoDTensor(word, lod, static_cast<int64_t>(0), static_cast<int64_t>(1));
37-
SetupLoDTensor(
38-
predicate, lod, static_cast<int64_t>(0), static_cast<int64_t>(1));
39-
SetupLoDTensor(ctx_n2, lod, static_cast<int64_t>(0), static_cast<int64_t>(1));
40-
SetupLoDTensor(ctx_n1, lod, static_cast<int64_t>(0), static_cast<int64_t>(1));
41-
SetupLoDTensor(ctx_0, lod, static_cast<int64_t>(0), static_cast<int64_t>(1));
42-
SetupLoDTensor(ctx_p1, lod, static_cast<int64_t>(0), static_cast<int64_t>(1));
43-
SetupLoDTensor(ctx_p2, lod, static_cast<int64_t>(0), static_cast<int64_t>(1));
44-
SetupLoDTensor(mark, lod, static_cast<int64_t>(0), static_cast<int64_t>(1));
35+
int64_t word_dict_len = 44068;
36+
int64_t predicate_dict_len = 3162;
37+
int64_t mark_dict_len = 2;
38+
39+
SetupLoDTensor(word,
40+
lod,
41+
static_cast<int64_t>(0),
42+
static_cast<int64_t>(word_dict_len - 1));
43+
SetupLoDTensor(predicate,
44+
lod,
45+
static_cast<int64_t>(0),
46+
static_cast<int64_t>(predicate_dict_len - 1));
47+
SetupLoDTensor(ctx_n2,
48+
lod,
49+
static_cast<int64_t>(0),
50+
static_cast<int64_t>(word_dict_len - 1));
51+
SetupLoDTensor(ctx_n1,
52+
lod,
53+
static_cast<int64_t>(0),
54+
static_cast<int64_t>(word_dict_len - 1));
55+
SetupLoDTensor(ctx_0,
56+
lod,
57+
static_cast<int64_t>(0),
58+
static_cast<int64_t>(word_dict_len - 1));
59+
SetupLoDTensor(ctx_p1,
60+
lod,
61+
static_cast<int64_t>(0),
62+
static_cast<int64_t>(word_dict_len - 1));
63+
SetupLoDTensor(ctx_p2,
64+
lod,
65+
static_cast<int64_t>(0),
66+
static_cast<int64_t>(word_dict_len - 1));
67+
SetupLoDTensor(mark,
68+
lod,
69+
static_cast<int64_t>(0),
70+
static_cast<int64_t>(mark_dict_len - 1));
4571

4672
std::vector<paddle::framework::LoDTensor*> cpu_feeds;
4773
cpu_feeds.push_back(&word);

paddle/fluid/inference/tests/book/test_inference_understand_sentiment.cc

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,12 @@ TEST(inference, understand_sentiment) {
3131

3232
paddle::framework::LoDTensor words;
3333
paddle::framework::LoD lod{{0, 4, 10}};
34-
SetupLoDTensor(words, lod, static_cast<int64_t>(0), static_cast<int64_t>(10));
34+
int64_t word_dict_len = 5147;
35+
36+
SetupLoDTensor(words,
37+
lod,
38+
static_cast<int64_t>(0),
39+
static_cast<int64_t>(word_dict_len - 1));
3540

3641
std::vector<paddle::framework::LoDTensor*> cpu_feeds;
3742
cpu_feeds.push_back(&words);

paddle/fluid/inference/tests/book/test_inference_word2vec.cc

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -31,12 +31,12 @@ TEST(inference, word2vec) {
3131

3232
paddle::framework::LoDTensor first_word, second_word, third_word, fourth_word;
3333
paddle::framework::LoD lod{{0, 1}};
34-
int64_t dict_size = 2072; // Hard-coding the size of dictionary
34+
int64_t dict_size = 2073; // The size of dictionary
3535

36-
SetupLoDTensor(first_word, lod, static_cast<int64_t>(0), dict_size);
37-
SetupLoDTensor(second_word, lod, static_cast<int64_t>(0), dict_size);
38-
SetupLoDTensor(third_word, lod, static_cast<int64_t>(0), dict_size);
39-
SetupLoDTensor(fourth_word, lod, static_cast<int64_t>(0), dict_size);
36+
SetupLoDTensor(first_word, lod, static_cast<int64_t>(0), dict_size - 1);
37+
SetupLoDTensor(second_word, lod, static_cast<int64_t>(0), dict_size - 1);
38+
SetupLoDTensor(third_word, lod, static_cast<int64_t>(0), dict_size - 1);
39+
SetupLoDTensor(fourth_word, lod, static_cast<int64_t>(0), dict_size - 1);
4040

4141
std::vector<paddle::framework::LoDTensor*> cpu_feeds;
4242
cpu_feeds.push_back(&first_word);

paddle/fluid/inference/tests/test_helper.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -101,8 +101,8 @@ void TestInference(const std::string& dirname,
101101
if (IsCombined) {
102102
// All parameters are saved in a single file.
103103
// Hard-coding the file names of program and parameters in unittest.
104-
// Users are free to specify different filename
105-
// (provided: the filenames are changed in the python api as well: io.py)
104+
// The file names should be consistent with that used in Python API
105+
// `fluid.io.save_inference_model`.
106106
std::string prog_filename = "__model_combined__";
107107
std::string param_filename = "__params_combined__";
108108
inference_program = paddle::inference::Load(executor,

0 commit comments

Comments
 (0)