Skip to content

Commit 08d8a62

Browse files
committed
Merge branch 'develop' of https://github.com/PaddlePaddle/Paddle into develop
2 parents 4231625 + 9ff5184 commit 08d8a62

File tree

5 files changed

+59
-31
lines changed

5 files changed

+59
-31
lines changed

paddle/fluid/framework/operator.cc

Lines changed: 6 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -544,11 +544,13 @@ class RuntimeInferShapeContext : public InferShapeContext {
544544

545545
void ShareLoD(const std::string& in, const std::string& out, size_t i = 0,
546546
size_t j = 0) const override {
547-
PADDLE_ENFORCE_LT(i, Inputs(in).size());
548-
PADDLE_ENFORCE_LT(j, Outputs(out).size());
549-
Variable* in_var = scope_.FindVar(Inputs(in)[i]);
550-
Variable* out_var = scope_.FindVar(Outputs(out)[j]);
547+
const std::vector<std::string>& inputs = Inputs(in);
548+
const std::vector<std::string>& outputs = Outputs(out);
549+
PADDLE_ENFORCE_LT(i, inputs.size());
550+
PADDLE_ENFORCE_LT(j, outputs.size());
551+
Variable* in_var = scope_.FindVar(inputs.at(i));
551552
if (!in_var->IsType<LoDTensor>()) return;
553+
Variable* out_var = scope_.FindVar(outputs.at(j));
552554
PADDLE_ENFORCE(out_var->IsType<LoDTensor>(),
553555
"The %d-th output of Output(%s) must be LoDTensor.", j, out);
554556
auto in_tensor = in_var->Get<LoDTensor>();
@@ -576,20 +578,6 @@ class RuntimeInferShapeContext : public InferShapeContext {
576578
out_tensor->set_layout(in_tensor.layout());
577579
}
578580

579-
void ShareLayout(const std::string& in, const std::string& out, size_t i = 0,
580-
size_t j = 0) const {
581-
PADDLE_ENFORCE_LT(i, Inputs(in).size());
582-
PADDLE_ENFORCE_LT(j, Outputs(out).size());
583-
Variable* in_var = scope_.FindVar(Inputs(in)[i]);
584-
Variable* out_var = scope_.FindVar(Outputs(out)[j]);
585-
if (!in_var->IsType<LoDTensor>()) return;
586-
PADDLE_ENFORCE(out_var->IsType<LoDTensor>(),
587-
"The %d-th output of Output(%s) must be LoDTensor.", j, out);
588-
auto in_tensor = in_var->Get<LoDTensor>();
589-
auto* out_tensor = out_var->GetMutable<LoDTensor>();
590-
out_tensor->set_layout(in_tensor.layout());
591-
}
592-
593581
bool IsRuntime() const override { return true; }
594582

595583
protected:

paddle/fluid/framework/shape_inference.cc

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -46,16 +46,6 @@ std::vector<DDim> InferShapeContext::GetReaderDims(
4646
return this->GetRepeatedDims(arg_names[0]);
4747
}
4848

49-
void InferShapeContext::ShareLoDs(const std::string &in,
50-
const std::string &out) const {
51-
PADDLE_ENFORCE_EQ(Inputs(in).size(), Outputs(out).size(),
52-
"The number of arguments in %s and %s is not equal.", in,
53-
out);
54-
for (size_t i = 0; i < in.size(); ++i) {
55-
ShareLoD(in, out, i, i);
56-
}
57-
}
58-
5949
DDim InferShapeContext::GetInputsElementDim(const std::string &name,
6050
int idx) const {
6151
const std::vector<std::string> &names = Inputs(name);

paddle/fluid/framework/shape_inference.h

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,8 +56,6 @@ class InferShapeContext {
5656
virtual const std::vector<std::string> &Outputs(
5757
const std::string &name) const = 0;
5858

59-
void ShareLoDs(const std::string &in, const std::string &out) const;
60-
6159
virtual void ShareLoD(const std::string &in, const std::string &out,
6260
size_t i = 0, size_t j = 0) const = 0;
6361

python/paddle/reader/decorator.py

Lines changed: 37 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
__all__ = [
1616
'map_readers', 'buffered', 'compose', 'chain', 'shuffle',
1717
'ComposeNotAligned', 'firstn', 'xmap_readers', 'PipeReader',
18-
'multiprocess_reader'
18+
'multiprocess_reader', 'Fake'
1919
]
2020

2121
from threading import Thread
@@ -504,3 +504,39 @@ def get_line(self, cut_lines=True, line_break="\n"):
504504
yield decomp_buff
505505
else:
506506
break
507+
508+
509+
class Fake(object):
510+
"""
511+
fake reader will cache the first data it read and yield it out for data_num times.
512+
It is used to cache a data from real reader and use it for speed testing.
513+
514+
:param reader: the origin reader
515+
:param data_num: times that this reader will yield data.
516+
517+
:return: a fake reader.
518+
519+
Examples:
520+
.. code-block:: python
521+
522+
def reader():
523+
for i in range(10):
524+
yield i
525+
526+
fake_reader = Fake()(reader, 100)
527+
"""
528+
529+
def __init__(self):
530+
self.data = None
531+
self.yield_num = 0
532+
533+
def __call__(self, reader, data_num):
534+
def fake_reader():
535+
if self.data is None:
536+
self.data = next(reader())
537+
while self.yield_num < data_num:
538+
yield self.data
539+
self.yield_num += 1
540+
self.yield_num = 0
541+
542+
return fake_reader

python/paddle/reader/tests/decorator_test.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -203,5 +203,21 @@ def test_multi_process_reader(self):
203203
self.reader_test(use_pipe=True)
204204

205205

206+
class TestFakeReader(unittest.TestCase):
207+
def test_fake_reader(self):
208+
def reader():
209+
for i in range(10):
210+
yield i
211+
212+
data_num = 100
213+
fake_reader = paddle.reader.Fake()(reader, data_num)
214+
for _ in range(10):
215+
i = 0
216+
for data in fake_reader():
217+
self.assertEqual(data, 0)
218+
i += 1
219+
self.assertEqual(i, data_num)
220+
221+
206222
if __name__ == '__main__':
207223
unittest.main()

0 commit comments

Comments
 (0)