Skip to content

Commit 2617ac9

Browse files
committed
"add doc string"
1 parent bddd4bc commit 2617ac9

File tree

2 files changed

+20
-12
lines changed

2 files changed

+20
-12
lines changed

python/paddle/fluid/tests/unittests/test_memory_optimization_transpiler.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from __future__ import print_function
1616
import unittest
1717

18+
import paddle.fluid as fluid
1819
import paddle.fluid.layers as layers
1920
import paddle.fluid.optimizer as optimizer
2021
from paddle.fluid.framework import Program, program_guard
@@ -66,13 +67,16 @@ def test_inplace_ops(self):
6667
print("after optimization")
6768
print(str(result_program))
6869

70+
6971
class TestMemoryTranspiler3(unittest.TestCase):
7072
def setUp(self):
7173
program = Program()
7274
with program_guard(program, startup_program=Program()):
7375
word = fluid.layers.data(name='word', shape=[1], dtype='int64')
74-
emb = [fluid.layers.embedding(word, size=[65536, 256], param_attr='emb')
75-
for _ in range(6)]
76+
emb = [
77+
fluid.layers.embedding(
78+
word, size=[65536, 256], param_attr='emb') for _ in range(6)
79+
]
7680

7781
left = emb.pop(0)
7882
while len(emb) != 0:

python/paddle/fluid/transpiler/memory_optimization_transpiler.py

100644100755
Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,6 @@ def _update_graph(self, old_name, new_name, begin_idx=0):
9696
self._live_out[i].remove(old_name)
9797
self._live_out[i].add(new_name)
9898

99-
10099
def _dataflow_analyze(self):
101100
self._build_graph()
102101
live_in = defaultdict(set)
@@ -121,8 +120,8 @@ def _fill_pool(self, i, is_forward):
121120
]
122121
if can_optimize:
123122
for var_name in can_optimize:
124-
cache = (var_name, self._find_var(
125-
block_desc, var_name, is_forward).shape())
123+
cache = (var_name, self._find_var(block_desc, var_name,
124+
is_forward).shape())
126125
if cache not in self.pool:
127126
self.pool.append(cache)
128127

@@ -232,17 +231,22 @@ def compare_shape(x_shape, cache_shape, opt_level):
232231
]
233232
for x, x_shape in out_pair:
234233
if (x, x_shape) in self.pool:
235-
raise ValueError("x in pool")
234+
raise ValueError("x in pool, %s, %s" % (x, x_shape))
236235
# If x is both in uses and defs, it can not be optimized!
237236
if x in self._uses[i]:
238237
continue
239238
for index, cache_pair in enumerate(self.pool):
240239
cache_var = cache_pair[0]
241240
cache_shape = cache_pair[1]
242241
if not self._has_var(block_desc, cache_var, is_forward):
243-
raise ValueError("cache", cpt.to_text(cache_var), " Not exists!")
242+
raise ValueError("cache",
243+
cpt.to_text(cache_var),
244+
" Not exists!")
244245
if x == cache_var:
245-
raise ValueError("x : ", cpt.to_text(x), " cache : ", cpt.to_text(cache_var), " is same var!")
246+
raise ValueError("x : ",
247+
cpt.to_text(x), " cache : ",
248+
cpt.to_text(cache_var),
249+
" is same var!")
246250

247251
x_dtype = self._find_var(block_desc, x,
248252
is_forward).dtype()
@@ -266,14 +270,14 @@ def compare_shape(x_shape, cache_shape, opt_level):
266270
# Rename the var to the cache var already with
267271
# memory allocated in order to reuse the memory.
268272
_rename_arg_(self._ops, x, cache_var, begin_idx=i)
269-
self._program.block(block_desc.id)._remove_var(cpt.to_text(
270-
x))
273+
self._program.block(block_desc.id).var(cpt.to_text(
274+
x)).desc = self._find_var(block_desc, cache_var,
275+
is_forward)
271276
self._update_graph(x, cache_var, begin_idx=i)
272277
break
273278
self._fill_pool(i, is_forward)
274279

275280

276-
277281
def _process_sub_block_pair(pdesc, sub_block_pair):
278282
"""Creates a list of tuple each of which tracks info of a subblock.
279283
@@ -379,7 +383,7 @@ def memory_optimize(input_program, skip_opt_set=None, print_log=False, level=0):
379383
380384
Note: it doesn't not support subblock nested in subblock.
381385
382-
:param input_program: Input Program
386+
:param input_program(str): Input Program
383387
:param print_log: whether to print debug log.
384388
:param level: If level=0, reuse if the shape is completely equal, o
385389
:return:

0 commit comments

Comments
 (0)