Skip to content

Commit 5fdd85b

Browse files
author
lilong12
authored
bug fix, test=develop (#32753)
1 parent ce27821 commit 5fdd85b

File tree

2 files changed

+3
-3
lines changed

2 files changed

+3
-3
lines changed

python/paddle/distributed/fleet/base/topology.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -147,7 +147,7 @@ def __init__(self, topology):
147147
debug_str = "HybridParallelInfo: rank_id: %d, dp_degree: %d, " \
148148
"mp_degree: %d, pp_degree: %d" % (self.global_rank, self._dp_degree,
149149
self._mp_degree,self._pp_degree)
150-
debug_str += "dp_group: %s, mp_group: %s, pp_group: %s, check/clip group: %s" % (
150+
debug_str += ", dp_group: %s, mp_group: %s, pp_group: %s, check/clip group: %s" % (
151151
self._dp_group, self._mp_group, self._pp_group, self._check_group)
152152
logger.info(debug_str)
153153

python/paddle/distributed/fleet/meta_parallel/pipeline_parallel.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -125,9 +125,9 @@ def _forward(self, cache_id):
125125
self._recv_activations(cache_id)
126126

127127
if isinstance(self.caches['inputs'][cache_id], tuple):
128-
inputs = tuple(t.clone() for t in self.caches['inputs'][cache_id])
128+
inputs = tuple(t for t in self.caches['inputs'][cache_id])
129129
else:
130-
inputs = self.caches['inputs'][cache_id].clone()
130+
inputs = self.caches['inputs'][cache_id]
131131

132132
self._clear_grads(inputs)
133133
outputs = self._layers.forward(inputs)

0 commit comments

Comments
 (0)