Skip to content

Commit 90df7ff

Browse files
authored
transpiler.py code clean (#15555)
* move var strusted to vars_distributed.py, add optimizer's block name, test=develop * rename optimzier's seems complex, revert it, test=develop * replace * with details, test=develop
1 parent 294d594 commit 90df7ff

File tree

3 files changed

+279
-259
lines changed

3 files changed

+279
-259
lines changed

python/paddle/fluid/transpiler/details/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,3 +17,4 @@
1717
from .program_utils import *
1818
from .ufind import *
1919
from .checkport import *
20+
from .vars_distributed import *
Lines changed: 269 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,269 @@
1+
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
from __future__ import print_function
15+
from paddle.fluid.framework import Variable
16+
17+
18+
class VarStruct(object):
19+
"""
20+
record part properties of a Variable in python.
21+
"""
22+
23+
def __init__(self, name, shape, dtype, type, lod_level, persistable):
24+
self.name = name
25+
self.shape = shape
26+
self.dtype = dtype
27+
self.type = type
28+
self.lod_level = lod_level
29+
self.persistable = persistable
30+
31+
32+
class VarDistributed(object):
33+
"""
34+
a class to record the var distributed on parameter servers.
35+
the class will record the relationship between origin var and slice var.
36+
the slice var's properties, such as type/shape/offset/endpoint.
37+
"""
38+
39+
def __init__(self,
40+
origin_var,
41+
slice_var,
42+
is_slice=None,
43+
block_id=None,
44+
offset=None,
45+
vtype=None,
46+
endpoint=None):
47+
"""
48+
Args:
49+
origin_var(Variable|VarStruct): origin var properties
50+
slice_var(Variable|VarStruct): slice var properties
51+
is_slice(bool|None): slice or not, slice_var=True/False and its block size > 8192 are the judgement standard.
52+
block_id(int|None): the number about the slice var.
53+
offset(int|None): if the slice var is sliced, offset is the numel before the var.
54+
vtype(str|None): a tag, such as Optimizer/Param/RemoteProfetch.
55+
endpoint(str|None): which parameter the slice var on, such as "127.0.0.1:1001"
56+
"""
57+
58+
if isinstance(origin_var, Variable):
59+
self.origin = self.__create_var_struct(origin_var)
60+
else:
61+
self.origin = origin_var
62+
63+
if isinstance(slice_var, Variable):
64+
self.slice = self.__create_var_struct(slice_var)
65+
else:
66+
self.slice = slice_var
67+
68+
if self.equal(self.origin, self.slice):
69+
self.is_slice = False
70+
self.block_id = 0
71+
self.offset = 0
72+
else:
73+
self.is_slice = True
74+
self.block_id = 0
75+
self.offset = 0
76+
77+
if is_slice is not None:
78+
self.is_slice = is_slice
79+
if block_id is not None:
80+
self.block_id = block_id
81+
if offset is not None:
82+
self.offset = offset
83+
84+
self.vtype = vtype
85+
self.endpoint = endpoint
86+
87+
@staticmethod
88+
def __create_var_struct(var):
89+
return VarStruct(var.name, var.shape, var.dtype, var.type,
90+
var.lod_level, var.persistable)
91+
92+
@staticmethod
93+
def equal(var1, var2):
94+
"""
95+
the two var is equal or not.
96+
Returns:
97+
bool: equal will return True else False
98+
"""
99+
assert isinstance(var1, VarStruct) and isinstance(var2, VarStruct)
100+
101+
return var1.name == var2.name and \
102+
var1.type == var2.type and \
103+
var1.shape == var2.shape and \
104+
var1.dtype == var2.dtype and \
105+
var1.lod_level == var2.lod_level and \
106+
var1.persistable == var2.persistable
107+
108+
def __str__(self):
109+
origin_var_str = "{name} : fluid.{type}.shape{shape}.astype({dtype})". \
110+
format(i="{", e="}", name=self.origin.name, type=self.origin.type,
111+
shape=self.origin.shape, dtype=self.origin.dtype)
112+
113+
slice_var_str = "{name} : fluid.{type}.shape{shape}.astype({dtype})" \
114+
".slice({is_slice}).block({block_id}).offset({offset})". \
115+
format(i="{", e="}", name=self.slice.name, type=self.slice.type,
116+
shape=self.slice.shape, dtype=self.slice.dtype,
117+
is_slice=self.is_slice, block_id=self.block_id, offset=self.offset)
118+
119+
return "var owned: {}, origin var: ( {} ), slice var: ( {} ), endpoint: {} ".format(
120+
self.vtype, origin_var_str, slice_var_str, self.endpoint)
121+
122+
123+
class VarsDistributed(object):
124+
"""
125+
a gather about VarDistributed with many methods to find distributed vars.
126+
through the class, we can get overview about the distributed parameters on parameter servers.
127+
this class may centralized and convenient for developer to manage and get variable's distribute.
128+
other module can also use this to find variables such io.py.
129+
"""
130+
131+
def __init__(self):
132+
self.distributed_vars = []
133+
134+
def add_distributed_var(self,
135+
origin_var,
136+
slice_var,
137+
is_slice=None,
138+
block_id=None,
139+
offset=None,
140+
vtype=None,
141+
endpoint=None):
142+
"""
143+
add distributed var in this.
144+
145+
Args:
146+
origin_var(Variable|VarStruct): origin var properties
147+
slice_var(Variable|VarStruct): slice var properties
148+
is_slice(bool|None): slice or not, slice_var=True/False and its block size > 8192 are the judgement standard.
149+
block_id(int|None): the number about the slice var.
150+
offset(int|None): if the slice var is sliced, offset is the numel before the var.
151+
vtype(str|None): a tag, such as Optimizer/Param/RemoteProfetch.
152+
endpoint(str|None): which parameter the slice var on, such as "127.0.0.1:1001"
153+
Returns:
154+
None
155+
"""
156+
self.distributed_vars.append(
157+
VarDistributed(origin_var, slice_var, is_slice, block_id, offset,
158+
vtype, endpoint))
159+
160+
def get_distributed_var_by_slice(self, var_name):
161+
"""
162+
get distributed var by conditions.
163+
164+
Args:
165+
var_name(str): slice var name, such as "w.traier0.block1"
166+
Returns:
167+
VarDistributed: distributed var.
168+
"""
169+
for dist_var in self.distributed_vars:
170+
if dist_var.slice.name == var_name:
171+
return dist_var
172+
return None
173+
174+
@staticmethod
175+
def equal(var1, var2):
176+
"""
177+
the two var is equal or not.
178+
Returns:
179+
bool: equal will return True else False
180+
"""
181+
return var1.name == var2.name and \
182+
var1.type == var2.type and \
183+
var1.shape == var2.shape and \
184+
var1.dtype == var2.dtype and \
185+
var1.lod_level == var2.lod_level and \
186+
var1.persistable == var2.persistable
187+
188+
def get_distributed_var_by_origin_and_ep(self, origin_var_name, endpoint):
189+
"""
190+
get distributed var by conditions.
191+
192+
Args:
193+
origin_var_name(str):
194+
endpoint(str): the parameter endpoint, such as "127.0.0.1:1001"
195+
Returns:
196+
VarDistributed: distributed var.
197+
"""
198+
for dist_var in self.distributed_vars:
199+
if dist_var.origin.name == origin_var_name and dist_var.endpoint == endpoint:
200+
return dist_var
201+
return None
202+
203+
def get_distributed_vars_by_vtypes(self, vtypes, groupby=False):
204+
"""
205+
get distributed vars by conditions.
206+
207+
Args:
208+
vtype(str|None): distributed var's vtype, such as "Optimizer", "RemotePrefetch"
209+
groupby(bool|False): group by origin var or not.
210+
211+
Returns:
212+
list: distributed var list.
213+
dict: distributed var map when groupby=True
214+
"""
215+
vtype_vars = []
216+
for var in self.distributed_vars:
217+
if var.vtype in vtypes:
218+
vtype_vars.append(var)
219+
if not groupby:
220+
return vtype_vars
221+
222+
params_map = {}
223+
for var in vtype_vars:
224+
origin_var_name = var.origin.name
225+
226+
if origin_var_name in params_map.keys():
227+
optimizers = params_map.get(origin_var_name)
228+
else:
229+
optimizers = []
230+
optimizers.append(var)
231+
params_map[origin_var_name] = optimizers
232+
return params_map
233+
234+
def get_distributed_vars_by_ep(self, endpoint, vtype=None):
235+
"""
236+
get distributed vars by conditions.
237+
238+
Args:
239+
endpoint(str): the parameter server endpoint, such as "127.0.0.1:2001"
240+
vtype(str|None): distributed var's vtype, such as "Optimizer", "RemotePrefetch"
241+
242+
Returns:
243+
list: distributed var list.
244+
"""
245+
endpoint_vars = []
246+
for var in self.distributed_vars:
247+
if var.endpoint == endpoint:
248+
endpoint_vars.append(var)
249+
if not vtype:
250+
return endpoint_vars
251+
252+
vtype_vars = []
253+
for var in endpoint_vars:
254+
if var.vtype == vtype:
255+
vtype_vars.append(var)
256+
return vtype_vars
257+
258+
def overview(self):
259+
"""
260+
get the overview string about all params on all parameter servers.
261+
262+
Returns:
263+
Str: overview string.
264+
265+
"""
266+
vars_str = []
267+
for var in self.distributed_vars:
268+
vars_str.append(str(var))
269+
return "\n".join(vars_str)

0 commit comments

Comments
 (0)