@@ -152,7 +152,7 @@ class Variable(object):
152
152
shape(tuple|list|None): The shape of variable. -1 means the batch size.
153
153
Some kinds of variable do not contain shape, just set it to None.
154
154
dtype(np.dtype|core.VarDesc.VarType|str): The data type of variable.
155
- lod_level(int): The level of lod tensor. 0 means there is not a time
155
+ lod_level(int): The level of lod tensor. 0 means it is not a time
156
156
series data.
157
157
persistable(bool): True if the variable should be saved as check point.
158
158
Defaults to False.
@@ -346,7 +346,7 @@ def instance(cls):
346
346
def __init__ (self ):
347
347
assert not hasattr (
348
348
self .__class__ ,
349
- '_instance' ), 'Please use `instance()` to get OpProtoHolder opject !'
349
+ '_instance' ), 'Please use `instance()` to get OpProtoHolder object !'
350
350
op_protos = get_all_op_protos ()
351
351
self .op_proto_map = {}
352
352
for proto in op_protos :
@@ -368,8 +368,8 @@ def get_op_proto(self, type):
368
368
369
369
class Operator (object ):
370
370
"""
371
- Python Operator class. The operator represents the build in instructs in a
372
- Block. Users can use the build in instructs to describe their neural
371
+ Python Operator class. The operator represents the build in instructions in a
372
+ Block. Users can use the build in instructions to describe their neural
373
373
network.
374
374
"""
375
375
@@ -478,7 +478,7 @@ def find_name(var_list, name):
478
478
raise TypeError ("'attrs' should be a dict." )
479
479
for attr in proto .attrs :
480
480
attr_name = attr .name
481
- if (not attr_name in attrs ) or (attrs [attr_name ] is None ):
481
+ if (attr_name not in attrs ) or (attrs [attr_name ] is None ):
482
482
continue
483
483
if isinstance (attrs [attr_name ], Block ):
484
484
self .desc .set_block_attr (attr_name , attrs [attr_name ].desc )
@@ -751,7 +751,7 @@ def iter_parameters(self):
751
751
if isinstance (item [1 ], Parameter ))
752
752
753
753
def create_var (self , * args , ** kwargs ):
754
- var = Variable (self , * args , ** kwargs )
754
+ var = Variable (block = self , * args , ** kwargs )
755
755
if 'initializer' in kwargs :
756
756
kwargs ['initializer' ](var , self )
757
757
return var
@@ -822,13 +822,13 @@ def create_parameter(self, *args, **kwargs):
822
822
823
823
def append_op (self , * args , ** kwargs ):
824
824
op_desc = self .desc .append_op ()
825
- op = Operator (self , op_desc , * args , ** kwargs )
825
+ op = Operator (block = self , desc = op_desc , * args , ** kwargs )
826
826
self .ops .append (op )
827
827
return op
828
828
829
829
def delete_ops (self , ops ):
830
830
# remove from cpp
831
- # FIXME(typhoonzero): remove only the first occuracy .
831
+ # FIXME(typhoonzero): remove only the first occurrence .
832
832
try :
833
833
start = list (self .ops ).index (ops [0 ])
834
834
end = list (self .ops ).index (ops [- 1 ])
@@ -846,6 +846,11 @@ def prepend_op(self, *args, **kwargs):
846
846
return op
847
847
848
848
def sync_with_cpp (self ):
849
+ """
850
+ Sync with the desc on the c++ end.
851
+
852
+ This method is used to synchronize the c++ desc instance generated by backward.
853
+ """
849
854
# sync variables from cpp
850
855
for var in self .desc .all_vars ():
851
856
if not self .has_var (var .name ()):
@@ -891,9 +896,9 @@ def sync_with_cpp(self):
891
896
892
897
def copy_param_info_from (self , other ):
893
898
"""
894
- Copy the information of parameters from other block
899
+ Copy the information of parameters from the other block
895
900
Args:
896
- other(Block): other block
901
+ other(Block): the other block
897
902
898
903
Returns:
899
904
None
@@ -1239,6 +1244,6 @@ def get_var(name, program=None):
1239
1244
if program is None :
1240
1245
program = default_main_program ()
1241
1246
assert isinstance (name , str )
1242
- assert isinstance (name , Program )
1247
+ assert isinstance (program , Program )
1243
1248
1244
1249
return program .global_block ().var (name )
0 commit comments