@@ -13,14 +13,14 @@ Sequential model
1313.. code-block :: python
1414
1515 from tensorlayerx.nn import SequentialLayer
16- from tensorlayerx.nn import Dense
16+ from tensorlayerx.nn import Linear
1717 import tensorlayerx as tlx
1818
1919 def get_model ():
2020 layer_list = []
21- layer_list.append(Dense( n_units = 800 , act = tlx.ReLU, in_channels = 784 , name = ' Dense1' ))
22- layer_list.append(Dense( n_units = 800 , act = tlx.ReLU, in_channels = 800 , name = ' Dense2' ))
23- layer_list.append(Dense( n_units = 10 , act = tlx.ReLU, in_channels = 800 , name = ' Dense3' ))
21+ layer_list.append(Linear( out_features = 800 , act = tlx.ReLU, in_features = 784 , name = ' Dense1' ))
22+ layer_list.append(Linear( out_features = 800 , act = tlx.ReLU, in_features = 800 , name = ' Dense2' ))
23+ layer_list.append(Linear( out_features = 10 , act = tlx.ReLU, in_features = 800 , name = ' Dense3' ))
2424 MLP = SequentialLayer(layer_list)
2525 return MLP
2626
@@ -36,18 +36,18 @@ In this case, you need to manually input the output shape of the previous layer
3636
3737 import tensorlayerx as tlx
3838 from tensorlayerx.nn import Module
39- from tensorlayerx.nn import Dropout, Dense
39+ from tensorlayerx.nn import Dropout, Linear
4040 class CustomModel (Module ):
4141
4242 def __init__ (self ):
4343 super (CustomModel, self ).__init__ ()
4444
45- self .dropout1 = Dropout(keep = 0.8 )
46- self .dense1 = Dense( n_units = 800 , act = tlx.ReLU, in_channels = 784 )
47- self .dropout2 = Dropout(keep = 0.8 )
48- self .dense2 = Dense( n_units = 800 , act = tlx.ReLU, in_channels = 800 )
49- self .dropout3 = Dropout(keep = 0.8 )
50- self .dense3 = Dense( n_units = 10 , act = None , in_channels = 800 )
45+ self .dropout1 = Dropout(p = 0.2 )
46+ self .dense1 = Linear( out_features = 800 , act = tlx.ReLU, in_features = 784 )
47+ self .dropout2 = Dropout(p = 0.2 )
48+ self .dense2 = Linear( out_features = 800 , act = tlx.ReLU, in_features = 800 )
49+ self .dropout3 = Dropout(p = 0.2 )
50+ self .dense3 = Linear( out_features = 10 , act = None , in_features = 800 )
5151
5252 def forward (self , x , foo = False ):
5353 z = self .dropout1(x)
@@ -76,18 +76,18 @@ In this case, you do not manually input the output shape of the previous layer t
7676
7777 import tensorlayerx as tlx
7878 from tensorlayerx.nn import Module
79- from tensorlayerx.nn import Dropout, Dense
79+ from tensorlayerx.nn import Dropout, Linear
8080 class CustomModel (Module ):
8181
8282 def __init__ (self ):
8383 super (CustomModel, self ).__init__ ()
8484
85- self .dropout1 = Dropout(keep = 0.8 )
86- self .dense1 = Dense( n_units = 800 , act = tlx.ReLU)
87- self .dropout2 = Dropout(keep = 0.8 )
88- self .dense2 = Dense( n_units = 800 , act = tlx.ReLU)
89- self .dropout3 = Dropout(keep = 0.8 )
90- self .dense3 = Dense( n_units = 10 , act = None )
85+ self .dropout1 = Dropout(p = 0.2 )
86+ self .dense1 = Linear( out_features = 800 , act = tlx.ReLU)
87+ self .dropout2 = Dropout(p = 0.2 )
88+ self .dense2 = Linear( out_features = 800 , act = tlx.ReLU)
89+ self .dropout3 = Dropout(p = 0.2 )
90+ self .dense3 = Linear( out_features = 10 , act = None )
9191
9292 def forward (self , x , foo = False ):
9393 z = self .dropout1(x)
@@ -131,13 +131,13 @@ For dynamic model, call the layer multiple time in forward function
131131.. code-block :: python
132132
133133 import tensorlayerx as tlx
134- from tensorlayerx.nn import Module, Dense , Concat
134+ from tensorlayerx.nn import Module, Linear , Concat
135135 class MyModel (Module ):
136136 def __init__ (self ):
137137 super (MyModel, self ).__init__ ()
138- self .dense_shared = Dense( n_units = 800 , act = tlx.ReLU, in_channels = 784 )
139- self .dense1 = Dense( n_units = 10 , act = tlx.ReLU, in_channels = 800 )
140- self .dense2 = Dense( n_units = 10 , act = tlx.ReLU, in_channels = 800 )
138+ self .dense_shared = Linear( out_features = 800 , act = tlx.ReLU, in_features = 784 )
139+ self .dense1 = Linear( out_features = 10 , act = tlx.ReLU, in_features = 800 )
140+ self .dense2 = Linear( out_features = 10 , act = tlx.ReLU, in_features = 800 )
141141 self .cat = Concat()
142142
143143 def forward (self , x ):
@@ -159,12 +159,12 @@ Print model information
159159
160160 # Model(
161161 # (_inputlayer): Input(shape=[None, 784], name='_inputlayer')
162- # (dropout): Dropout(keep =0.8, name='dropout')
163- # (dense): Dense(n_units =800, relu, in_channels ='784', name='dense')
164- # (dropout_1): Dropout(keep =0.8, name='dropout_1')
165- # (dense_1): Dense(n_units =800, relu, in_channels ='800', name='dense_1')
166- # (dropout_2): Dropout(keep =0.8, name='dropout_2')
167- # (dense_2): Dense(n_units =10, None, in_channels ='800', name='dense_2')
162+ # (dropout): Dropout(p =0.8, name='dropout')
163+ # (dense): Linear(out_features =800, relu, in_features ='784', name='dense')
164+ # (dropout_1): Dropout(p =0.8, name='dropout_1')
165+ # (dense_1): Linear(out_features =800, relu, in_features ='800', name='dense_1')
166+ # (dropout_2): Dropout(p =0.8, name='dropout_2')
167+ # (dense_2): Linear(out_features =10, None, in_features ='800', name='dense_2')
168168 # )
169169
170170 Get specific weights
0 commit comments