@@ -2182,7 +2182,7 @@ def SubpixelConv2d(net, scale=2, n_out_channel=None, act=tf.identity, name='subp
21822182
21832183 scope_name = tf .get_variable_scope ().name
21842184 if scope_name :
2185- name = scope_name + '/' + name
2185+ whole_name = scope_name + '/' + name
21862186
21872187 def _PS (X , r , n_out_channel ):
21882188 if n_out_channel >= 1 :
@@ -2204,7 +2204,7 @@ def _PS(X, r, n_out_channel):
22042204
22052205 print (" [TL] SubpixelConv2d %s: scale: %d n_out_channel: %s act: %s" % (name , scale , n_out_channel , act .__name__ ))
22062206
2207- net_new = Layer (inputs , name = name )
2207+ net_new = Layer (inputs , name = whole_name )
22082208 # with tf.name_scope(name):
22092209 with tf .variable_scope (name ) as vs :
22102210 net_new .outputs = act (_PS (inputs , r = scale , n_out_channel = n_out_channel ))
@@ -5378,6 +5378,41 @@ def __init__(
53785378 self .all_layers .extend ( [self .outputs ] )
53795379 # self.all_params.extend( variables )
53805380
5381+
5382+ class TransposeLayer (Layer ):
5383+ """
5384+ The :class:`TransposeLayer` class transpose the dimension of a teneor, see `tf.transpose() <https://www.tensorflow.org/api_docs/python/tf/transpose>`_ .
5385+
5386+ Parameters
5387+ ----------
5388+ layer : a :class:`Layer` instance
5389+ The `Layer` class feeding into this layer.
5390+ perm: list, a permutation of the dimensions
5391+ Similar with numpy.transpose.
5392+ name : a string or None
5393+ An optional name to attach to this layer.
5394+ """
5395+ def __init__ (
5396+ self ,
5397+ layer = None ,
5398+ perm = None ,
5399+ name = 'transpose' ,
5400+ ):
5401+ Layer .__init__ (self , name = name )
5402+ self .inputs = layer .outputs
5403+ assert perm is not None
5404+
5405+ print (" [TL] TransposeLayer %s: perm:%s" % (self .name , perm ))
5406+ # with tf.variable_scope(name) as vs:
5407+ self .outputs = tf .transpose (self .inputs , perm = perm , name = name )
5408+ self .all_layers = list (layer .all_layers )
5409+ self .all_params = list (layer .all_params )
5410+ self .all_drop = dict (layer .all_drop )
5411+ self .all_layers .extend ( [self .outputs ] )
5412+ # self.all_params.extend( variables )
5413+
5414+
5415+
53815416## TF-Slim layer
53825417class SlimNetsLayer (Layer ):
53835418 """
0 commit comments