Skip to content

Commit 08a817e

Browse files
committed
delete unnecessary parameters and modifications for some mathmatical
layers.
1 parent ce939b3 commit 08a817e

File tree

1 file changed

+45
-82
lines changed

1 file changed

+45
-82
lines changed

python/paddle/trainer/config_parser.py

Lines changed: 45 additions & 82 deletions
Original file line numberDiff line numberDiff line change
@@ -1803,9 +1803,8 @@ class ConvTransLayer(ConvTransLayerBase):
18031803

18041804
@config_layer('norm')
18051805
class NormLayer(LayerBase):
1806-
def __init__(self, name, inputs, device=None, **xargs):
1807-
super(NormLayer, self).__init__(
1808-
name, 'norm', 0, inputs=inputs, device=device, **xargs)
1806+
def __init__(self, name, inputs, **xargs):
1807+
super(NormLayer, self).__init__(name, 'norm', 0, inputs=inputs, **xargs)
18091808
for input_index in xrange(len(self.inputs)):
18101809
input_layer = self.get_input_layer(input_index)
18111810
norm_conf = self.config.inputs[input_index].norm_conf
@@ -1817,9 +1816,8 @@ def __init__(self, name, inputs, device=None, **xargs):
18171816

18181817
@config_layer('pool')
18191818
class PoolLayer(LayerBase):
1820-
def __init__(self, name, inputs, device=None, **xargs):
1821-
super(PoolLayer, self).__init__(
1822-
name, 'pool', 0, inputs=inputs, device=device, **xargs)
1819+
def __init__(self, name, inputs, **xargs):
1820+
super(PoolLayer, self).__init__(name, 'pool', 0, inputs=inputs, **xargs)
18231821
for input_index in xrange(len(self.inputs)):
18241822
input_layer = self.get_input_layer(input_index)
18251823
pool_conf = self.config.inputs[input_index].pool_conf
@@ -1851,7 +1849,6 @@ def __init__(self,
18511849
inputs,
18521850
active_type="linear",
18531851
bias=True,
1854-
device=None,
18551852
use_global_stats=True,
18561853
moving_average_fraction=0.9,
18571854
batch_norm_type=None,
@@ -1893,7 +1890,6 @@ def __init__(self,
18931890
0,
18941891
active_type=active_type,
18951892
inputs=inputs,
1896-
device=device,
18971893
**xargs)
18981894

18991895
if use_global_stats is not None:
@@ -1927,9 +1923,9 @@ def calc_parameter_size(self, image_conf):
19271923

19281924
@config_layer('trans')
19291925
class TransLayer(LayerBase):
1930-
def __init__(self, name, inputs, device=None, **xargs):
1926+
def __init__(self, name, inputs, **xargs):
19311927
super(TransLayer, self).__init__(
1932-
name, 'trans', 0, inputs=inputs, device=device, **xargs)
1928+
name, 'trans', 0, inputs=inputs, **xargs)
19331929
config_assert(
19341930
len(self.inputs) == 1,
19351931
'TransLayer must have one and only one input')
@@ -1938,9 +1934,9 @@ def __init__(self, name, inputs, device=None, **xargs):
19381934

19391935
@config_layer('resize')
19401936
class ResizeLayer(LayerBase):
1941-
def __init__(self, name, size, inputs, device=None, **xargs):
1937+
def __init__(self, name, size, inputs, **xargs):
19421938
super(ResizeLayer, self).__init__(
1943-
name, 'resize', size=size, inputs=inputs, device=device, **xargs)
1939+
name, 'resize', size=size, inputs=inputs, **xargs)
19441940
config_assert(
19451941
len(self.inputs) == 1,
19461942
'ResizeLayer must have one and only one input')
@@ -2265,15 +2261,9 @@ def Generator(
22652261

22662262
@config_layer('expand')
22672263
class ExpandLayer(LayerBase):
2268-
def __init__(self,
2269-
name,
2270-
inputs,
2271-
trans_type='non-seq',
2272-
device=None,
2273-
bias=False,
2274-
**xargs):
2264+
def __init__(self, name, inputs, trans_type='non-seq', bias=False, **xargs):
22752265
super(ExpandLayer, self).__init__(
2276-
name, 'expand', 0, inputs=inputs, device=device, **xargs)
2266+
name, 'expand', 0, inputs=inputs, **xargs)
22772267
config_assert(
22782268
len(self.inputs) == 2, 'ExpandLayer takes 2 and only 2 inputs')
22792269
self.config.trans_type = trans_type
@@ -2304,12 +2294,10 @@ def __init__(self,
23042294
inputs,
23052295
trans_type='non-seq',
23062296
active_type='linear',
2307-
device=None,
23082297
bias=False,
23092298
output_max_index=None,
23102299
**xargs):
2311-
super(MaxLayer, self).__init__(
2312-
name, 'max', 0, inputs=inputs, device=device, **xargs)
2300+
super(MaxLayer, self).__init__(name, 'max', 0, inputs=inputs, **xargs)
23132301
config_assert(len(self.inputs) == 1, 'MaxLayer must have 1 input')
23142302
self.config.trans_type = trans_type
23152303
self.config.active_type = active_type
@@ -2356,15 +2344,13 @@ def __init__(self,
23562344
inputs,
23572345
active_type='linear',
23582346
trans_type='non-seq',
2359-
device=None,
23602347
bias=False,
23612348
**xargs):
23622349
super(SequenceLastInstanceLayer, self).__init__(
23632350
name,
23642351
'seqlastins',
23652352
0,
23662353
inputs=inputs,
2367-
device=device,
23682354
active_type=active_type,
23692355
**xargs)
23702356
config_assert(
@@ -2378,39 +2364,32 @@ def __init__(self,
23782364

23792365
@config_layer('seqfirstins')
23802366
class SequenceFirstInstanceLayer(SequenceLastInstanceLayer):
2381-
def __init__(
2382-
self,
2383-
name,
2384-
inputs,
2385-
active_type='linear',
2386-
trans_type='non-seq',
2387-
device=None,
2388-
bias=False, ):
2367+
def __init__(self,
2368+
name,
2369+
inputs,
2370+
active_type='linear',
2371+
trans_type='non-seq',
2372+
bias=False,
2373+
**xargs):
23892374
super(SequenceFirstInstanceLayer, self).__init__(
23902375
name,
23912376
inputs=inputs,
23922377
active_type=active_type,
23932378
device=device,
2394-
bias=bias)
2379+
bias=bias,
2380+
**xargs)
23952381
self.config.trans_type = trans_type
23962382
self.config.select_first = True
23972383

23982384

23992385
@config_layer('seqconcat')
24002386
class SequenceConcatLayer(LayerBase):
2401-
def __init__(self,
2402-
name,
2403-
inputs,
2404-
active_type='linear',
2405-
device=None,
2406-
bias=False,
2407-
**xargs):
2387+
def __init__(self, name, inputs, active_type='linear', bias=False, **xargs):
24082388
super(SequenceConcatLayer, self).__init__(
24092389
name,
24102390
'seqconcat',
24112391
0,
24122392
inputs=inputs,
2413-
device=device,
24142393
active_type=active_type,
24152394
**xargs)
24162395
config_assert(
@@ -2428,15 +2407,13 @@ def __init__(self,
24282407
size,
24292408
inputs,
24302409
active_type='linear',
2431-
device=None,
24322410
bias=False,
24332411
**xargs):
24342412
super(SequenceReshapeLayer, self).__init__(
24352413
name,
24362414
'seqreshape',
24372415
size,
24382416
inputs=inputs,
2439-
device=device,
24402417
active_type=active_type,
24412418
**xargs)
24422419
config_assert(
@@ -2447,21 +2424,9 @@ def __init__(self,
24472424

24482425
@config_layer('subseq')
24492426
class SubSequenceLayer(LayerBase):
2450-
def __init__(self,
2451-
name,
2452-
inputs,
2453-
active_type='linear',
2454-
device=None,
2455-
bias=False,
2456-
**xargs):
2427+
def __init__(self, name, inputs, active_type='linear', bias=False, **xargs):
24572428
super(SubSequenceLayer, self).__init__(
2458-
name,
2459-
'subseq',
2460-
0,
2461-
inputs=inputs,
2462-
device=device,
2463-
active_type=active_type,
2464-
**xargs)
2429+
name, 'subseq', 0, inputs=inputs, active_type=active_type, **xargs)
24652430
config_assert(len(inputs) == 3, 'SubSequenceLayer must have 3 inputs')
24662431
input_layer0 = self.get_input_layer(0)
24672432
size = input_layer0.size
@@ -2471,9 +2436,9 @@ def __init__(self,
24712436

24722437
@config_layer('out_prod')
24732438
class OuterProdLayer(LayerBase):
2474-
def __init__(self, name, inputs, device=None, **xargs):
2439+
def __init__(self, name, inputs, device=None):
24752440
super(OuterProdLayer, self).__init__(
2476-
name, 'out_prod', 0, inputs=inputs, device=device, **xargs)
2441+
name, 'out_prod', 0, inputs=inputs, device=device)
24772442
config_assert(len(inputs) == 2, 'OuterProdLayer must have 2 inputs')
24782443
input_layer0 = self.get_input_layer(0)
24792444
input_layer1 = self.get_input_layer(1)
@@ -2482,9 +2447,9 @@ def __init__(self, name, inputs, device=None, **xargs):
24822447

24832448
@config_layer('power')
24842449
class PowerLayer(LayerBase):
2485-
def __init__(self, name, inputs, device=None, **xargs):
2450+
def __init__(self, name, inputs, device=None):
24862451
super(PowerLayer, self).__init__(
2487-
name, 'power', 0, inputs=inputs, device=device, **xargs)
2452+
name, 'power', 0, inputs=inputs, device=device)
24882453
config_assert(len(inputs) == 2, 'PowerLayer must have 2 inputs')
24892454
input_layer1 = self.get_input_layer(1)
24902455
self.set_layer_size(input_layer1.size)
@@ -2495,8 +2460,13 @@ def __init__(self, name, inputs, device=None, **xargs):
24952460

24962461
@config_layer('slope_intercept')
24972462
class SlopeInterceptLayer(LayerBase):
2498-
def __init__(self, name, inputs, slope=1.0, intercept=0.0,
2499-
device=None, **xargs):
2463+
def __init__(self,
2464+
name,
2465+
inputs,
2466+
slope=1.0,
2467+
intercept=0.0,
2468+
device=None,
2469+
**xargs):
25002470
super(SlopeInterceptLayer, self).__init__(
25012471
name, 'slope_intercept', 0, inputs=inputs, device=device, **xargs)
25022472
self.config.slope = slope
@@ -2508,9 +2478,9 @@ def __init__(self, name, inputs, slope=1.0, intercept=0.0,
25082478

25092479
@config_layer('scaling')
25102480
class ScalingLayer(LayerBase):
2511-
def __init__(self, name, inputs, device=None, **xargs):
2481+
def __init__(self, name, inputs, device=None):
25122482
super(ScalingLayer, self).__init__(
2513-
name, 'scaling', 0, inputs=inputs, device=device, **xargs)
2483+
name, 'scaling', 0, inputs=inputs, device=device)
25142484
config_assert(len(inputs) == 2, 'ScalingLayer must have 2 inputs')
25152485
input_layer1 = self.get_input_layer(1)
25162486
self.set_layer_size(input_layer1.size)
@@ -2521,19 +2491,19 @@ def __init__(self, name, inputs, device=None, **xargs):
25212491

25222492
@config_layer('conv_shift')
25232493
class ConvShiftLayer(LayerBase):
2524-
def __init__(self, name, inputs, device=None, **xargs):
2494+
def __init__(self, name, inputs, device=None):
25252495
super(ConvShiftLayer, self).__init__(
2526-
name, 'conv_shift', 0, inputs=inputs, device=device, **xargs)
2496+
name, 'conv_shift', 0, inputs=inputs, device=device)
25272497
config_assert(len(inputs) == 2, 'ConvShiftLayer must have 2 inputs')
25282498
input_layer0 = self.get_input_layer(0)
25292499
self.set_layer_size(input_layer0.size)
25302500

25312501

25322502
@config_layer('convex_comb')
25332503
class ConvexCombinationLayer(LayerBase):
2534-
def __init__(self, name, size, inputs, device=None, **xargs):
2504+
def __init__(self, name, size, inputs, device=None):
25352505
super(ConvexCombinationLayer, self).__init__(
2536-
name, 'convex_comb', size, inputs=inputs, device=device, **xargs)
2506+
name, 'convex_comb', size, inputs=inputs, device=device)
25372507
config_assert(
25382508
len(self.inputs) == 2, 'ConvexCombinationLayer must have 2 inputs')
25392509
config_assert(
@@ -2572,9 +2542,9 @@ def __init__(self, name, inputs, **xargs):
25722542

25732543
@config_layer('sum_to_one_norm')
25742544
class SumToOneNormLayer(LayerBase):
2575-
def __init__(self, name, inputs, device=None, **xargs):
2545+
def __init__(self, name, inputs, device=None):
25762546
super(SumToOneNormLayer, self).__init__(
2577-
name, 'sum_to_one_norm', 0, inputs=inputs, device=device, **xargs)
2547+
name, 'sum_to_one_norm', 0, inputs=inputs, device=device)
25782548
config_assert(
25792549
len(self.inputs) == 1, 'SumToOneNormLayer must have 1 input')
25802550
input_layer0 = self.get_input_layer(0)
@@ -2619,17 +2589,10 @@ def __init__(self,
26192589
average_strategy='average',
26202590
trans_type='non-seq',
26212591
active_type='linear',
2622-
device=None,
26232592
bias=False,
26242593
**xargs):
26252594
super(AverageLayer, self).__init__(
2626-
name,
2627-
'average',
2628-
0,
2629-
inputs=inputs,
2630-
device=device,
2631-
active_type=active_type,
2632-
**xargs)
2595+
name, 'average', 0, inputs=inputs, active_type=active_type, **xargs)
26332596
self.config.average_strategy = average_strategy
26342597
self.config.trans_type = trans_type
26352598
config_assert(len(inputs) == 1, 'AverageLayer must have 1 input')
@@ -2653,9 +2616,9 @@ def __init__(self, name, inputs, cos_scale=5, device=None):
26532616

26542617
@config_layer('tensor')
26552618
class TensorLayer(LayerBase):
2656-
def __init__(self, name, size, inputs, device=None, bias=True, **xargs):
2619+
def __init__(self, name, size, inputs, bias=True, **xargs):
26572620
super(TensorLayer, self).__init__(
2658-
name, 'tensor', size, inputs=inputs, device=device, **xargs)
2621+
name, 'tensor', size, inputs=inputs, **xargs)
26592622
config_assert(len(self.inputs) == 2, 'TensorLayer must have 2 inputs')
26602623
config_assert(size > 0, 'size must be positive')
26612624
config_assert(inputs[1].parameter_name == None,

0 commit comments

Comments
 (0)