@@ -1803,9 +1803,8 @@ class ConvTransLayer(ConvTransLayerBase):
1803
1803
1804
1804
@config_layer ('norm' )
1805
1805
class NormLayer (LayerBase ):
1806
- def __init__ (self , name , inputs , device = None , ** xargs ):
1807
- super (NormLayer , self ).__init__ (
1808
- name , 'norm' , 0 , inputs = inputs , device = device , ** xargs )
1806
+ def __init__ (self , name , inputs , ** xargs ):
1807
+ super (NormLayer , self ).__init__ (name , 'norm' , 0 , inputs = inputs , ** xargs )
1809
1808
for input_index in xrange (len (self .inputs )):
1810
1809
input_layer = self .get_input_layer (input_index )
1811
1810
norm_conf = self .config .inputs [input_index ].norm_conf
@@ -1817,9 +1816,8 @@ def __init__(self, name, inputs, device=None, **xargs):
1817
1816
1818
1817
@config_layer ('pool' )
1819
1818
class PoolLayer (LayerBase ):
1820
- def __init__ (self , name , inputs , device = None , ** xargs ):
1821
- super (PoolLayer , self ).__init__ (
1822
- name , 'pool' , 0 , inputs = inputs , device = device , ** xargs )
1819
+ def __init__ (self , name , inputs , ** xargs ):
1820
+ super (PoolLayer , self ).__init__ (name , 'pool' , 0 , inputs = inputs , ** xargs )
1823
1821
for input_index in xrange (len (self .inputs )):
1824
1822
input_layer = self .get_input_layer (input_index )
1825
1823
pool_conf = self .config .inputs [input_index ].pool_conf
@@ -1851,7 +1849,6 @@ def __init__(self,
1851
1849
inputs ,
1852
1850
active_type = "linear" ,
1853
1851
bias = True ,
1854
- device = None ,
1855
1852
use_global_stats = True ,
1856
1853
moving_average_fraction = 0.9 ,
1857
1854
batch_norm_type = None ,
@@ -1893,7 +1890,6 @@ def __init__(self,
1893
1890
0 ,
1894
1891
active_type = active_type ,
1895
1892
inputs = inputs ,
1896
- device = device ,
1897
1893
** xargs )
1898
1894
1899
1895
if use_global_stats is not None :
@@ -1927,9 +1923,9 @@ def calc_parameter_size(self, image_conf):
1927
1923
1928
1924
@config_layer ('trans' )
1929
1925
class TransLayer (LayerBase ):
1930
- def __init__ (self , name , inputs , device = None , ** xargs ):
1926
+ def __init__ (self , name , inputs , ** xargs ):
1931
1927
super (TransLayer , self ).__init__ (
1932
- name , 'trans' , 0 , inputs = inputs , device = device , ** xargs )
1928
+ name , 'trans' , 0 , inputs = inputs , ** xargs )
1933
1929
config_assert (
1934
1930
len (self .inputs ) == 1 ,
1935
1931
'TransLayer must have one and only one input' )
@@ -1938,9 +1934,9 @@ def __init__(self, name, inputs, device=None, **xargs):
1938
1934
1939
1935
@config_layer ('resize' )
1940
1936
class ResizeLayer (LayerBase ):
1941
- def __init__ (self , name , size , inputs , device = None , ** xargs ):
1937
+ def __init__ (self , name , size , inputs , ** xargs ):
1942
1938
super (ResizeLayer , self ).__init__ (
1943
- name , 'resize' , size = size , inputs = inputs , device = device , ** xargs )
1939
+ name , 'resize' , size = size , inputs = inputs , ** xargs )
1944
1940
config_assert (
1945
1941
len (self .inputs ) == 1 ,
1946
1942
'ResizeLayer must have one and only one input' )
@@ -2265,15 +2261,9 @@ def Generator(
2265
2261
2266
2262
@config_layer ('expand' )
2267
2263
class ExpandLayer (LayerBase ):
2268
- def __init__ (self ,
2269
- name ,
2270
- inputs ,
2271
- trans_type = 'non-seq' ,
2272
- device = None ,
2273
- bias = False ,
2274
- ** xargs ):
2264
+ def __init__ (self , name , inputs , trans_type = 'non-seq' , bias = False , ** xargs ):
2275
2265
super (ExpandLayer , self ).__init__ (
2276
- name , 'expand' , 0 , inputs = inputs , device = device , ** xargs )
2266
+ name , 'expand' , 0 , inputs = inputs , ** xargs )
2277
2267
config_assert (
2278
2268
len (self .inputs ) == 2 , 'ExpandLayer takes 2 and only 2 inputs' )
2279
2269
self .config .trans_type = trans_type
@@ -2304,12 +2294,10 @@ def __init__(self,
2304
2294
inputs ,
2305
2295
trans_type = 'non-seq' ,
2306
2296
active_type = 'linear' ,
2307
- device = None ,
2308
2297
bias = False ,
2309
2298
output_max_index = None ,
2310
2299
** xargs ):
2311
- super (MaxLayer , self ).__init__ (
2312
- name , 'max' , 0 , inputs = inputs , device = device , ** xargs )
2300
+ super (MaxLayer , self ).__init__ (name , 'max' , 0 , inputs = inputs , ** xargs )
2313
2301
config_assert (len (self .inputs ) == 1 , 'MaxLayer must have 1 input' )
2314
2302
self .config .trans_type = trans_type
2315
2303
self .config .active_type = active_type
@@ -2356,15 +2344,13 @@ def __init__(self,
2356
2344
inputs ,
2357
2345
active_type = 'linear' ,
2358
2346
trans_type = 'non-seq' ,
2359
- device = None ,
2360
2347
bias = False ,
2361
2348
** xargs ):
2362
2349
super (SequenceLastInstanceLayer , self ).__init__ (
2363
2350
name ,
2364
2351
'seqlastins' ,
2365
2352
0 ,
2366
2353
inputs = inputs ,
2367
- device = device ,
2368
2354
active_type = active_type ,
2369
2355
** xargs )
2370
2356
config_assert (
@@ -2378,39 +2364,32 @@ def __init__(self,
2378
2364
2379
2365
@config_layer ('seqfirstins' )
2380
2366
class SequenceFirstInstanceLayer (SequenceLastInstanceLayer ):
2381
- def __init__ (
2382
- self ,
2383
- name ,
2384
- inputs ,
2385
- active_type = 'linear' ,
2386
- trans_type = 'non-seq' ,
2387
- device = None ,
2388
- bias = False , ):
2367
+ def __init__ (self ,
2368
+ name ,
2369
+ inputs ,
2370
+ active_type = 'linear' ,
2371
+ trans_type = 'non-seq' ,
2372
+ bias = False ,
2373
+ ** xargs ):
2389
2374
super (SequenceFirstInstanceLayer , self ).__init__ (
2390
2375
name ,
2391
2376
inputs = inputs ,
2392
2377
active_type = active_type ,
2393
2378
device = device ,
2394
- bias = bias )
2379
+ bias = bias ,
2380
+ ** xargs )
2395
2381
self .config .trans_type = trans_type
2396
2382
self .config .select_first = True
2397
2383
2398
2384
2399
2385
@config_layer ('seqconcat' )
2400
2386
class SequenceConcatLayer (LayerBase ):
2401
- def __init__ (self ,
2402
- name ,
2403
- inputs ,
2404
- active_type = 'linear' ,
2405
- device = None ,
2406
- bias = False ,
2407
- ** xargs ):
2387
+ def __init__ (self , name , inputs , active_type = 'linear' , bias = False , ** xargs ):
2408
2388
super (SequenceConcatLayer , self ).__init__ (
2409
2389
name ,
2410
2390
'seqconcat' ,
2411
2391
0 ,
2412
2392
inputs = inputs ,
2413
- device = device ,
2414
2393
active_type = active_type ,
2415
2394
** xargs )
2416
2395
config_assert (
@@ -2428,15 +2407,13 @@ def __init__(self,
2428
2407
size ,
2429
2408
inputs ,
2430
2409
active_type = 'linear' ,
2431
- device = None ,
2432
2410
bias = False ,
2433
2411
** xargs ):
2434
2412
super (SequenceReshapeLayer , self ).__init__ (
2435
2413
name ,
2436
2414
'seqreshape' ,
2437
2415
size ,
2438
2416
inputs = inputs ,
2439
- device = device ,
2440
2417
active_type = active_type ,
2441
2418
** xargs )
2442
2419
config_assert (
@@ -2447,21 +2424,9 @@ def __init__(self,
2447
2424
2448
2425
@config_layer ('subseq' )
2449
2426
class SubSequenceLayer (LayerBase ):
2450
- def __init__ (self ,
2451
- name ,
2452
- inputs ,
2453
- active_type = 'linear' ,
2454
- device = None ,
2455
- bias = False ,
2456
- ** xargs ):
2427
+ def __init__ (self , name , inputs , active_type = 'linear' , bias = False , ** xargs ):
2457
2428
super (SubSequenceLayer , self ).__init__ (
2458
- name ,
2459
- 'subseq' ,
2460
- 0 ,
2461
- inputs = inputs ,
2462
- device = device ,
2463
- active_type = active_type ,
2464
- ** xargs )
2429
+ name , 'subseq' , 0 , inputs = inputs , active_type = active_type , ** xargs )
2465
2430
config_assert (len (inputs ) == 3 , 'SubSequenceLayer must have 3 inputs' )
2466
2431
input_layer0 = self .get_input_layer (0 )
2467
2432
size = input_layer0 .size
@@ -2471,9 +2436,9 @@ def __init__(self,
2471
2436
2472
2437
@config_layer ('out_prod' )
2473
2438
class OuterProdLayer (LayerBase ):
2474
- def __init__ (self , name , inputs , device = None , ** xargs ):
2439
+ def __init__ (self , name , inputs , device = None ):
2475
2440
super (OuterProdLayer , self ).__init__ (
2476
- name , 'out_prod' , 0 , inputs = inputs , device = device , ** xargs )
2441
+ name , 'out_prod' , 0 , inputs = inputs , device = device )
2477
2442
config_assert (len (inputs ) == 2 , 'OuterProdLayer must have 2 inputs' )
2478
2443
input_layer0 = self .get_input_layer (0 )
2479
2444
input_layer1 = self .get_input_layer (1 )
@@ -2482,9 +2447,9 @@ def __init__(self, name, inputs, device=None, **xargs):
2482
2447
2483
2448
@config_layer ('power' )
2484
2449
class PowerLayer (LayerBase ):
2485
- def __init__ (self , name , inputs , device = None , ** xargs ):
2450
+ def __init__ (self , name , inputs , device = None ):
2486
2451
super (PowerLayer , self ).__init__ (
2487
- name , 'power' , 0 , inputs = inputs , device = device , ** xargs )
2452
+ name , 'power' , 0 , inputs = inputs , device = device )
2488
2453
config_assert (len (inputs ) == 2 , 'PowerLayer must have 2 inputs' )
2489
2454
input_layer1 = self .get_input_layer (1 )
2490
2455
self .set_layer_size (input_layer1 .size )
@@ -2495,8 +2460,13 @@ def __init__(self, name, inputs, device=None, **xargs):
2495
2460
2496
2461
@config_layer ('slope_intercept' )
2497
2462
class SlopeInterceptLayer (LayerBase ):
2498
- def __init__ (self , name , inputs , slope = 1.0 , intercept = 0.0 ,
2499
- device = None , ** xargs ):
2463
+ def __init__ (self ,
2464
+ name ,
2465
+ inputs ,
2466
+ slope = 1.0 ,
2467
+ intercept = 0.0 ,
2468
+ device = None ,
2469
+ ** xargs ):
2500
2470
super (SlopeInterceptLayer , self ).__init__ (
2501
2471
name , 'slope_intercept' , 0 , inputs = inputs , device = device , ** xargs )
2502
2472
self .config .slope = slope
@@ -2508,9 +2478,9 @@ def __init__(self, name, inputs, slope=1.0, intercept=0.0,
2508
2478
2509
2479
@config_layer ('scaling' )
2510
2480
class ScalingLayer (LayerBase ):
2511
- def __init__ (self , name , inputs , device = None , ** xargs ):
2481
+ def __init__ (self , name , inputs , device = None ):
2512
2482
super (ScalingLayer , self ).__init__ (
2513
- name , 'scaling' , 0 , inputs = inputs , device = device , ** xargs )
2483
+ name , 'scaling' , 0 , inputs = inputs , device = device )
2514
2484
config_assert (len (inputs ) == 2 , 'ScalingLayer must have 2 inputs' )
2515
2485
input_layer1 = self .get_input_layer (1 )
2516
2486
self .set_layer_size (input_layer1 .size )
@@ -2521,19 +2491,19 @@ def __init__(self, name, inputs, device=None, **xargs):
2521
2491
2522
2492
@config_layer ('conv_shift' )
2523
2493
class ConvShiftLayer (LayerBase ):
2524
- def __init__ (self , name , inputs , device = None , ** xargs ):
2494
+ def __init__ (self , name , inputs , device = None ):
2525
2495
super (ConvShiftLayer , self ).__init__ (
2526
- name , 'conv_shift' , 0 , inputs = inputs , device = device , ** xargs )
2496
+ name , 'conv_shift' , 0 , inputs = inputs , device = device )
2527
2497
config_assert (len (inputs ) == 2 , 'ConvShiftLayer must have 2 inputs' )
2528
2498
input_layer0 = self .get_input_layer (0 )
2529
2499
self .set_layer_size (input_layer0 .size )
2530
2500
2531
2501
2532
2502
@config_layer ('convex_comb' )
2533
2503
class ConvexCombinationLayer (LayerBase ):
2534
- def __init__ (self , name , size , inputs , device = None , ** xargs ):
2504
+ def __init__ (self , name , size , inputs , device = None ):
2535
2505
super (ConvexCombinationLayer , self ).__init__ (
2536
- name , 'convex_comb' , size , inputs = inputs , device = device , ** xargs )
2506
+ name , 'convex_comb' , size , inputs = inputs , device = device )
2537
2507
config_assert (
2538
2508
len (self .inputs ) == 2 , 'ConvexCombinationLayer must have 2 inputs' )
2539
2509
config_assert (
@@ -2572,9 +2542,9 @@ def __init__(self, name, inputs, **xargs):
2572
2542
2573
2543
@config_layer ('sum_to_one_norm' )
2574
2544
class SumToOneNormLayer (LayerBase ):
2575
- def __init__ (self , name , inputs , device = None , ** xargs ):
2545
+ def __init__ (self , name , inputs , device = None ):
2576
2546
super (SumToOneNormLayer , self ).__init__ (
2577
- name , 'sum_to_one_norm' , 0 , inputs = inputs , device = device , ** xargs )
2547
+ name , 'sum_to_one_norm' , 0 , inputs = inputs , device = device )
2578
2548
config_assert (
2579
2549
len (self .inputs ) == 1 , 'SumToOneNormLayer must have 1 input' )
2580
2550
input_layer0 = self .get_input_layer (0 )
@@ -2619,17 +2589,10 @@ def __init__(self,
2619
2589
average_strategy = 'average' ,
2620
2590
trans_type = 'non-seq' ,
2621
2591
active_type = 'linear' ,
2622
- device = None ,
2623
2592
bias = False ,
2624
2593
** xargs ):
2625
2594
super (AverageLayer , self ).__init__ (
2626
- name ,
2627
- 'average' ,
2628
- 0 ,
2629
- inputs = inputs ,
2630
- device = device ,
2631
- active_type = active_type ,
2632
- ** xargs )
2595
+ name , 'average' , 0 , inputs = inputs , active_type = active_type , ** xargs )
2633
2596
self .config .average_strategy = average_strategy
2634
2597
self .config .trans_type = trans_type
2635
2598
config_assert (len (inputs ) == 1 , 'AverageLayer must have 1 input' )
@@ -2653,9 +2616,9 @@ def __init__(self, name, inputs, cos_scale=5, device=None):
2653
2616
2654
2617
@config_layer ('tensor' )
2655
2618
class TensorLayer (LayerBase ):
2656
- def __init__ (self , name , size , inputs , device = None , bias = True , ** xargs ):
2619
+ def __init__ (self , name , size , inputs , bias = True , ** xargs ):
2657
2620
super (TensorLayer , self ).__init__ (
2658
- name , 'tensor' , size , inputs = inputs , device = device , ** xargs )
2621
+ name , 'tensor' , size , inputs = inputs , ** xargs )
2659
2622
config_assert (len (self .inputs ) == 2 , 'TensorLayer must have 2 inputs' )
2660
2623
config_assert (size > 0 , 'size must be positive' )
2661
2624
config_assert (inputs [1 ].parameter_name == None ,
0 commit comments