@@ -2301,14 +2301,9 @@ def Generator(
2301
2301
2302
2302
@config_layer ('expand' )
2303
2303
class ExpandLayer (LayerBase ):
2304
- def __init__ (self ,
2305
- name ,
2306
- inputs ,
2307
- trans_type = 'non-seq' ,
2308
- device = None ,
2309
- bias = False ):
2304
+ def __init__ (self , name , inputs , trans_type = 'non-seq' , bias = False , ** xargs ):
2310
2305
super (ExpandLayer , self ).__init__ (
2311
- name , 'expand' , 0 , inputs = inputs , device = device )
2306
+ name , 'expand' , 0 , inputs = inputs , ** xargs )
2312
2307
config_assert (
2313
2308
len (self .inputs ) == 2 , 'ExpandLayer takes 2 and only 2 inputs' )
2314
2309
self .config .trans_type = trans_type
@@ -2339,11 +2334,10 @@ def __init__(self,
2339
2334
inputs ,
2340
2335
trans_type = 'non-seq' ,
2341
2336
active_type = 'linear' ,
2342
- device = None ,
2343
2337
bias = False ,
2344
- output_max_index = None ):
2345
- super ( MaxLayer , self ). __init__ (
2346
- name , 'max' , 0 , inputs = inputs , device = device )
2338
+ output_max_index = None ,
2339
+ ** xargs ):
2340
+ super ( MaxLayer , self ). __init__ ( name , 'max' , 0 , inputs = inputs , ** xargs )
2347
2341
config_assert (len (self .inputs ) == 1 , 'MaxLayer must have 1 input' )
2348
2342
self .config .trans_type = trans_type
2349
2343
self .config .active_type = active_type
@@ -2390,15 +2384,15 @@ def __init__(self,
2390
2384
inputs ,
2391
2385
active_type = 'linear' ,
2392
2386
trans_type = 'non-seq' ,
2393
- device = None ,
2394
- bias = False ):
2387
+ bias = False ,
2388
+ ** xargs ):
2395
2389
super (SequenceLastInstanceLayer , self ).__init__ (
2396
2390
name ,
2397
2391
'seqlastins' ,
2398
2392
0 ,
2399
2393
inputs = inputs ,
2400
- device = device ,
2401
- active_type = active_type )
2394
+ active_type = active_type ,
2395
+ ** xargs )
2402
2396
config_assert (
2403
2397
len (inputs ) == 1 , 'SequenceLastInstanceLayer must have 1 input' )
2404
2398
self .config .trans_type = trans_type
@@ -2410,39 +2404,29 @@ def __init__(self,
2410
2404
2411
2405
@config_layer ('seqfirstins' )
2412
2406
class SequenceFirstInstanceLayer (SequenceLastInstanceLayer ):
2413
- def __init__ (
2414
- self ,
2415
- name ,
2416
- inputs ,
2417
- active_type = 'linear' ,
2418
- trans_type = 'non-seq' ,
2419
- device = None ,
2420
- bias = False , ):
2407
+ def __init__ (self ,
2408
+ name ,
2409
+ inputs ,
2410
+ active_type = 'linear' ,
2411
+ trans_type = 'non-seq' ,
2412
+ bias = False ,
2413
+ ** xargs ):
2421
2414
super (SequenceFirstInstanceLayer , self ).__init__ (
2422
- name ,
2423
- inputs = inputs ,
2424
- active_type = active_type ,
2425
- device = device ,
2426
- bias = bias )
2415
+ name , inputs = inputs , active_type = active_type , bias = bias , ** xargs )
2427
2416
self .config .trans_type = trans_type
2428
2417
self .config .select_first = True
2429
2418
2430
2419
2431
2420
@config_layer ('seqconcat' )
2432
2421
class SequenceConcatLayer (LayerBase ):
2433
- def __init__ (self ,
2434
- name ,
2435
- inputs ,
2436
- active_type = 'linear' ,
2437
- device = None ,
2438
- bias = False ):
2422
+ def __init__ (self , name , inputs , active_type = 'linear' , bias = False , ** xargs ):
2439
2423
super (SequenceConcatLayer , self ).__init__ (
2440
2424
name ,
2441
2425
'seqconcat' ,
2442
2426
0 ,
2443
2427
inputs = inputs ,
2444
- device = device ,
2445
- active_type = active_type )
2428
+ active_type = active_type ,
2429
+ ** xargs )
2446
2430
config_assert (
2447
2431
len (inputs ) == 2 , 'SequenceConcatLayer must have 2 inputs' )
2448
2432
for input_index in xrange (len (self .inputs )):
@@ -2458,15 +2442,15 @@ def __init__(self,
2458
2442
size ,
2459
2443
inputs ,
2460
2444
active_type = 'linear' ,
2461
- device = None ,
2462
- bias = False ):
2445
+ bias = False ,
2446
+ ** xargs ):
2463
2447
super (SequenceReshapeLayer , self ).__init__ (
2464
2448
name ,
2465
2449
'seqreshape' ,
2466
2450
size ,
2467
2451
inputs = inputs ,
2468
- device = device ,
2469
- active_type = active_type )
2452
+ active_type = active_type ,
2453
+ ** xargs )
2470
2454
config_assert (
2471
2455
len (inputs ) == 1 , 'SequenceReshapeLayer must have 1 inputs' )
2472
2456
self .set_layer_size (size )
@@ -2475,19 +2459,9 @@ def __init__(self,
2475
2459
2476
2460
@config_layer ('subseq' )
2477
2461
class SubSequenceLayer (LayerBase ):
2478
- def __init__ (self ,
2479
- name ,
2480
- inputs ,
2481
- active_type = 'linear' ,
2482
- device = None ,
2483
- bias = False ):
2462
+ def __init__ (self , name , inputs , active_type = 'linear' , bias = False , ** xargs ):
2484
2463
super (SubSequenceLayer , self ).__init__ (
2485
- name ,
2486
- 'subseq' ,
2487
- 0 ,
2488
- inputs = inputs ,
2489
- device = device ,
2490
- active_type = active_type )
2464
+ name , 'subseq' , 0 , inputs = inputs , active_type = active_type , ** xargs )
2491
2465
config_assert (len (inputs ) == 3 , 'SubSequenceLayer must have 3 inputs' )
2492
2466
input_layer0 = self .get_input_layer (0 )
2493
2467
size = input_layer0 .size
@@ -2644,15 +2618,10 @@ def __init__(self,
2644
2618
average_strategy = 'average' ,
2645
2619
trans_type = 'non-seq' ,
2646
2620
active_type = 'linear' ,
2647
- device = None ,
2648
- bias = False ):
2621
+ bias = False ,
2622
+ ** xargs ):
2649
2623
super (AverageLayer , self ).__init__ (
2650
- name ,
2651
- 'average' ,
2652
- 0 ,
2653
- inputs = inputs ,
2654
- device = device ,
2655
- active_type = active_type )
2624
+ name , 'average' , 0 , inputs = inputs , active_type = active_type , ** xargs )
2656
2625
self .config .average_strategy = average_strategy
2657
2626
self .config .trans_type = trans_type
2658
2627
config_assert (len (inputs ) == 1 , 'AverageLayer must have 1 input' )
@@ -2676,9 +2645,9 @@ def __init__(self, name, inputs, cos_scale=1, device=None):
2676
2645
2677
2646
@config_layer ('tensor' )
2678
2647
class TensorLayer (LayerBase ):
2679
- def __init__ (self , name , size , inputs , device = None , bias = True , ** xargs ):
2648
+ def __init__ (self , name , size , inputs , bias = True , ** xargs ):
2680
2649
super (TensorLayer , self ).__init__ (
2681
- name , 'tensor' , size , inputs = inputs , device = device , ** xargs )
2650
+ name , 'tensor' , size , inputs = inputs , ** xargs )
2682
2651
config_assert (len (self .inputs ) == 2 , 'TensorLayer must have 2 inputs' )
2683
2652
config_assert (size > 0 , 'size must be positive' )
2684
2653
config_assert (inputs [1 ].parameter_name == None ,
0 commit comments