@@ -552,6 +552,26 @@ def convert_tanh(params, w_name, scope_name, inputs, layers, weights):
552
552
layers [scope_name ] = tanh (layers [inputs [0 ]])
553
553
554
554
555
+ def convert_selu (params , w_name , scope_name , inputs , layers , weights ):
556
+ """
557
+ Convert selu layer.
558
+
559
+ Args:
560
+ params: dictionary with layer parameters
561
+ w_name: name prefix in state_dict
562
+ scope_name: pytorch scope name
563
+ inputs: pytorch node inputs
564
+ layers: dictionary with keras tensors
565
+ weights: pytorch state_dict
566
+ """
567
+ print ('Converting selu ...' )
568
+
569
+ tf_name = w_name + str (random .random ())
570
+ selu = keras .layers .Activation ('selu' , name = tf_name )
571
+ layers [scope_name ] = selu (layers [inputs [0 ]])
572
+
573
+
574
+
555
575
def convert_transpose (params , w_name , scope_name , inputs , layers , weights ):
556
576
"""
557
577
Convert transpose layer.
@@ -737,6 +757,38 @@ def convert_upsample(params, w_name, scope_name, inputs, layers, weights):
737
757
layers [scope_name ] = upsampling (layers [inputs [0 ]])
738
758
739
759
760
+ def convert_padding (params , w_name , scope_name , inputs , layers , weights ):
761
+ """
762
+ Convert padding layer.
763
+
764
+ Args:
765
+ params: dictionary with layer parameters
766
+ w_name: name prefix in state_dict
767
+ scope_name: pytorch scope name
768
+ inputs: pytorch node inputs
769
+ layers: dictionary with keras tensors
770
+ weights: pytorch state_dict
771
+ """
772
+ print ('Converting padding...' )
773
+
774
+ if params ['mode' ] != 'constant' :
775
+ raise AssertionError ('Cannot convert non-constant padding' )
776
+
777
+ if params ['value' ] != 0.0 :
778
+ raise AssertionError ('Cannot convert non-zero padding' )
779
+
780
+ tf_name = w_name + str (random .random ())
781
+
782
+ # Magic ordering
783
+ padding_name = tf_name + '_pad'
784
+ padding_layer = keras .layers .ZeroPadding2D (
785
+ padding = ((params ['pads' ][2 ], params ['pads' ][6 ]), (params ['pads' ][3 ], params ['pads' ][7 ])),
786
+ name = tf_name
787
+ )
788
+
789
+ layers [scope_name ] = padding_layer (layers [inputs [0 ]])
790
+
791
+
740
792
AVAILABLE_CONVERTERS = {
741
793
'onnx::Conv' : convert_conv ,
742
794
'onnx::ConvTranspose' : convert_convtranspose ,
@@ -756,11 +808,13 @@ def convert_upsample(params, w_name, scope_name, inputs, layers, weights):
756
808
'onnx::Sigmoid' : convert_sigmoid ,
757
809
'onnx::Softmax' : convert_softmax ,
758
810
'onnx::Tanh' : convert_tanh ,
811
+ 'onnx::Selu' : convert_selu ,
759
812
'onnx::Transpose' : convert_transpose ,
760
813
'onnx::Reshape' : convert_reshape ,
761
814
'onnx::MatMul' : convert_matmul ,
762
815
'onnx::Gather' : convert_gather ,
763
816
'onnx::ReduceSum' : convert_reduce_sum ,
764
817
'onnx::Constant' : convert_constant ,
765
818
'onnx::Upsample' : convert_upsample ,
819
+ 'onnx::Pad' : convert_padding ,
766
820
}
0 commit comments