Skip to content

Commit 946ad65

Browse files
committed
Added LeakyReLU layer support.
1 parent 1a295ca commit 946ad65

File tree

3 files changed

+71
-1
lines changed

3 files changed

+71
-1
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ Reshape:
8181
Activations:
8282

8383
* ReLU
84-
* LeakyReLU (only with 0.2)
84+
* LeakyReLU
8585
* PReLU (only with 0.2)
8686
* SELU (only with 0.2)
8787
* Tanh

pytorch2keras/layers.py

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -475,6 +475,26 @@ def convert_relu(params, w_name, scope_name, inputs, layers, weights):
475475
layers[scope_name] = relu(layers[inputs[0]])
476476

477477

478+
def convert_lrelu(params, w_name, scope_name, inputs, layers, weights):
479+
"""
480+
Convert leaky relu layer.
481+
482+
Args:
483+
params: dictionary with layer parameters
484+
w_name: name prefix in state_dict
485+
scope_name: pytorch scope name
486+
inputs: pytorch node inputs
487+
layers: dictionary with keras tensors
488+
weights: pytorch state_dict
489+
"""
490+
print('Converting lrelu ...')
491+
492+
tf_name = w_name + str(random.random())
493+
leakyrelu = \
494+
keras.layers.LeakyReLU(alpha=params['alpha'], name=tf_name)
495+
layers[scope_name] = leakyrelu(layers[inputs[0]])
496+
497+
478498
def convert_sigmoid(params, w_name, scope_name, inputs, layers, weights):
479499
"""
480500
Convert sigmoid layer.
@@ -732,6 +752,7 @@ def convert_upsample(params, w_name, scope_name, inputs, layers, weights):
732752
'Sub': convert_elementwise_sub,
733753
'Concat': convert_concat,
734754
'Relu': convert_relu,
755+
'LeakyRelu': convert_lrelu,
735756
'Sigmoid': convert_sigmoid,
736757
'Softmax': convert_softmax,
737758
'Tanh': convert_tanh,

tests/lrelu.py

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
import keras # work around segfault
2+
import sys
3+
import numpy as np
4+
5+
import torch
6+
import torch.nn as nn
7+
from torch.autograd import Variable
8+
9+
sys.path.append('../pytorch2keras')
10+
from converter import pytorch_to_keras
11+
12+
13+
class TestLeakyRelu(nn.Module):
14+
"""Module for LeakyReLu conversion testing
15+
"""
16+
17+
def __init__(self, inp=10, out=16, bias=True):
18+
super(TestLeakyRelu, self).__init__()
19+
self.linear = nn.Linear(inp, out, bias=True)
20+
self.relu = nn.LeakyReLU(inplace=True)
21+
22+
def forward(self, x):
23+
x = self.linear(x)
24+
x = self.relu(x)
25+
return x
26+
27+
28+
if __name__ == '__main__':
29+
max_error = 0
30+
for i in range(100):
31+
inp = np.random.randint(1, 100)
32+
out = np.random.randint(1, 100)
33+
model = TestLeakyRelu(inp, out, inp % 2)
34+
35+
input_np = np.random.uniform(0, 1, (1, inp))
36+
input_var = Variable(torch.FloatTensor(input_np))
37+
output = model(input_var)
38+
39+
k_model = pytorch_to_keras(model, input_var, (inp,), verbose=True)
40+
41+
pytorch_output = output.data.numpy()
42+
keras_output = k_model.predict(input_np)
43+
44+
error = np.max(pytorch_output - keras_output)
45+
print(error)
46+
if max_error < error:
47+
max_error = error
48+
49+
print('Max error: {0}'.format(max_error))

0 commit comments

Comments
 (0)