Skip to content

Commit 1ef598a

Browse files
authored
Merge pull request #106 from lucasb-eyer/crelu
Add CReLU layer.
2 parents 4bbf175 + 0cfb6ee commit 1ef598a

File tree

1 file changed

+12
-3
lines changed

1 file changed

+12
-3
lines changed

DeepFried2/layers/ReLU.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,18 @@
44

55
class ReLU(df.Module):
66

7-
def __init__(self, alpha = 0):
7+
def __init__(self, alpha=0, caxis=None):
8+
""" Fancy Rectified Linear Unit.
9+
- `alpha` is the "leakyness", i.e. slope of negative part (0=relu, 1=linear).
10+
- `caxis` can be specified to create a CReLU, [relu, -relu] along that axis.
11+
"""
812
df.Module.__init__(self)
913
self.alpha = alpha
14+
self.caxis = caxis
1015

11-
def symb_forward(self, symb_input):
12-
return relu(symb_input, self.alpha)
16+
def symb_forward(self, x):
17+
if self.caxis is None:
18+
return relu(x, self.alpha)
19+
else:
20+
return df.T.concatenate([relu(x, self.alpha),
21+
relu(-x, self.alpha)], axis=self.caxis)

0 commit comments

Comments
 (0)