We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
2 parents 4bbf175 + 0cfb6ee commit 1ef598aCopy full SHA for 1ef598a
DeepFried2/layers/ReLU.py
@@ -4,9 +4,18 @@
4
5
class ReLU(df.Module):
6
7
- def __init__(self, alpha = 0):
+ def __init__(self, alpha=0, caxis=None):
8
+ """ Fancy Rectified Linear Unit.
9
+ - `alpha` is the "leakyness", i.e. slope of negative part (0=relu, 1=linear).
10
+ - `caxis` can be specified to create a CReLU, [relu, -relu] along that axis.
11
+ """
12
df.Module.__init__(self)
13
self.alpha = alpha
14
+ self.caxis = caxis
15
- def symb_forward(self, symb_input):
- return relu(symb_input, self.alpha)
16
+ def symb_forward(self, x):
17
+ if self.caxis is None:
18
+ return relu(x, self.alpha)
19
+ else:
20
+ return df.T.concatenate([relu(x, self.alpha),
21
+ relu(-x, self.alpha)], axis=self.caxis)
0 commit comments