Skip to content

Commit a1755a3

Browse files
authored
Merge pull request #1068 from Laicheng0830/ADD_Mish
Add mish
2 parents 01acd2b + d3f6af1 commit a1755a3

File tree

3 files changed

+34
-1
lines changed

3 files changed

+34
-1
lines changed

docs/modules/activation.rst

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ For more complex activation, TensorFlow API will be required.
3535
sign
3636
hard_tanh
3737
pixel_wise_softmax
38+
mish
3839

3940
Ramp
4041
------
@@ -68,6 +69,10 @@ Pixel-wise softmax
6869
--------------------
6970
.. autofunction:: pixel_wise_softmax
7071

72+
mish
73+
---------
74+
.. autofunction:: mish
75+
7176
Parametric activation
7277
------------------------------
7378
See ``tensorlayer.layers``.

tensorlayer/activation.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
'htanh',
2020
'hard_tanh',
2121
'pixel_wise_softmax',
22+
'mish',
2223
]
2324

2425

@@ -339,6 +340,25 @@ def pixel_wise_softmax(x, name='pixel_wise_softmax'):
339340
return tf.nn.softmax(x)
340341

341342

343+
def mish(x):
344+
"""Mish activation function.
345+
346+
Reference: [Mish: A Self Regularized Non-Monotonic Neural Activation Function .Diganta Misra, 2019]<https://arxiv.org/abs/1908.08681>
347+
348+
Parameters
349+
----------
350+
x : Tensor
351+
input.
352+
353+
Returns
354+
-------
355+
Tensor
356+
A ``Tensor`` in the same type as ``x``.
357+
358+
"""
359+
return x * tf.math.tanh(tf.math.softplus(x))
360+
361+
342362
# Alias
343363
lrelu = leaky_relu
344364
lrelu6 = leaky_relu6

tests/test_activations.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import unittest
66

77
import tensorflow as tf
8-
8+
import numpy as np
99
import tensorlayer as tl
1010
from tests.utils import CustomTestCase
1111

@@ -116,6 +116,14 @@ def test_swish(self):
116116

117117
self.assertAlmostEqual(computed_output.numpy(), good_output, places=5)
118118

119+
def test_mish(self):
120+
for i in range(-5, 15):
121+
good_output = i * np.tanh(np.math.log(1 + np.math.exp(i)))
122+
123+
computed_output = tl.act.mish(float(i))
124+
125+
self.assertAlmostEqual(computed_output.numpy(), good_output, places=5)
126+
119127

120128
if __name__ == '__main__':
121129

0 commit comments

Comments
 (0)