@@ -18,6 +18,8 @@ public enum ActivationFunctionType {
1818 case softsign /// SoftSign Activation Function
1919 case sinusoid /// Sinusoid Activation Function
2020 case gaussian /// Sinusoid Activation Function
21+ case ReLU /// ReLU Activation Function
22+ case LeakyReLU /// Leaky ReLU Activation Function
2123
2224 /**
2325 The activate method returns an activation function.
@@ -48,6 +50,8 @@ public enum ActivationFunctionType {
4850
4951 /**
5052 The activate method returns the derivative of the activation function a Layer object is using.
53+ Note that a Layer object is instantiated with an ActivationFunctionType. This method simply observes the
54+ ActivationFunctionType that the layer is using and returns the derivative for that layers particular ActivationFunctionType.
5155
5256 - returns: A method that taeks in a Float as a parameter and returns a Float.
5357 */
@@ -101,6 +105,14 @@ public enum ActivationFunctionType {
101105 return exp ( powf ( ( - val) , 2 ) )
102106 }
103107
108+ private func fncReLU( val: Float ) -> Float {
109+ return max ( 0 , val)
110+ }
111+
112+ private func fncLeakyReLU( val: Float ) -> Float {
113+ return max ( 0.01 * val, val)
114+ }
115+
104116 // MARK: Derivatives
105117 private func derivativeOfLinear( val: Float ) -> Float {
106118 return 1.0
@@ -126,6 +138,14 @@ public enum ActivationFunctionType {
126138 return - 2 * val * exp( powf ( ( - val) , 2 ) )
127139 }
128140
141+ private func derivativeOfReLU( val: Float ) -> Float {
142+ return ( val < 0.0 ) ? 0.0 : 1.0
143+ }
144+
145+ private func derivativeOfLeakyReLU( val: Float ) -> Float {
146+ return ( val < 0.0 ) ? 0.01 : 1.0
147+ }
148+
129149 /// Simply a method to satisfy the switch statements located in the activate and derivative methods. The method simply returns -1 which indicates that an error has occurred (A non-existant enum was discovered).
130150 private func error( val: Float ) -> Float {
131151 return - 1
0 commit comments