Skip to content

Commit 9542ce0

Browse files
author
Guled
committed
Added ReLU & Leaky ReLU
1 parent 9f2aca6 commit 9542ce0

File tree

2 files changed

+25
-3
lines changed

2 files changed

+25
-3
lines changed

Example/MLKit/GameScene.swift

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,8 @@ class GameScene: SKScene, SKPhysicsContactDelegate {
5151

5252
/// Best score (regardless of generation)
5353
var bestScore: Int = 0
54+
55+
/// Label that displays the best score (bestScore attribute)
5456
var bestScoreLabel: SKLabelNode!
5557

5658

@@ -319,7 +321,7 @@ class GameScene: SKScene, SKPhysicsContactDelegate {
319321
if generationCounter >= 3 {
320322

321323
// Experiment: Keep some of the last best birds and put them back into the population
322-
lastBestGen = (flappyBirdGenerationContainer?.filter({$0.fitness >= 9.0}))!
324+
lastBestGen = (flappyBirdGenerationContainer?.filter({$0.fitness >= 7.0}))!
323325
}
324326

325327
if (currentBird?.fitness)! > maxFitness {
@@ -352,8 +354,8 @@ class GameScene: SKScene, SKPhysicsContactDelegate {
352354
var offspring = BiologicalProcessManager.onePointCrossover(crossoverRate: 0.5, parentOneGenotype: parents.0.genotypeRepresentation, parentTwoGenotype: parents.1.genotypeRepresentation)
353355

354356
// Mutate their genes
355-
BiologicalProcessManager.scrambleMutation(mutationRate: 0.7, genotype: &offspring.0)
356-
BiologicalProcessManager.scrambleMutation(mutationRate: 0.7, genotype: &offspring.1)
357+
BiologicalProcessManager.inverseMutation(mutationRate: 0.7, genotype: &offspring.0)
358+
BiologicalProcessManager.inverseMutation(mutationRate: 0.7, genotype: &offspring.1)
357359

358360

359361
// Create a separate neural network for the birds based on their genes

MLKit/Classes/ANN/ActivationFunctionType.swift

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@ public enum ActivationFunctionType {
1818
case softsign /// SoftSign Activation Function
1919
case sinusoid /// Sinusoid Activation Function
2020
case gaussian /// Sinusoid Activation Function
21+
case ReLU /// ReLU Activation Function
22+
case LeakyReLU /// Leaky ReLU Activation Function
2123

2224
/**
2325
The activate method returns an activation function.
@@ -48,6 +50,8 @@ public enum ActivationFunctionType {
4850

4951
/**
5052
The activate method returns the derivative of the activation function a Layer object is using.
53+
Note that a Layer object is instantiated with an ActivationFunctionType. This method simply observes the
54+
ActivationFunctionType that the layer is using and returns the derivative for that layers particular ActivationFunctionType.
5155

5256
- returns: A method that taeks in a Float as a parameter and returns a Float.
5357
*/
@@ -101,6 +105,14 @@ public enum ActivationFunctionType {
101105
return exp(powf((-val), 2))
102106
}
103107

108+
private func fncReLU(val: Float) -> Float {
109+
return max(0, val)
110+
}
111+
112+
private func fncLeakyReLU(val: Float) -> Float {
113+
return max(0.01*val, val)
114+
}
115+
104116
// MARK: Derivatives
105117
private func derivativeOfLinear(val: Float) -> Float {
106118
return 1.0
@@ -126,6 +138,14 @@ public enum ActivationFunctionType {
126138
return -2 * val * exp(powf((-val), 2))
127139
}
128140

141+
private func derivativeOfReLU(val: Float) -> Float {
142+
return (val < 0.0) ? 0.0 : 1.0
143+
}
144+
145+
private func derivativeOfLeakyReLU(val: Float) -> Float {
146+
return (val < 0.0) ? 0.01 : 1.0
147+
}
148+
129149
/// Simply a method to satisfy the switch statements located in the activate and derivative methods. The method simply returns -1 which indicates that an error has occurred (A non-existant enum was discovered).
130150
private func error(val: Float) -> Float {
131151
return -1

0 commit comments

Comments
 (0)