Skip to content

Commit 914dad4

Browse files
author
magicindian
committed
Perceptron Implemented
1 parent cbe0214 commit 914dad4

20 files changed

+66
-11
lines changed

src/aima/learning/statistics/ActivationFunction.java renamed to src/aima/learning/neural/ActivationFunction.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
* Created on Aug 3, 2005
33
*
44
*/
5-
package aima.learning.statistics;
5+
package aima.learning.neural;
66

77
/**
88
* @author Ravi Mohan

src/aima/learning/neural/BackPropLearning.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@ public BackPropLearning(double learningRate, double momentum) {
1818

1919
}
2020

21-
public void setNeuralNetwork(FeedForwardNeuralNetwork ffnn) {
22-
21+
public void setNeuralNetwork(FunctionApproximator fapp) {
22+
FeedForwardNeuralNetwork ffnn = (FeedForwardNeuralNetwork) fapp;
2323
this.hiddenLayer = ffnn.getHiddenLayer();
2424
this.outputLayer = ffnn.getOutputLayer();
2525
this.hiddenSensitivity = new LayerSensitivity(hiddenLayer);
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
package aima.learning.neural;
2+
3+
4+
public class HardLimitActivationFunction implements ActivationFunction {
5+
6+
public double activation(double parameter) {
7+
8+
if (parameter < 0.0) {
9+
return 0.0;
10+
} else {
11+
return 1.0;
12+
}
13+
}
14+
15+
public double deriv(double parameter) {
16+
17+
return 0.0;
18+
}
19+
20+
}

src/aima/learning/neural/Layer.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
package aima.learning.neural;
22

3-
import aima.learning.statistics.ActivationFunction;
43
import aima.util.Matrix;
54
import aima.util.Util;
65

src/aima/learning/neural/LogSigActivationFunction.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
package aima.learning.neural;
22

3-
import aima.learning.statistics.ActivationFunction;
43

54
public class LogSigActivationFunction implements ActivationFunction {
65

src/aima/learning/neural/NNDataSet.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88

99
import aima.learning.framework.DataSet;
1010
import aima.learning.framework.Example;
11-
import aima.learning.statistics.Numerizer;
1211
import aima.util.Pair;
1312
import aima.util.Util;
1413

src/aima/learning/neural/NNTrainingScheme.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,5 +5,5 @@ public interface NNTrainingScheme {
55

66
void processError(FeedForwardNeuralNetwork network, Vector error);
77

8-
void setNeuralNetwork(FeedForwardNeuralNetwork ffnn);
8+
void setNeuralNetwork(FunctionApproximator ffnn);
99
}

src/aima/learning/statistics/Numerizer.java renamed to src/aima/learning/neural/Numerizer.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
* Created on Aug 6, 2005
33
*
44
*/
5-
package aima.learning.statistics;
5+
package aima.learning.neural;
66

77
import java.util.List;
88

src/aima/learning/neural/PureLinearActivationFunction.java

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
package aima.learning.neural;
22

3-
import aima.learning.statistics.ActivationFunction;
43

54
public class PureLinearActivationFunction implements ActivationFunction {
65

src/aima/learning/neural/Vector.java

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,14 @@ public Vector minus(Vector v) {
4646
return result;
4747
}
4848

49+
public Vector plus(Vector v) {
50+
Vector result = new Vector(size());
51+
for (int i = 0; i < size(); i++) {
52+
result.setValue(i, getValue(i) + v.getValue(i));
53+
}
54+
return result;
55+
}
56+
4957
public int indexHavingMaxValue() {
5058
if (size() <= 0) {
5159
throw new RuntimeException("can't perform this op on empty vector");

0 commit comments

Comments
 (0)