Skip to content

Commit cbe0214

Browse files
author
magicindian
committed
Neural Network works!!! TODO remove old code, release!
1 parent 599574b commit cbe0214

File tree

8 files changed

+230
-9
lines changed

8 files changed

+230
-9
lines changed

src/aima/learning/neural/FeedForwardNeuralNetwork.java

Lines changed: 43 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,11 @@
55

66
public class FeedForwardNeuralNetwork implements FunctionApproximator {
77

8+
public static final String UPPER_LIMIT_WEIGHTS = "upper_limit_weights";
9+
public static final String LOWER_LIMIT_WEIGHTS = "lower_limit_weights";
10+
public static final String NUMBER_OF_OUTPUTS = "number_of_outputs";
11+
public static final String NUMBER_OF_HIDDEN_NEURONS = "number_of_hidden_neurons";
12+
public static final String NUMBER_OF_INPUTS = "number_of_inputs";
813
private final Layer hiddenLayer;
914
private final Layer outputLayer;
1015

@@ -16,16 +21,16 @@ public class FeedForwardNeuralNetwork implements FunctionApproximator {
1621
public FeedForwardNeuralNetwork(NNConfig config) {
1722

1823
int numberOfInputNeurons = config
19-
.getParameterAsInteger("number_of_inputs");
24+
.getParameterAsInteger(NUMBER_OF_INPUTS);
2025
int numberOfHiddenNeurons = config
21-
.getParameterAsInteger("number_of_hidden_neurons");
26+
.getParameterAsInteger(NUMBER_OF_HIDDEN_NEURONS);
2227
int numberOfOutputNeurons = config
23-
.getParameterAsInteger("number_of_outputs");
28+
.getParameterAsInteger(NUMBER_OF_OUTPUTS);
2429

2530
double lowerLimitForWeights = config
26-
.getParameterAsDouble("lower_limit_weights");
31+
.getParameterAsDouble(LOWER_LIMIT_WEIGHTS);
2732
double upperLimitForWeights = config
28-
.getParameterAsDouble("upper_limit_weights");
33+
.getParameterAsDouble(UPPER_LIMIT_WEIGHTS);
2934

3035
hiddenLayer = new Layer(numberOfHiddenNeurons, numberOfInputNeurons,
3136
lowerLimitForWeights, upperLimitForWeights,
@@ -63,6 +68,39 @@ public Vector processInput(Vector input) {
6368
return trainingScheme.processInput(this, input);
6469
}
6570

71+
public void trainOn(NNDataSet innds, int numberofEpochs) {
72+
for (int i = 0; i < numberofEpochs; i++) {
73+
innds.refreshDataset();
74+
while (innds.hasMoreExamples()) {
75+
NNExample nne = innds.getExampleAtRandom();
76+
processInput(nne.getInput());
77+
Vector error = getOutputLayer()
78+
.errorVectorFrom(nne.getTarget());
79+
processError(error);
80+
}
81+
}
82+
83+
}
84+
85+
public Vector predict(NNExample nne) {
86+
return processInput(nne.getInput());
87+
}
88+
89+
public int[] testOnDataSet(NNDataSet nnds) {
90+
int[] result = new int[] { 0, 0 };
91+
nnds.refreshDataset();
92+
while (nnds.hasMoreExamples()) {
93+
NNExample nne = nnds.getExampleAtRandom();
94+
Vector prediction = predict(nne);
95+
if (nne.isCorrect(prediction)) {
96+
result[0] = result[0] + 1;
97+
} else {
98+
result[1] = result[1] + 1;
99+
}
100+
}
101+
return result;
102+
}
103+
66104
public void testOn(DataSet ds) {
67105
// TODO Auto-generated method stub
68106
}

src/aima/learning/neural/Layer.java

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ public Layer(int numberOfNeurons, int numberOfInputs,
4141
ActivationFunction af) {
4242

4343
activationFunction = af;
44-
this.weightMatrix = new Matrix(numberOfNeurons, numberOfInputs());
44+
this.weightMatrix = new Matrix(numberOfNeurons, numberOfInputs);
4545
lastWeightUpdateMatrix = new Matrix(weightMatrix.getRowDimension(),
4646
weightMatrix.getColumnDimension());
4747
penultimateWeightUpdateMatrix = new Matrix(weightMatrix
@@ -104,7 +104,7 @@ public Vector getLastInducedField() {
104104
private static void initializeMatrix(Matrix aMatrix, double lowerLimit,
105105
double upperLimit) {
106106
for (int i = 0; i < aMatrix.getRowDimension(); i++) {
107-
for (int j = 0; j < aMatrix.getColumnDimension(); i++) {
107+
for (int j = 0; j < aMatrix.getColumnDimension(); j++) {
108108
double random = Util.generateRandomDoubleBetween(lowerLimit,
109109
upperLimit);
110110
aMatrix.set(i, j, random);
@@ -192,4 +192,9 @@ public void acceptNewBiasUpdate(Vector biasUpdate) {
192192
setLastBiasUpdateVector(biasUpdate);
193193
}
194194

195+
public Vector errorVectorFrom(Vector target) {
196+
return target.minus(getLastActivationValues());
197+
198+
}
199+
195200
}

src/aima/learning/neural/NNConfig.java

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,10 @@ public NNConfig(Hashtable<String, Object> hash) {
1313
this.hash = hash;
1414
}
1515

16+
public NNConfig() {
17+
this.hash = new Hashtable<String, Object>();
18+
}
19+
1620
public double getParameterAsDouble(String key) {
1721

1822
return (Double) hash.get(key);
@@ -23,4 +27,11 @@ public int getParameterAsInteger(String key) {
2327
return (Integer) hash.get(key);
2428
}
2529

30+
public void setConfig(String key, Double value) {
31+
hash.put(key, value);
32+
}
33+
34+
public void setConfig(String key, int value) {
35+
hash.put(key, value);
36+
}
2637
}

src/aima/learning/neural/NNDataSet.java

Lines changed: 62 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,10 @@
66
import java.util.Arrays;
77
import java.util.List;
88

9+
import aima.learning.framework.DataSet;
10+
import aima.learning.framework.Example;
11+
import aima.learning.statistics.Numerizer;
12+
import aima.util.Pair;
913
import aima.util.Util;
1014

1115
public abstract class NNDataSet {
@@ -21,7 +25,7 @@ public abstract class NNDataSet {
2125
/*
2226
* a copy from which examples are drawn.
2327
*/
24-
private List<NNExample> presentlyProcessed;
28+
private List<NNExample> presentlyProcessed = new ArrayList<NNExample>();;
2529

2630
/*
2731
* list of mean Values for all components of raw data set
@@ -35,7 +39,7 @@ public abstract class NNDataSet {
3539
/*
3640
* the normalized data set
3741
*/
38-
private List<List<Double>> nds;
42+
protected List<List<Double>> nds;
3943

4044
/*
4145
* the column numbers of the "target"
@@ -91,6 +95,21 @@ public void createNormalizedDataFromFile(String filename) throws Exception {
9195
nds = normalize(rds);
9296
}
9397

98+
/*
99+
* create a normalized data "table" from the DataSet using numerizer. At
100+
* this stage, the data is *not* split into input pattern and targets TODO
101+
* remove redundancy of recreating the target columns. the numerizer has
102+
* already isolated the targets
103+
*/
104+
105+
public void createNormalizedDataFromDataSet(DataSet ds, Numerizer numerizer)
106+
throws Exception {
107+
108+
List<List<Double>> rds = rawExamplesFromDataSet(ds, numerizer);
109+
// normalize raw dataset
110+
nds = normalize(rds);
111+
}
112+
94113
private List<List<Double>> normalize(List<List<Double>> rds) {
95114
int rawDataLength = rds.get(0).size();
96115
List<List<Double>> nds = new ArrayList<List<Double>>();
@@ -140,6 +159,27 @@ private List<Double> exampleFromString(String line, String separator) {
140159
return rexample;
141160
}
142161

162+
private List<List<Double>> rawExamplesFromDataSet(DataSet ds,
163+
Numerizer numerizer) {
164+
// assumes all values for inout and target are doubles
165+
List<List<Double>> rds = new ArrayList<List<Double>>();
166+
for (int i = 0; i < ds.size(); i++) {
167+
List<Double> rexample = new ArrayList<Double>();
168+
Example e = ds.getExample(i);
169+
Pair<List<Double>, List<Double>> p = numerizer.numerize(e);
170+
List<Double> attributes = p.getFirst();
171+
for (Double d : attributes) {
172+
rexample.add(d);
173+
}
174+
List<Double> targets = p.getSecond();
175+
for (Double d : targets) {
176+
rexample.add(d);
177+
}
178+
rds.add(rexample);
179+
}
180+
return rds;
181+
}
182+
143183
/*
144184
* Gets (and removes) a random example from the 'presentlyProcessed'
145185
*/
@@ -149,6 +189,14 @@ public NNExample getExampleAtRandom() {
149189
return presentlyProcessed.remove(i);
150190
}
151191

192+
/*
193+
* Gets (and removes) a random example from the 'presentlyProcessed'
194+
*/
195+
public NNExample getExample(int index) {
196+
197+
return presentlyProcessed.remove(index);
198+
}
199+
152200
/*
153201
* check if any more examples remain to be processed
154202
*/
@@ -185,6 +233,18 @@ public void createExamplesFromFile(String filename) throws Exception {
185233

186234
}
187235

236+
/*
237+
* method called by clients to set up data set and make it ready for
238+
* processing
239+
*/
240+
public void createExamplesFromDataSet(DataSet ds, Numerizer numerizer)
241+
throws Exception {
242+
createNormalizedDataFromDataSet(ds, numerizer);
243+
setTargetColumns();
244+
createExamples();
245+
246+
}
247+
188248
public List<List<Double>> getNormalizedData() {
189249
return nds;
190250
}

src/aima/learning/neural/NNExample.java

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,4 +23,25 @@ public NNExample copyExample() {
2323
return new NNExample(newInput, newTarget);
2424
}
2525

26+
public Vector getInput() {
27+
Vector v = new Vector(normalizedInput);
28+
return v;
29+
30+
}
31+
32+
public Vector getTarget() {
33+
Vector v = new Vector(normalizedTarget);
34+
return v;
35+
36+
}
37+
38+
public boolean isCorrect(Vector prediction) {
39+
/*
40+
* compares the index having greatest value in target to indec having
41+
* greatest value in prediction. Ifidentical, correct
42+
*/
43+
return getTarget().indexHavingMaxValue() == prediction
44+
.indexHavingMaxValue();
45+
}
46+
2647
}

src/aima/learning/neural/Vector.java

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
package aima.learning.neural;
22

3+
import java.util.List;
4+
35
import aima.util.Matrix;
46

57
public class Vector extends Matrix {
@@ -9,6 +11,13 @@ public Vector(int size) {
911
super(size, 1);
1012
}
1113

14+
public Vector(List<Double> lst) {
15+
super(lst.size(), 1);
16+
for (int i = 0; i < lst.size(); i++) {
17+
setValue(i, lst.get(i));
18+
}
19+
}
20+
1221
public double getValue(int i) {
1322
return super.get(i, 0);
1423
}
@@ -29,4 +38,25 @@ public int size() {
2938
return getRowDimension();
3039
}
3140

41+
public Vector minus(Vector v) {
42+
Vector result = new Vector(size());
43+
for (int i = 0; i < size(); i++) {
44+
result.setValue(i, getValue(i) - v.getValue(i));
45+
}
46+
return result;
47+
}
48+
49+
public int indexHavingMaxValue() {
50+
if (size() <= 0) {
51+
throw new RuntimeException("can't perform this op on empty vector");
52+
}
53+
int res = 0;
54+
for (int i = 0; i < size(); i++) {
55+
if (getValue(i) > getValue(res)) {
56+
res = i;
57+
}
58+
}
59+
return res;
60+
}
61+
3262
}

src/aima/test/learningtest/neural/BackPropagationTests.java

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,15 @@
11
package aima.test.learningtest.neural;
22

33
import junit.framework.TestCase;
4+
import aima.learning.framework.DataSet;
5+
import aima.learning.framework.DataSetFactory;
46
import aima.learning.neural.BackPropLearning;
57
import aima.learning.neural.FeedForwardNeuralNetwork;
8+
import aima.learning.neural.NNConfig;
9+
import aima.learning.neural.NNDataSet;
610
import aima.learning.neural.Vector;
11+
import aima.learning.statistics.IrisDataSetNumerizer;
12+
import aima.learning.statistics.Numerizer;
713
import aima.util.Matrix;
814

915
public class BackPropagationTests extends TestCase {
@@ -108,4 +114,29 @@ public void testFeedForwardAndBAckLoopWorksWithMomentum() {
108114
assertEquals(0.6061, outputLayerBias.getValue(0), 0.001);
109115

110116
}
117+
118+
public void testDataSetPopulation() throws Exception {
119+
DataSet irisDataSet = DataSetFactory.getIrisDataSet();
120+
Numerizer numerizer = new IrisDataSetNumerizer();
121+
NNDataSet innds = new IrisNNDataSet();
122+
123+
innds.createExamplesFromDataSet(irisDataSet, numerizer);
124+
125+
NNConfig config = new NNConfig();
126+
config.setConfig(FeedForwardNeuralNetwork.NUMBER_OF_INPUTS, 4);
127+
config.setConfig(FeedForwardNeuralNetwork.NUMBER_OF_OUTPUTS, 3);
128+
config.setConfig(FeedForwardNeuralNetwork.NUMBER_OF_HIDDEN_NEURONS, 6);
129+
config.setConfig(FeedForwardNeuralNetwork.LOWER_LIMIT_WEIGHTS, -2.0);
130+
config.setConfig(FeedForwardNeuralNetwork.UPPER_LIMIT_WEIGHTS, 2.0);
131+
132+
FeedForwardNeuralNetwork ffnn = new FeedForwardNeuralNetwork(config);
133+
ffnn.setTrainingScheme(new BackPropLearning(0.1, 0.9));
134+
135+
ffnn.trainOn(innds, 10);
136+
137+
innds.refreshDataset();
138+
int[] result = ffnn.testOnDataSet(innds);
139+
System.out.println(result[0] + " right, " + result[1] + " wrong");
140+
141+
}
111142
}
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
package aima.test.learningtest.neural;
2+
3+
import java.util.ArrayList;
4+
5+
import aima.learning.neural.NNDataSet;
6+
7+
public class IrisNNDataSet extends NNDataSet {
8+
9+
@Override
10+
public void setTargetColumns() {
11+
// assumed that data from file has been pre processed
12+
// TODO this should be
13+
// somewhere else,in the
14+
// super class.
15+
// Type != class Aargh! I want more
16+
// powerful type systems
17+
targetColumnNumbers = new ArrayList<Integer>();
18+
int size = nds.get(0).size();
19+
targetColumnNumbers.add(size - 1); // last column
20+
targetColumnNumbers.add(size - 2); // last but one column
21+
targetColumnNumbers.add(size - 3); // and the one before that
22+
23+
}
24+
25+
}

0 commit comments

Comments
 (0)