Skip to content

Commit cf14eef

Browse files
committed
1.1.0 - Changed progress functions to async and added success rate function
+ Changed progress functions to async + Added async success rate function + Improved default values + Added learning rate and traning iteration function + Added more comments to code + Improved performance in some areas
1 parent 9686c0e commit cf14eef

File tree

4 files changed

+148
-90
lines changed

4 files changed

+148
-90
lines changed

Java-Neural-Network/.classpath

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
<attribute name="test" value="true"/>
1414
</attributes>
1515
</classpathentry>
16-
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
16+
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8">
1717
<attributes>
1818
<attribute name="maven.pomderived" value="true"/>
1919
</attributes>

Java-Neural-Network/.project

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,4 +20,15 @@
2020
<nature>org.eclipse.jdt.core.javanature</nature>
2121
<nature>org.eclipse.m2e.core.maven2Nature</nature>
2222
</natures>
23+
<filteredResources>
24+
<filter>
25+
<id>1600964252595</id>
26+
<name></name>
27+
<type>30</type>
28+
<matcher>
29+
<id>org.eclipse.core.resources.regexFilterMatcher</id>
30+
<arguments>node_modules|.git|__CREATED_BY_JAVA_LANGUAGE_SERVER__</arguments>
31+
</matcher>
32+
</filter>
33+
</filteredResources>
2334
</projectDescription>

Java-Neural-Network/pom.xml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,16 @@
66

77
<groupId>unprotesting.com.github</groupId>
88
<artifactId>Java-Neural-Network</artifactId>
9-
<version>1.0.1</version>
9+
<version>1.1.0</version>
1010

1111
<name>Java-Neural-Network</name>
1212

1313
<url>https://github.com/Unprotesting/Java-Neural-Network</url>
1414

1515
<properties>
1616
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
17-
<maven.compiler.source>1.7</maven.compiler.source>
18-
<maven.compiler.target>1.7</maven.compiler.target>
17+
<maven.compiler.source>1.8</maven.compiler.source>
18+
<maven.compiler.target>1.8</maven.compiler.target>
1919
</properties>
2020

2121
<build>

Java-Neural-Network/src/main/java/unprotesting/com/github/NeuralNetwork.java

Lines changed: 133 additions & 86 deletions
Original file line numberDiff line numberDiff line change
@@ -2,101 +2,110 @@
22

33
import java.util.Arrays;
44
import java.util.Random;
5+
import java.io.FileWriter;
6+
import java.io.IOException;
57

68
public class NeuralNetwork {
79
// Variable Declaration
8-
9-
// Layers
10-
static Layer[] layers;
11-
12-
// Training data
13-
static TrainingData[] tData1, tData200, tData5000; // My changes
1410

15-
static String[] list;
16-
17-
// Main Method
18-
public static void main(String[] args) throws InterruptedException {
19-
// My changes
20-
// Set the Min and Max weight value for all Neurons
21-
Neuron.setRangeWeight(-1,1);
22-
23-
// Create the layers
24-
layers = new Layer[5];
25-
layers[0] = null; // Input Layer 0,10
26-
layers[1] = new Layer(10,24); // Hidden Layer 10,24
27-
layers[2] = new Layer(24,25); // Hidden Layer 24,25
28-
layers[3] = new Layer(25,32); // Hidden Layer 25,32
29-
layers[4] = new Layer(32,2); // Output Layer 32,2
30-
31-
// Create the training data
32-
tData200 = loadInputs(200);
33-
34-
System.out.println("============");
35-
System.out.println("Output before training - 1");
36-
System.out.println("============");
37-
for(int i = 0; i < tData200.length; i++) {
38-
forward(tData200[i].data);
39-
System.out.println(layers[4].neurons[0].value + " - " + layers[4].neurons[1].value);
40-
}
41-
42-
Thread.sleep(5);
43-
44-
// Train the data
45-
// First pass; low data-set size, high training amount, high learning rate
46-
train(25000, 0.0075f, tData200);
11+
// Layers
12+
static Layer[] layers;
4713

48-
System.out.println("============");
49-
System.out.println("Output after training - 1");
50-
System.out.println("============");
51-
for(int i = 0; i < tData200.length; i++) {
52-
forward(tData200[i].data);
53-
System.out.println((layers[4].neurons[0].value)+ " - "+ (layers[4].neurons[1].value) + " =(this should equal)= " + list[i]);
54-
}
14+
// Training data
15+
static TrainingData[] tData1, tDataFull, testData1000;
16+
17+
// Boolean for Async Threads
18+
public static boolean isComplete = true;
19+
20+
// Asnyc references to progress
21+
public static int time = 0;
22+
public static int i_stat = 0;
23+
public static float currentChange = 0f;
24+
25+
// CSV data writer
26+
public static FileWriter csvWriter;
27+
28+
// Main Method
29+
public static void main(String[] args) throws InterruptedException, IOException {
30+
// Set the Min and Max weight value for all Neurons
31+
Neuron.setRangeWeight(-1, 1);
32+
33+
csvWriter = new FileWriter("NeuralData.csv");
34+
csvWriter.append("\n");
35+
36+
// Create the layers
37+
layers = new Layer[5];
38+
layers[0] = null; // Input Layer 0,10
39+
layers[1] = new Layer(10, 24); // Hidden Layer 10,24
40+
layers[2] = new Layer(24, 45); // Hidden Layer 24,45
41+
layers[3] = new Layer(45, 32); // Hidden Layer 45,32
42+
layers[4] = new Layer(32, 2); // Output Layer 32,2
5543

5644
Thread.sleep(5);
5745

58-
// Create the training data
59-
tData5000 = loadInputs(5000);
60-
61-
System.out.println("============");
62-
System.out.println("Output before training - 2");
63-
System.out.println("============");
64-
for(int i = 0; i < tData5000.length; i++) {
65-
forward(tData5000[i].data);
66-
System.out.println((layers[4].neurons[0].value)+ " - " +(layers[4].neurons[1].value) + " =(this should equal)= " + list[i]);
46+
// Create the test data
47+
testData1000 = loadInputs(1000);
48+
49+
// Asynchronous function to check progress and success rate
50+
Thread asyncProgressThread = new Thread(() -> {
51+
while (!isComplete) {
52+
try {
53+
Thread.sleep(50);
54+
} catch (InterruptedException e) {
55+
e.printStackTrace();
56+
}
57+
// Check progress function
58+
checkProgress();
59+
try {
60+
// Check success rate function
61+
checkSuccessRate();
62+
} catch (IOException e) {
63+
e.printStackTrace();
64+
}
65+
try {
66+
Thread.sleep(2950);
67+
} catch (InterruptedException e) {
68+
e.printStackTrace();
69+
}
70+
}
71+
});
72+
73+
// Start async function
74+
asyncProgressThread.start();
75+
isComplete = false;
76+
77+
// Train data under mutiple configurations
78+
// Decreasing training iterations and learning rate
79+
for (int i = 1; i < 10; i++){
80+
System.out.println("Starting Training: " + i);
81+
// Training data changes each iteration
82+
tDataFull = loadInputs((int)(i * 1000 * 1.25));
83+
train((int)(1000/i), (0.01f/i), tDataFull);
6784
}
68-
69-
Thread.sleep(5);
7085

71-
// Train the data
72-
// First pass; high data-set size, low training amount, low learning rate
73-
train(1000, 0.0015f, tData5000);
86+
Thread.sleep(5);
7487

75-
System.out.println("============");
76-
System.out.println("Output after training - 2");
77-
System.out.println("============");
78-
for(int i = 0; i < tData5000.length; i++) {
79-
forward(tData5000[i].data);
80-
System.out.println((layers[4].neurons[0].value)+ " - "+ (layers[4].neurons[1].value) + " =(this should equal)= " + list[i]);
81-
}
88+
// Stop async task
89+
isComplete = true;
8290

83-
Thread.sleep(100);
91+
Thread.sleep(250);
8492

93+
// Create testing data
8594
tData1 = loadInputs(1);
86-
System.out.println("============");
87-
System.out.println("Output before testing");
88-
System.out.println("============");
89-
float[] test = tData1[0].data;
90-
System.out.println(Arrays.toString(test));
9195

92-
train(5, 0.001f, tData1);
96+
// Test network
97+
train(10, 0.0001f, tData1);
9398
System.out.println("============");
9499
System.out.println("Output after testing");
95100
System.out.println("============");
96101
for(int i = 0; i < tData1.length; i++) {
97102
forward(tData1[i].data);
98-
System.out.println((layers[4].neurons[0].value)+ " - "+ (layers[4].neurons[1].value) + " =(this should equal)= " + list[i]);
103+
System.out.println((layers[4].neurons[0].value)+ " - "+ (layers[4].neurons[1].value) + " =(this should equal)= " + Arrays.toString(tData1[i].expectedOutput));
99104
}
105+
106+
// Close writer
107+
csvWriter.flush();
108+
csvWriter.close();
100109
}
101110

102111

@@ -116,24 +125,22 @@ public static void forward(float[] inputs) {
116125
}
117126
}
118127

128+
// Create training data by loading float arrays as soprted ascendingly, descendingly or not at all
119129
public static TrainingData[] loadInputs(int inputs) {
120130
TrainingData[] outputTrainingData = new TrainingData[inputs];
121-
list = new String[inputs];
122131
int i = 0;
123132
for (;i<inputs;){
124133
float temp = loadRandomFloat();
125134
if (temp > 7000){
126135
float[] z = { loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat()};
127136
float[] sortedInput = ascendingBubbleSortFloatArray(z);
128137
outputTrainingData[i] = new TrainingData(sortedInput, new float[]{1, 0});
129-
list[i] = "one, zero";
130138
i++;
131139
}
132140
if (temp <= 3000){
133141
float[] z = { loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat(), loadRandomFloat()};
134142
float[] sortedInput = descendingBubbleSortFloatArray(z);
135143
outputTrainingData[i] = new TrainingData(sortedInput, new float[]{0, 0});
136-
list[i] = "zero, zero";
137144
i++;
138145
}
139146
if (7000 > temp && temp > 3000){
@@ -142,17 +149,14 @@ public static TrainingData[] loadInputs(int inputs) {
142149
boolean isdsorted = isDecsendingSorted(z);
143150
if (isdsorted == true){
144151
outputTrainingData[i] = new TrainingData(z, new float[]{0, 0});
145-
list[i] = "zero, zero";
146152
i++;
147153
}
148154
else if (isasorted == true){
149155
outputTrainingData[i] = new TrainingData(z, new float[]{1, 0});
150-
list[i] = "one, zero";
151156
i++;
152157
}
153158
else {
154159
outputTrainingData[i] = new TrainingData(z, new float[]{0, 1});
155-
list[i] = "zero, one";
156160
i++;
157161
}
158162
}
@@ -161,6 +165,7 @@ else if (isasorted == true){
161165

162166
}
163167

168+
// Test if an array is sorted ascendingly
164169
public static boolean isAscendingSorted(float[] a) {
165170
for (int i = 0; i < a.length - 1; i++) {
166171
if (a[i] > a[i + 1]) {
@@ -171,6 +176,7 @@ public static boolean isAscendingSorted(float[] a) {
171176
return true; // If this part has been reached, the array must be sorted.
172177
}
173178

179+
// Test if an array is sorted decendingly
174180
public static boolean isDecsendingSorted(float[] a) {
175181
for (int i = 0; i < a.length - 1; i++) {
176182
if (a[i] < a[i + 1]) {
@@ -181,6 +187,7 @@ public static boolean isDecsendingSorted(float[] a) {
181187
return true; // If this part has been reached, the array must be sorted.
182188
}
183189

190+
// Function to load a random float from 1-9999
184191
public static float loadRandomFloat() {
185192
int leftLimit = 1;
186193
int rightLimit = 9999;
@@ -285,19 +292,59 @@ public static float sumGradient(int n_index,int l_index) {
285292
gradient_sum += current_neuron.weights[n_index]*current_neuron.gradient;
286293
}
287294
return gradient_sum;
288-
}
289-
295+
}
296+
297+
// Check progress of training
298+
public static void checkProgress(){
299+
System.out.println("Total percentage complete: %" + NeuralNetwork.time + ". Current training: No." + NeuralNetwork.i_stat + ". ");
300+
}
301+
302+
// Check success rate by loading new test data
303+
public static void checkSuccessRate() throws IOException {
304+
int checks = 0;
305+
float diff = 0;
306+
float totalDif = 0;
307+
for (int i = 0; i < testData1000.length; i++){
308+
if (testData1000[i] != null){
309+
forward(testData1000[i].data);
310+
float[] val = {layers[4].neurons[0].value, layers[4].neurons[1].value};
311+
float[] optimalVal = testData1000[i].expectedOutput;
312+
for (int x = 0; x < 2; x++){
313+
if (Math.round(val[x]) == optimalVal[x]){
314+
diff = diff + optimalVal[x] - Math.round(val[x]);
315+
}
316+
if ((Math.round(val[x])) != optimalVal[x]){
317+
diff = diff + 1;
318+
}
319+
if (val == optimalVal){
320+
diff = diff + 0;
321+
}
322+
}
323+
diff = diff*50;
324+
totalDif = totalDif + diff;
325+
diff = 0;
326+
checks++;
327+
}
328+
}
329+
totalDif = 100-(totalDif/checks);
330+
currentChange = totalDif - currentChange;
331+
System.out.println("Success Rate: %" + totalDif + ". Change: %" + currentChange);
332+
csvWriter.append("\n");
333+
csvWriter.append(String.valueOf(totalDif));
334+
csvWriter.append(String.valueOf(","));
335+
csvWriter.append(String.valueOf(currentChange));
336+
currentChange = totalDif;
337+
}
290338

291-
// This function is used to train being forward and backward.
339+
// This function is used to train data by pushing it forward and backward.
292340
public static void train(int training_iterations,float learning_rate, TrainingData[] traningData) {
293341
for(int i = 0; i < training_iterations; i++) {
342+
time = (i * 100 / training_iterations);
343+
i_stat = i;
294344
for(int j = 0; j < traningData.length; j++) {
295345
forward(traningData[j].data);
296346
backward(learning_rate,traningData[j]);
297347
}
298-
if (i%267 == 0 && i != 0){
299-
System.out.println("Percentage complete: %" + (i*100/training_iterations));
300-
}
301348

302349
}
303350

0 commit comments

Comments
 (0)