diff --git a/Javascript/p005-ReLU-Activation.js b/Javascript/p005-ReLU-Activation.js index 8a2b87c..6aad1d5 100644 --- a/Javascript/p005-ReLU-Activation.js +++ b/Javascript/p005-ReLU-Activation.js @@ -3,7 +3,7 @@ Creates a dense layer of neurons with a ReLU activation function, and feeds forw Associated YT tutorial: https://www.youtu.be/gmjzbpSVY1A */ -const math = require("mathjs"); +//const math = require("mathjs"); // Moved this code from spiral-data.js written by @vancegillies // Updated by @daniel-kukiela @@ -47,7 +47,7 @@ class Layer_Dense { this.weights = math.random([n_inputs, n_neurons], -1.0, 1.0); this.biases = math.zeros(1, n_neurons); } - + forward (inputs) { var biasesmat = this.biases; // Since only adding matrices elementwise is supported, you need to make the biases into a matrix and not a vector. @@ -58,16 +58,16 @@ class Layer_Dense { class Activation_ReLU { constructor () {} - + forward (inputs) { this.output = math.matrix(inputs._data.map(layer => layer.map(i => i<0?0:i))); } } -var layer1 = new Layer_Dense(4, 5); +var layer1 = new Layer_Dense(2, 5); var activation1 = new Activation_ReLU(); layer1.forward(X); //console.log(layer1.output); activation1.forward(layer1.output); -console.log(activation1.output); \ No newline at end of file +console.log(activation1.output); diff --git a/Javascript/p006-Softmax-Activation.js b/Javascript/p006-Softmax-Activation.js new file mode 100644 index 0000000..ea6fd88 --- /dev/null +++ b/Javascript/p006-Softmax-Activation.js @@ -0,0 +1,102 @@ +/* This is a javascript implementation of neural networks from scratch in python series. +* +*The part 6 bits i.e Softmax activation struct is declared and defined after line 718 +* +* Link to the series on youtube: https://www.youtube.com/watch?v=Wo5dMEP_BbI&list=PLQVvvaa0QuDcjD5BAw2DxE6OF2tius3V3 +*/ + +// const math = require("mathjs"); + +// Moved this code from spiral-data.js written by @vancegillies +// Updated by @daniel-kukiela +function spiral_data(points, classes) { + // Using MathJs functions to make matrices with zeros but converting to arrays for simplicity + const X = math.zeros(points * classes, 2).toArray(); + const y = math.zeros(points * classes, "dense").toArray(); + let ix = 0; + for (let class_number = 0; class_number < classes; class_number++) { + let r = 0; + let t = class_number * 4; + + while (r <= 1 && t <= (class_number + 1) * 4) { + // adding some randomness to t + const random_t = t + math.random(points) * 0.008; + // Was `* 0.2` but reduced so you can somewhat see the arms of spiral in visualization + // Fell free to change it back + + // converting from polar to cartesian coordinates + X[ix][0] = r * math.sin(random_t * 2.5); + X[ix][1] = r * math.cos(random_t * 2.5); + y[ix] = class_number; + + // the below two statements achieve linspace-like functionality + r += 1.0 / (points - 1); + t += 4.0 / (points - 1); + + ix++; // increment index + } + } + // Returning as MathJs matrices, could be arrays, doesnt really matter + return [math.matrix(X), math.matrix(y)]; +} + +let [X, y] = spiral_data(100, 3); + +class Layer_Dense { + constructor (n_inputs, n_neurons){ + this.weights = math.random([n_inputs, n_neurons], -1.0, 1.0); + this.biases = math.zeros(1, n_neurons); + } + + forward (inputs) { + var biasesmat = this.biases; + // Since only adding matrices elementwise is supported, you need to make the biases into a matrix and not a vector. + for (var i=0; i layer.map(i => i<0?0:i))); + } +} + +class Activation_Softmax { + forward (inputs) { + let exp_values = new Array; + + inputs.forEach ((input) => { + if (Array.isArray(input)) { + input.forEach ((element) => { + exp_values.push(math.exp(element)); + }); + } + exp_values.push(math.exp(input)); + }); + + let norm_base = math.sum(exp_values); + let norm_values = new Array; + + exp_values.forEach ((element) => { + norm_values.push(element / norm_base); + }); + + this.output = norm_values; + } +} + +var dense1 = new Layer_Dense(2, 3); +var activation1 = new Activation_ReLU(); + +var dense2 = new Layer_Dense(3, 3); +var activation2 = new Activation_Softmax(); + + +dense1.forward(X); +activation1.forward(dense1.output); + +dense2.forward(activation1.output); +activation2.forward(dense2.output); + +console.log(activation2.output); diff --git a/Javascript/p007-Categorical-Cross-Entropy-Loss.js b/Javascript/p007-Categorical-Cross-Entropy-Loss.js new file mode 100644 index 0000000..0437fcf --- /dev/null +++ b/Javascript/p007-Categorical-Cross-Entropy-Loss.js @@ -0,0 +1,16 @@ +/* +* Calculating the loss with Categorical Cross Entropy +* Associated with YT NNFS tutorial: https://www.youtube.com/watch?v=dEXPMQXoiLc +*/ + +softmax_output = [0.7, 0.1, 0.2] +target_output = [1, 0, 0] + +loss = -(math.log(softmax_output[0]) * target_output[0] + + math.log(softmax_output[1]) * target_output[1] + + math.log(softmax_output[2]) * target_output[2]) + +console.log(loss) + +console.log(-math.log(0.7)) +console.log(-math.log(0.5))