Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions Javascript/p005-ReLU-Activation.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ Creates a dense layer of neurons with a ReLU activation function, and feeds forw
Associated YT tutorial: https://www.youtu.be/gmjzbpSVY1A
*/

const math = require("mathjs");
//const math = require("mathjs");

// Moved this code from spiral-data.js written by @vancegillies
// Updated by @daniel-kukiela
Expand Down Expand Up @@ -47,7 +47,7 @@ class Layer_Dense {
this.weights = math.random([n_inputs, n_neurons], -1.0, 1.0);
this.biases = math.zeros(1, n_neurons);
}

forward (inputs) {
var biasesmat = this.biases;
// Since only adding matrices elementwise is supported, you need to make the biases into a matrix and not a vector.
Expand All @@ -58,16 +58,16 @@ class Layer_Dense {

class Activation_ReLU {
constructor () {}

forward (inputs) {
this.output = math.matrix(inputs._data.map(layer => layer.map(i => i<0?0:i)));
}
}

var layer1 = new Layer_Dense(4, 5);
var layer1 = new Layer_Dense(2, 5);
var activation1 = new Activation_ReLU();

layer1.forward(X);
//console.log(layer1.output);
activation1.forward(layer1.output);
console.log(activation1.output);
console.log(activation1.output);
102 changes: 102 additions & 0 deletions Javascript/p006-Softmax-Activation.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
/* This is a javascript implementation of neural networks from scratch in python series.
*
*The part 6 bits i.e Softmax activation struct is declared and defined after line 718
*
* Link to the series on youtube: https://www.youtube.com/watch?v=Wo5dMEP_BbI&list=PLQVvvaa0QuDcjD5BAw2DxE6OF2tius3V3
*/

// const math = require("mathjs");

// Moved this code from spiral-data.js written by @vancegillies
// Updated by @daniel-kukiela
function spiral_data(points, classes) {
// Using MathJs functions to make matrices with zeros but converting to arrays for simplicity
const X = math.zeros(points * classes, 2).toArray();
const y = math.zeros(points * classes, "dense").toArray();
let ix = 0;
for (let class_number = 0; class_number < classes; class_number++) {
let r = 0;
let t = class_number * 4;

while (r <= 1 && t <= (class_number + 1) * 4) {
// adding some randomness to t
const random_t = t + math.random(points) * 0.008;
// Was `* 0.2` but reduced so you can somewhat see the arms of spiral in visualization
// Fell free to change it back

// converting from polar to cartesian coordinates
X[ix][0] = r * math.sin(random_t * 2.5);
X[ix][1] = r * math.cos(random_t * 2.5);
y[ix] = class_number;

// the below two statements achieve linspace-like functionality
r += 1.0 / (points - 1);
t += 4.0 / (points - 1);

ix++; // increment index
}
}
// Returning as MathJs matrices, could be arrays, doesnt really matter
return [math.matrix(X), math.matrix(y)];
}

let [X, y] = spiral_data(100, 3);

class Layer_Dense {
constructor (n_inputs, n_neurons){
this.weights = math.random([n_inputs, n_neurons], -1.0, 1.0);
this.biases = math.zeros(1, n_neurons);
}

forward (inputs) {
var biasesmat = this.biases;
// Since only adding matrices elementwise is supported, you need to make the biases into a matrix and not a vector.
for (var i=0; i<inputs.size()[0]-1;i++) {biasesmat=math.concat(biasesmat, this.biases, 0);}
this.output = math.add(math.multiply(inputs, this.weights), biasesmat);
}
}

class Activation_ReLU {
forward (inputs) {
this.output = math.matrix(inputs._data.map(layer => layer.map(i => i<0?0:i)));
}
}

class Activation_Softmax {
forward (inputs) {
let exp_values = new Array;

inputs.forEach ((input) => {
if (Array.isArray(input)) {
input.forEach ((element) => {
exp_values.push(math.exp(element));
});
}
exp_values.push(math.exp(input));
});

let norm_base = math.sum(exp_values);
let norm_values = new Array;

exp_values.forEach ((element) => {
norm_values.push(element / norm_base);
});

this.output = norm_values;
}
}

var dense1 = new Layer_Dense(2, 3);
var activation1 = new Activation_ReLU();

var dense2 = new Layer_Dense(3, 3);
var activation2 = new Activation_Softmax();


dense1.forward(X);
activation1.forward(dense1.output);

dense2.forward(activation1.output);
activation2.forward(dense2.output);

console.log(activation2.output);
16 changes: 16 additions & 0 deletions Javascript/p007-Categorical-Cross-Entropy-Loss.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
/*
* Calculating the loss with Categorical Cross Entropy
* Associated with YT NNFS tutorial: https://www.youtube.com/watch?v=dEXPMQXoiLc
*/

softmax_output = [0.7, 0.1, 0.2]
target_output = [1, 0, 0]

loss = -(math.log(softmax_output[0]) * target_output[0] +
math.log(softmax_output[1]) * target_output[1] +
math.log(softmax_output[2]) * target_output[2])

console.log(loss)

console.log(-math.log(0.7))
console.log(-math.log(0.5))