-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathCNN.m
More file actions
57 lines (53 loc) · 2.43 KB
/
CNN.m
File metadata and controls
57 lines (53 loc) · 2.43 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
classdef CNN < handle
%%Basically a layer manager; gets called by Main
%% Layers to include/layer order is TBD at the moment
%% as for activation functions (cross entropy, mse), matlab has them
%% already implemented.
properties
conLayer
reluLayer
poolingLayer
flattenLayer
fcLayer
softMaxLayer
end
methods
function output = predict(this, input)
output = this.conLayer.forward(input);
output = this.reluLayer.forward(output);
output = this.poolingLayer.forward(output);
output = this.flattenLayer.forward(output);
output = this.fcLayer.forward(output);
output = this.softMaxLayer.forward(output);
end
function this = train(this, data, labels, kernelSize, depth, outputSize, epoch, learningRate)
this.conLayer = ConvolutionLayer(size(data, 1), kernelSize, depth);
this.reluLayer = ReLULayer();
this.poolingLayer = PoolingLayer(2, 2, depth, 'max');
this.flattenLayer = FlattenLayer();
this.fcLayer = FCLayer((size(data, 1) - kernelSize + 1) * (size(data, 2) - kernelSize + 1) * depth / 4, outputSize);
this.softMaxLayer = SoftMaxLayer();
% sequentially
% conv -> ReLU -> pooling -> fully connected
% cross entropy loss -> backprop -> update weights
for i = 1:epoch
disp("Current Epoch: " + i);
ceLoss = 0;
for j = 1:size(data, 3)
result = this.predict(data(:, :, j));
loss = result - labels(:, j); % (labels(:, j) - result) .* (labels(:, j) - result);
% msePrime = 2 * (sum(loss) / numel(loss));
%calculate cross entropy
ceLoss = ceLoss - sum(labels(:, j) .* log(result));
currGradient = this.fcLayer.backward(loss, learningRate);
currGradient = this.flattenLayer.backward(currGradient);
currGradient = this.poolingLayer.backward(currGradient);
currGradient = this.reluLayer.backward(currGradient);
currGradient = this.conLayer.backward(currGradient, learningRate);
end
ceLoss = ceLoss / size(data, 3);
disp("Cross Entropy Loss For This Epoch: " + ceLoss);
end
end
end
end