-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathneural_network_potential.cpp
More file actions
79 lines (66 loc) · 2.17 KB
/
neural_network_potential.cpp
File metadata and controls
79 lines (66 loc) · 2.17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
#include "neural_network_potential.h"
Layer::Layer(int in, int out, vector<double> &w, vector<double> &b, string act) {
weight = Map<MatrixXd>(&w[0], out, in);
bias = Map<VectorXd>(&b[0], out);
set_activation(act);
}
Layer::~Layer() {}
void Layer::tanh(VectorXd &input, VectorXd &deriv) {
// return = tanh(x)
// deriv = 1 - tanh(x)^2 = 1 - return^2
input = input.array().tanh();
deriv = 1.0 - input.array().square();
}
void Layer::elu(VectorXd &input, VectorXd &deriv) {
// return = exp(x) - 1, when x < 0
// x , when x > 0
// deriv = exp(x) , when x < 0
// 1 , when x > 0
// DON'T CHANGE ORDER OF FOLLOWING CALCULATIONS !!!
deriv = (input.array() < 0).select(input.array().exp(), VectorXd::Ones(input.size()));
input = (input.array() < 0).select(input.array().exp() - 1.0, input);
}
void Layer::sigmoid(VectorXd &input, VectorXd &deriv) {
// return = sigmoid(x)
// deriv = sigmoid(x) * (1-sigmoid(x)) = return * (1-return)
input = 1.0 / (1.0 + (-input).array().exp());
deriv = input.array() * (1.0 - input.array());
}
void Layer::identity(VectorXd &input, VectorXd &deriv) {
// return = x
// deriv = 1
deriv = VectorXd::Ones(input.size());
}
void Layer::set_activation(string act) {
if (act == "tanh") {
activation = &Layer::tanh;
} else if (act == "elu") {
activation = &Layer::elu;
} else if (act == "sigmoid") {
activation = &Layer::sigmoid;
} else if (act == "identity") {
activation = &Layer::identity;
} else {
cout << "ERROR!! not implemented ACTIVATION FUNCTION!!" << endl;
}
}
void Layer::feedforward(VectorXd &input, VectorXd &deriv) {
input = (weight * input).colwise() + bias;
(this->*activation)(input, deriv);
}
NNP::NNP(int n) {
depth = n;
}
NNP::~NNP() {}
void NNP::feedforward(VectorXd input, VectorXd &dE_dG, int eflag,
double &evdwl) {
int i;
VectorXd deriv[depth];
for (i = 0; i < depth; i++) layers[i].feedforward(input, deriv[i]);
dE_dG = VectorXd::Ones(1);
for (i = depth - 1; i >= 0; i--) {
dE_dG = dE_dG.array() * deriv[i].array();
dE_dG = dE_dG.transpose() * layers[i].weight;
}
if (eflag) evdwl = input.coeffRef(0);
}