Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion app/Graph/build.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ void build_graph(Tensor& input, Tensor& output, bool comments,
ImplType impl1 = parallel ? kTBB : kDefault;
ImplType impl2 = parallel ? kSTL : kDefault;
std::vector<std::shared_ptr<Layer>> layers;
std::vector<bool> layerpostop;

std::string json_file = MODEL_PATH_H5;
json model_data = read_json(json_file);
Expand Down Expand Up @@ -73,12 +74,14 @@ void build_graph(Tensor& input, Tensor& output, bool comments,
1, pads, 1, tmp_values, tmp_bias, impl2);
conv_layer->setName(kConvolution);
layers.push_back(conv_layer);
layerpostop.push_back(0);
if (comments) std::cout << "ConvLayer added to layers." << std::endl;
}
if (layer_type.find("relu") != std::string::npos) {
auto ew_layer = std::make_shared<EWLayer>("relu");
ew_layer->setName(kElementWise);
layers.push_back(ew_layer);
layerpostop.push_back(1);
if (comments)
std::cout << "Element wise (relu) added to layers" << std::endl;
}
Expand All @@ -99,6 +102,7 @@ void build_graph(Tensor& input, Tensor& output, bool comments,
auto fc_layer = std::make_shared<FCLayer>(tensor, tmp_bias);
fc_layer->setName(kFullyConnected);
layers.push_back(fc_layer);
layerpostop.push_back(0);
if (comments) std::cout << "DenseLayer added to layers." << std::endl;
}

Expand All @@ -116,6 +120,7 @@ void build_graph(Tensor& input, Tensor& output, bool comments,
auto pool_layer = std::make_shared<PoolingLayer>(shape, pooltype, impl1);
pool_layer->setName(kPooling);
layers.push_back(pool_layer);
layerpostop.push_back(0);
if (comments) std::cout << "PoolingLayer added to layers." << std::endl;
}

Expand All @@ -124,13 +129,15 @@ void build_graph(Tensor& input, Tensor& output, bool comments,
std::make_shared<FlattenLayer>(std::vector<size_t>({0, 3, 2, 1}));
flatten_layer->setName(kFlatten);
layers.push_back(flatten_layer);
layerpostop.push_back(0);
if (comments) std::cout << "FlattenLayer added to layers." << std::endl;
}

if (layer_type.find("Dropout") != std::string::npos) {
auto dropout_layer = std::make_shared<DropOutLayer>(0.0);
dropout_layer->setName(kDropout);
layers.push_back(dropout_layer);
layerpostop.push_back(0);
if (comments)
std::cout
<< "DropOutLayer added to layers with probability 0.4 (turned "
Expand All @@ -155,7 +162,12 @@ void build_graph(Tensor& input, Tensor& output, bool comments,
<< std::endl;

for (size_t i = 0; i < layers.size() - 1; ++i) {
graph.makeConnection(*layers[i], *layers[i + 1]);
if (layerpostop[i]) {
layers[i - 1]->postops.layers.push_back(layers[i].get());
layers[i - 1]->postops.count++;
graph.makeConnection(*layers[i - 1], *layers[i + 1]);
} else if (!layerpostop[i + 1])
graph.makeConnection(*layers[i], *layers[i + 1]);
}

graph.setOutput(*layers.back(), output);
Expand Down
6 changes: 6 additions & 0 deletions include/graph/graph.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,12 @@ class Graph {
weights_.push_back(layers_[i]->get_weights());
#endif
inten_ = *outten_;
if (layers_[i]->postops.count > 0) {
for (unsigned int j = 0; j < layers_[i]->postops.count; j++) {
layers_[i]->postops.layers[j]->run(inten_, *outten_);
}
inten_ = *outten_;
}
#ifdef ENABLE_STATISTIC_TIME
auto end = std::chrono::high_resolution_clock::now();
auto elapsed =
Expand Down
8 changes: 8 additions & 0 deletions include/layers/Layer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,18 @@ enum LayerType : uint8_t {

enum ImplType : uint8_t { kDefault, kTBB, kSTL };

class Layer;

struct PostOperations {
std::vector<Layer*> layers;
unsigned int count = 0;
};

class Layer {
public:
Layer() = default;
virtual ~Layer() = default;
PostOperations postops;
int getID() const { return id_; }
void setID(int id) { id_ = id; }
LayerType getName() const { return type_; }
Expand Down
66 changes: 66 additions & 0 deletions test/inference/test_inference.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -138,3 +138,69 @@ TEST(bfs, check_end_to_end) {
std::vector<float> res(3, 21);
ASSERT_EQ(tmp, res);
}
TEST(bfs, check_struct_layer) {
Graph graph(5);
Shape sh1({1, 5, 5, 3});
std::vector<int> vec;
vec.reserve(75);
for (int i = 0; i < 75; ++i) {
vec.push_back(3);
}
Tensor input = make_tensor(vec, sh1);
Tensor output = make_tensor(vec, sh1);
InputLayer a1(kNhwc, kNchw, 1, 2);
a1.setName(kInput);
std::vector<int> kernelvec = {1, 1, 1, 1, 1, 1, 1, 1, 1};
Shape sh2({3, 3});
Tensor kernel = make_tensor(kernelvec, sh2);
ConvolutionalLayer a2(1, 0, 1, kernel);
ConvolutionalLayer a3(1, 0, 1, kernel);

// EWLayer a4("linear", 2.0F, 3.0F);
// a2.ewops.layers.push_back(&a4);
// a2.ewops.countlayers++;

a2.setName(kConvolution);
a3.setName(kConvolution);
graph.setInput(a1, input);
graph.makeConnection(a1, a2);
graph.makeConnection(a2, a3);
graph.setOutput(a3, output);
graph.inference();
std::vector<int> tmp = *output.as<int>();
std::vector<int> res = {81, 81, 81};
ASSERT_EQ(tmp, res);
}
TEST(bfs, check_struct_layer_added) {
Graph graph(5);
Shape sh1({1, 5, 5, 3});
std::vector<int> vec;
vec.reserve(75);
for (int i = 0; i < 75; ++i) {
vec.push_back(3);
}
Tensor input = make_tensor(vec, sh1);
Tensor output = make_tensor(vec, sh1);
InputLayer a1(kNhwc, kNchw, 1, 2);
a1.setName(kInput);
std::vector<int> kernelvec = {1, 1, 1, 1, 1, 1, 1, 1, 1};
Shape sh2({3, 3});
Tensor kernel = make_tensor(kernelvec, sh2);
ConvolutionalLayer a2(1, 0, 1, kernel);
ConvolutionalLayer a3(1, 0, 1, kernel);

EWLayer a4("linear", 2.0F, 3.0F);
a2.postops.layers.push_back(&a4);
a2.postops.count++;

a2.setName(kConvolution);
a3.setName(kConvolution);
graph.setInput(a1, input);
graph.makeConnection(a1, a2);
graph.makeConnection(a2, a3);
graph.setOutput(a3, output);
graph.inference();
std::vector<int> tmp = *output.as<int>();
std::vector<int> res = {189, 189, 189};
ASSERT_EQ(tmp, res);
}
Loading