Skip to content

Commit 8a3da6f

Browse files
Update AlexNet example with accessors
1 parent 2aab316 commit 8a3da6f

File tree

5 files changed

+317
-23
lines changed

5 files changed

+317
-23
lines changed

arm_compute/graph/Tensor.h

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,16 @@ class Tensor
6767
* @param[in] accessor Tensor accessor
6868
*/
6969
template <typename AccessorType>
70+
Tensor(TensorInfo &&info, std::unique_ptr<AccessorType> &&accessor)
71+
: _target(TargetHint::DONT_CARE), _info(info), _accessor(std::move(accessor)), _tensor(nullptr)
72+
{
73+
}
74+
/** Constructor
75+
*
76+
* @param[in] info Tensor info to use
77+
* @param[in] accessor Tensor accessor
78+
*/
79+
template <typename AccessorType>
7080
Tensor(TensorInfo &&info, AccessorType &&accessor)
7181
: _target(TargetHint::DONT_CARE), _info(info), _accessor(arm_compute::support::cpp14::make_unique<AccessorType>(std::forward<AccessorType>(accessor))), _tensor(nullptr)
7282
{

examples/graph_alexnet.cpp

Lines changed: 69 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -63,35 +63,91 @@ std::unique_ptr<ITensorAccessor> get_accessor(const std::string &path, const std
6363
}
6464
}
6565

66+
/** Generates appropriate input accessor according to the specified ppm_path
67+
*
68+
* @note If ppm_path is empty will generate a DummyAccessor else will generate a PPMAccessor
69+
*
70+
* @param[in] ppm_path Path to PPM file
71+
* @param[in] mean_r Red mean value to be subtracted from red channel
72+
* @param[in] mean_g Green mean value to be subtracted from green channel
73+
* @param[in] mean_b Blue mean value to be subtracted from blue channel
74+
*
75+
* @return An appropriate tensor accessor
76+
*/
77+
std::unique_ptr<ITensorAccessor> get_input_accessor(const std::string &ppm_path, float mean_r, float mean_g, float mean_b)
78+
{
79+
if(ppm_path.empty())
80+
{
81+
return arm_compute::support::cpp14::make_unique<DummyAccessor>();
82+
}
83+
else
84+
{
85+
return arm_compute::support::cpp14::make_unique<PPMAccessor>(ppm_path, true, mean_r, mean_g, mean_b);
86+
}
87+
}
88+
89+
/** Generates appropriate output accessor according to the specified labels_path
90+
*
91+
* @note If labels_path is empty will generate a DummyAccessor else will generate a TopNPredictionsAccessor
92+
*
93+
* @param[in] labels_path Path to labels text file
94+
* @param[in] top_n (Optional) Number of output classes to print
95+
* @param[out] output_stream (Optional) Output stream
96+
*
97+
* @return An appropriate tensor accessor
98+
*/
99+
std::unique_ptr<ITensorAccessor> get_output_accessor(const std::string &labels_path, size_t top_n = 5, std::ostream &output_stream = std::cout)
100+
{
101+
if(labels_path.empty())
102+
{
103+
return arm_compute::support::cpp14::make_unique<DummyAccessor>();
104+
}
105+
else
106+
{
107+
return arm_compute::support::cpp14::make_unique<TopNPredictionsAccessor>(labels_path, top_n, output_stream);
108+
}
109+
}
110+
66111
/** Example demonstrating how to implement AlexNet's network using the Compute Library's graph API
67112
*
68113
* @param[in] argc Number of arguments
69-
* @param[in] argv Arguments ( [optional] Path to the weights folder, [optional] batches )
114+
* @param[in] argv Arguments ( [optional] Path to the weights folder, [optional] image, [optional] labels )
70115
*/
71116
void main_graph_alexnet(int argc, const char **argv)
72117
{
73-
std::string data_path; /** Path to the trainable data */
74-
unsigned int batches = 4; /** Number of batches */
118+
std::string data_path; /* Path to the trainable data */
119+
std::string image; /* Image data */
120+
std::string label; /* Label data */
121+
122+
constexpr float mean_r = 122.68f; /* Mean value to subtract from red channel */
123+
constexpr float mean_g = 116.67f; /* Mean value to subtract from green channel */
124+
constexpr float mean_b = 104.01f; /* Mean value to subtract from blue channel */
75125

76126
// Parse arguments
77127
if(argc < 2)
78128
{
79129
// Print help
80-
std::cout << "Usage: " << argv[0] << " [path_to_data] [batches]\n\n";
130+
std::cout << "Usage: " << argv[0] << " [path_to_data] [image] [labels]\n\n";
81131
std::cout << "No data folder provided: using random values\n\n";
82132
}
83133
else if(argc == 2)
84134
{
85-
//Do something with argv[1]
86135
data_path = argv[1];
87-
std::cout << "Usage: " << argv[0] << " [path_to_data] [batches]\n\n";
88-
std::cout << "No number of batches where specified, thus will use the default : " << batches << "\n\n";
136+
std::cout << "Usage: " << argv[0] << " " << argv[1] << " [image] [labels]\n\n";
137+
std::cout << "No image provided: using random values\n\n";
138+
}
139+
else if(argc == 3)
140+
{
141+
data_path = argv[1];
142+
image = argv[2];
143+
std::cout << "Usage: " << argv[0] << " " << argv[1] << " " << argv[2] << " [labels]\n\n";
144+
std::cout << "No text file with labels provided: skipping output accessor\n\n";
89145
}
90146
else
91147
{
92-
//Do something with argv[1] and argv[2]
93148
data_path = argv[1];
94-
batches = std::strtol(argv[2], nullptr, 0);
149+
image = argv[2];
150+
label = argv[3];
95151
}
96152

97153
// Check if OpenCL is available and initialize the scheduler
@@ -106,7 +162,8 @@ void main_graph_alexnet(int argc, const char **argv)
106162
arm_compute::Logger::get().set_logger(std::cout, arm_compute::LoggerVerbosity::INFO);
107163

108164
graph << hint
109-
<< Tensor(TensorInfo(TensorShape(227U, 227U, 3U, batches), 1, DataType::F32), DummyAccessor())
165+
<< Tensor(TensorInfo(TensorShape(227U, 227U, 3U, 1U), 1, DataType::F32),
166+
get_input_accessor(image, mean_r, mean_g, mean_b))
110167
// Layer 1
111168
<< ConvolutionLayer(
112169
11U, 11U, 96U,
@@ -167,7 +224,7 @@ void main_graph_alexnet(int argc, const char **argv)
167224
get_accessor(data_path, "/cnn_data/alexnet_model/fc8_b.npy"))
168225
// Softmax
169226
<< SoftmaxLayer()
170-
<< Tensor(DummyAccessor());
227+
<< Tensor(get_output_accessor(label, 5));
171228

172229
// Run graph
173230
graph.run();
@@ -176,7 +233,7 @@ void main_graph_alexnet(int argc, const char **argv)
176233
/** Main program for AlexNet
177234
*
178235
* @param[in] argc Number of arguments
179-
* @param[in] argv Arguments ( [optional] Path to the weights folder, [optional] batches )
236+
* @param[in] argv Arguments ( [optional] Path to the weights folder, [optional] image, [optional] labels )
180237
*/
181238
int main(int argc, const char **argv)
182239
{

utils/GraphUtils.cpp

Lines changed: 100 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,10 @@
3434
#include "arm_compute/core/PixelValue.h"
3535
#include "libnpy/npy.hpp"
3636

37+
#include <algorithm>
38+
#include <iomanip>
39+
#include <ostream>
3740
#include <random>
38-
#include <sstream>
3941

4042
using namespace arm_compute::graph_utils;
4143

@@ -48,16 +50,8 @@ bool PPMWriter::access_tensor(ITensor &tensor)
4850
{
4951
std::stringstream ss;
5052
ss << _name << _iterator << ".ppm";
51-
if(dynamic_cast<Tensor *>(&tensor) != nullptr)
52-
{
53-
arm_compute::utils::save_to_ppm(dynamic_cast<Tensor &>(tensor), ss.str());
54-
}
55-
#ifdef ARM_COMPUTE_CL
56-
else if(dynamic_cast<CLTensor *>(&tensor) != nullptr)
57-
{
58-
arm_compute::utils::save_to_ppm(dynamic_cast<CLTensor &>(tensor), ss.str());
59-
}
60-
#endif /* ARM_COMPUTE_CL */
53+
54+
arm_compute::utils::save_to_ppm(tensor, ss.str());
6155

6256
_iterator++;
6357
if(_maximum == 0)
@@ -87,6 +81,101 @@ bool DummyAccessor::access_tensor(ITensor &tensor)
8781
return ret;
8882
}
8983

84+
PPMAccessor::PPMAccessor(const std::string &ppm_path, bool bgr, float mean_r, float mean_g, float mean_b)
85+
: _ppm_path(ppm_path), _bgr(bgr), _mean_r(mean_r), _mean_g(mean_g), _mean_b(mean_b)
86+
{
87+
}
88+
89+
bool PPMAccessor::access_tensor(ITensor &tensor)
90+
{
91+
utils::PPMLoader ppm;
92+
const float mean[3] =
93+
{
94+
_bgr ? _mean_b : _mean_r,
95+
_mean_g,
96+
_bgr ? _mean_r : _mean_b
97+
};
98+
99+
// Open PPM file
100+
ppm.open(_ppm_path);
101+
102+
// Fill the tensor with the PPM content (BGR)
103+
ppm.fill_planar_tensor(tensor, _bgr);
104+
105+
// Subtract the mean value from each channel
106+
Window window;
107+
window.use_tensor_dimensions(tensor.info()->tensor_shape());
108+
109+
execute_window_loop(window, [&](const Coordinates & id)
110+
{
111+
const float value = *reinterpret_cast<float *>(tensor.ptr_to_element(id)) - mean[id.z()];
112+
*reinterpret_cast<float *>(tensor.ptr_to_element(id)) = value;
113+
});
114+
115+
return true;
116+
}
117+
118+
TopNPredictionsAccessor::TopNPredictionsAccessor(const std::string &labels_path, size_t top_n, std::ostream &output_stream)
119+
: _labels(), _output_stream(output_stream), _top_n(top_n)
120+
{
121+
_labels.clear();
122+
123+
std::ifstream ifs;
124+
125+
try
126+
{
127+
ifs.exceptions(std::ifstream::badbit);
128+
ifs.open(labels_path, std::ios::in | std::ios::binary);
129+
130+
for(std::string line; !std::getline(ifs, line).fail();)
131+
{
132+
_labels.emplace_back(line);
133+
}
134+
}
135+
catch(const std::ifstream::failure &e)
136+
{
137+
ARM_COMPUTE_ERROR("Accessing %s: %s", labels_path.c_str(), e.what());
138+
}
139+
}
140+
141+
bool TopNPredictionsAccessor::access_tensor(ITensor &tensor)
142+
{
143+
ARM_COMPUTE_ERROR_ON_DATA_TYPE_CHANNEL_NOT_IN(&tensor, 1, DataType::F32);
144+
ARM_COMPUTE_ERROR_ON(_labels.size() != tensor.info()->dimension(0));
145+
146+
// Get the predicted class
147+
std::vector<float> classes_prob;
148+
std::vector<size_t> index;
149+
150+
const auto output_net = reinterpret_cast<float *>(tensor.buffer() + tensor.info()->offset_first_element_in_bytes());
151+
const size_t num_classes = tensor.info()->dimension(0);
152+
153+
classes_prob.resize(num_classes);
154+
index.resize(num_classes);
155+
156+
std::copy(output_net, output_net + num_classes, classes_prob.begin());
157+
158+
// Sort results
159+
std::iota(std::begin(index), std::end(index), static_cast<size_t>(0));
160+
std::sort(std::begin(index), std::end(index),
161+
[&](size_t a, size_t b)
162+
{
163+
return classes_prob[a] > classes_prob[b];
164+
});
165+
166+
_output_stream << "---------- Top " << _top_n << " predictions ----------" << std::endl
167+
<< std::endl;
168+
for(size_t i = 0; i < _top_n; ++i)
169+
{
170+
_output_stream << std::fixed << std::setprecision(4)
171+
<< classes_prob[index.at(i)]
172+
<< " - [id = " << index.at(i) << "]"
173+
<< ", " << _labels[index.at(i)] << std::endl;
174+
}
175+
176+
return false;
177+
}
178+
90179
RandomAccessor::RandomAccessor(PixelValue lower, PixelValue upper, std::random_device::result_type seed)
91180
: _lower(lower), _upper(upper), _seed(seed)
92181
{

utils/GraphUtils.h

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,8 @@
2929
#include "arm_compute/graph/Types.h"
3030

3131
#include <random>
32+
#include <string>
33+
#include <vector>
3234

3335
namespace arm_compute
3436
{
@@ -76,6 +78,60 @@ class DummyAccessor final : public graph::ITensorAccessor
7678
unsigned int _maximum;
7779
};
7880

81+
/** PPM accessor class */
82+
class PPMAccessor final : public graph::ITensorAccessor
83+
{
84+
public:
85+
/** Constructor
86+
*
87+
* @param[in] ppm_path Path to PPM file
88+
* @param[in] bgr (Optional) Fill the first plane with blue channel (default = false)
89+
* @param[in] mean_r (Optional) Red mean value to be subtracted from red channel
90+
* @param[in] mean_g (Optional) Green mean value to be subtracted from green channel
91+
* @param[in] mean_b (Optional) Blue mean value to be subtracted from blue channel
92+
*/
93+
PPMAccessor(const std::string &ppm_path, bool bgr = true, float mean_r = 0.0f, float mean_g = 0.0f, float mean_b = 0.0f);
94+
/** Allow instances of this class to be move constructed */
95+
PPMAccessor(PPMAccessor &&) = default;
96+
97+
// Inherited methods overriden:
98+
bool access_tensor(ITensor &tensor) override;
99+
100+
private:
101+
const std::string &_ppm_path;
102+
const bool _bgr;
103+
const float _mean_r;
104+
const float _mean_g;
105+
const float _mean_b;
106+
};
107+
108+
/** Result accessor class */
109+
class TopNPredictionsAccessor final : public graph::ITensorAccessor
110+
{
111+
public:
112+
/** Constructor
113+
*
114+
* @param[in] labels_path Path to labels text file.
115+
* @param[in] top_n (Optional) Number of output classes to print
116+
* @param[out] output_stream (Optional) Output stream
117+
*/
118+
TopNPredictionsAccessor(const std::string &labels_path, size_t top_n = 5, std::ostream &output_stream = std::cout);
119+
/** Allow instances of this class to be move constructed */
120+
TopNPredictionsAccessor(TopNPredictionsAccessor &&) = default;
121+
/** Prevent instances of this class from being copied (As this class contains pointers) */
122+
TopNPredictionsAccessor(const TopNPredictionsAccessor &) = delete;
123+
/** Prevent instances of this class from being copied (As this class contains pointers) */
124+
TopNPredictionsAccessor &operator=(const TopNPredictionsAccessor &) = delete;
125+
126+
// Inherited methods overriden:
127+
bool access_tensor(ITensor &tensor) override;
128+
129+
private:
130+
std::vector<std::string> _labels;
131+
std::ostream &_output_stream;
132+
size_t _top_n;
133+
};
134+
79135
/** Random accessor class */
80136
class RandomAccessor final : public graph::ITensorAccessor
81137
{

0 commit comments

Comments
 (0)