28
28
#include " utils/args_helper.hpp"
29
29
30
30
#ifndef UNUSED
31
- #ifdef _WIN32
32
- #define UNUSED
33
- #else
34
- #define UNUSED __attribute__ ((unused))
35
- #endif
31
+ #ifdef _WIN32
32
+ #define UNUSED
33
+ #else
34
+ #define UNUSED __attribute__ ((unused))
35
+ #endif
36
36
#endif
37
37
38
38
template <typename T, std::size_t N>
@@ -92,8 +92,8 @@ class Color {
92
92
* @param b - value for blue channel
93
93
*/
94
94
Color (unsigned char r,
95
- unsigned char g,
96
- unsigned char b) : _r(r), _g(g), _b(b) {}
95
+ unsigned char g,
96
+ unsigned char b) : _r(r), _g(g), _b(b) {}
97
97
98
98
inline unsigned char red () const {
99
99
return _r;
@@ -138,16 +138,17 @@ inline std::size_t getTensorWidth(const InferenceEngine::TensorDesc& desc) {
138
138
const auto & dims = desc.getDims ();
139
139
const auto & size = dims.size ();
140
140
if ((size >= 2 ) &&
141
- (layout == InferenceEngine::Layout::NCHW ||
142
- layout == InferenceEngine::Layout::NHWC ||
143
- layout == InferenceEngine::Layout::NCDHW ||
144
- layout == InferenceEngine::Layout::NDHWC ||
145
- layout == InferenceEngine::Layout::OIHW ||
146
- layout == InferenceEngine::Layout::CHW ||
147
- layout == InferenceEngine::Layout::HW)) {
141
+ (layout == InferenceEngine::Layout::NCHW ||
142
+ layout == InferenceEngine::Layout::NHWC ||
143
+ layout == InferenceEngine::Layout::NCDHW ||
144
+ layout == InferenceEngine::Layout::NDHWC ||
145
+ layout == InferenceEngine::Layout::OIHW ||
146
+ layout == InferenceEngine::Layout::CHW ||
147
+ layout == InferenceEngine::Layout::HW)) {
148
148
// Regardless of layout, dimensions are stored in fixed order
149
149
return dims.back ();
150
- } else {
150
+ }
151
+ else {
151
152
throw std::runtime_error (" Tensor does not have width dimension" );
152
153
}
153
154
return 0 ;
@@ -158,72 +159,75 @@ inline std::size_t getTensorHeight(const InferenceEngine::TensorDesc& desc) {
158
159
const auto & dims = desc.getDims ();
159
160
const auto & size = dims.size ();
160
161
if ((size >= 2 ) &&
161
- (layout == InferenceEngine::Layout::NCHW ||
162
- layout == InferenceEngine::Layout::NHWC ||
163
- layout == InferenceEngine::Layout::NCDHW ||
164
- layout == InferenceEngine::Layout::NDHWC ||
165
- layout == InferenceEngine::Layout::OIHW ||
166
- layout == InferenceEngine::Layout::CHW ||
167
- layout == InferenceEngine::Layout::HW)) {
162
+ (layout == InferenceEngine::Layout::NCHW ||
163
+ layout == InferenceEngine::Layout::NHWC ||
164
+ layout == InferenceEngine::Layout::NCDHW ||
165
+ layout == InferenceEngine::Layout::NDHWC ||
166
+ layout == InferenceEngine::Layout::OIHW ||
167
+ layout == InferenceEngine::Layout::CHW ||
168
+ layout == InferenceEngine::Layout::HW)) {
168
169
// Regardless of layout, dimensions are stored in fixed order
169
170
return dims.at (size - 2 );
170
- } else {
171
+ }
172
+ else {
171
173
throw std::runtime_error (" Tensor does not have height dimension" );
172
174
}
173
175
return 0 ;
174
176
}
175
177
176
178
inline std::size_t getTensorChannels (const InferenceEngine::TensorDesc& desc) {
177
179
const auto & layout = desc.getLayout ();
178
- if (layout == InferenceEngine::Layout::NCHW ||
179
- layout == InferenceEngine::Layout::NHWC ||
180
+ if (layout == InferenceEngine::Layout::NCHW ||
181
+ layout == InferenceEngine::Layout::NHWC ||
180
182
layout == InferenceEngine::Layout::NCDHW ||
181
183
layout == InferenceEngine::Layout::NDHWC ||
182
- layout == InferenceEngine::Layout::C ||
183
- layout == InferenceEngine::Layout::CHW ||
184
- layout == InferenceEngine::Layout::NC ||
184
+ layout == InferenceEngine::Layout::C ||
185
+ layout == InferenceEngine::Layout::CHW ||
186
+ layout == InferenceEngine::Layout::NC ||
185
187
layout == InferenceEngine::Layout::CN) {
186
188
// Regardless of layout, dimensions are stored in fixed order
187
189
const auto & dims = desc.getDims ();
188
190
switch (desc.getLayoutByDims (dims)) {
189
- case InferenceEngine::Layout::C: return dims.at (0 );
190
- case InferenceEngine::Layout::NC: return dims.at (1 );
191
- case InferenceEngine::Layout::CHW: return dims.at (0 );
192
- case InferenceEngine::Layout::NCHW: return dims.at (1 );
193
- case InferenceEngine::Layout::NCDHW: return dims.at (1 );
194
- case InferenceEngine::Layout::SCALAR: // [[fallthrough]]
195
- case InferenceEngine::Layout::BLOCKED: // [[fallthrough]]
196
- default :
197
- throw std::runtime_error (" Tensor does not have channels dimension" );
191
+ case InferenceEngine::Layout::C: return dims.at (0 );
192
+ case InferenceEngine::Layout::NC: return dims.at (1 );
193
+ case InferenceEngine::Layout::CHW: return dims.at (0 );
194
+ case InferenceEngine::Layout::NCHW: return dims.at (1 );
195
+ case InferenceEngine::Layout::NCDHW: return dims.at (1 );
196
+ case InferenceEngine::Layout::SCALAR: // [[fallthrough]]
197
+ case InferenceEngine::Layout::BLOCKED: // [[fallthrough]]
198
+ default :
199
+ throw std::runtime_error (" Tensor does not have channels dimension" );
198
200
}
199
- } else {
201
+ }
202
+ else {
200
203
throw std::runtime_error (" Tensor does not have channels dimension" );
201
204
}
202
205
return 0 ;
203
206
}
204
207
205
208
inline std::size_t getTensorBatch (const InferenceEngine::TensorDesc& desc) {
206
209
const auto & layout = desc.getLayout ();
207
- if (layout == InferenceEngine::Layout::NCHW ||
208
- layout == InferenceEngine::Layout::NHWC ||
210
+ if (layout == InferenceEngine::Layout::NCHW ||
211
+ layout == InferenceEngine::Layout::NHWC ||
209
212
layout == InferenceEngine::Layout::NCDHW ||
210
213
layout == InferenceEngine::Layout::NDHWC ||
211
- layout == InferenceEngine::Layout::NC ||
214
+ layout == InferenceEngine::Layout::NC ||
212
215
layout == InferenceEngine::Layout::CN) {
213
216
// Regardless of layout, dimensions are stored in fixed order
214
217
const auto & dims = desc.getDims ();
215
218
switch (desc.getLayoutByDims (dims)) {
216
- case InferenceEngine::Layout::NC: return dims.at (0 );
217
- case InferenceEngine::Layout::NCHW: return dims.at (0 );
218
- case InferenceEngine::Layout::NCDHW: return dims.at (0 );
219
- case InferenceEngine::Layout::CHW: // [[fallthrough]]
220
- case InferenceEngine::Layout::C: // [[fallthrough]]
221
- case InferenceEngine::Layout::SCALAR: // [[fallthrough]]
222
- case InferenceEngine::Layout::BLOCKED: // [[fallthrough]]
223
- default :
224
- throw std::runtime_error (" Tensor does not have channels dimension" );
219
+ case InferenceEngine::Layout::NC: return dims.at (0 );
220
+ case InferenceEngine::Layout::NCHW: return dims.at (0 );
221
+ case InferenceEngine::Layout::NCDHW: return dims.at (0 );
222
+ case InferenceEngine::Layout::CHW: // [[fallthrough]]
223
+ case InferenceEngine::Layout::C: // [[fallthrough]]
224
+ case InferenceEngine::Layout::SCALAR: // [[fallthrough]]
225
+ case InferenceEngine::Layout::BLOCKED: // [[fallthrough]]
226
+ default :
227
+ throw std::runtime_error (" Tensor does not have channels dimension" );
225
228
}
226
- } else {
229
+ }
230
+ else {
227
231
throw std::runtime_error (" Tensor does not have channels dimension" );
228
232
}
229
233
return 0 ;
0 commit comments