Skip to content

Commit fe7fc7c

Browse files
author
Pashchenkov Maxim
committed
Applying comments, added dropping batch info, fixed bug, reduced memory usage
1 parent bb5407a commit fe7fc7c

File tree

7 files changed

+98
-85
lines changed

7 files changed

+98
-85
lines changed

demos/gesture_recognition_demo/cpp_gapi/gesture_recognition_demo_gapi.hpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -52,10 +52,10 @@ static void showUsage() {
5252
std::cout << " -o \"<path>\" " << output_message << std::endl;
5353
std::cout << " -limit \"<num>\" " << limit_message << std::endl;
5454
std::cout << " -res \"<WxH>\" " << camera_resolution_message << std::endl;
55-
std::cout << " -m_d \"<path>\" " << person_detection_model_message << std::endl;
56-
std::cout << " -m_a \"<path>\" " << action_recognition_model_message << std::endl;
57-
std::cout << " -d_d \"<device>\" " << target_device_message_d << std::endl;
58-
std::cout << " -d_a \"<device>\" " << target_device_message_a << std::endl;
55+
std::cout << " -m_d \"<path>\" " << person_detection_model_message << std::endl;
56+
std::cout << " -m_a \"<path>\" " << action_recognition_model_message << std::endl;
57+
std::cout << " -d_d \"<device>\" " << target_device_message_d << std::endl;
58+
std::cout << " -d_a \"<device>\" " << target_device_message_a << std::endl;
5959
std::cout << " -no_show " << no_show_message << std::endl;
6060
std::cout << " -c " << class_map_message << std::endl;
6161
std::cout << " -s " << samples_dir_message << std::endl;

demos/gesture_recognition_demo/cpp_gapi/include/custom_kernels.hpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,6 @@
1111

1212
#include "tracker.hpp"
1313

14-
static std::atomic<size_t> current_person_id{ 0 };
15-
1614
namespace custom {
1715
G_API_OP(GetFastFrame,
1816
<cv::GMat(cv::GArray<cv::GMat>, cv::Size)>, "custom.get_fast_frame") {
@@ -48,12 +46,14 @@ G_API_OP(ConstructClip,
4846
<cv::GArray<cv::GMat>(const cv::GArray<cv::GMat>,
4947
const cv::GArray<TrackedObject>,
5048
const cv::Scalar,
51-
const cv::Size)>,
49+
const cv::Size,
50+
const cv::GOpaque<std::shared_ptr<size_t>>)>,
5251
"custom.construct_clip") {
5352
static cv::GArrayDesc outMeta(const cv::GArrayDesc&,
5453
const cv::GArrayDesc&,
5554
const cv::Scalar&,
56-
const cv::Size&) {
55+
const cv::Size&,
56+
const cv::GOpaqueDesc&) {
5757
return cv::empty_array_desc();
5858
}
5959
};

demos/gesture_recognition_demo/cpp_gapi/include/stream_source.hpp

Lines changed: 52 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -11,41 +11,43 @@
1111
#include <thread>
1212
#include <mutex>
1313

14-
std::mutex batch_lock; // batch frames filling will be locked
15-
1614
namespace custom {
1715
class BatchProducer {
1816
public:
19-
BatchProducer(const int batch_size, const float batch_fps)
20-
: batch_fps(batch_fps) {
17+
BatchProducer(const int batch_size, const float batch_fps, const std::shared_ptr<bool>& drop_batch)
18+
: batch_fps(batch_fps), drop_batch(drop_batch) {
2119
/** Create batch memory space for batch_size + 2 size
2220
* first additional element is fast image
2321
* second additional Mat is sacriface of memory for data about first element
2422
**/
2523
batch = std::vector<cv::Mat>(batch_size + 1 + 1); // 16(8) 15FPS-batch imgaes + one fast image + batch description
26-
batch[batch_size + 1].create(cv::Size{1, 2}, CV_8U); // 1x2 Mat for first element position and is_filled batch state
24+
batch[batch_size + 1].create(cv::Size{ 1, 2 }, CV_8U); // 1x2 Mat for first element position and is_filled batch state
2725
auto ptr = batch[batch.size() - 1].ptr<uint8_t>();
2826
ptr[1] = 0; // set is_filled to NO
2927
}
3028
std::vector<cv::Mat> getBatch() {
31-
return batch;
29+
batch_lock.lock();
30+
std::vector<cv::Mat> temp_batch = batch;
31+
batch_lock.unlock();
32+
return temp_batch;
3233
}
3334

3435
void fillFastFrame(const cv::Mat& frame) {
35-
/** Copy fast frame from VideoCapture to batch memory as 17th (9) image **/
36-
batch[batch.size() - 2] = frame.clone(); // 16th (from 0)
36+
/** Copy fast frame from VideoCapture to batch memory as 17th (9) image **/
37+
frame.copyTo(batch[batch.size() - 2]); // 16th (from 0)
3738
}
3839

3940
void fillBatch(const cv::Mat& frame, std::chrono::steady_clock::time_point time) {
41+
if (*drop_batch > 0) {
42+
DropBatchInfo();
43+
}
4044
/** Place of new frame in batch **/
4145
const int step = updateStep(batch.size() - 2);
42-
batch_lock.lock();
4346
/** Adding of new image to batch. **/
44-
batch[step] = frame.clone();
47+
frame.copyTo(batch[step]);
4548
/** Putting of info about batch to additional element **/
4649
auto ptr = batch[batch.size() - 1].ptr<uint8_t>();
4750
ptr[0] = first_el; // position of start of batch in cyclic buffer
48-
batch_lock.unlock();
4951
const auto cur_step = std::chrono::steady_clock::now() - time;
5052
const auto gap = std::chrono::duration_cast<std::chrono::milliseconds>(cur_step);
5153
const auto time_step = std::chrono::milliseconds(int(1000.f / batch_fps)); // 1/15 sec
@@ -55,46 +57,59 @@ class BatchProducer {
5557
}
5658
private:
5759
float batch_fps = 0; // constant FPS for batch
60+
const std::shared_ptr<bool>& drop_batch;
5861
std::vector<cv::Mat> batch; // pack of images for graph
5962
size_t first_el = 0; // place of first image in batch
6063
size_t images_in_batch_count = 0; // number of images in batch
6164
bool is_filled = false; // is batch filled
65+
std::mutex batch_lock; // batch frames filling will be locked
6266

6367
int updateStep(const size_t batch_size) {
6468
if (images_in_batch_count < batch_size) {
6569
/** case when batch isn't filled **/
6670
return images_in_batch_count++;
67-
} else {
71+
}
72+
else {
6873
if (!is_filled) {
69-
batch_lock.lock();
7074
auto ptr = batch[batch.size() - 1].ptr<uint8_t>();
7175
ptr[1] = 1;
72-
batch_lock.unlock();
7376
is_filled = true;
7477
}
7578
/** Cyclic buffer if filled. Counting of step for next image **/
7679
first_el = (first_el + 1) % batch_size;
7780
return first_el;
7881
}
7982
}
83+
84+
void DropBatchInfo() {
85+
/** Drop batch information.
86+
* Processing will continue when the batch will be filled
87+
* Data of the batch will be overwritten */
88+
auto ptr = batch[batch.size() - 1].ptr<uint8_t>();
89+
ptr[0] = 0; // first position
90+
ptr[1] = 0; // batch if filled
91+
first_el = 0;
92+
images_in_batch_count = 0;
93+
is_filled = false;
94+
}
8095
};
8196

8297
void runBatchFill(const cv::Mat& frame,
83-
BatchProducer& producer,
84-
std::chrono::steady_clock::time_point& time) {
85-
while(!frame.empty()) {
98+
BatchProducer& producer,
99+
std::chrono::steady_clock::time_point& time) {
100+
while (!frame.empty()) {
86101
producer.fillBatch(frame, time);
87102
}
88103
}
89104

90-
class CustomCapSource : public cv::gapi::wip::IStreamSource
91-
{
105+
class CustomCapSource : public cv::gapi::wip::IStreamSource {
92106
public:
93107
explicit CustomCapSource(const std::shared_ptr<ImagesCapture>& cap,
94108
const cv::Size& frame_size,
95109
const int batch_size,
96-
const float batch_fps)
97-
: cap(cap), producer(batch_size, batch_fps), source_fps(cap->fps()) {
110+
const float batch_fps,
111+
const std::shared_ptr<bool>& drop_batch)
112+
: cap(cap), producer(batch_size, batch_fps, drop_batch), source_fps(cap->fps()) {
98113
if (source_fps <= 0.) {
99114
source_fps = 30.;
100115
wait_gap = true;
@@ -106,19 +121,21 @@ class CustomCapSource : public cv::gapi::wip::IStreamSource
106121
GAPI_Assert(false && "Batch must contain more than one image");
107122
}
108123

109-
fast_frame.create(frame_size, CV_8UC3);
110-
111124
/** Reading of frame with ImagesCapture class **/
112125
read_time = std::chrono::steady_clock::now();
113-
fast_frame = cap->read();
126+
cv::Mat fast_frame = cap->read();
114127
if (!fast_frame.data) {
115128
GAPI_Assert(false && "Couldn't grab the frame");
116129
}
117130

131+
producer.fillFastFrame(fast_frame);
132+
fast_frame.copyTo(thread_frame);
118133
/** Batch filling with constant time step **/
134+
std::thread fill_bath_thr(runBatchFill,
135+
std::cref(thread_frame),
136+
std::ref(producer),
137+
std::ref(read_time));
119138
fill_bath_thr.detach();
120-
121-
producer.fillFastFrame(fast_frame);
122139
first_batch = producer.getBatch();
123140
}
124141

@@ -129,14 +146,11 @@ class CustomCapSource : public cv::gapi::wip::IStreamSource
129146
bool wait_gap = false; // waiting for fast frame reading (stop main thread when got a non-positive FPS value)
130147
bool first_pulled = false; // is first already pulled
131148
std::vector<cv::Mat> first_batch; // batch from constructor
132-
cv::Mat fast_frame; // frame from cv::VideoCapture
149+
cv::Mat thread_frame; // frame for batch constant filling
150+
std::mutex thread_frame_lock;
133151
std::chrono::steady_clock::time_point read_time; // timepoint from cv::read()
134-
std::thread fill_bath_thr = std::thread(runBatchFill,
135-
std::ref(fast_frame),
136-
std::ref(producer),
137-
std::ref(read_time));
138152

139-
virtual bool pull(cv::gapi::wip::Data &data) override {
153+
virtual bool pull(cv::gapi::wip::Data& data) override {
140154
/** Is first already pulled **/
141155
if (!first_pulled) {
142156
GAPI_Assert(!first_batch.empty());
@@ -148,11 +162,16 @@ class CustomCapSource : public cv::gapi::wip::IStreamSource
148162

149163
/** Frame reading with ImagesCapture class **/
150164
read_time = std::chrono::steady_clock::now();
151-
fast_frame = cap->read();
165+
cv::Mat fast_frame = cap->read();
166+
152167
if (!fast_frame.data) {
153168
return false;
154169
}
155170

171+
thread_frame_lock.lock();
172+
fast_frame.copyTo(thread_frame);
173+
thread_frame_lock.unlock();
174+
156175
/** Put fast frame to the batch **/
157176
producer.fillFastFrame(fast_frame);
158177
if (wait_gap) {
@@ -175,5 +194,4 @@ class CustomCapSource : public cv::gapi::wip::IStreamSource
175194
return cv::GMetaArg{ cv::empty_array_desc() };
176195
}
177196
};
178-
179197
} // namespace custom

demos/gesture_recognition_demo/cpp_gapi/include/visualizer.hpp

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ class Visualizer {
1414
private:
1515
cv::Mat storageFrame;
1616
bool no_show_;
17-
std::string main_window_name_;
1817
std::string storage_window_name_;
1918
std::vector<std::string> labels_;
2019
std::string storage_path_;
@@ -43,18 +42,15 @@ class Visualizer {
4342
const std::string& storage_window_name,
4443
const std::vector<std::string>& labels,
4544
const std::string& storage_path) :
46-
no_show_(no_show), main_window_name_(main_window_name),
47-
storage_window_name_(storage_window_name), labels_(labels),
48-
storage_path_(storage_path) {
45+
no_show_(no_show), storage_window_name_(storage_window_name),
46+
labels_(labels), storage_path_(storage_path) {
4947
if (storage_path_.size() > 0) {
5048
getStorageElements();
5149
}
5250
if (no_show) {
5351
return;
5452
}
5553

56-
cv::namedWindow(main_window_name_);
57-
5854
if (storage_path_.size() > 0) {
5955
cv::namedWindow(storage_window_name_);
6056
gesture_cap_.open(storage_elements_.front().second);
@@ -66,6 +62,4 @@ class Visualizer {
6662
const int out_label_number,
6763
const size_t current_id,
6864
const int key);
69-
70-
void finalize();
7165
};

demos/gesture_recognition_demo/cpp_gapi/main.cpp

Lines changed: 18 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -65,9 +65,13 @@ int main(int argc, char *argv[]) {
6565
cap = openImagesCapture(FLAGS_i, FLAGS_loop, 0,
6666
std::numeric_limits<size_t>::max(), stringToSize(FLAGS_res));
6767

68+
/** Share runtime id with graph **/
69+
auto current_person_id_m = std::make_shared<size_t>(0);
70+
6871
/** ---------------- Main graph of demo ---------------- **/
6972
/** Graph inputs **/
7073
cv::GArray<cv::GMat> batch;
74+
cv::GOpaque<std::shared_ptr<size_t>> current_person_id;
7175

7276
cv::GMat fast_frame = custom::GetFastFrame::on(batch, frame_size);
7377

@@ -81,7 +85,7 @@ int main(int argc, char *argv[]) {
8185
cv::GArray<TrackedObject> tracked = custom::TrackPerson::on(fast_frame, objects);
8286

8387
/** Create clip for AR net **/
84-
cv::GArray<cv::GMat> clip = custom::ConstructClip::on(batch, tracked, ar_net_shape, frame_size);
88+
cv::GArray<cv::GMat> clip = custom::ConstructClip::on(batch, tracked, ar_net_shape, frame_size, current_person_id);
8589

8690
/** Action recognition **/
8791
cv::GArray<cv::GMat> actions = cv::gapi::infer2<nets::ActionRecognition>(fast_frame, clip);
@@ -90,7 +94,7 @@ int main(int argc, char *argv[]) {
9094
cv::GOpaque<int> label = custom::GestureRecognitionPostprocessing::on(actions, float(FLAGS_t));
9195

9296
/** Inputs and outputs of graph **/
93-
auto graph = cv::GComputation(cv::GIn(batch), cv::GOut(fast_frame, tracked, label));
97+
auto graph = cv::GComputation(cv::GIn(batch, current_person_id), cv::GOut(fast_frame, tracked, label));
9498
/** ---------------- End of graph ---------------- **/
9599
/** Configure networks **/
96100
auto person_detection = cv::gapi::ie::Params<nets::PersonDetection> {
@@ -118,17 +122,16 @@ int main(int argc, char *argv[]) {
118122
TrackedObjects out_detections;
119123
int out_label_number;
120124

121-
auto out_vector = cv::gout(out_frame, out_detections, out_label_number);
122-
123125
/** ---------------- The execution part ---------------- **/
124126
const float batch_constant_FPS = 15;
125-
127+
auto drop_batch = std::make_shared<bool>(false);
126128
pipeline.setSource(cv::gin(cv::gapi::wip::make_src<custom::CustomCapSource>(cap,
127129
frame_size,
128130
int(ar_net_shape[1]),
129-
batch_constant_FPS)));
131+
batch_constant_FPS,
132+
drop_batch),
133+
current_person_id_m));
130134

131-
std::string mainWindowName = "Gesture Recognition demo G-API";
132135
std::string gestureWindowName = "Gesture";
133136

134137
cv::Size graphSize{static_cast<int>(frame_size.width / 4), 60};
@@ -144,16 +147,16 @@ int main(int argc, char *argv[]) {
144147
/** Fill labels container from file with classes **/
145148
const auto labels = fill_labels(FLAGS_c);
146149
size_t current_id = 0;
147-
size_t id = 0;
150+
size_t last_id = current_id;
148151
int gesture = 0;
149152

150153
/** Configure drawing utilities **/
151-
Visualizer visualizer(FLAGS_no_show, mainWindowName, gestureWindowName, labels, FLAGS_s);
154+
Visualizer visualizer(FLAGS_no_show, gestureWindowName, labels, FLAGS_s);
152155

153156
bool isStart = true;
154157
const auto startTime = std::chrono::steady_clock::now();
155158
pipeline.start();
156-
while (pipeline.pull(std::move(out_vector))) {
159+
while (pipeline.pull(std::move(cv::gout(out_frame, out_detections, out_label_number)))) {
157160
/** Put FPS to frame**/
158161
if (isStart) {
159162
metrics.update(startTime, out_frame, { 10, 22 }, cv::FONT_HERSHEY_COMPLEX,
@@ -177,21 +180,20 @@ int main(int argc, char *argv[]) {
177180
/** Controls **/
178181
int key = cv::waitKey(1);
179182
if (key == 0x1B) break; // (esc button) exit
180-
else if (key >= 48 && key <= 57) id = key - 48; // buttons for person id
183+
else if (key >= 48 && key <= 57) current_id = key - 48; // buttons for person id
181184
else if (key == 0x0D) out_label_number = -1; // (Enter) reset last gesture
182185
else if (key == 'f') gesture = 1; // next gesture
183186
else if (key == 'b') gesture = -1; // prev gesture
184187
else
185188
presenter.handleKey(key);
186189

187190
/** Share id with graph **/
188-
if (id < out_detections.size()) {
189-
current_person_id = id;
190-
current_id = id;
191+
if (current_id < out_detections.size()) {
192+
*drop_batch = !(last_id != current_id);
193+
*current_person_id_m = current_id;
194+
last_id = current_id;
191195
}
192196
}
193-
/** Destroy windows if exist **/
194-
visualizer.finalize();
195197
slog::info << "Metrics report:" << slog::endl;
196198
slog::info << "\tFPS: " << std::fixed << std::setprecision(1) << metrics.getTotal().fps << slog::endl;
197199
slog::info << presenter.reportMeans() << slog::endl;

0 commit comments

Comments
 (0)