1+ #include < iostream>
2+
3+ // Includes common necessary includes for development using depthai library
4+ #include " depthai/depthai.hpp"
5+
6+ int main (){
7+ dai::Pipeline pipeline;
8+
9+ auto cam = pipeline.create <dai::node::ColorCamera>();
10+ cam->setBoardSocket (dai::CameraBoardSocket::RGB);
11+ cam->setInterleaved (false );
12+ cam->setIspScale (2 , 3 );
13+ cam->setVideoSize (720 , 720 );
14+ cam->setPreviewSize (300 , 300 );
15+
16+ auto xoutRgb = pipeline.create <dai::node::XLinkOut>();
17+ xoutRgb->setStreamName (" rgb" );
18+ cam->video .link (xoutRgb->input );
19+
20+ auto script = pipeline.create <dai::node::Script>();
21+
22+ auto xin = pipeline.create <dai::node::XLinkIn>();
23+ xin->setStreamName (" in" );
24+ xin->out .link (script->inputs [" toggle" ]);
25+
26+ cam->preview .link (script->inputs [" rgb" ]);
27+ script->setScript (R"(
28+ toggle = False
29+ while True:
30+ msg = node.io['toggle'].tryGet()
31+ if msg is not None:
32+ toggle = msg.getData()[0]
33+ node.warn('Toggle! Perform NN inferencing: ' + str(toggle))
34+ frame = node.io['rgb'].get()
35+ if toggle:
36+ node.io['nn'].send(frame)
37+ )" );
38+
39+ auto nn = pipeline.create <dai::node::MobileNetDetectionNetwork>();
40+ nn->setBlobPath (BLOB_PATH);
41+ script->outputs [" nn" ].link (nn->input );
42+
43+ auto xoutNn = pipeline.create <dai::node::XLinkOut>();
44+ xoutNn->setStreamName (" nn" );
45+ nn->out .link (xoutNn->input );
46+
47+ // Connect to device with pipeline
48+ dai::Device device (pipeline);
49+ auto inQ = device.getInputQueue (" in" );
50+ auto qRgb = device.getOutputQueue (" rgb" );
51+ auto qNn = device.getOutputQueue (" nn" );
52+
53+ bool runNn = false ;
54+
55+ auto color = cv::Scalar (255 , 127 , 0 );
56+
57+ auto drawDetections = [color](cv::Mat frame, std::vector<dai::ImgDetection>& detections){
58+ for (auto & detection : detections) {
59+ int x1 = detection.xmin * frame.cols ;
60+ int y1 = detection.ymin * frame.rows ;
61+ int x2 = detection.xmax * frame.cols ;
62+ int y2 = detection.ymax * frame.rows ;
63+
64+ std::stringstream confStr;
65+ confStr << std::fixed << std::setprecision (2 ) << detection.confidence * 100 ;
66+ cv::putText (frame, confStr.str (), cv::Point (x1 + 10 , y1 + 20 ), cv::FONT_HERSHEY_TRIPLEX, 0.5 , color);
67+ cv::rectangle (frame, cv::Rect (cv::Point (x1, y1), cv::Point (x2, y2)), color, cv::FONT_HERSHEY_SIMPLEX);
68+ }
69+ };
70+
71+ while (true ) {
72+ auto frame = qRgb->get <dai::ImgFrame>()->getCvFrame ();
73+ auto imgDetections = qNn->tryGet <dai::ImgDetections>();
74+ if (imgDetections != nullptr ){
75+ auto detections = imgDetections->detections ;
76+ drawDetections (frame, detections);
77+ }
78+ std::string frameText = " NN inferencing: " ;
79+ if (runNn){
80+ frameText += " On" ;
81+ } else {
82+ frameText += " Off" ;
83+ }
84+ cv::putText (frame, frameText, cv::Point (20 , 20 ), cv::FONT_HERSHEY_TRIPLEX, 0.7 , color);
85+ cv::imshow (" Color frame" , frame);
86+
87+ int key = cv::waitKey (1 );
88+ if (key == ' q' ) {
89+ return 0 ;
90+ } else if (key == ' t' ) {
91+ if (runNn){
92+ std::cout << " Disabling\n " ;
93+ } else {
94+ std::cout << " Enabling\n " ;
95+ }
96+ runNn = !runNn;
97+ auto buf = dai::Buffer ();
98+ std::vector<uint8_t > messageData;
99+ messageData.push_back (runNn);
100+ buf.setData (messageData);
101+ inQ->send (buf);
102+ }
103+ }
104+ }
0 commit comments