44// Includes common necessary includes for development using depthai library
55#include " depthai/depthai.hpp"
66
7- static constexpr auto FPS = 15 ;
7+ static constexpr auto FPS = 30 ;
88
99int main () {
1010 dai::Pipeline pipeline;
1111
1212 // Define a source - color camera
1313 auto camRgb = pipeline.create <dai::node::ColorCamera>();
14- camRgb->setInterleaved (true );
15- camRgb->setPreviewSize (640 , 360 );
14+ // Since we are saving RGB frames in Script node we need to make the
15+ // video pool size larger, otherwise the pipeline will freeze because
16+ // the ColorCamera won't be able to produce new video frames.
17+ camRgb->setVideoNumFramesPool (10 );
1618 camRgb->setFps (FPS);
1719
1820 auto left = pipeline.create <dai::node::MonoCamera>();
@@ -37,17 +39,20 @@ int main() {
3739 // Script node will sync high-res frames
3840 auto script = pipeline.create <dai::node::Script>();
3941
40- // Send all streams to the Script node so we can sync them
42+ // Send both streams to the Script node so we can sync them
4143 stereo->disparity .link (script->inputs [" disp_in" ]);
42- camRgb->preview .link (script->inputs [" rgb_in" ]);
44+ camRgb->video .link (script->inputs [" rgb_in" ]);
4345
4446 script->setScript (R"(
45- FPS=15
47+ FPS=30
4648 import time
4749 from datetime import timedelta
4850 import math
4951
50- MS_THRESHOL=math.ceil(500 / FPS) # Timestamp threshold (in miliseconds) under which frames will be considered synced
52+ # Timestamp threshold (in miliseconds) under which frames will be considered synced.
53+ # Lower number means frames will have less delay between them, which can potentially
54+ # lead to dropped frames.
55+ MS_THRESHOL=math.ceil(500 / FPS)
5156
5257 def check_sync(queues, timestamp):
5358 matching_frames = []
@@ -93,15 +98,17 @@ int main() {
9398 )" );
9499
95100 std::vector<std::string> scriptOut{" disp" , " rgb" };
101+ // Create XLinkOut for disp/rgb streams
96102 for (auto & name : scriptOut) {
97103 auto xout = pipeline.create <dai::node::XLinkOut>();
98104 xout->setStreamName (name);
99105 script->outputs [name + " _out" ].link (xout->input );
100106 }
101107
102108 dai::Device device (pipeline);
103- // Rgb should be the first - as we will first.get() that frame, as it will arrive the latest to the host
104- // because it first needs to be converted to NV12 and then encoded to H264.
109+
110+ device.setLogOutputLevel (dai::LogLevel::INFO);
111+ device.setLogLevel (dai::LogLevel::INFO);
105112 std::vector<std::string> names{" rgb" , " disp" };
106113 std::map<std::string, std::shared_ptr<dai::DataOutputQueue>> streams;
107114 for (auto & name : names) {
@@ -112,6 +119,7 @@ int main() {
112119 auto name = iter.first ;
113120 auto queue = iter.second ;
114121 auto img = queue->get <dai::ImgFrame>();
122+ // Display timestamp/sequence number of two synced frames
115123 std::cout << " Stream " << name << " , timestamp: " << img->getTimestamp ().time_since_epoch ().count ()
116124 << " , sequence number: " << img->getSequenceNum () << std::endl;
117125 }
0 commit comments