Skip to content

Commit 6c6b0ea

Browse files
authored
Merge pull request #1667 from luxonis/rvc4_hfr
Rvc4 hfr
2 parents 6451ee2 + 7c08892 commit 6c6b0ea

File tree

13 files changed

+459
-19
lines changed

13 files changed

+459
-19
lines changed

cmake/Depthai/DepthaiDeviceRVC4Config.cmake

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,4 @@
33
set(DEPTHAI_DEVICE_RVC4_MATURITY "snapshot")
44

55
# "version if applicable"
6-
set(DEPTHAI_DEVICE_RVC4_VERSION "0.0.1+77bc43dab4950b360fe3f15e1b6df454a52e35f6")
6+
set(DEPTHAI_DEVICE_RVC4_VERSION "0.0.1+bb2705f681f80507098035bc33c1a27c1b0863fc")

examples/cpp/CMakeLists.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -140,6 +140,7 @@ add_subdirectory(Events)
140140
add_subdirectory(FeatureTracker)
141141
add_subdirectory(ObjectTracker)
142142
add_subdirectory(HostNodes)
143+
add_subdirectory(HFR)
143144
add_subdirectory(ImageManip)
144145
add_subdirectory(IMU)
145146
add_subdirectory(Misc)

examples/cpp/HFR/CMakeLists.txt

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
project(hfr_examples)
2+
cmake_minimum_required(VERSION 3.10)
3+
4+
## function: dai_add_example(example_name example_src enable_test use_pcl)
5+
## function: dai_set_example_test_labels(example_name ...)
6+
7+
dai_add_example(hfr_nn "hfr_nn.cpp" ON OFF)
8+
dai_set_example_test_labels(hfr_nn ondevice rvc4 rvc4rgb ci)
9+
10+
dai_add_example(hfr_save_encoded "hfr_save_encoded.cpp" ON OFF)
11+
dai_set_example_test_labels(hfr_save_encoded ondevice rvc4 rvc4rgb ci)
12+
13+
dai_add_example(hfr_small_preview "hfr_small_preview.cpp" ON OFF)
14+
dai_set_example_test_labels(hfr_small_preview ondevice rvc4 rvc4rgb ci)

examples/cpp/HFR/README.md

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
# HFR (High Frame Rate) examples on IMX586 on the RVC4 platform
2+
This directory contains examples demonstrating high frame rate (HFR) capabilities on the IMX586 sensor using the RVC4 platform.
3+
The examples showcase an early preview of the capabilities.
4+
5+
6+
## HFR resolutions
7+
The HFR mode introduces two new resolutions that can run at a high frame-rate.
8+
At the current time, the resolutions cannot be scaled arbitrarily nor can the FPS be varied arbitrarily — it must be one of the two supported values.
9+
We plan to add more flexibility in the future and it's possible to use `ImageManip` in the meantime.
10+
11+
The current supported resolutions are:
12+
* 1920x1080 @ 240 FPS
13+
* 1280x720 @ 480 FPS
14+
15+
16+
## Example descriptions
17+
### Object Detection
18+
The object detection [example](hfr_nn) demonstrates how to use the HFR mode with a YOLOv6 model for real-time object detection at **480 FPS**.
19+
20+
### Small live preview
21+
The small preview [example](hfr_small_preview) demonstrates how to use the HFR mode for a small live preview at **240 FPS** or **480 FPS**.
22+
23+
### Video encoding
24+
The video encoding [example](hfr_save_encoded) demonstrates how to use the HFR mode for video encoding at **240 FPS** or **480 FPS**.
25+
26+
All three examples have a `BenchmarkIn` node included that prints the framerate and the latency. This is the expected output:
27+
```
28+
[2025-08-14 23:31:49.487] [ThreadedNode] [warning] FPS: 474.3766
29+
[2025-08-14 23:31:49.487] [ThreadedNode] [warning] Messages took 1.0118543 s
30+
[2025-08-14 23:31:49.487] [ThreadedNode] [warning] Average latency: 0.05904912 s
31+
```

examples/cpp/HFR/hfr_nn.cpp

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
#include <optional>
2+
3+
#include "depthai/depthai.hpp"
4+
5+
constexpr int FPS = 480;
6+
7+
int main() {
8+
dai::Pipeline pipeline;
9+
10+
auto platform = pipeline.getDefaultDevice()->getPlatform();
11+
if(platform != dai::Platform::RVC4) {
12+
std::cerr << "This example is only supported on RVC4 devices\n" << std::flush;
13+
return -1;
14+
}
15+
16+
dai::NNModelDescription modelDescription;
17+
modelDescription.model = "yolov6-nano";
18+
modelDescription.platform = "RVC4";
19+
20+
auto nnArchivePath = getModelFromZoo(modelDescription);
21+
dai::NNArchive nnArchive(nnArchivePath);
22+
auto inputSize = nnArchive.getInputSize().value();
23+
24+
auto cameraNode = pipeline.create<dai::node::Camera>()->build();
25+
26+
// Configure the ImageManip as in HFR mode requesting arbitrary outputs is not yet supported
27+
auto* cameraOutput = cameraNode->requestOutput(std::make_pair(1280, 720), std::nullopt, dai::ImgResizeMode::CROP, static_cast<float>(FPS));
28+
29+
auto imageManip = pipeline.create<dai::node::ImageManip>();
30+
imageManip->initialConfig->setOutputSize(std::get<0>(inputSize), std::get<1>(inputSize));
31+
imageManip->setMaxOutputFrameSize(static_cast<int>(std::get<0>(inputSize) * std::get<1>(inputSize) * 3));
32+
imageManip->initialConfig->setFrameType(dai::ImgFrame::Type::BGR888i);
33+
imageManip->inputImage.setMaxSize(12);
34+
cameraOutput->link(imageManip->inputImage);
35+
36+
auto detectionNetwork = pipeline.create<dai::node::DetectionNetwork>();
37+
detectionNetwork->setNNArchive(nnArchive);
38+
imageManip->out.link(detectionNetwork->input);
39+
40+
auto benchmarkIn = pipeline.create<dai::node::BenchmarkIn>();
41+
benchmarkIn->setRunOnHost(true);
42+
benchmarkIn->sendReportEveryNMessages(FPS);
43+
detectionNetwork->out.link(benchmarkIn->input);
44+
45+
auto qDet = detectionNetwork->out.createOutputQueue();
46+
pipeline.start();
47+
48+
while(pipeline.isRunning()) {
49+
auto inDet = qDet->get<dai::ImgDetections>();
50+
if(inDet == nullptr) continue;
51+
(void)inDet;
52+
}
53+
54+
return 0;
55+
}
Lines changed: 106 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,106 @@
1+
#include <atomic>
2+
#include <chrono>
3+
#include <csignal>
4+
#include <fstream>
5+
#include <iostream>
6+
#include <optional>
7+
#include <thread>
8+
9+
#include "depthai/depthai.hpp"
10+
#include "depthai/pipeline/datatype/MessageGroup.hpp"
11+
12+
constexpr std::pair<int, int> SIZE = {1280, 720};
13+
constexpr int FPS = 480;
14+
15+
std::atomic<bool> quitEvent(false);
16+
17+
void signalHandler(int signum) {
18+
quitEvent = true;
19+
}
20+
21+
class VideoSaver : public dai::node::CustomNode<VideoSaver> {
22+
public:
23+
VideoSaver() : fileHandle("video_hfr.encoded", std::ios::binary) {
24+
if(!fileHandle.is_open()) {
25+
throw std::runtime_error("Could not open video_hfr.encoded for writing");
26+
}
27+
}
28+
29+
~VideoSaver() {
30+
if(fileHandle.is_open()) {
31+
fileHandle.close();
32+
}
33+
}
34+
35+
std::shared_ptr<dai::Buffer> processGroup(std::shared_ptr<dai::MessageGroup> message) override {
36+
if(!fileHandle.is_open()) return nullptr;
37+
38+
auto frame = message->get<dai::EncodedFrame>("data");
39+
unsigned char* frameData = frame->getData().data();
40+
size_t frameSize = frame->getData().size();
41+
fileHandle.write(reinterpret_cast<const char*>(frameData), frameSize);
42+
43+
return nullptr;
44+
}
45+
46+
private:
47+
std::ofstream fileHandle;
48+
};
49+
50+
int main() {
51+
signal(SIGTERM, signalHandler);
52+
signal(SIGINT, signalHandler);
53+
54+
dai::Pipeline pipeline;
55+
56+
auto platform = pipeline.getDefaultDevice()->getPlatform();
57+
if(platform != dai::Platform::RVC4) {
58+
std::cerr << "This example is only supported on RVC4 devices\n" << std::flush;
59+
return -1;
60+
}
61+
62+
auto camRgb = pipeline.create<dai::node::Camera>()->build(dai::CameraBoardSocket::CAM_A);
63+
auto* output = camRgb->requestOutput(SIZE, std::nullopt, dai::ImgResizeMode::CROP, static_cast<float>(FPS));
64+
65+
// ImageManip is added to workaround a limitation with VideoEncoder with native resolutions
66+
// This limitation will be lifted in the future
67+
auto imageManip = pipeline.create<dai::node::ImageManip>();
68+
imageManip->initialConfig->setOutputSize(SIZE.first, SIZE.second + 10);
69+
imageManip->setMaxOutputFrameSize(static_cast<int>(SIZE.first * (SIZE.second + 10) * 1.6));
70+
imageManip->inputImage.setMaxSize(12);
71+
output->link(imageManip->inputImage);
72+
auto encodedInput = imageManip->out;
73+
74+
auto benchmarkIn = pipeline.create<dai::node::BenchmarkIn>();
75+
benchmarkIn->setRunOnHost(true);
76+
77+
auto encoded = pipeline.create<dai::node::VideoEncoder>();
78+
encoded->setDefaultProfilePreset(static_cast<float>(FPS), dai::VideoEncoderProperties::Profile::H264_MAIN);
79+
encodedInput.link(encoded->input);
80+
encoded->out.link(benchmarkIn->input);
81+
82+
auto saver = pipeline.create<VideoSaver>();
83+
encoded->out.link(saver->inputs["data"]);
84+
85+
pipeline.start();
86+
87+
std::cout << "Started to save video to video_hfr.encoded" << std::endl;
88+
std::cout << "Press Ctrl+C to stop" << std::endl;
89+
90+
while(pipeline.isRunning() && !quitEvent) {
91+
std::this_thread::sleep_for(std::chrono::seconds(1));
92+
}
93+
94+
pipeline.stop();
95+
pipeline.wait();
96+
97+
std::cout << "To view the encoded data, convert the stream file (.encoded) into a video file (.mp4) using a command below:" << std::endl;
98+
std::cout << "ffmpeg -framerate " << FPS << " -i video_hfr.encoded -c copy video_hfr.mp4" << std::endl;
99+
100+
std::cout << "If the FPS is not set correctly, you can ask ffmpeg to generate it with the command below" << std::endl;
101+
102+
std::cout << "\nffmpeg -fflags +genpts -r " << FPS << " -i video_hfr.encoded \\\n -vsync cfr -fps_mode cfr \\\n -video_track_timescale " << FPS
103+
<< "00 \\\n -c:v copy \\\n video_hfr.mp4\n";
104+
105+
return 0;
106+
}
Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
#include <opencv2/opencv.hpp>
2+
#include <optional>
3+
4+
#include "depthai/depthai.hpp"
5+
6+
constexpr std::pair<int, int> SIZE = {1280, 720};
7+
constexpr int FPS = 480;
8+
9+
int main() {
10+
dai::Pipeline pipeline;
11+
12+
auto platform = pipeline.getDefaultDevice()->getPlatform();
13+
if(platform != dai::Platform::RVC4) {
14+
std::cerr << "This example is only supported on RVC4 devices\n" << std::flush;
15+
return -1;
16+
}
17+
18+
auto cam = pipeline.create<dai::node::Camera>()->build();
19+
auto benchmarkIn = pipeline.create<dai::node::BenchmarkIn>();
20+
benchmarkIn->setRunOnHost(true);
21+
benchmarkIn->sendReportEveryNMessages(FPS);
22+
23+
auto imageManip = pipeline.create<dai::node::ImageManip>();
24+
imageManip->initialConfig->setOutputSize(250, 250);
25+
imageManip->setMaxOutputFrameSize(static_cast<int>(250 * 250 * 1.6));
26+
27+
// One of the two modes can be selected
28+
// NOTE: Generic resolutions are not yet supported through camera node when using HFR mode
29+
auto* output = cam->requestOutput(SIZE, std::nullopt, dai::ImgResizeMode::CROP, static_cast<float>(FPS));
30+
31+
output->link(imageManip->inputImage);
32+
imageManip->out.link(benchmarkIn->input);
33+
34+
auto outputQueue = imageManip->out.createOutputQueue();
35+
36+
pipeline.start();
37+
38+
while(pipeline.isRunning()) {
39+
auto imgFrame = outputQueue->get<dai::ImgFrame>();
40+
if(imgFrame == nullptr) continue;
41+
cv::imshow("frame", imgFrame->getCvFrame());
42+
if(cv::waitKey(1) == 'q') {
43+
break;
44+
}
45+
}
46+
47+
return 0;
48+
}

examples/python/HFR/.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
*.encoded
2+
*.mp4

examples/python/HFR/README.md

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
# HFR (High Frame Rate) examples on IMX586 on the RVC4 platform
2+
This directory contains examples demonstrating high frame rate (HFR) capabilities on the IMX586 sensor using the RVC4 platform.
3+
The examples showcase an early preview of the capabilities.
4+
5+
6+
## HFR resolutions
7+
The HFR mode introduces two new resolutions that can run at a high frame-rate.
8+
At the current time, the resolutions cannot be scaled arbitrarily nor can the FPS be varied arbitrarily — it must be one of the two supported values.
9+
We plan to add more flexibility in the future and it's possible to use `ImageManip` in the meantime.
10+
11+
The current supported resolutions are:
12+
* 1920x1080 @ 240 FPS
13+
* 1280x720 @ 480 FPS
14+
15+
16+
## Example descriptions
17+
### Object Detection
18+
The object detection [example](hfr_nn.py) demonstrates how to use the HFR mode with a YOLOv6 model for real-time object detection at **480 FPS**.
19+
20+
### Small live preview
21+
The small preview [example](hfr_small_preview.py) demonstrates how to use the HFR mode for a small live preview at **240 FPS** or **480 FPS**.
22+
23+
### Video encoding
24+
The video encoding [example](hfr_save_encoded.py) demonstrates how to use the HFR mode for video encoding at **240 FPS** or **480 FPS**.
25+
26+
All three examples have a `BenchmarkIn` node included that prints the framerate and the latency. This is the expected output:
27+
```
28+
[2025-08-14 23:31:49.487] [ThreadedNode] [warning] FPS: 474.3766
29+
[2025-08-14 23:31:49.487] [ThreadedNode] [warning] Messages took 1.0118543 s
30+
[2025-08-14 23:31:49.487] [ThreadedNode] [warning] Average latency: 0.05904912 s
31+
```

examples/python/HFR/hfr_nn.py

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
#!/usr/bin/env python3
2+
import depthai as dai
3+
4+
FPS = 480
5+
6+
with dai.Pipeline() as pipeline:
7+
platform = pipeline.getDefaultDevice().getPlatform()
8+
if platform != dai.Platform.RVC4:
9+
raise RuntimeError("This example is only supported on RVC4 devices")
10+
11+
# Download the model
12+
nnArchivePath = dai.getModelFromZoo(dai.NNModelDescription("yolov6-nano", platform="RVC4"))
13+
nnArchive = dai.NNArchive(nnArchivePath)
14+
inputSize = nnArchive.getInputSize()
15+
cameraNode = pipeline.create(dai.node.Camera).build()
16+
17+
# Configure the ImageManip as in HFR mode requesting arbitrary outputs is not yet supported
18+
cameraOutput = cameraNode.requestOutput((1280, 720), fps=FPS)
19+
imageManip = pipeline.create(dai.node.ImageManip)
20+
imageManip.initialConfig.setOutputSize(inputSize[0], inputSize[1])
21+
imageManip.setMaxOutputFrameSize(int(inputSize[0] * inputSize[1] * 3))
22+
imageManip.initialConfig.setFrameType(dai.ImgFrame.Type.BGR888i)
23+
imageManip.inputImage.setMaxSize(12)
24+
cameraOutput.link(imageManip.inputImage)
25+
26+
# Configure the DetectionNetwork
27+
detectionNetwork = pipeline.create(dai.node.DetectionNetwork)
28+
detectionNetwork.setNNArchive(nnArchive)
29+
imageManip.out.link(detectionNetwork.input)
30+
31+
benchmarkIn = pipeline.create(dai.node.BenchmarkIn)
32+
benchmarkIn.setRunOnHost(True)
33+
benchmarkIn.sendReportEveryNMessages(FPS)
34+
detectionNetwork.out.link(benchmarkIn.input)
35+
36+
37+
qDet = detectionNetwork.out.createOutputQueue()
38+
pipeline.start()
39+
40+
while pipeline.isRunning():
41+
inDet: dai.ImgDetections = qDet.get()
42+
# print(f"Got {len(inDet.detections)} nn detections ")

0 commit comments

Comments
 (0)