Skip to content

Commit a0275bc

Browse files
author
Jakob Socan
committed
Merge remote-tracking branch 'origin/develop' into fw_asan_fix
2 parents 9b32e73 + 39f5f29 commit a0275bc

26 files changed

+655
-223
lines changed

.github/workflows/bom.yml

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,9 @@ on:
2424
type: string
2525
default: "stereo"
2626
description: "Which device to test - stereo or rgb"
27+
additional_options:
28+
required: false
29+
type: string
2730

2831
jobs:
2932
id:
@@ -95,11 +98,14 @@ jobs:
9598
HOLD_RESERVATION="--hold-reservation"
9699
fi
97100
101+
if [[ -n "${{ github.event.inputs.additional_options }}" ]]; then
102+
ADDITIONAL_OPTIONS="${{ github.event.inputs.additional_options }}"
103+
fi
104+
98105
if [[ "${{ github.event.inputs.device }}" == 'stereo' ]]; then
99106
MODELS="oak4_pro or oak4_d"
100107
else
101108
MODELS="oak4_s"
102109
RGB="rgb"
103110
fi
104-
105-
exec hil $HOLD_RESERVATION --models "$MODELS" $RESERVATION_OPTION --wait --docker-image ${{ secrets.CONTAINER_REGISTRY }}/depthai-core-hil:${{ needs.build_docker_container.outputs.tag }} --commands "./tests/run_tests_entrypoint.sh rvc4$RGB"
111+
exec hil $HOLD_RESERVATION --models "$MODELS" $RESERVATION_OPTION --wait $ADDITIONAL_OPTIONS --docker-image ${{ secrets.CONTAINER_REGISTRY }}/depthai-core-hil:${{ needs.build_docker_container.outputs.tag }} --commands "./tests/run_tests_entrypoint.sh rvc4$RGB"

.github/workflows/python-main.yml

Lines changed: 122 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -387,17 +387,40 @@ jobs:
387387
cd bindings/python && mv wheelhouse/audited wheelhouse/audited_pre && mkdir -p wheelhouse/audited
388388
echo "Combining repaired wheels into one master wheel"
389389
python3 ci/combine_wheels.py --input_folder=wheelhouse/audited_pre --output_folder=wheelhouse/audited
390+
- name: Upload combined wheels as an artifact
391+
uses: actions/upload-artifact@v4
392+
with:
393+
name: audited-wheels-combined-macos-${{ matrix.os }}
394+
path: bindings/python/wheelhouse/audited/*
395+
- name: Append build hash if not a tagged commit
396+
if: startsWith(github.ref, 'refs/tags/v') != true
397+
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV
398+
- name: Install combined wheel and run a smoke-test
399+
if: startsWith(github.ref, 'refs/tags/v') != true
400+
env:
401+
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
402+
run: |
403+
set -euo pipefail
404+
405+
# Resolve the exact dev version (includes commit hash)
406+
ver=$(python -c "import os,sys,pathlib; sys.path.insert(0, str(pathlib.Path('bindings/python').resolve())); import find_version as v; print(v.get_package_dev_version(os.environ['BUILD_COMMIT_HASH']))")
407+
echo "Installing depthai==$ver using $(python -V)"
408+
409+
# Get the name of the one file in bindings/python/wheelhouse/audited
410+
wheel=$(ls bindings/python/wheelhouse/audited/*.whl)
411+
412+
# Install wheel
413+
python -m pip install -U pip
414+
python -m pip install --force-reinstall $wheel
415+
416+
EXPECTED_VERSION="$ver" python bindings/python/ci/smoke_depthai.py
390417
- name: Upload combined wheels to artifactory
418+
if: success()
391419
run: cd bindings/python && bash ./ci/upload-artifactory.sh
392420
env:
393421
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
394422
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }}
395423
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }}
396-
- name: Upload the combined wheels as an artifact
397-
uses: actions/upload-artifact@v4
398-
with:
399-
name: audited-wheels-combined-macos-${{ matrix.os }}
400-
path: bindings/python/wheelhouse/audited/*
401424

402425
# This job builds wheels for x86_64 arch
403426
build-linux-x86_64:
@@ -487,17 +510,43 @@ jobs:
487510
cd bindings/python && mv wheelhouse/audited wheelhouse/audited_pre && mkdir -p wheelhouse/audited
488511
echo "Combining repaired wheels into one master wheel"
489512
python3 ci/combine_wheels.py --input_folder=wheelhouse/audited_pre --output_folder=wheelhouse/audited
513+
- name: Upload combined wheels as an artifact
514+
uses: actions/upload-artifact@v4
515+
with:
516+
name: audited-wheels-combined-linux-x86_64
517+
path: bindings/python/wheelhouse/audited/*
518+
- name: Append build hash if not a tagged commit
519+
if: startsWith(github.ref, 'refs/tags/v') != true
520+
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV
521+
- name: Install combined wheel and run a smoke-test
522+
if: startsWith(github.ref, 'refs/tags/v') != true
523+
env:
524+
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
525+
run: |
526+
set -euo pipefail
527+
528+
PYBIN="/opt/python/cp310-cp310/bin/python"
529+
530+
# Resolve the exact dev version (includes commit hash)
531+
ver=$("$PYBIN" -c "import os,sys,pathlib; sys.path.insert(0, str(pathlib.Path('bindings/python').resolve())); import find_version as v; print(v.get_package_dev_version(os.environ['BUILD_COMMIT_HASH']))")
532+
echo "Installing depthai==$ver using $($PYBIN -V)"
533+
534+
# Get the name of the one file in bindings/python/wheelhouse/audited
535+
wheel=$(ls bindings/python/wheelhouse/audited/*.whl)
536+
537+
"$PYBIN" -m ensurepip --upgrade || true
538+
"$PYBIN" -m pip install -U pip
539+
"$PYBIN" -m pip install --force-reinstall $wheel
540+
541+
# Smoke test: fail hard on any exception or version mismatch
542+
EXPECTED_VERSION="$ver" "$PYBIN" bindings/python/ci/smoke_depthai.py
490543
- name: Upload combined wheels to artifactory
544+
if: success()
491545
run: cd bindings/python && bash ./ci/upload-artifactory.sh
492546
env:
493547
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
494548
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }}
495549
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }}
496-
- name: Upload the combined wheels as an artifact
497-
uses: actions/upload-artifact@v4
498-
with:
499-
name: audited-wheels-combined-linux-x86_64
500-
path: bindings/python/wheelhouse/audited/*
501550

502551
# This job builds wheels for ARM64 arch
503552
build-linux-arm64:
@@ -592,17 +641,43 @@ jobs:
592641
cd bindings/python && mv wheelhouse/audited wheelhouse/audited_pre && mkdir -p wheelhouse/audited
593642
echo "Combining repaired wheels into one master wheel"
594643
python3 ci/combine_wheels.py --input_folder=wheelhouse/audited_pre --output_folder=wheelhouse/audited
644+
- name: Upload combined wheels as an artifact
645+
uses: actions/upload-artifact@v4
646+
with:
647+
name: audited-wheels-combined-linux-arm64
648+
path: bindings/python/wheelhouse/audited/*
649+
- name: Append build hash if not a tagged commit
650+
if: startsWith(github.ref, 'refs/tags/v') != true
651+
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" >> $GITHUB_ENV
652+
- name: Install combined wheel and run a smoke-test
653+
if: startsWith(github.ref, 'refs/tags/v') != true
654+
env:
655+
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
656+
run: |
657+
set -euo pipefail
658+
659+
PYBIN="/opt/python/cp310-cp310/bin/python"
660+
661+
# Resolve the exact dev version (includes commit hash)
662+
ver=$("$PYBIN" -c "import os,sys,pathlib; sys.path.insert(0, str(pathlib.Path('bindings/python').resolve())); import find_version as v; print(v.get_package_dev_version(os.environ['BUILD_COMMIT_HASH']))")
663+
echo "Installing depthai==$ver using $($PYBIN -V)"
664+
665+
# Get the name of the one file in bindings/python/wheelhouse/audited
666+
wheel=$(ls bindings/python/wheelhouse/audited/*.whl)
667+
668+
# Install combined wheel
669+
"$PYBIN" -m ensurepip --upgrade || true
670+
"$PYBIN" -m pip install -U pip
671+
"$PYBIN" -m pip install --force-reinstall $wheel
672+
673+
EXPECTED_VERSION="$ver" "$PYBIN" bindings/python/ci/smoke_depthai.py
595674
- name: Upload combined wheels to artifactory
675+
if: success()
596676
run: cd bindings/python && bash ./ci/upload-artifactory.sh
597677
env:
598678
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
599679
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }}
600680
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }}
601-
- name: Upload the combined wheels as an artifact
602-
uses: actions/upload-artifact@v4
603-
with:
604-
name: audited-wheels-combined-linux-arm64
605-
path: bindings/python/wheelhouse/audited/*
606681

607682
combine-windows-x86_64-wheels:
608683
needs: build-windows-x86_64
@@ -627,18 +702,44 @@ jobs:
627702
mkdir -p wheelhouse/audited
628703
echo "Combining repaired wheels into one master wheel"
629704
python ci/combine_wheels.py --input_folder=wheelhouse/audited_pre --output_folder=wheelhouse/audited --log_level=debug
705+
- name: Upload combined wheels as an artifact
706+
uses: actions/upload-artifact@v4
707+
with:
708+
name: audited-wheels-combined-windows-x86_64
709+
path: bindings/python/wheelhouse/audited/*
710+
- name: Append build hash if not a tagged commit
711+
if: startsWith(github.ref, 'refs/tags/v') != true
712+
run: echo "BUILD_COMMIT_HASH=${{github.sha}}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
713+
- name: Install combined wheel and run a smoke-test
714+
if: startsWith(github.ref, 'refs/tags/v') != true
715+
shell: pwsh
716+
env:
717+
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
718+
run: |
719+
$env:PYTHONIOENCODING = 'utf-8'
720+
$ErrorActionPreference = "Stop" # Fail on any error
721+
722+
# Resolve the exact dev version (includes commit hash)
723+
$ver = python -c "import os,sys,pathlib; sys.path.insert(0,str(pathlib.Path('bindings/python').resolve())); import find_version as v; print(v.get_package_dev_version(os.environ['BUILD_COMMIT_HASH']))"
724+
Write-Host "Installing depthai==$ver using:"; python -VV
725+
726+
# Get the name of the one file in bindings/python/wheelhouse/audited
727+
$wheel_name = (Get-ChildItem bindings/python/wheelhouse/audited/*.whl).Name
728+
729+
# Install combined wheel
730+
python -m pip install -U pip
731+
python -m pip install --force-reinstall bindings/python/wheelhouse/audited/$wheel_name
732+
733+
# Smoke test (no heredoc; YAML-safe). Fail on import error or version mismatch.
734+
$env:EXPECTED_VERSION = $ver
735+
python bindings/python/ci/smoke_depthai.py
630736
- name: Upload combined wheels to artifactory
737+
if: success()
631738
run: cd bindings/python && bash ./ci/upload-artifactory.sh
632739
env:
633740
ARTIFACTORY_URL: ${{ secrets.ARTIFACTORY_URL }}
634741
ARTIFACTORY_USER: ${{ secrets.ARTIFACTORY_USER }}
635742
ARTIFACTORY_PASS: ${{ secrets.ARTIFACTORY_PASS }}
636-
- name: Upload the combined wheels as an artifact
637-
uses: actions/upload-artifact@v4
638-
with:
639-
name: audited-wheels-combined-windows-x86_64
640-
path: bindings/python/wheelhouse/audited/*
641-
642743
release:
643744
if: startsWith(github.ref, 'refs/tags/v')
644745
# needs: [pytest, build-linux-armhf, build-windows-x86_64, build-macos-x86_64, build-macos-arm64, build-linux-x86_64, build-linux-arm64]

CMakeLists.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ endif()
6666

6767

6868
# Create depthai project
69-
project(depthai VERSION "3.0.0" LANGUAGES CXX C)
69+
project(depthai VERSION "3.1.0" LANGUAGES CXX C)
7070
set(DEPTHAI_PRE_RELEASE_TYPE "") # Valid options are "alpha", "beta", "rc", ""
7171
set(DEPTHAI_PRE_RELEASE_VERSION "0") # Valid options are "0", "1", "2", ...
7272

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import os, sys, traceback, platform
2+
3+
try:
4+
import depthai as dai
5+
except Exception:
6+
traceback.print_exc()
7+
sys.exit(1)
8+
9+
expected = os.environ.get("EXPECTED_VERSION") or os.environ.get("ver")
10+
installed = getattr(dai, "__version__", "<unknown>")
11+
if installed != expected:
12+
print(f"Version mismatch: installed {installed} vs expected {expected}", file=sys.stderr)
13+
sys.exit(1)
14+
15+
print("depthai:", installed)
16+
print("python:", platform.python_version(), "ABI:", sys.implementation.cache_tag)

bindings/python/setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -280,7 +280,7 @@ def build_extension(self, ext):
280280
],
281281
python_requires='>=3.7',
282282
install_requires=[
283-
"numpy>=2.0.0",
283+
"numpy<3.0.0",
284284
],
285285
entry_points={
286286
"console_scripts": [

bindings/python/src/DatatypeBindings.cpp

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,5 +131,11 @@ void DatatypeBindings::bind(pybind11::module& m, void* pCallstack) {
131131
.value("PointCloudData", DatatypeEnum::PointCloudData)
132132
.value("ImageAlignConfig", DatatypeEnum::ImageAlignConfig)
133133
.value("ImgAnnotations", DatatypeEnum::ImgAnnotations)
134-
.value("RGBDData", DatatypeEnum::RGBDData);
134+
.value("RGBDData", DatatypeEnum::RGBDData)
135+
.value("ImageFiltersConfig", DatatypeEnum::ImageFiltersConfig)
136+
.value("ToFDepthConfidenceFilterConfig", DatatypeEnum::ToFDepthConfidenceFilterConfig)
137+
.value("DynamicCalibrationControl", DatatypeEnum::DynamicCalibrationControl)
138+
.value("DynamicCalibrationResult", DatatypeEnum::DynamicCalibrationResult)
139+
.value("CalibrationQuality", DatatypeEnum::CalibrationQuality)
140+
.value("CoverageData", DatatypeEnum::CoverageData);
135141
}

bindings/python/src/DeviceBindings.cpp

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -607,6 +607,13 @@ void DeviceBindings::bind(pybind11::module& m, void* pCallstack) {
607607
return d.getLeonMssCpuUsage();
608608
},
609609
DOC(dai, DeviceBase, getLeonMssCpuUsage))
610+
.def(
611+
"getProcessMemoryUsage",
612+
[](DeviceBase& d) {
613+
py::gil_scoped_release release;
614+
return d.getProcessMemoryUsage();
615+
},
616+
DOC(dai, DeviceBase, getProcessMemoryUsage))
610617
.def(
611618
"addLogCallback",
612619
[](DeviceBase& d, std::function<void(LogMessage)> callback) {

bindings/python/src/pipeline/datatype/PointCloudDataBindings.cpp

Lines changed: 35 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -117,39 +117,41 @@ void bind_pointclouddata(pybind11::module& m, void* pCallstack) {
117117
.def("getTimestamp", &PointCloudData::Buffer::getTimestamp, DOC(dai, Buffer, getTimestamp))
118118
.def("getTimestampDevice", &PointCloudData::Buffer::getTimestampDevice, DOC(dai, Buffer, getTimestampDevice))
119119
.def("getSequenceNum", &PointCloudData::Buffer::getSequenceNum, DOC(dai, Buffer, getSequenceNum))
120-
.def("setPoints", [](py::object& obj, py::array_t<float>& arr) {
121-
if (arr.ndim() != 2 || arr.shape(1) != 3) {
122-
throw std::runtime_error("Input must be a 2D numpy array of points with the shape of (N, 3)");
123-
}
124-
dai::PointCloudData& data = obj.cast<dai::PointCloudData&>();
125-
std::vector<Point3f> points;
126-
points.reserve(arr.shape(0));
127-
auto ra = arr.unchecked();
128-
for(int i = 0; i < arr.shape(0); i++) {
129-
points.emplace_back(ra(i, 0), ra(i, 1), ra(i, 2));
130-
}
131-
data.setPoints(points);
132-
})
133-
.def("setPointsRGB", [](py::object& obj, py::array_t<float>& points, py::array_t<uint8_t>& colors) {
134-
if (points.ndim() != 2 || points.shape(1) != 3) {
135-
throw std::runtime_error("Points input must be a 2D numpy array of points with the shape of (N, 3)");
136-
}
137-
if (colors.ndim() != 2 || colors.shape(1) != 4) {
138-
throw std::runtime_error("Colors input must be a 2D numpy array of colors with the shape of (N, 4)");
139-
}
140-
if (points.shape(0) != colors.shape(0)) {
141-
throw std::runtime_error("Points and Colors must have the same number of rows");
142-
}
143-
dai::PointCloudData& data = obj.cast<dai::PointCloudData&>();
144-
std::vector<Point3fRGBA> pointsRGBA;
145-
pointsRGBA.reserve(points.shape(0));
146-
auto ra1 = points.unchecked();
147-
auto ra2 = colors.unchecked();
148-
for(int i = 0; i < points.shape(0); i++) {
149-
pointsRGBA.emplace_back(ra1(i, 0), ra1(i, 1), ra1(i, 2), ra2(i, 0), ra2(i, 1), ra2(i, 2), ra2(i, 3));
150-
}
151-
data.setPointsRGB(pointsRGBA);
152-
})
120+
.def("setPoints",
121+
[](py::object& obj, py::array_t<float>& arr) {
122+
if(arr.ndim() != 2 || arr.shape(1) != 3) {
123+
throw std::runtime_error("Input must be a 2D numpy array of points with the shape of (N, 3)");
124+
}
125+
dai::PointCloudData& data = obj.cast<dai::PointCloudData&>();
126+
std::vector<Point3f> points;
127+
points.reserve(arr.shape(0));
128+
auto ra = arr.unchecked();
129+
for(int i = 0; i < arr.shape(0); i++) {
130+
points.emplace_back(ra(i, 0), ra(i, 1), ra(i, 2));
131+
}
132+
data.setPoints(points);
133+
})
134+
.def("setPointsRGB",
135+
[](py::object& obj, py::array_t<float>& points, py::array_t<uint8_t>& colors) {
136+
if(points.ndim() != 2 || points.shape(1) != 3) {
137+
throw std::runtime_error("Points input must be a 2D numpy array of points with the shape of (N, 3)");
138+
}
139+
if(colors.ndim() != 2 || colors.shape(1) != 4) {
140+
throw std::runtime_error("Colors input must be a 2D numpy array of colors with the shape of (N, 4)");
141+
}
142+
if(points.shape(0) != colors.shape(0)) {
143+
throw std::runtime_error("Points and Colors must have the same number of rows");
144+
}
145+
dai::PointCloudData& data = obj.cast<dai::PointCloudData&>();
146+
std::vector<Point3fRGBA> pointsRGBA;
147+
pointsRGBA.reserve(points.shape(0));
148+
auto ra1 = points.unchecked();
149+
auto ra2 = colors.unchecked();
150+
for(int i = 0; i < points.shape(0); i++) {
151+
pointsRGBA.emplace_back(ra1(i, 0), ra1(i, 1), ra1(i, 2), ra2(i, 0), ra2(i, 1), ra2(i, 2), ra2(i, 3));
152+
}
153+
data.setPointsRGB(pointsRGBA);
154+
})
153155
.def("setWidth", &PointCloudData::setWidth, DOC(dai, PointCloudData, setWidth))
154156
.def("setHeight", &PointCloudData::setHeight, DOC(dai, PointCloudData, setHeight))
155157
.def("setSize",

bindings/python/src/pipeline/datatype/StereoDepthConfigBindings.cpp

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ void bind_stereodepthconfig(pybind11::module& m, void* pCallstack) {
5050
py::class_<StereoDepthConfig::CensusTransform> censusTransform(stereoDepthConfig, "CensusTransform", DOC(dai, StereoDepthConfig, CensusTransform));
5151
py::enum_<StereoDepthConfig::CensusTransform::KernelSize> censusTransformKernelSize(
5252
censusTransform, "KernelSize", DOC(dai, StereoDepthConfig, CensusTransform, KernelSize));
53-
53+
py::enum_<StereoDepthConfig::PostProcessing::Filter> filterEnum(postProcessing, "Filter", DOC(dai, StereoDepthConfig, PostProcessing, Filter));
5454
///////////////////////////////////////////////////////////////////////
5555
///////////////////////////////////////////////////////////////////////
5656
///////////////////////////////////////////////////////////////////////
@@ -82,6 +82,13 @@ void bind_stereodepthconfig(pybind11::module& m, void* pCallstack) {
8282
.value("FOOT", StereoDepthConfig::AlgorithmControl::DepthUnit::FOOT, DOC(dai, StereoDepthConfig, AlgorithmControl, DepthUnit, FOOT))
8383
.value("CUSTOM", StereoDepthConfig::AlgorithmControl::DepthUnit::CUSTOM, DOC(dai, StereoDepthConfig, AlgorithmControl, DepthUnit, CUSTOM));
8484

85+
filterEnum.value("NONE", StereoDepthConfig::PostProcessing::Filter::NONE, DOC(dai, StereoDepthConfig, PostProcessing, Filter, NONE))
86+
.value("DECIMATION", StereoDepthConfig::PostProcessing::Filter::DECIMATION, DOC(dai, StereoDepthConfig, PostProcessing, Filter, DECIMATION))
87+
.value("SPECKLE", StereoDepthConfig::PostProcessing::Filter::SPECKLE, DOC(dai, StereoDepthConfig, PostProcessing, Filter, SPECKLE))
88+
.value("MEDIAN", StereoDepthConfig::PostProcessing::Filter::MEDIAN, DOC(dai, StereoDepthConfig, PostProcessing, Filter, MEDIAN))
89+
.value("SPATIAL", StereoDepthConfig::PostProcessing::Filter::SPATIAL, DOC(dai, StereoDepthConfig, PostProcessing, Filter, SPATIAL))
90+
.value("TEMPORAL", StereoDepthConfig::PostProcessing::Filter::TEMPORAL, DOC(dai, StereoDepthConfig, PostProcessing, Filter, TEMPORAL));
91+
8592
algorithmControl.def(py::init<>())
8693
.def_readwrite("depthAlign", &StereoDepthConfig::AlgorithmControl::depthAlign, DOC(dai, StereoDepthConfig, AlgorithmControl, depthAlign))
8794
.def_readwrite("depthUnit", &StereoDepthConfig::AlgorithmControl::depthUnit, DOC(dai, StereoDepthConfig, AlgorithmControl, depthUnit))

0 commit comments

Comments
 (0)