Skip to content

Commit 6001e78

Browse files
authored
Merge branch 'main' into docs
2 parents b33fdb3 + f5d13f3 commit 6001e78

File tree

7 files changed

+420
-253
lines changed

7 files changed

+420
-253
lines changed

docs/source/components/nodes/image_manip.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@ ImageManip
33

44
ImageManip node can be used to crop, rotate rectangle area or perform various image transforms: rotate, mirror, flip, perspective transform.
55

6+
For downscaling, ImageManip uses the bilinear/bicubic interpolation.
7+
68
How to place it
79
###############
810

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
Script change pipeline flow
2+
===========================
3+
4+
This example shows how you can change the flow of data inside your pipeline in runtime using the :ref:`Script` node. In this example, we send a message from
5+
the host to choose whether we want Script node to forwards color frame to the :ref:`MobileNetDetectionNetwork`.
6+
7+
Demo
8+
####
9+
10+
.. image:: https://user-images.githubusercontent.com/18037362/187734814-df3b46c9-5e04-4a9d-bf6f-d738b40b4421.gif
11+
12+
Pipeline Graph
13+
##############
14+
15+
.. image:: https://user-images.githubusercontent.com/18037362/187736249-db7ff175-fcea-4d4e-b567-f99087bd82ee.png
16+
17+
Setup
18+
#####
19+
20+
.. include:: /includes/install_from_pypi.rst
21+
22+
Source code
23+
###########
24+
25+
.. tabs::
26+
27+
.. tab:: Python
28+
29+
Also `available on GitHub <https://github.com/luxonis/depthai-python/blob/main/examples/Script/script_change_pipeline_flow.py>`__
30+
31+
.. literalinclude:: ../../../../examples/Script/script_change_pipeline_flow.py
32+
:language: python
33+
:linenos:
34+
35+
.. tab:: C++
36+
37+
Also `available on GitHub <https://github.com/luxonis/depthai-core/blob/main/examples/Script/script_change_pipeline_flow.cpp>`__
38+
39+
.. literalinclude:: ../../../../depthai-core/examples/Script/script_change_pipeline_flow.cpp
40+
:language: cpp
41+
:linenos:
42+
43+
.. include:: /includes/footer-short.rst

docs/source/tutorials/code_samples.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -113,6 +113,7 @@ are presented with code.
113113
.. rubric:: Script
114114

115115
- :ref:`Script camera control` - Controlling the camera with the Script node
116+
- :ref:`Script change pipeline flow` - Change the flow of data inside your pipeline in runtime with :ref:`Script` node
116117
- :ref:`Script forward frames` - Forward incoming image stream to two different output streams (demuxing)
117118
- :ref:`Script get local IP` - Get local IP of the device (only OAK-POE devices)
118119
- :ref:`Script HTTP client` - Send HTTP request to a server (only OAK-POE devices)
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
#!/usr/bin/env python3
2+
import depthai as dai
3+
import cv2
4+
from pathlib import Path
5+
import numpy as np
6+
7+
parentDir = Path(__file__).parent
8+
nnPath = str((parentDir / Path('../models/mobilenet-ssd_openvino_2021.4_5shave.blob')).resolve().absolute())
9+
10+
pipeline = dai.Pipeline()
11+
12+
cam = pipeline.createColorCamera()
13+
cam.setBoardSocket(dai.CameraBoardSocket.RGB)
14+
cam.setInterleaved(False)
15+
cam.setIspScale(2,3)
16+
cam.setVideoSize(720,720)
17+
cam.setPreviewSize(300,300)
18+
19+
xoutRgb = pipeline.create(dai.node.XLinkOut)
20+
xoutRgb.setStreamName('rgb')
21+
cam.video.link(xoutRgb.input)
22+
23+
script = pipeline.createScript()
24+
25+
xin = pipeline.create(dai.node.XLinkIn)
26+
xin.setStreamName('in')
27+
xin.out.link(script.inputs['toggle'])
28+
29+
cam.preview.link(script.inputs['rgb'])
30+
script.setScript("""
31+
toggle = False
32+
while True:
33+
msg = node.io['toggle'].tryGet()
34+
if msg is not None:
35+
toggle = msg.getData()[0]
36+
node.warn('Toggle! Perform NN inferencing: ' + str(toggle))
37+
38+
frame = node.io['rgb'].get()
39+
40+
if toggle:
41+
node.io['nn'].send(frame)
42+
""")
43+
44+
nn = pipeline.create(dai.node.MobileNetDetectionNetwork)
45+
nn.setBlobPath(nnPath)
46+
script.outputs['nn'].link(nn.input)
47+
48+
xoutNn = pipeline.create(dai.node.XLinkOut)
49+
xoutNn.setStreamName('nn')
50+
nn.out.link(xoutNn.input)
51+
52+
# Connect to device with pipeline
53+
with dai.Device(pipeline) as device:
54+
inQ = device.getInputQueue("in")
55+
qRgb = device.getOutputQueue("rgb")
56+
qNn = device.getOutputQueue("nn")
57+
58+
runNn = False
59+
60+
def frameNorm(frame, bbox):
61+
normVals = np.full(len(bbox), frame.shape[0])
62+
normVals[::2] = frame.shape[1]
63+
return (np.clip(np.array(bbox), 0, 1) * normVals).astype(int)
64+
65+
color = (255, 127, 0)
66+
def drawDetections(frame, detections):
67+
for detection in detections:
68+
bbox = frameNorm(frame, (detection.xmin, detection.ymin, detection.xmax, detection.ymax))
69+
cv2.putText(frame, f"{int(detection.confidence * 100)}%", (bbox[0] + 10, bbox[1] + 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, color)
70+
cv2.rectangle(frame, (bbox[0], bbox[1]), (bbox[2], bbox[3]), color, 2)
71+
72+
73+
while True:
74+
frame = qRgb.get().getCvFrame()
75+
76+
if qNn.has():
77+
detections = qNn.get().detections
78+
drawDetections(frame, detections)
79+
80+
cv2.putText(frame, f"NN inferencing: {runNn}", (20,20), cv2.FONT_HERSHEY_TRIPLEX, 0.7, color)
81+
cv2.imshow('Color frame', frame)
82+
83+
key = cv2.waitKey(1)
84+
if key == ord('q'):
85+
break
86+
elif key == ord('t'):
87+
runNn = not runNn
88+
print(f"{'Enabling' if runNn else 'Disabling'} NN inferencing")
89+
buf = dai.Buffer()
90+
buf.setData(runNn)
91+
inQ.send(buf)

0 commit comments

Comments
 (0)