Skip to content

Commit ea52b01

Browse files
authored
Merge pull request #284 from KKalem/alars_cams
Alars robot description
2 parents f5374e6 + 6f8c480 commit ea52b01

File tree

17 files changed

+275
-103
lines changed

17 files changed

+275
-103
lines changed

messages/dji_msgs/msg/Links.msg

Lines changed: 13 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,19 +13,26 @@ string MAP = 'map'
1313
string ODOM = 'odom'
1414
string HOME_POINT = 'home_point'
1515

16-
# The drone's body
16+
# The drone's body, defined as the point between the thick parts
17+
# of the legs.
1718
string BASE_LINK = 'base_link'
1819
# Same BASE_LINK, but only has the yaw component of the orientation.
1920
string BASE_FLAT = 'base_flat_link'
2021

2122

22-
# parts of the drone
23-
string GIMBAL_CAMERA_LINK = 'gimbal_camera_link'
24-
string WINCH_LINK = 'winch_link' # should be right-leg T junction
23+
# parts of the alars module that are relevant. in-between parts
24+
# like the roll/pitch/yaw arms of the gimbal are not included here
25+
# even though they are published by the z1_pro_driver node
26+
string ROPE_BASE_LINK = 'rope_base_link'
27+
28+
string GIMBAL_CAMERA_LINK = 'z1_camera_link'
29+
string FISHEYE_CAMERA_LINK = 'fisheye_camera_link'
30+
31+
# optical frames: z "depth", x "right", y "down" with top-left origin
32+
string GIMBAL_OPTICAL_FRAME = 'z1_optical_frame'
33+
string FISHEYE_OPTICAL_FRAME = 'fisheye_optical_frame'
2534

2635

27-
# Used by hook_estimator
28-
string HOOK_LINK = 'hook_link'
2936

3037
# Used by projection/filter
3138
string ESTIMATED_AUV = 'estimated_auv'

messages/dji_msgs/msg/Topics.msg

Lines changed: 15 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,18 @@
55
string MOVE_TO_SETPOINT_TOPIC = 'move_to_setpoint' # PoseStamped
66
string LABELED_UTM_TOPIC = 'labeled_utm_topic' # String, formatted: "utm_{zone}_{band}"
77

8-
# user feedback
9-
string CONTROLLER_INPUT_TOPIC = 'joy' # Joy
10-
string CONTROLLER_VIBRATION_TOPIC = 'joy/set_feedback' # JoyFeedback
11-
string TTS_TOPIC = 'speak' # String
128

139
# cameras
1410
string GIMBAL_CAMERA_RAW_TOPIC = 'gimbal_camera/camera/image_raw' # Image
11+
string GIMBAL_CAMERA_INFO_TOPIC = 'gimbal_camera/camera/camera_info' #CameraInfo
1512
string FISHEYE_CAMERA_RAW_TOPIC = 'fisheye_camera/camera/image_raw' # Image
13+
string FISHEYE_CAMERA_INFO_TOPIC = 'fisheye_camera/camera/camera_info' #CameraInfo
14+
15+
16+
# sensors that are not coming from PSDK
17+
string LOAD_CELL_RAW_TOPIC = 'sensor/load_cell_raw' # Int32
18+
string LOAD_CELL_WEIGHT_TOPIC = 'sensor/load_cell_weight' # Float32
19+
1620

1721
# object detectors in normalized image coordinates [-1,1]
1822
# (0,0) is the center of the image, +x is right, +y is up!
@@ -44,6 +48,10 @@ string DISABLE_ALARS_DETECTOR_SERVICE_TOPIC = 'disable_alars_detector' # Trigger
4448
string ENABLE_HOOK_ESTIMATOR_SERVICE_TOPIC = 'enable_hook_estimator' # Trigger
4549
string DISABLE_HOOK_ESTIMATOR_SERVICE_TOPIC = 'disable_hook_estimator' # Trigger
4650

47-
# sensors that are not coming from PSDK
48-
string LOAD_CELL_RAW_TOPIC = 'sensor/load_cell_raw' # Int32
49-
string LOAD_CELL_WEIGHT_TOPIC = 'sensor/load_cell_weight' # Float32
51+
52+
53+
54+
# user feedback
55+
string CONTROLLER_INPUT_TOPIC = 'joy' # Joy
56+
string CONTROLLER_VIBRATION_TOPIC = 'joy/set_feedback' # JoyFeedback
57+
string TTS_TOPIC = 'speak' # String

perception/alars/auv_state_estimation/auv_state_estimation/projection.py

Lines changed: 22 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import rclpy
2+
from rclpy.time import Time, Duration
23
import numpy as np
34
from rclpy.node import Node
45
from sensor_msgs.msg import CameraInfo
@@ -8,7 +9,7 @@
89
from scipy.spatial.transform import Rotation as R
910
from geometry_msgs.msg import PolygonStamped, Point32
1011

11-
from dji_msgs.msg import Links
12+
from dji_msgs.msg import Links, Topics
1213

1314

1415
class ProjectionNode(Node):
@@ -19,12 +20,12 @@ def __init__(self, name = 'projection_node'):
1920
namespace="",
2021
parameters=[
2122
("z_water", 0.0),
22-
("topics.camera_info", "gimbal_camera/camera/cam_info"),
23-
("topics.input_polygon", "alars_detection/auv_obb"),
24-
("topics.output_projected_polygon", "projection/auv_obb"),
23+
("topics.camera_info", Topics.GIMBAL_CAMERA_INFO_TOPIC),
24+
("topics.input_polygon", Topics.ESTIMATED_AUV_OBB_TOPIC),
25+
("topics.output_projected_polygon", Topics.PROJECTED_AUV_OBB_TOPIC),
2526
("topics.rviz.camera_rays", "rviz/projection_rays"),
2627
("frames.map", Links.MAP),
27-
("frames.camera", Links.GIMBAL_CAMERA_LINK),
28+
("frames.camera", Links.GIMBAL_OPTICAL_FRAME),
2829
])
2930

3031
self.marker_ns = self.get_name() # ns fro rviz ray markers
@@ -50,7 +51,6 @@ def __init__(self, name = 'projection_node'):
5051
self.K_inv = None
5152
self.width = None
5253
self.height = None
53-
self.R_im_cam = np.array([[ 0, -1, 0], [-1, 0, 0], [ 0, 0, -1]]) # Optical to cam link frame
5454

5555
# tf
5656
self.tf_buffer = tf2_ros.Buffer()
@@ -83,11 +83,11 @@ def project(self, msg: PolygonStamped):
8383
self.get_logger().warning("No CameraInfo received yet")
8484
return
8585

86-
stamp = msg.header.stamp
87-
points_im = np.array([(p.x, p.y) for p in msg.polygon.points])
88-
86+
# use zero time to get the lastest available instead of the exact stamp of the image
87+
# image might be coming faster than TF updates for reasons, and using stamp of image
88+
# to get TF causes "lookup in the future" errors.
8989
try:
90-
transform = self.tf_buffer.lookup_transform(self.map_frame, self.cam_frame, stamp, timeout=rclpy.duration.Duration(seconds=0.5))
90+
transform = self.tf_buffer.lookup_transform(self.map_frame, self.cam_frame, Time(seconds=0), timeout=Duration(seconds=1))
9191
except Exception as e:
9292
self.get_logger().info(f"TF lookup failed ({self.cam_frame} -> {self.map_frame}): {e}")
9393
return
@@ -99,30 +99,31 @@ def project(self, msg: PolygonStamped):
9999
rot = R.from_quat([q.x, q.y, q.z, q.w]).as_matrix()
100100

101101
# pixelcoords from normalized image coords
102+
points_im = np.array([(p.x, p.y) for p in msg.polygon.points])
102103
u = (points_im[:, 0] + 1.0) * 0.5 * self.width
103-
v = (1.0 - (points_im[:, 1] + 1.0) * 0.5) * self.height # y up -> v down (optical frame)
104+
v = (points_im[:, 1] + 1.0) * 0.5 * self.height
104105

105106
ray_im = self.K_inv @ np.vstack((u, v, np.ones_like(u))) # projection from pixel to 3D ray in image frame (pc = K_inv @ xs)
106-
ray_cam = self.R_im_cam @ ray_im # Convert from image frame to cam link frame
107-
108-
ray_map = rot @ ray_cam # rotate ray to map frame
107+
ray_map = rot @ ray_im # rotate ray to map frame
108+
109109
dz = ray_map[2, :]
110110
if np.any(dz > -1e-2): # if the z component of the norm ray is positive or close to zero, it means the ray is parallel to or pointing away from the water plane
111111
self.get_logger().info(f"Projection doesn't intersect with water surface")
112112
return
113113

114114
t_intersect = (self.z_water - cam_pos_map[2]) / dz # translation along ray to intersect with water
115115
intersection_points_map = cam_pos_map[:, None] + ray_map * t_intersect # intersection points in map frame
116-
intersection_points_cam = ray_cam * t_intersect # intersection points in cam frame
116+
intersection_points_cam = ray_im * t_intersect # intersection points in cam frame
117117

118-
self.publish_poly(stamp, intersection_points_map.T)
119-
self.publish_ray_marker(stamp, np.mean(intersection_points_cam, axis=1))
118+
now: Time = self.get_clock().now()
119+
self.publish_poly(now, intersection_points_map.T)
120+
self.publish_ray_marker(now, np.mean(intersection_points_cam, axis=1))
120121

121-
def publish_ray_marker(self, stamp, intersection_point):
122+
def publish_ray_marker(self, stamp:Time, intersection_point):
122123
# publish projected line (ray) for rviz
123124
marker = Marker()
124125
marker.header.frame_id = self.cam_frame
125-
marker.header.stamp = stamp
126+
marker.header.stamp = stamp.to_msg()
126127
marker.ns = self.marker_ns
127128
marker.id = 0
128129
marker.type = Marker.LINE_STRIP
@@ -136,11 +137,11 @@ def publish_ray_marker(self, stamp, intersection_point):
136137
marker.points.append(p1)
137138
self.ray_pub.publish(marker)
138139

139-
def publish_poly(self, stamp, points):
140+
def publish_poly(self, stamp:Time, points):
140141
# Publish head pose and obb corners (may be used for filter)
141142
obb_marker = PolygonStamped()
142143
obb_marker.header.frame_id = self.map_frame
143-
obb_marker.header.stamp = stamp
144+
obb_marker.header.stamp = stamp.to_msg()
144145
for px, py, pz in points:
145146
point32 = Point32()
146147
point32.x = px

perception/alars/auv_state_estimation/config/params.yaml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
z_water: 0.0
66

77
topics:
8-
camera_info: "gimbal_camera/camera/cam_info"
8+
camera_info: "gimbal_camera/camera/camera_info"
99
input_polygon: "alars_detection/auv_obb"
1010
output_projected_polygon: "alars_projection/auv_obb"
1111
rviz:

perception/alars/auv_yolo_detector/auv_yolo_detector/auv_yolo_detector.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -323,7 +323,7 @@ def filter_detections(self, obb: OBB) -> Tuple[Results, torch.Tensor, Union[list
323323
def published_normalized_position(self, p: tuple, wh: tuple, label: str) -> tuple:
324324
"""
325325
Normalize pixels coordinates so the origin is the center of the image, the x axis is horizontal
326-
from left to right and y axis is vertical from bottom to top
326+
from left to right and y axis is vertical from top to bottom
327327
Publishes normalized pixel position in corresponding topics, depending on label
328328
Args:
329329
p: (horizontal position, vertical position) (both in pixels)
@@ -335,7 +335,7 @@ def published_normalized_position(self, p: tuple, wh: tuple, label: str) -> tupl
335335
point.header.stamp = self.get_clock().now().to_msg()
336336
point.header.frame_id = self.model_params["frames.camera"]
337337
point.point.x = float((p[0]-wh[0]/2)/(wh[0]/2))
338-
point.point.y = float(-(p[1]-wh[1]/2)/(wh[1]/2))
338+
point.point.y = float((p[1]-wh[1]/2)/(wh[1]/2))
339339
if label == 'sam': self.sam_position_pub.publish(point)
340340
elif label == 'buoy': self.buoy_position_pub.publish(point)
341341
else: self.get_logger().error('Position not published, label argument should be "sam" or "buoy"')
@@ -351,7 +351,7 @@ def publish_normalized_points(self, points: np.ndarray, wh: tuple, label=None):
351351
w, h = wh
352352
for px, py in points:
353353
xn = float((px - w/2) / (w/2))
354-
yn = float(-(py - h/2) / (h/2))
354+
yn = float((py - h/2) / (h/2))
355355
p = Point32()
356356
p.x = xn
357357
p.y = yn
@@ -425,7 +425,7 @@ def get_params(self):
425425

426426
"frames.map": namespace.removeprefix("/") + "/" + Links.MAP,
427427
"frames.quadrotor_odom": namespace.removeprefix("/") + "/" + Links.ODOM,
428-
"frames.camera": namespace.removeprefix("/") + "/" + Links.GIMBAL_CAMERA_LINK,
428+
"frames.camera": namespace.removeprefix("/") + "/" + Links.GIMBAL_OPTICAL_FRAME,
429429

430430
}
431431
self.model_params = {

scripts/smarc_bringups/scripts/dji_bringup.sh

Lines changed: 45 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,8 @@ if [[ $USE_SIM_TIME = "False" ]]; then
8787
tmux send-keys "ros2 launch psdk_wrapper wrapper.launch.py namespace:=/$ROBOT_NAME/wrapper" C-m
8888

8989
tmux select-pane -t $SESSION:0.1
90-
tmux send-keys "ros2 run dji_captain dji_captain --ros-args -p use_sim_time:=$USE_SIM_TIME -p home_altitude_above_water:=$HOME_ABOVE_WATER -r __ns:=/$ROBOT_NAME " C-m
90+
# tmux send-keys "ros2 run dji_captain dji_captain --ros-args -p use_sim_time:=$USE_SIM_TIME -p home_altitude_above_water:=$HOME_ABOVE_WATER -r __ns:=/$ROBOT_NAME " C-m
91+
tmux send-keys "ros2 launch dji_captain alars_captain.launch robot_name:=$ROBOT_NAME use_sim_time:=$USE_SIM_TIME home_altitude_above_water:=$HOME_ABOVE_WATER" C-m
9192

9293
tmux select-pane -t $SESSION:0.2
9394
tmux send-keys "fast-discovery-server -i 0" C-m
@@ -99,7 +100,7 @@ else
99100
tmux select-window -t $SESSION:0
100101
tmux split-window -h -t $SESSION:0.0
101102
tmux select-pane -t $SESSION:0.0
102-
tmux send-keys "ros2 run dji_captain dji_captain --ros-args -p use_sim_time:=$USE_SIM_TIME -p home_altitude_above_water:=$HOME_ABOVE_WATER -r __ns:=/$ROBOT_NAME " C-m
103+
tmux send-keys "ros2 launch dji_captain alars_captain.launch robot_name:=$ROBOT_NAME use_sim_time:=$USE_SIM_TIME home_altitude_above_water:=$HOME_ABOVE_WATER" C-m
103104

104105
tmux select-pane -t $SESSION:0.1
105106
tmux send-keys "ros2 topic echo /$ROBOT_NAME/captain_status std_msgs/msg/String --field data" C-m
@@ -188,13 +189,15 @@ tmux send-keys "ros2 topic echo ${ROBOT_NAME}/alars_bt/status std_msgs/msg/Strin
188189
# 4 Camera and detection
189190
############
190191
# camera and detection node
191-
tmux new-window -t $SESSION:4 -n 'Camera'
192-
tmux rename-window "Cam"
192+
tmux new-window -t $SESSION:4 -n 'CamProc'
193+
tmux rename-window "CamProc"
193194
tmux select-window -t $SESSION:4
195+
194196
tmux split-window -h -t $SESSION:4.0
197+
tmux select-layout -t $SESSION:4 even-vertical
195198

196-
tmux select-pane -t $SESSION:4.0
197199
# Fransisco's YOLO detector
200+
tmux select-pane -t $SESSION:4.0
198201
YOLO_DEVICE=0
199202
if [[ $USE_SIM_TIME = "True" ]]; then
200203
YOLO_DEVICE=cpu
@@ -204,33 +207,11 @@ namespace:=$ROBOT_NAME \
204207
device:=$YOLO_DEVICE \
205208
use_sim_time:=$USE_SIM_TIME" C-m
206209

207-
# the cam driver is needed just for the real thing
208-
# for basic usb webcam
209-
#tmux send-keys "ros2 run usb_cam usb_cam_node_exe --ros-args -r __ns:=/$ROBOT_NAME/gimbal_camera" C-m
210-
if [[ $USE_SIM_TIME = "False" ]]; then
211-
tmux split-window -v -t $SESSION:4.1
212-
tmux select-pane -t $SESSION:4.1
213-
# for the dji gimbal cam
214-
# requires ros-humble-gscam gstreamer1.0-tools gstreamer1.0-plugins-good
215-
GSCAM_CONFIG_DJI="v4l2src device=/dev/djipocket3 ! image/jpeg,width=1920,height=1080,framerate=30/1 ! jpegdec ! videoconvert ! video/x-raw,format=BGR"
216-
tmux send-keys "ros2 run gscam gscam_node --ros-args \
217-
-p gscam_config:=\"$GSCAM_CONFIG_DJI\" \
218-
-p frame_id:=osmo3_optical_frame \
219-
-p image_encoding:=rgb8 \
220-
-p sync_sink:=false \
221-
-r __ns:=/$ROBOT_NAME/gimbal_camera" C-m
210+
# Sebastian's projection node
211+
tmux select-pane -t $SESSION:4.1
212+
tmux send-keys "ros2 launch auv_state_estimation projection_launch.py namespace:=$ROBOT_NAME use_sim_time:=$USE_SIM_TIME" C-m
213+
222214

223-
# for the 360 cam
224-
tmux select-pane -t $SESSION:4.2
225-
# for the dji gimbal cam
226-
GSCAM_CONFIG_FISH="v4l2src device=/dev/insta360x4 ! image/jpeg,width=1920,height=1080,framerate=30/1 ! jpegdec ! videoconvert ! video/x-raw,format=BGR"
227-
tmux send-keys "ros2 run gscam gscam_node --ros-args \
228-
-p gscam_config:=\"$GSCAM_CONFIG_FISH\" \
229-
-p frame_id:=fisheye_optical_frame \
230-
-p image_encoding:=rgb8 \
231-
-p sync_sink:=false \
232-
-r __ns:=/$ROBOT_NAME/fisheye_camera" C-m
233-
fi
234215

235216
############
236217
# 5 AUX Nodes like geofence etc.
@@ -246,10 +227,41 @@ tmux send-keys "ros2 run actionable_geofence geofence_node --ros-args -r __ns:=/
246227
############
247228
if [[ $USE_SIM_TIME = "False" ]]; then
248229
# new window for load_cell_driver
249-
tmux new-window -t $SESSION:7 -n 'LoadCell'
250-
tmux rename-window "LoadCell"
230+
tmux new-window -t $SESSION:7 -n 'Drivers'
231+
tmux rename-window "Drivers"
251232
tmux select-window -t $SESSION:7
233+
234+
tmux split-window -v -t $SESSION:7.0
235+
tmux split-window -v -t $SESSION:7.1
236+
tmux select-layout -t $SESSION:7 even-vertical
237+
238+
tmux select-pane -t $SESSION:7.0
252239
tmux send-keys "ros2 run nau7802_ros2_driver nau7802_ros2_driver --ros-args -r __ns:=/$ROBOT_NAME" C-m
240+
241+
tmux select-pane -t $SESSION:7.1
242+
# for basic usb webcam
243+
#tmux send-keys "ros2 run usb_cam usb_cam_node_exe --ros-args -r __ns:=/$ROBOT_NAME/gimbal_camera" C-m
244+
# TODO: replace with the z1 pro driver
245+
# for the dji gimbal cam
246+
# requires ros-humble-gscam gstreamer1.0-tools gstreamer1.0-plugins-good
247+
# GSCAM_CONFIG_DJI="v4l2src device=/dev/djipocket3 ! image/jpeg,width=1920,height=1080,framerate=30/1 ! jpegdec ! videoconvert ! video/x-raw,format=BGR"
248+
# tmux send-keys "ros2 run gscam gscam_node --ros-args \
249+
# -p gscam_config:=\"$GSCAM_CONFIG_DJI\" \
250+
# -p frame_id:=osmo3_optical_frame \
251+
# -p image_encoding:=rgb8 \
252+
# -p sync_sink:=false \
253+
# -r __ns:=/$ROBOT_NAME/gimbal_camera" C-m
254+
255+
# for the 360 cam
256+
tmux select-pane -t $SESSION:7.2
257+
# for the dji gimbal cam
258+
GSCAM_CONFIG_FISH="v4l2src device=/dev/insta360x4 ! image/jpeg,width=1920,height=1080,framerate=30/1 ! jpegdec ! videoconvert ! video/x-raw,format=BGR"
259+
tmux send-keys "ros2 run gscam gscam_node --ros-args \
260+
-p gscam_config:=\"$GSCAM_CONFIG_FISH\" \
261+
-p frame_id:=fisheye_optical_frame \
262+
-p image_encoding:=rgb8 \
263+
-p sync_sink:=false \
264+
-r __ns:=/$ROBOT_NAME/fisheye_camera" C-m
253265
fi
254266

255267

Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
cmake_minimum_required(VERSION 3.8)
2+
project(alars_description)
3+
4+
if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang")
5+
add_compile_options(-Wall -Wextra -Wpedantic)
6+
endif()
7+
8+
# find dependencies
9+
find_package(ament_cmake REQUIRED)
10+
# uncomment the following section in order to fill in
11+
# further dependencies manually.
12+
# find_package(<dependency> REQUIRED)
13+
14+
install(
15+
DIRECTORY launch/
16+
DESTINATION share/${PROJECT_NAME}/launch/
17+
)
18+
19+
install(
20+
DIRECTORY mesh/
21+
DESTINATION share/${PROJECT_NAME}/mesh/
22+
)
23+
24+
#install(
25+
# DIRECTORY robots/
26+
# DESTINATION share/${PROJECT_NAME}/robots/
27+
#)
28+
29+
install(
30+
DIRECTORY urdf/
31+
DESTINATION share/${PROJECT_NAME}/urdf/
32+
)
33+
34+
install(
35+
PROGRAMS
36+
# scripts/joint_state_converter.py
37+
DESTINATION lib/${PROJECT_NAME}
38+
)
39+
40+
if(BUILD_TESTING)
41+
find_package(ament_lint_auto REQUIRED)
42+
# the following line skips the linter which checks for copyrights
43+
# comment the line when a copyright and license is added to all source files
44+
set(ament_cmake_copyright_FOUND TRUE)
45+
# the following line skips cpplint (only works in a git repo)
46+
# comment the line when this package is in a git repo and when
47+
# a copyright and license is added to all source files
48+
set(ament_cmake_cpplint_FOUND TRUE)
49+
ament_lint_auto_find_test_dependencies()
50+
endif()
51+
52+
ament_package()

vehicles/descriptions/alars_description/README.md

Whitespace-only changes.

0 commit comments

Comments
 (0)