From 8506300aabffdabba5f918c90e31e8d24a83140b Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Tue, 14 Oct 2025 08:12:45 +0000 Subject: [PATCH 01/17] added logs and /robotic_platform prefix for topics --- .../src/extrinsic_calibrator_class.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py index 6427a9b..f7563b6 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py @@ -665,7 +665,7 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c self.node = node self.camera_name = camera_name self.camera_id = camera_id - self.image_topic = image_topic + self.image_topic = "/robotic_platform/" + image_topic self.camera_info_topic = camera_info_topic self.bridge = bridge self.tf_broadcaster = broadcaster @@ -686,11 +686,15 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) self.cv2_image_publisher = self.node.create_publisher(Image, f"{image_topic}/detected_markers", 10) + self.node.get_logger().info(f"Camera {self.camera_name} subscribed to {image_topic} and {camera_info_topic} topics.") + #self.node.get_logger().info(f"Camera matrix and {self.camera_matrix}, dist coeffs {self.dist_coeffs}") + self.marker_transforms = {} self.reliable_marker_transforms = {} def camera_info_callback(self, msg): + #self.node.get_logger().info(f"Camera {self.camera_name} camera info received: {msg.k}, {msg.d}.") if self.camera_matrix is None: self.camera_matrix = np.array(msg.k).reshape((3, 3)) self.dist_coeffs = np.array(msg.d) @@ -706,6 +710,9 @@ def image_callback(self, msg): # For ArUco detection, you can use the filtered_image directly corners, ids, rejected_img_points = self.detector.detectMarkers(cv_image) + + self.node.get_logger().info(f"Camera {self.camera_name}: Detected {len(corners)} markers.") + detected_ids = set() if ids is not None: for i, id in enumerate(ids): @@ -747,6 +754,7 @@ def image_callback(self, msg): pass # iterate through each marker of the marker_transforms dictionary + self.node.get_logger().info(f"Camera {self.camera_name}: Maker transforms collected: " + ", ".join([f"Marker {mid}: {len(transforms)}" for mid, transforms in self.marker_transforms.items()])) for marker_id, transforms in self.marker_transforms.items(): if len(transforms) == 30: self.check_precision(marker_id, transforms) @@ -779,6 +787,8 @@ def is_precise(self, transforms): angles.append(angle) rotation_range = np.max(angles) + self.node.get_logger().info(f"Camera {self.camera_name}: Position range: {position_range}, Rotation range (radians): {rotation_range}") + return np.all(position_range < 0.01) and np.all(rotation_range < np.radians(1)) else: return False From eef7bd8e190e7aaf725586d5b534eca513322241 Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Tue, 21 Oct 2025 13:29:01 +0200 Subject: [PATCH 02/17] initial --- .../CMakeLists.txt | 0 .../package.xml | 3 +- .../config/aruco_parameters.yaml | 23 ++- .../python_aruco_parameters.py | 48 +++++ .../python_camera_topics_parameters.py | 20 ++ .../src/aruco_generator_class.py | 57 ++++-- .../src/extrinsic_calibrator_class.py | 179 ++++++++++------ extrinsic_calibrator_examples/CMakeLists.txt | 38 ---- extrinsic_calibrator_examples/README.md | 191 ------------------ .../config/d435.yaml | 23 --- .../config/d435_intrinsics.yaml | 20 -- .../config/l515.yaml | 23 --- .../config/l515_intrinsics.yaml | 20 -- .../extrinsic_calibrator_examples/__init__.py | 0 .../launch_extrinsic_calibrator.launch.py | 80 -------- .../launch/launch_rviz.launch.py | 67 ------ .../launch/launch_usb_cameras.launch.py | 76 ------- extrinsic_calibrator_examples/license.md | 140 ------------- extrinsic_calibrator_examples/package.xml | 38 ---- .../rviz/extrinsic.rviz | 188 ----------------- .../scripts/__init__.py | 0 21 files changed, 250 insertions(+), 984 deletions(-) rename {extrinsic_calibrator => extrinsic_calibrator_board}/CMakeLists.txt (100%) rename {extrinsic_calibrator => extrinsic_calibrator_board}/package.xml (98%) create mode 100644 extrinsic_calibrator_core/extrinsic_calibrator_core/python_aruco_parameters.py create mode 100644 extrinsic_calibrator_core/extrinsic_calibrator_core/python_camera_topics_parameters.py delete mode 100644 extrinsic_calibrator_examples/CMakeLists.txt delete mode 100644 extrinsic_calibrator_examples/README.md delete mode 100644 extrinsic_calibrator_examples/config/d435.yaml delete mode 100644 extrinsic_calibrator_examples/config/d435_intrinsics.yaml delete mode 100644 extrinsic_calibrator_examples/config/l515.yaml delete mode 100644 extrinsic_calibrator_examples/config/l515_intrinsics.yaml delete mode 100644 extrinsic_calibrator_examples/extrinsic_calibrator_examples/__init__.py delete mode 100644 extrinsic_calibrator_examples/launch/launch_extrinsic_calibrator.launch.py delete mode 100644 extrinsic_calibrator_examples/launch/launch_rviz.launch.py delete mode 100644 extrinsic_calibrator_examples/launch/launch_usb_cameras.launch.py delete mode 100644 extrinsic_calibrator_examples/license.md delete mode 100644 extrinsic_calibrator_examples/package.xml delete mode 100644 extrinsic_calibrator_examples/rviz/extrinsic.rviz delete mode 100644 extrinsic_calibrator_examples/scripts/__init__.py diff --git a/extrinsic_calibrator/CMakeLists.txt b/extrinsic_calibrator_board/CMakeLists.txt similarity index 100% rename from extrinsic_calibrator/CMakeLists.txt rename to extrinsic_calibrator_board/CMakeLists.txt diff --git a/extrinsic_calibrator/package.xml b/extrinsic_calibrator_board/package.xml similarity index 98% rename from extrinsic_calibrator/package.xml rename to extrinsic_calibrator_board/package.xml index 6263224..26e3d4a 100644 --- a/extrinsic_calibrator/package.xml +++ b/extrinsic_calibrator_board/package.xml @@ -25,5 +25,4 @@ ament_cmake - - \ No newline at end of file + \ No newline at end of file diff --git a/extrinsic_calibrator_core/config/aruco_parameters.yaml b/extrinsic_calibrator_core/config/aruco_parameters.yaml index fef39ca..71c605c 100644 --- a/extrinsic_calibrator_core/config/aruco_parameters.yaml +++ b/extrinsic_calibrator_core/config/aruco_parameters.yaml @@ -4,4 +4,25 @@ aruco_params: default_value: "DICT_6X6_250" marker_length: type: double - default_value: 0.26 \ No newline at end of file + default_value: 0.26 + board_mode: # "single_markers" or "grid_board" + type: string + default_value: "grid_board" + grid_rows: + type: int + default_value: 5 + grid_cols: + type: int + default_value: 5 + grid_marker_separation: # gap between markers (meters) + type: double + default_value: 0.04 + grid_first_marker_id: + type: int + default_value: 0 + grid_image_size_px: # square output image side length (pixels) for generator + type: int + default_value: 1200 + generate_individual_markers: # if true also dumps each marker image when in grid mode + type: bool + default_value: false \ No newline at end of file diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/python_aruco_parameters.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/python_aruco_parameters.py new file mode 100644 index 0000000..26f992a --- /dev/null +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/python_aruco_parameters.py @@ -0,0 +1,48 @@ +# Auto-generated parameter listener for ArUco parameters including grid board support. +from dataclasses import dataclass +import rclpy + +@dataclass +class ArucoParamsData: + aruco_dict: str + marker_length: float + board_mode: str = 'single_markers' + grid_rows: int = 0 + grid_cols: int = 0 + grid_marker_separation: float = 0.0 + grid_first_marker_id: int = 0 + grid_image_size_px: int = 1200 + generate_individual_markers: bool = False + +class ParamListener: + def __init__(self, node): + self._node = node + + def get_params(self) -> ArucoParamsData: + # Fetch parameters using node.get_parameter if declared automatically + def _get(name, default=None): + try: + p = self._node.get_parameter(f"aruco_params.{name}") + if p.type_ == 2: # string + return p.get_parameter_value().string_value + elif p.type_ == 3: # double + return p.get_parameter_value().double_value + elif p.type_ == 1: # integer + return p.get_parameter_value().integer_value + elif p.type_ == 4: # bool + return p.get_parameter_value().bool_value + else: + return default + except Exception: + return default + return ArucoParamsData( + aruco_dict=_get('aruco_dict', 'DICT_6X6_250'), + marker_length=_get('marker_length', 0.26), + board_mode=_get('board_mode', 'single_markers'), + grid_rows=_get('grid_rows', 0), + grid_cols=_get('grid_cols', 0), + grid_marker_separation=_get('grid_marker_separation', 0.0), + grid_first_marker_id=_get('grid_first_marker_id', 0), + grid_image_size_px=_get('grid_image_size_px', 1200), + generate_individual_markers=_get('generate_individual_markers', False), + ) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/python_camera_topics_parameters.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/python_camera_topics_parameters.py new file mode 100644 index 0000000..753d56a --- /dev/null +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/python_camera_topics_parameters.py @@ -0,0 +1,20 @@ +# Auto-generated simple camera topics parameter listener to satisfy imports. +from dataclasses import dataclass + +@dataclass +class CamerasParamsData: + camera_names: list + +class ParamListener: + def __init__(self, node): + self._node = node + def get_params(self) -> CamerasParamsData: + # Expect a parameter cameras_params.camera_names + try: + p = self._node.get_parameter('cameras_params.camera_names') + names = list(p.get_parameter_value().string_array_value) + except Exception: + names = [] + return CamerasParamsData(camera_names=names) + +cameras_params = type('cameras_params', (), {'ParamListener': ParamListener}) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py index 20086a9..2daeb6e 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py @@ -54,7 +54,7 @@ from rclpy.node import Node # Custom parameters -from extrinsic_calibrator_core.python_aruco_parameters import aruco_params +from extrinsic_calibrator_core.python_aruco_parameters import ParamListener as ArucoParamListener from extrinsic_calibrator_core.src.extrinsic_calibrator_class import ArucoParams @@ -64,32 +64,52 @@ def __init__(self): super().__init__('aruco_marker_generator') self.get_logger().info("Aruco Marker Generator Node has started.") - aruco_params_listener = aruco_params.ParamListener(self) + aruco_params_listener = ArucoParamListener(self) imported_aruco_params = aruco_params_listener.get_params() self.real_aruco_params = ArucoParams(self,imported_aruco_params) - # Parameters for marker generation + # Parameters for generation (pixel size of individual marker when single mode) self.declare_parameter('marker_size', 200) self.declare_parameter('output_directory', '~/markers') marker_size = self.get_parameter('marker_size').value output_directory = os.path.expanduser(self.get_parameter('output_directory').value) - # Check if output directory exists, create if it doesn't + # Check or create output directory if not os.path.exists(output_directory): os.makedirs(output_directory) self.get_logger().info(f"Created output directory: {output_directory}") else: self.get_logger().info(f"Output directory: {output_directory} already exists") - - # Generate all markers from aruco_dict - num_markers = len(self.real_aruco_params.aruco_dict.bytesList) - for marker_id in range(0, num_markers): - if not self.generate_marker(marker_id, marker_size, output_directory, self.real_aruco_params): - self.get_logger().info("ArUco generation failed") - return False - - self.get_logger().info("ArUco generation finished successfully. Hit Ctrl+C to exit") + + if self.real_aruco_params.board_mode == 'grid_board' and self.real_aruco_params.board is not None: + # Generate a single grid board image + board_px = getattr(self.real_aruco_params, 'grid_image_size_px', 1200) + margin_px = int(board_px * 0.02) + self.get_logger().info(f"Generating ArUco Grid Board ({self.real_aruco_params.grid_cols}x{self.real_aruco_params.grid_rows}) -> {board_px}px") + board_image = self.real_aruco_params.board.generateImage((board_px, board_px), margin_px, 1) + rotated_board = cv2.rotate(board_image, cv2.ROTATE_180) + output_path = os.path.join(output_directory, 'aruco_grid_board.png') + cv2.imwrite(output_path, rotated_board) + self.get_logger().info(f"Grid board image saved to {output_path}") + + # Optionally also dump individual marker images + if getattr(self.real_aruco_params, 'generate_individual_markers', False): + num_markers = self.real_aruco_params.grid_rows * self.real_aruco_params.grid_cols + first_id = self.real_aruco_params.grid_first_marker_id + for local_idx in range(num_markers): + marker_id = first_id + local_idx + if not self.generate_marker(marker_id, marker_size, output_directory, self.real_aruco_params): + self.get_logger().warn(f"Failed to generate individual marker {marker_id}") + self.get_logger().info("ArUco grid generation finished successfully. Hit Ctrl+C to exit") + else: + # Single marker mode: generate all markers from dictionary + num_markers = len(self.real_aruco_params.aruco_dict.bytesList) + for marker_id in range(0, num_markers): + if not self.generate_marker(marker_id, marker_size, output_directory, self.real_aruco_params): + self.get_logger().info("ArUco generation failed") + return False + self.get_logger().info("ArUco marker set generation finished successfully. Hit Ctrl+C to exit") while(1): pass @@ -99,16 +119,15 @@ def __init__(self): def generate_marker(self, marker_id, marker_size, output_directory, aruco_params:ArucoParams): - # Generate the marker image - marker_image = cv2.aruco.generateImageMarker(aruco_params.aruco_dict, marker_id, marker_size) + try: + marker_image = cv2.aruco.generateImageMarker(aruco_params.aruco_dict, marker_id, marker_size) + except Exception as e: + self.get_logger().error(f"Error generating marker {marker_id}: {e}") + return False - # Rotate the image 180 degrees rotated_marker = cv2.rotate(marker_image, cv2.ROTATE_180) - - # Save the rotated marker image to the specified directory output_path = os.path.join(output_directory, f'aruco_marker_{marker_id}.png') cv2.imwrite(output_path, rotated_marker) - # self.get_logger().info(f'Marker with ID {marker_id} generated and saved to {output_path}') return True diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py index f7563b6..50860ce 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py @@ -67,6 +67,42 @@ from extrinsic_calibrator_core.python_camera_topics_parameters import cameras_params +class ArucoParams(): + def __init__(self, node:Node, aruco_params): + if hasattr(cv2.aruco, aruco_params.aruco_dict): + self.aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_params.aruco_dict)) + else: + node.get_logger().error(f"cv2.aruco doesn't have a dictionary with the name '{aruco_params.aruco_dict}'") + self.marker_length = aruco_params.marker_length + # Grid board optional parameters (fallbacks ensure backward compatibility) + self.board_mode = getattr(aruco_params, 'board_mode', 'single_markers') + self.grid_rows = getattr(aruco_params, 'grid_rows', 0) + self.grid_cols = getattr(aruco_params, 'grid_cols', 0) + self.grid_marker_separation = getattr(aruco_params, 'grid_marker_separation', 0.0) + self.grid_first_marker_id = getattr(aruco_params, 'grid_first_marker_id', 0) + self.grid_image_size_px = getattr(aruco_params, 'grid_image_size_px', 1200) + self.generate_individual_markers = getattr(aruco_params, 'generate_individual_markers', False) + + self.board = None + if self.board_mode == 'grid_board': + if self.grid_rows > 0 and self.grid_cols > 0: + try: + self.board = cv2.aruco.GridBoard( + self.grid_cols, + self.grid_rows, + self.marker_length, + self.grid_marker_separation, + self.aruco_dict, + self.grid_first_marker_id + ) + node.get_logger().info(f"Created ArUco GridBoard {self.grid_cols}x{self.grid_rows} starting at ID {self.grid_first_marker_id}") + except Exception as e: + node.get_logger().error(f"Failed to create grid board: {e}") + else: + node.get_logger().error("Grid board mode selected but rows/cols are invalid (<=0). Reverting to single markers.") + self.board_mode = 'single_markers' + + class ExtrinsicCalibrator(Node): def __init__(self): super().__init__('detector_aruco_node', automatically_declare_parameters_from_overrides=True) @@ -79,12 +115,14 @@ def __init__(self): aruco_params_listener = aruco_params.ParamListener(self) imported_aruco_params = aruco_params_listener.get_params() - self.real_aruco_params = ArucoParams(self,imported_aruco_params) + self.real_aruco_params = ArucoParams(self, imported_aruco_params) + self.get_logger().info(f"Using ArUco dictionary: {imported_aruco_params.aruco_dict}, with marker length: {self.real_aruco_params.marker_length} meters") cameras_param_listener = cameras_params.ParamListener(self) self.imported_cameras_params = cameras_param_listener.get_params() camera_names = list(self.imported_cameras_params.camera_names) + self.get_logger().info(f"Detected camera names from parameters: {camera_names}") # construct the cameras self.array_of_cameras = [] @@ -649,13 +687,6 @@ def display_marker_to_marker_table(self, title, table_data): -class ArucoParams(): - def __init__(self, node:Node, aruco_params): - if hasattr(cv2.aruco, aruco_params.aruco_dict): - self.aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_params.aruco_dict)) - else: - node.get_logger().error(f"cv2.aruco doesn't have a dictionary with the name '{aruco_params.aruco_dict}'") - self.marker_length = aruco_params.marker_length @@ -679,13 +710,16 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c self.aruco_dict = aruco_params.aruco_dict self.parameters = cv2.aruco.DetectorParameters() self.detector = cv2.aruco.ArucoDetector(self.aruco_dict, self.parameters) - self.marker_length = aruco_params.marker_length # length of the marker side in meters (adjust as needed) + self.marker_length = aruco_params.marker_length + self.board_mode = aruco_params.board_mode + self.board = aruco_params.board # May be None if single marker mode + self.grid_first_marker_id = getattr(aruco_params, 'grid_first_marker_id', 0) # Subscribe to the camera image topic and camera info self.image_sub = self.node.create_subscription(Image, image_topic, self.image_callback, 1) self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) self.cv2_image_publisher = self.node.create_publisher(Image, f"{image_topic}/detected_markers", 10) - + self.node.get_logger().info(f"Camera {self.camera_name} subscribed to {image_topic} and {camera_info_topic} topics.") #self.node.get_logger().info(f"Camera matrix and {self.camera_matrix}, dist coeffs {self.dist_coeffs}") @@ -709,60 +743,89 @@ def image_callback(self, msg): cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8") # For ArUco detection, you can use the filtered_image directly - corners, ids, rejected_img_points = self.detector.detectMarkers(cv_image) + corners, ids, _ = self.detector.detectMarkers(cv_image) self.node.get_logger().info(f"Camera {self.camera_name}: Detected {len(corners)} markers.") - detected_ids = set() - if ids is not None: - for i, id in enumerate(ids): - marker_id = id[0] - detected_ids.add(marker_id) - - if marker_id not in self.marker_transforms and marker_id not in self.reliable_marker_transforms: - self.marker_transforms[marker_id] = deque(maxlen=30) + # Draw detected markers always + if len(corners) > 0: + cv2.aruco.drawDetectedMarkers(cv_image, corners, ids) - objPoints = np.array([ [-self.marker_length/2, self.marker_length/2, 0], - [self.marker_length/2, self.marker_length/2, 0], - [self.marker_length/2, -self.marker_length/2, 0], - [-self.marker_length/2,-self.marker_length/2, 0]], dtype=np.float32) - - success, rvec, tvec = cv2.solvePnP(objPoints, corners[i], self.camera_matrix, self.dist_coeffs) - if success: + detected_ids = set() + if ids is not None and len(corners) > 0: + if self.board_mode == 'grid_board' and self.board is not None: + # Estimate board pose (uses subset of visible markers) + retval, rvec, tvec = cv2.aruco.estimatePoseBoard(corners, ids, self.board, self.camera_matrix, self.dist_coeffs, None, None) + if retval > 0: rot_matrix, _ = cv2.Rodrigues(rvec) - translation_matrix = np.eye(4) - translation_matrix[:3, :3] = rot_matrix - translation_matrix[:3, 3] = tvec.flatten() - - # Draw the transform - cv2.aruco.drawDetectedMarkers(cv_image, corners, ids) + board_transform = np.eye(4) + board_transform[:3, :3] = rot_matrix + board_transform[:3, 3] = tvec.flatten() + # Publish axes for board origin (use half marker length for axis size) cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.marker_length/2) - ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") - self.cv2_image_publisher.publish(ros_image) - - # Filter out the already reliable markers - if marker_id in self.reliable_marker_transforms: - continue - else: - self.marker_transforms[marker_id].append(translation_matrix) - - # Add None for markers not detected in this frame - for marker_id in self.marker_transforms: - if marker_id not in detected_ids: - # Restart the precision of the marker if not seen - # self.marker_transforms[marker_id].append(None) - pass - - # iterate through each marker of the marker_transforms dictionary - self.node.get_logger().info(f"Camera {self.camera_name}: Maker transforms collected: " + ", ".join([f"Marker {mid}: {len(transforms)}" for mid, transforms in self.marker_transforms.items()])) - for marker_id, transforms in self.marker_transforms.items(): - if len(transforms) == 30: - self.check_precision(marker_id, transforms) - - # delete all the transforms from the marker_transforms dictionary - for marker_id, transform in self.reliable_marker_transforms.items(): - if marker_id in self.marker_transforms: - del self.marker_transforms[marker_id] + + # Collect transforms for each visible marker id + for i, id_val in enumerate(ids): + marker_id = int(id_val[0]) + detected_ids.add(marker_id) + if marker_id not in self.marker_transforms and marker_id not in self.reliable_marker_transforms: + self.marker_transforms[marker_id] = deque(maxlen=30) + # local center of marker: average of its object points in board coordinates + local_index = marker_id - self.grid_first_marker_id + if 0 <= local_index < len(self.board.objPoints): + obj_pts = np.array(self.board.objPoints[local_index], dtype=np.float32).reshape(-1, 3) + local_center = obj_pts.mean(axis=0) + # world center = R * local_center + t + world_center = rot_matrix @ local_center + tvec.flatten() + marker_transform = np.eye(4) + marker_transform[:3, :3] = rot_matrix + marker_transform[:3, 3] = world_center + if marker_id not in self.reliable_marker_transforms: + self.marker_transforms[marker_id].append(marker_transform) + else: + self.node.get_logger().warn(f"Camera {self.camera_name}: Board pose not estimated (retval={retval}).") + else: + # Single marker mode identical to previous logic + for i, id_val in enumerate(ids): + marker_id = int(id_val[0]) + detected_ids.add(marker_id) + if marker_id not in self.marker_transforms and marker_id not in self.reliable_marker_transforms: + self.marker_transforms[marker_id] = deque(maxlen=30) + objPoints = np.array([ + [-self.marker_length/2, self.marker_length/2, 0], + [ self.marker_length/2, self.marker_length/2, 0], + [ self.marker_length/2,-self.marker_length/2, 0], + [-self.marker_length/2,-self.marker_length/2, 0] + ], dtype=np.float32) + success, rvec, tvec = cv2.solvePnP(objPoints, corners[i], self.camera_matrix, self.dist_coeffs) + if success: + rot_matrix, _ = cv2.Rodrigues(rvec) + translation_matrix = np.eye(4) + translation_matrix[:3, :3] = rot_matrix + translation_matrix[:3, 3] = tvec.flatten() + cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.marker_length/2) + if marker_id not in self.reliable_marker_transforms: + self.marker_transforms[marker_id].append(translation_matrix) + + # Publish annotated image + ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") + self.cv2_image_publisher.publish(ros_image) + + # Iterate for precision checks + if len(self.marker_transforms) > 0: + self.node.get_logger().info( + f"Camera {self.camera_name}: Marker transforms collected: " + ", ".join([ + f"Marker {mid}: {len(transforms)}" for mid, transforms in self.marker_transforms.items() + ]) + ) + for marker_id, transforms in list(self.marker_transforms.items()): + if len(transforms) == transforms.maxlen: + self.check_precision(marker_id, transforms) + + # Remove reliable markers from accumulation dict + for marker_id in list(self.reliable_marker_transforms.keys()): + if marker_id in self.marker_transforms: + del self.marker_transforms[marker_id] def check_precision(self, marker_id, transform): diff --git a/extrinsic_calibrator_examples/CMakeLists.txt b/extrinsic_calibrator_examples/CMakeLists.txt deleted file mode 100644 index 68afc9e..0000000 --- a/extrinsic_calibrator_examples/CMakeLists.txt +++ /dev/null @@ -1,38 +0,0 @@ -cmake_minimum_required(VERSION 3.8) -project(extrinsic_calibrator_examples) - -if(NOT CMAKE_CXX_STANDARD) - set(CMAKE_CXX_STANDARD 14) -endif() - -# DEPENDENCIES - -# Find packages -find_package(ament_cmake REQUIRED) -find_package(ament_cmake_python REQUIRED) -find_package(rclcpp REQUIRED) -find_package(rclpy REQUIRED) -find_package(cv_bridge REQUIRED) -find_package(extrinsic_calibrator_core REQUIRED) -find_package(rviz2 REQUIRED) -find_package(sensor_msgs REQUIRED) -find_package(std_msgs REQUIRED) -find_package(tf2_ros REQUIRED) - -find_package(ament_cmake_auto REQUIRED) -ament_auto_find_build_dependencies() - -# COPY A PARTICULAR FOLDER TO THE INSTALL DIRECTORY -# Install config dependencies -install( - DIRECTORY - config - rviz - DESTINATION - share/${PROJECT_NAME} -) - - -# LAUNCH -# Install launchfile -ament_auto_package(INSTALL_TO_SHARE launch) \ No newline at end of file diff --git a/extrinsic_calibrator_examples/README.md b/extrinsic_calibrator_examples/README.md deleted file mode 100644 index 07ba2a5..0000000 --- a/extrinsic_calibrator_examples/README.md +++ /dev/null @@ -1,191 +0,0 @@ -# extrinsic_calibrator_examples - -## Overview - -`extrinsic_calibrator_examples` is a ROS2 package designed to provide examples on how to use the `extrinsic_calibrator_core` package as well as useful ros2 launch files to launch the cameras, the calibrator, as well as a demonstration rviz file. - -## Features - -- Launch file to launch a set of cameras using the `usb_camera` package as well as the corresponding set of config files to configure the cameras. -- Laucnh file to launch the rviz file to visualize the markers and the camera frames as well as the rviz file to configure it. -- Launch file to launch all the previous, as well as the calibrator, them being the set of cameras, the rviz visualizer and the calibrator itself. - -## Configuration - -The package provides configuration options through YAML files. - -### Camera configuration - -Here you have an example configuration file `l515.yaml` file to configure the camera according to the `usb_camera` package, as well as the corresponding intrinsic calibration file. - -```yaml -/**: - ros__parameters: - video_device: "/dev/video12" # "ffplay /dev/video12" to test - framerate: 6.0 - io_method: "mmap" - frame_id: "cam2_frame" - pixel_format: "yuyv" # see usb_cam/supported_formats for list of supported formats - av_device_format: "YUV422P" - image_width: 640 - image_height: 480 - camera_name: "cam2" - camera_info_url: "package://extrinsic_calibrator_examples/config/l515_intrinsics.yaml" - brightness: -1 - contrast: -1 - saturation: -1 - sharpness: -1 - gain: -1 - auto_white_balance: true - white_balance: 4000 - autoexposure: true - exposure: 100 - autofocus: false - focus: -1 -``` - -Don't forget to modify the parameter `camera_info_url` to properly link the camera configuration to the intrinsic calibration file. - -```yaml -image_width: 640 -image_height: 480 -camera_name: "cam2" -camera_matrix: - rows: 3 - cols: 3 - data: [607.4058837890625, 0.0, 325.59991455078125, 0.0, 607.5341186523438, 247.25904846191406, 0.0, 0.0, 1.0] -distortion_model: "plumb_bob" -distortion_coefficients: - rows: 1 - cols: 5 - data: [0.19551624357700348, -0.5865326523780823, -0.002620677463710308, 0.0008374004391953349, 0.5133219957351685] -rectification_matrix: - rows: 3 - cols: 3 - data: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0] -projection_matrix: - rows: 3 - cols: 4 - data: [607.4058837890625, 0.0, 325.59991455078125, 0.0, 0.0, 607.5341186523438, 247.25904846191406, 0.0, 0.0, 0.0, 1.0, 0.0] - -``` - -In case you want to launch more cameras with the same launch file, simply add them as additional nodes in the launch file `launch_usb_cameras.launch.py`: - -```py -d435_config = os.path.join(config_dir, 'd435.yaml') -l515_config = os.path.join(config_dir, 'l515.yaml') -# d457_config = os.path.join(config_dir, 'd457.yaml') - -return LaunchDescription([ - Node( - package='usb_cam', - executable='usb_cam_node_exe', - name='d435_camera', - namespace='camera_1', - parameters=[d435_config], - output='screen' - ), - Node( - package='usb_cam', - executable='usb_cam_node_exe', - name='l515_camera', - namespace='camera_2', - parameters=[l515_config], - output='screen' - ), - # Node( - # package='usb_cam', - # executable='usb_cam_node_exe', - # name='d457_camera', - # namespace='camera_3', - # parameters=[d457_config], - # output='screen' - # ), -]) - -``` - -## Usage - -### Launching the Cameras - -Using the `usb_cam` package for your camera streams,you can launch the set of all cameras using: -```sh -ros2 launch extrinsic_calibrator_examples launch_usb_cameras.launch.py -``` - -### Launching the rviz visualizer - -An example rviz config file is provided which includes displays for the `/camera_1/image_raw/detected_markers` topic and the `/camera_2/image_raw/detected_markers` topic as well as the tf2 display of the found markers and cameras. To launch it, use: -```sh -ros2 launch extrinsic_calibrator_examples launch_rviz.launch.py -``` - -### Launching Both Cameras and the Calibrator - -To simultaneously launch the cameras, the rviz visualizer and the extrinsic calibrator, use: -```sh -ros2 launch extrinsic_calibrator_examples launch_extrinsic_calibrator.launch.py -``` - - -## Dependencies - -The package relies on the following libraries and ROS2 packages: - -- `extrinsic_calibrator_core` for the core functionality -- `usb_cam` package for camera streaming -- `rviz2` for visualization - - -To install the necessary dependencies, ensure you run: -```sh -# update libraries -sudo apt-get update -# install ros dependencies -rosdep update -``` - -## Author Information - -**Authors:** -- [Josep Rueda Collell](mailto:rueda_999@hotmail.com) -- [Ander Gonzalez](mailto:ander.gonzalez@ikelan.es) - -**Created:** October 2024 - -**Affiliation:** [IKERLAN](https://www.ikerlan.es) - -setup_paint - -### Citation -If you use this code, please cite: -**Josep Rueda Collell**. "ROS2 Extrinsic Camera Calibrator using ArUco Markers". (2024). - ---- - -Developed as part of **AI-PRISM** project. - - - - - -*AI Powered human-centred Robot Interactions for Smart Manufacturing* - - - - - -Horizon Europe – Grant Agreement number [101058589](https://cordis.europa.eu/project/id/101058589) - -*Funded by the European Union. Views and opinions expressed are however those of the author(s) only and do not necessarily reflect those of the European Union. The European Union cannot be held responsible for them. Neither the European Union nor the granting authority can be held responsible for them.* - -## License - -This software is provided under a dual license system. You may choose between: - -- **GNU Affero General Public License v3**: For open-source development, subject to the conditions of this license. -- **Commercial License**: For proprietary use. For more details on the commercial license, please contact us at [info@ikerlan.es](mailto:info@ikerlan.es). - -Please see the [LICENSE](./license.md) file for the complete terms and conditions of each license option. \ No newline at end of file diff --git a/extrinsic_calibrator_examples/config/d435.yaml b/extrinsic_calibrator_examples/config/d435.yaml deleted file mode 100644 index babec8f..0000000 --- a/extrinsic_calibrator_examples/config/d435.yaml +++ /dev/null @@ -1,23 +0,0 @@ -/**: - ros__parameters: - video_device: "/dev/video4" # "ffplay /dev/video4" to test - framerate: 6.0 - io_method: "mmap" - frame_id: "cam1_frame" - pixel_format: "yuyv" # see usb_cam/supported_formats for list of supported formats - av_device_format: "YUV422P" - image_width: 640 - image_height: 480 - camera_name: "cam1" - camera_info_url: "package://extrinsic_calibrator_examples/config/d435_intrinsics.yaml" - brightness: -1 - contrast: -1 - saturation: -1 - sharpness: -1 - gain: -1 - auto_white_balance: true - white_balance: 4000 - autoexposure: true - exposure: 100 - autofocus: false - focus: -1 \ No newline at end of file diff --git a/extrinsic_calibrator_examples/config/d435_intrinsics.yaml b/extrinsic_calibrator_examples/config/d435_intrinsics.yaml deleted file mode 100644 index 2504d46..0000000 --- a/extrinsic_calibrator_examples/config/d435_intrinsics.yaml +++ /dev/null @@ -1,20 +0,0 @@ -image_width: 640 -image_height: 480 -camera_name: "cam1" -camera_matrix: - rows: 3 - cols: 3 - data: [604.122802734375, 0.0, 327.40875244140625, 0.0, 603.847900390625, 244.95323181152344, 0.0, 0.0, 1.0] -distortion_model: "plumb_bob" -distortion_coefficients: - rows: 1 - cols: 5 - data: [0.0, 0.0, 0.0, 0.0, 0.0] -rectification_matrix: - rows: 3 - cols: 3 - data: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0] -projection_matrix: - rows: 3 - cols: 4 - data: [604.122802734375, 0.0, 327.40875244140625, 0.0, 0.0, 603.847900390625, 244.95323181152344, 0.0, 0.0, 0.0, 1.0, 0.0] diff --git a/extrinsic_calibrator_examples/config/l515.yaml b/extrinsic_calibrator_examples/config/l515.yaml deleted file mode 100644 index f9950b8..0000000 --- a/extrinsic_calibrator_examples/config/l515.yaml +++ /dev/null @@ -1,23 +0,0 @@ -/**: - ros__parameters: - video_device: "/dev/video12" # "ffplay /dev/video12" to test - framerate: 6.0 - io_method: "mmap" - frame_id: "cam2_frame" - pixel_format: "yuyv" # see usb_cam/supported_formats for list of supported formats - av_device_format: "YUV422P" - image_width: 640 - image_height: 480 - camera_name: "cam2" - camera_info_url: "package://extrinsic_calibrator_examples/config/l515_intrinsics.yaml" - brightness: -1 - contrast: -1 - saturation: -1 - sharpness: -1 - gain: -1 - auto_white_balance: true - white_balance: 4000 - autoexposure: true - exposure: 100 - autofocus: false - focus: -1 \ No newline at end of file diff --git a/extrinsic_calibrator_examples/config/l515_intrinsics.yaml b/extrinsic_calibrator_examples/config/l515_intrinsics.yaml deleted file mode 100644 index f4ab8f4..0000000 --- a/extrinsic_calibrator_examples/config/l515_intrinsics.yaml +++ /dev/null @@ -1,20 +0,0 @@ -image_width: 640 -image_height: 480 -camera_name: "cam2" -camera_matrix: - rows: 3 - cols: 3 - data: [607.4058837890625, 0.0, 325.59991455078125, 0.0, 607.5341186523438, 247.25904846191406, 0.0, 0.0, 1.0] -distortion_model: "plumb_bob" -distortion_coefficients: - rows: 1 - cols: 5 - data: [0.19551624357700348, -0.5865326523780823, -0.002620677463710308, 0.0008374004391953349, 0.5133219957351685] -rectification_matrix: - rows: 3 - cols: 3 - data: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0] -projection_matrix: - rows: 3 - cols: 4 - data: [607.4058837890625, 0.0, 325.59991455078125, 0.0, 0.0, 607.5341186523438, 247.25904846191406, 0.0, 0.0, 0.0, 1.0, 0.0] diff --git a/extrinsic_calibrator_examples/extrinsic_calibrator_examples/__init__.py b/extrinsic_calibrator_examples/extrinsic_calibrator_examples/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/extrinsic_calibrator_examples/launch/launch_extrinsic_calibrator.launch.py b/extrinsic_calibrator_examples/launch/launch_extrinsic_calibrator.launch.py deleted file mode 100644 index 986cee1..0000000 --- a/extrinsic_calibrator_examples/launch/launch_extrinsic_calibrator.launch.py +++ /dev/null @@ -1,80 +0,0 @@ -# ------------------------------------------------------------------------------ -# This file is part of **extrinsic-calibrator: -# October 2024 -# Copyright 2024 IKERLAN. All Rights Reserved. -# -# -# LICENSE NOTICE -# -# This software is available under a dual license system. Choose between: -# - GNU Affero General Public License v3.0 for open-source usage, or -# - A commercial license for proprietary development. -# For commercial license details, contact us at info@ikerlan.es. -# -# GNU Affero General Public License v3.0 -# Version 3, 19 November 2007 -# © 2007 Free Software Foundation, Inc. -# -# Licensed under a dual license system: -# 1. Open-source usage under the GNU Affero General Public License v3.0 -# (AGPL-3.0), allowing you to freely use, modify, and distribute the -# software for open-source projects. You can find a copy of the AGPL-3.0 -# license at https://www.gnu.org/licenses/agpl-3.0.html. -# 2. For commercial/proprietary use, a separate commercial license is required. -# Please contact us at info@ikerlan.es for inquiries about our commercial -# licensing options. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . -# -# Author Information: -# Author: Josep Rueda Collell -# Created: October 2024 -# Affiliation: IKERLAN (https://www.ikerlan.es) -# ------------------------------------------------------------------------------ - -from launch import LaunchDescription -from launch.actions import IncludeLaunchDescription -from launch.launch_description_sources import PythonLaunchDescriptionSource -from launch_ros.actions import Node -from launch_ros.substitutions import FindPackageShare -from launch.substitutions import PathJoinSubstitution -from launch.substitutions import PathJoinSubstitution - - -def generate_launch_description(): - return LaunchDescription([ - # Launch the extrinsic calibrator node. The config file is in the config folder and is passed to the node using the generate_parameter_library - Node( - package='extrinsic_calibrator_core', - executable='extrinsic_calibrator_node.py', - name='extrinsic_calibrator_node', - output='screen', - ), - - # Laucnh the set of usb-cameras with their own config_files - IncludeLaunchDescription( - PythonLaunchDescriptionSource(PathJoinSubstitution([ - FindPackageShare("extrinsic_calibrator_examples"), - "launch", - "launch_usb_cameras.launch.py"])) - ), - - # Laucnh the rviz visualizer with the TF of the map and the cameras - IncludeLaunchDescription( - PythonLaunchDescriptionSource(PathJoinSubstitution([ - FindPackageShare("extrinsic_calibrator_examples"), - "launch", - "launch_rviz.launch.py"])) - ), - ]) diff --git a/extrinsic_calibrator_examples/launch/launch_rviz.launch.py b/extrinsic_calibrator_examples/launch/launch_rviz.launch.py deleted file mode 100644 index fcc63b8..0000000 --- a/extrinsic_calibrator_examples/launch/launch_rviz.launch.py +++ /dev/null @@ -1,67 +0,0 @@ -# ------------------------------------------------------------------------------ -# This file is part of **extrinsic-calibrator: -# October 2024 -# Copyright 2024 IKERLAN. All Rights Reserved. -# -# -# LICENSE NOTICE -# -# This software is available under a dual license system. Choose between: -# - GNU Affero General Public License v3.0 for open-source usage, or -# - A commercial license for proprietary development. -# For commercial license details, contact us at info@ikerlan.es. -# -# GNU Affero General Public License v3.0 -# Version 3, 19 November 2007 -# © 2007 Free Software Foundation, Inc. -# -# Licensed under a dual license system: -# 1. Open-source usage under the GNU Affero General Public License v3.0 -# (AGPL-3.0), allowing you to freely use, modify, and distribute the -# software for open-source projects. You can find a copy of the AGPL-3.0 -# license at https://www.gnu.org/licenses/agpl-3.0.html. -# 2. For commercial/proprietary use, a separate commercial license is required. -# Please contact us at info@ikerlan.es for inquiries about our commercial -# licensing options. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . -# -# Author Information: -# Author: Josep Rueda Collell -# Created: October 2024 -# Affiliation: IKERLAN (https://www.ikerlan.es) -# ------------------------------------------------------------------------------ - -import os -from ament_index_python.packages import get_package_share_directory -from launch import LaunchDescription -from launch_ros.actions import Node - - -def generate_launch_description(): - package_name = 'extrinsic_calibrator_examples' - rviz_dir = os.path.join(get_package_share_directory(package_name), 'rviz') - - rviz_config = os.path.join(rviz_dir, 'extrinsic.rviz') - - return LaunchDescription([ - Node( - package='rviz2', - executable='rviz2', - name='rviz2', - arguments=['-d', rviz_config], - output='screen' - ), - - ]) diff --git a/extrinsic_calibrator_examples/launch/launch_usb_cameras.launch.py b/extrinsic_calibrator_examples/launch/launch_usb_cameras.launch.py deleted file mode 100644 index 6d2105c..0000000 --- a/extrinsic_calibrator_examples/launch/launch_usb_cameras.launch.py +++ /dev/null @@ -1,76 +0,0 @@ -# ------------------------------------------------------------------------------ -# This file is part of **extrinsic-calibrator: -# October 2024 -# Copyright 2024 IKERLAN. All Rights Reserved. -# -# -# LICENSE NOTICE -# -# This software is available under a dual license system. Choose between: -# - GNU Affero General Public License v3.0 for open-source usage, or -# - A commercial license for proprietary development. -# For commercial license details, contact us at info@ikerlan.es. -# -# GNU Affero General Public License v3.0 -# Version 3, 19 November 2007 -# © 2007 Free Software Foundation, Inc. -# -# Licensed under a dual license system: -# 1. Open-source usage under the GNU Affero General Public License v3.0 -# (AGPL-3.0), allowing you to freely use, modify, and distribute the -# software for open-source projects. You can find a copy of the AGPL-3.0 -# license at https://www.gnu.org/licenses/agpl-3.0.html. -# 2. For commercial/proprietary use, a separate commercial license is required. -# Please contact us at info@ikerlan.es for inquiries about our commercial -# licensing options. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU Affero General Public License for more details. -# -# You should have received a copy of the GNU Affero General Public License -# along with this program. If not, see . -# -# Author Information: -# Author: Josep Rueda Collell -# Created: October 2024 -# Affiliation: IKERLAN (https://www.ikerlan.es) -# ------------------------------------------------------------------------------ - -import os -from ament_index_python.packages import get_package_share_directory -from launch import LaunchDescription -from launch_ros.actions import Node - - -def generate_launch_description(): - package_name = 'extrinsic_calibrator_examples' - config_dir = os.path.join(get_package_share_directory(package_name), 'config') - - d435_config = os.path.join(config_dir, 'd435.yaml') - l515_config = os.path.join(config_dir, 'l515.yaml') - - return LaunchDescription([ - Node( - package='usb_cam', - executable='usb_cam_node_exe', - name='d435_camera', - namespace='camera_1', - parameters=[d435_config], - output='screen' - ), - Node( - package='usb_cam', - executable='usb_cam_node_exe', - name='l515_camera', - namespace='camera_2', - parameters=[l515_config], - output='screen' - ) - ]) diff --git a/extrinsic_calibrator_examples/license.md b/extrinsic_calibrator_examples/license.md deleted file mode 100644 index aff2e2f..0000000 --- a/extrinsic_calibrator_examples/license.md +++ /dev/null @@ -1,140 +0,0 @@ -# IKERLAN, S. COOP -This file is part of **extrinsic-calibrator: humble** -October 2024 - -# LICENSE NOTICE - -This software has been liberated under a dual license system, on which you can choose between the open-source GNU Affero General Public License v3 in the case you are interested in using this software for an open-source development, or a commercial license in the case you are interested in using this software for a privative development. If you want to know more about our commercial license, please contact us in info@ikerlan.es. - -# GNU AFFERO GENERAL PUBLIC LICENSE -Version 3, 19 November 2007 -Copyright © 2007 Free Software Foundation, Inc. -Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. -Preamble -The GNU Affero General Public License is a free, copyleft license for software and other kinds of works, specifically designed to ensure cooperation with the community in the case of network server software. -The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, our General Public Licenses are intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. -When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. -Developers that use our General Public Licenses protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License which gives you legal permission to copy, distribute and/or modify the software. -A secondary benefit of defending all users' freedom is that improvements made in alternate versions of the program, if they receive widespread use, become available for other developers to incorporate. Many developers of free software are heartened and encouraged by the resulting cooperation. However, in the case of software used on network servers, this result may fail to come about. The GNU General Public License permits making a modified version and letting the public access it on a server without ever releasing its source code to the public. -The GNU Affero General Public License is designed specifically to ensure that, in such cases, the modified source code becomes available to the community. It requires the operator of a network server to provide the source code of the modified version running there to the users of that server. Therefore, public use of a modified version, on a publicly accessible server, gives the public access to the source code of the modified version. -An older license, called the Affero General Public License and published by Affero, was designed to accomplish similar goals. This is a different license, not a version of the Affero GPL, but Affero has released a new version of the Affero GPL which permits relicensing under this license. -The precise terms and conditions for copying, distribution and modification follow. -TERMS AND CONDITIONS -0. Definitions. -"This License" refers to version 3 of the GNU Affero General Public License. -"Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. -"The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. -To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. -A "covered work" means either the unmodified Program or a work based on the Program. -To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. -To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. -An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. -1. Source Code. -The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. -A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. -The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. -The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. -The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. -The Corresponding Source for a work in source code form is that same work. -2. Basic Permissions. -All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. -You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. -Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. -3. Protecting Users' Legal Rights From Anti-Circumvention Law. -No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. -When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. -4. Conveying Verbatim Copies. -You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. -You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. -5. Conveying Modified Source Versions. -You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: -• a) The work must carry prominent notices stating that you modified it, and giving a relevant date. -• b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". -• c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. -• d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. -A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. -6. Conveying Non-Source Forms. -You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: -• a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. -• b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. -• c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. -• d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. -• e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. -A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. -A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. -"Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. -If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). -The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. -Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. -7. Additional Terms. -"Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. -When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. -Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: -• a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or -• b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or -• c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or -• d) Limiting the use for publicity purposes of names of licensors or authors of the material; or -• e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or -• f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. -All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. -If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. -Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. -8. Termination. -You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). -However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. -Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. -Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. -9. Acceptance Not Required for Having Copies. -You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. -10. Automatic Licensing of Downstream Recipients. -Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. -An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. -You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. -11. Patents. -A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". -A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. -Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. -In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. -If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. -If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. -A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. -Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. -12. No Surrender of Others' Freedom. -If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. -13. Remote Network Interaction; Use with the GNU General Public License. -Notwithstanding any other provision of this License, if you modify the Program, your modified version must prominently offer all users interacting with it remotely through a computer network (if your version supports such interaction) an opportunity to receive the Corresponding Source of your version by providing access to the Corresponding Source from a network server at no charge, through some standard or customary means of facilitating copying of software. This Corresponding Source shall include the Corresponding Source for any work covered by version 3 of the GNU General Public License that is incorporated pursuant to the following paragraph. -Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the work with which it is combined will remain governed by version 3 of the GNU General Public License. -14. Revised Versions of this License. -The Free Software Foundation may publish revised and/or new versions of the GNU Affero General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. -Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU Affero General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU Affero General Public License, you may choose any version ever published by the Free Software Foundation. -If the Program specifies that a proxy can decide which future versions of the GNU Affero General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. -Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. -15. Disclaimer of Warranty. -THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. -16. Limitation of Liability. -IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. -17. Interpretation of Sections 15 and 16. -If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. -END OF TERMS AND CONDITIONS -How to Apply These Terms to Your New Programs -If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. -To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU Affero General Public License as - published by the Free Software Foundation, either version 3 of the - License, or (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Affero General Public License for more details. - - You should have received a copy of the GNU Affero General Public License - along with this program. If not, see . -Also add information on how to contact you by electronic and paper mail. -If your software can interact with users remotely through a computer network, you should also make sure that it provides a way for users to get its source. For example, if your program is a web application, its interface could display a "Source" link that leads users to an archive of the code. There are many ways you could offer source, and different solutions will be better for different programs; see section 13 for the specific requirements. -You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU AGPL, see . - diff --git a/extrinsic_calibrator_examples/package.xml b/extrinsic_calibrator_examples/package.xml deleted file mode 100644 index 90f40bd..0000000 --- a/extrinsic_calibrator_examples/package.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - extrinsic_calibrator_examples - 0.1.0 - ROS2 package designed to provide examples on how to use the extrinsic_calibrator_core package as well as useful ros2 launch files to launch the cameras, the calibrator, as well as a demonstration rviz file. - Josep Rueda Collell - Ander Gonzalez - AGPL-3.0-only - - ament_cmake - ament_cmake_python - - rclcpp - rclpy - cv_bridge - extrinsic_calibrator_core - rviz2 - sensor_msgs - std_msgs - tf2_ros - tf_transformations - usb_cam - - rosidl_default_generators - - rosidl_default_runtime - - rosidl_interface_packages - - ament_lint_auto - ament_lint_common - - - ament_cmake - - - \ No newline at end of file diff --git a/extrinsic_calibrator_examples/rviz/extrinsic.rviz b/extrinsic_calibrator_examples/rviz/extrinsic.rviz deleted file mode 100644 index 6a66681..0000000 --- a/extrinsic_calibrator_examples/rviz/extrinsic.rviz +++ /dev/null @@ -1,188 +0,0 @@ -Panels: - - Class: rviz_common/Displays - Help Height: 78 - Name: Displays - Property Tree Widget: - Expanded: - - /Global Options1 - - /Status1 - - /Image1 - - /Image2 - - /TF1 - - /TF1/Frames1 - - /TF1/Tree1 - Splitter Ratio: 0.6382352709770203 - Tree Height: 88 - - Class: rviz_common/Selection - Name: Selection - - Class: rviz_common/Tool Properties - Expanded: - - /2D Goal Pose1 - - /Publish Point1 - Name: Tool Properties - Splitter Ratio: 0.5886790156364441 - - Class: rviz_common/Views - Expanded: - - /Current View1 - Name: Views - Splitter Ratio: 0.5 - - Class: rviz_common/Time - Experimental: false - Name: Time - SyncMode: 0 - SyncSource: "" -Visualization Manager: - Class: "" - Displays: - - Alpha: 0.5 - Cell Size: 1 - Class: rviz_default_plugins/Grid - Color: 160; 160; 164 - Enabled: true - Line Style: - Line Width: 0.029999999329447746 - Value: Lines - Name: Grid - Normal Cell Count: 0 - Offset: - X: 0 - Y: 0 - Z: 0 - Plane: XY - Plane Cell Count: 10 - Reference Frame: - Value: true - - Class: rviz_default_plugins/Image - Enabled: true - Max Value: 1 - Median window: 5 - Min Value: 0 - Name: Image - Normalize Range: true - Topic: - Depth: 5 - Durability Policy: Volatile - History Policy: Keep Last - Reliability Policy: Reliable - Value: /camera_1/image_raw/detected_markers - Value: true - - Class: rviz_default_plugins/Image - Enabled: true - Max Value: 1 - Median window: 5 - Min Value: 0 - Name: Image - Normalize Range: true - Topic: - Depth: 5 - Durability Policy: Volatile - History Policy: Keep Last - Reliability Policy: Reliable - Value: /camera_2/image_raw/detected_markers - Value: true - - Class: rviz_default_plugins/TF - Enabled: true - Frame Timeout: 15 - Frames: - All Enabled: false - cam2: - Value: true - map: - Value: true - Marker Scale: 1 - Name: TF - Show Arrows: true - Show Axes: true - Show Names: true - Tree: - map: - cam2: - {} - Update Interval: 0 - Value: true - Enabled: true - Global Options: - Background Color: 48; 48; 48 - Fixed Frame: map - Frame Rate: 30 - Name: root - Tools: - - Class: rviz_default_plugins/Interact - Hide Inactive Objects: true - - Class: rviz_default_plugins/MoveCamera - - Class: rviz_default_plugins/Select - - Class: rviz_default_plugins/FocusCamera - - Class: rviz_default_plugins/Measure - Line color: 128; 128; 0 - - Class: rviz_default_plugins/SetInitialPose - Covariance x: 0.25 - Covariance y: 0.25 - Covariance yaw: 0.06853891909122467 - Topic: - Depth: 5 - Durability Policy: Volatile - History Policy: Keep Last - Reliability Policy: Reliable - Value: /initialpose - - Class: rviz_default_plugins/SetGoal - Topic: - Depth: 5 - Durability Policy: Volatile - History Policy: Keep Last - Reliability Policy: Reliable - Value: /goal_pose - - Class: rviz_default_plugins/PublishPoint - Single click: true - Topic: - Depth: 5 - Durability Policy: Volatile - History Policy: Keep Last - Reliability Policy: Reliable - Value: /clicked_point - Transformation: - Current: - Class: rviz_default_plugins/TF - Value: true - Views: - Current: - Class: rviz_default_plugins/Orbit - Distance: 4.191425800323486 - Enable Stereo Rendering: - Stereo Eye Separation: 0.05999999865889549 - Stereo Focal Distance: 1 - Swap Stereo Eyes: false - Value: false - Focal Point: - X: 0 - Y: 0 - Z: 0 - Focal Shape Fixed Size: true - Focal Shape Size: 0.05000000074505806 - Invert Z Axis: false - Name: Current View - Near Clip Distance: 0.009999999776482582 - Pitch: 0.7203978896141052 - Target Frame: - Value: Orbit (rviz) - Yaw: 0.8403979539871216 - Saved: ~ -Window Geometry: - Displays: - collapsed: false - Height: 1007 - Hide Left Dock: false - Hide Right Dock: false - Image: - collapsed: false - QMainWindow State: 000000ff00000000fd00000004000000000000018300000355fc020000000cfb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003b000000e1000000c700fffffffb0000000a0049006d00610067006501000001080000003b0000000000000000fb0000000a0049006d00610067006501000001490000001a0000000000000000fb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d0061006700650100000122000001330000002800fffffffb0000000a0049006d006100670065010000025b000001350000002800ffffff000000010000010f00000355fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000003b00000355000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000007740000003efc0100000002fb0000000800540069006d00650100000000000007740000025300fffffffb0000000800540069006d00650100000000000004500000000000000000000004d60000035500000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 - Selection: - collapsed: false - Time: - collapsed: false - Tool Properties: - collapsed: false - Views: - collapsed: false - Width: 1908 - X: -32 - Y: -28 diff --git a/extrinsic_calibrator_examples/scripts/__init__.py b/extrinsic_calibrator_examples/scripts/__init__.py deleted file mode 100644 index e69de29..0000000 From 87c3588101e8b2fc95dabe2bc83ed144bd425aef Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Tue, 21 Oct 2025 16:04:01 +0200 Subject: [PATCH 03/17] parameters descriptions --- .../config/aruco_parameters.yaml | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/extrinsic_calibrator_core/config/aruco_parameters.yaml b/extrinsic_calibrator_core/config/aruco_parameters.yaml index 71c605c..89dc93f 100644 --- a/extrinsic_calibrator_core/config/aruco_parameters.yaml +++ b/extrinsic_calibrator_core/config/aruco_parameters.yaml @@ -2,27 +2,34 @@ aruco_params: aruco_dict: type: string default_value: "DICT_6X6_250" + description: "Aruco dictionary to use. nXn_value. n - how many squares per marker. VALUE - how many pixels per marker. See - https://docs.opencv.org/4.x/d5/dae/tutorial_aruco_detection.html" marker_length: type: double default_value: 0.26 - board_mode: # "single_markers" or "grid_board" + description: "Length of one side of the marker in meters." + board_mode: type: string default_value: "grid_board" + description: "Board generation mode. Options: single_marker, grid_board" grid_rows: type: int default_value: 5 + description: "Number of marker rows in the grid board." grid_cols: type: int default_value: 5 - grid_marker_separation: # gap between markers (meters) + description: "Number of marker columns in the grid board." + grid_marker_separation: type: double default_value: 0.04 + description: "Separation between markers in the grid board (meters). See 'b' on the image there - https://github.com/ethz-asl/kalibr/wiki/calibration-targets" grid_first_marker_id: type: int default_value: 0 - grid_image_size_px: # square output image side length (pixels) for generator + description: "ID of the first marker in the grid board. Every marker has an unique ID." + grid_image_size_px: # TODO - check it type: int default_value: 1200 - generate_individual_markers: # if true also dumps each marker image when in grid mode + generate_individual_markers: # TODO - check it type: bool default_value: false \ No newline at end of file From 6894a6633372c2d4c8201379b9938f890b695f08 Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Tue, 21 Oct 2025 16:16:10 +0200 Subject: [PATCH 04/17] revert to main status --- .../python_aruco_parameters.py | 48 ----- .../python_camera_topics_parameters.py | 20 -- .../src/extrinsic_calibrator_class.py | 193 ++++++------------ 3 files changed, 60 insertions(+), 201 deletions(-) delete mode 100644 extrinsic_calibrator_core/extrinsic_calibrator_core/python_aruco_parameters.py delete mode 100644 extrinsic_calibrator_core/extrinsic_calibrator_core/python_camera_topics_parameters.py diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/python_aruco_parameters.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/python_aruco_parameters.py deleted file mode 100644 index 26f992a..0000000 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/python_aruco_parameters.py +++ /dev/null @@ -1,48 +0,0 @@ -# Auto-generated parameter listener for ArUco parameters including grid board support. -from dataclasses import dataclass -import rclpy - -@dataclass -class ArucoParamsData: - aruco_dict: str - marker_length: float - board_mode: str = 'single_markers' - grid_rows: int = 0 - grid_cols: int = 0 - grid_marker_separation: float = 0.0 - grid_first_marker_id: int = 0 - grid_image_size_px: int = 1200 - generate_individual_markers: bool = False - -class ParamListener: - def __init__(self, node): - self._node = node - - def get_params(self) -> ArucoParamsData: - # Fetch parameters using node.get_parameter if declared automatically - def _get(name, default=None): - try: - p = self._node.get_parameter(f"aruco_params.{name}") - if p.type_ == 2: # string - return p.get_parameter_value().string_value - elif p.type_ == 3: # double - return p.get_parameter_value().double_value - elif p.type_ == 1: # integer - return p.get_parameter_value().integer_value - elif p.type_ == 4: # bool - return p.get_parameter_value().bool_value - else: - return default - except Exception: - return default - return ArucoParamsData( - aruco_dict=_get('aruco_dict', 'DICT_6X6_250'), - marker_length=_get('marker_length', 0.26), - board_mode=_get('board_mode', 'single_markers'), - grid_rows=_get('grid_rows', 0), - grid_cols=_get('grid_cols', 0), - grid_marker_separation=_get('grid_marker_separation', 0.0), - grid_first_marker_id=_get('grid_first_marker_id', 0), - grid_image_size_px=_get('grid_image_size_px', 1200), - generate_individual_markers=_get('generate_individual_markers', False), - ) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/python_camera_topics_parameters.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/python_camera_topics_parameters.py deleted file mode 100644 index 753d56a..0000000 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/python_camera_topics_parameters.py +++ /dev/null @@ -1,20 +0,0 @@ -# Auto-generated simple camera topics parameter listener to satisfy imports. -from dataclasses import dataclass - -@dataclass -class CamerasParamsData: - camera_names: list - -class ParamListener: - def __init__(self, node): - self._node = node - def get_params(self) -> CamerasParamsData: - # Expect a parameter cameras_params.camera_names - try: - p = self._node.get_parameter('cameras_params.camera_names') - names = list(p.get_parameter_value().string_array_value) - except Exception: - names = [] - return CamerasParamsData(camera_names=names) - -cameras_params = type('cameras_params', (), {'ParamListener': ParamListener}) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py index 50860ce..c3ab44c 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py @@ -67,42 +67,6 @@ from extrinsic_calibrator_core.python_camera_topics_parameters import cameras_params -class ArucoParams(): - def __init__(self, node:Node, aruco_params): - if hasattr(cv2.aruco, aruco_params.aruco_dict): - self.aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_params.aruco_dict)) - else: - node.get_logger().error(f"cv2.aruco doesn't have a dictionary with the name '{aruco_params.aruco_dict}'") - self.marker_length = aruco_params.marker_length - # Grid board optional parameters (fallbacks ensure backward compatibility) - self.board_mode = getattr(aruco_params, 'board_mode', 'single_markers') - self.grid_rows = getattr(aruco_params, 'grid_rows', 0) - self.grid_cols = getattr(aruco_params, 'grid_cols', 0) - self.grid_marker_separation = getattr(aruco_params, 'grid_marker_separation', 0.0) - self.grid_first_marker_id = getattr(aruco_params, 'grid_first_marker_id', 0) - self.grid_image_size_px = getattr(aruco_params, 'grid_image_size_px', 1200) - self.generate_individual_markers = getattr(aruco_params, 'generate_individual_markers', False) - - self.board = None - if self.board_mode == 'grid_board': - if self.grid_rows > 0 and self.grid_cols > 0: - try: - self.board = cv2.aruco.GridBoard( - self.grid_cols, - self.grid_rows, - self.marker_length, - self.grid_marker_separation, - self.aruco_dict, - self.grid_first_marker_id - ) - node.get_logger().info(f"Created ArUco GridBoard {self.grid_cols}x{self.grid_rows} starting at ID {self.grid_first_marker_id}") - except Exception as e: - node.get_logger().error(f"Failed to create grid board: {e}") - else: - node.get_logger().error("Grid board mode selected but rows/cols are invalid (<=0). Reverting to single markers.") - self.board_mode = 'single_markers' - - class ExtrinsicCalibrator(Node): def __init__(self): super().__init__('detector_aruco_node', automatically_declare_parameters_from_overrides=True) @@ -115,14 +79,12 @@ def __init__(self): aruco_params_listener = aruco_params.ParamListener(self) imported_aruco_params = aruco_params_listener.get_params() - self.real_aruco_params = ArucoParams(self, imported_aruco_params) - self.get_logger().info(f"Using ArUco dictionary: {imported_aruco_params.aruco_dict}, with marker length: {self.real_aruco_params.marker_length} meters") + self.real_aruco_params = ArucoParams(self,imported_aruco_params) cameras_param_listener = cameras_params.ParamListener(self) self.imported_cameras_params = cameras_param_listener.get_params() camera_names = list(self.imported_cameras_params.camera_names) - self.get_logger().info(f"Detected camera names from parameters: {camera_names}") # construct the cameras self.array_of_cameras = [] @@ -593,7 +555,7 @@ def broadcast_cameras_and_markers_to_world(self): t.header.stamp = self.get_clock().now().to_msg() t.header.frame_id = "marker_0" - t.child_frame_id = "world" + t.child_frame_id = "map" translation = tf_transformations.translation_from_matrix(origin_transform) quaternion = tf_transformations.quaternion_from_matrix(origin_transform) @@ -639,7 +601,7 @@ def broadcast_cameras_and_markers_to_world(self): if self.map_to_cameras_transform_table[camera.camera_id] is not None: t = TransformStamped() t.header.stamp = self.get_clock().now().to_msg() - t.header.frame_id = "world" + t.header.frame_id = "map" t.child_frame_id = camera.camera_name transform = self.map_to_cameras_transform_table[camera.camera_id] @@ -687,6 +649,13 @@ def display_marker_to_marker_table(self, title, table_data): +class ArucoParams(): + def __init__(self, node:Node, aruco_params): + if hasattr(cv2.aruco, aruco_params.aruco_dict): + self.aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_params.aruco_dict)) + else: + node.get_logger().error(f"cv2.aruco doesn't have a dictionary with the name '{aruco_params.aruco_dict}'") + self.marker_length = aruco_params.marker_length @@ -696,7 +665,7 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c self.node = node self.camera_name = camera_name self.camera_id = camera_id - self.image_topic = "/robotic_platform/" + image_topic + self.image_topic = image_topic self.camera_info_topic = camera_info_topic self.bridge = bridge self.tf_broadcaster = broadcaster @@ -710,25 +679,18 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c self.aruco_dict = aruco_params.aruco_dict self.parameters = cv2.aruco.DetectorParameters() self.detector = cv2.aruco.ArucoDetector(self.aruco_dict, self.parameters) - self.marker_length = aruco_params.marker_length - self.board_mode = aruco_params.board_mode - self.board = aruco_params.board # May be None if single marker mode - self.grid_first_marker_id = getattr(aruco_params, 'grid_first_marker_id', 0) + self.marker_length = aruco_params.marker_length # length of the marker side in meters (adjust as needed) # Subscribe to the camera image topic and camera info self.image_sub = self.node.create_subscription(Image, image_topic, self.image_callback, 1) self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) self.cv2_image_publisher = self.node.create_publisher(Image, f"{image_topic}/detected_markers", 10) - - self.node.get_logger().info(f"Camera {self.camera_name} subscribed to {image_topic} and {camera_info_topic} topics.") - #self.node.get_logger().info(f"Camera matrix and {self.camera_matrix}, dist coeffs {self.dist_coeffs}") - + self.marker_transforms = {} self.reliable_marker_transforms = {} def camera_info_callback(self, msg): - #self.node.get_logger().info(f"Camera {self.camera_name} camera info received: {msg.k}, {msg.d}.") if self.camera_matrix is None: self.camera_matrix = np.array(msg.k).reshape((3, 3)) self.dist_coeffs = np.array(msg.d) @@ -743,89 +705,56 @@ def image_callback(self, msg): cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8") # For ArUco detection, you can use the filtered_image directly - corners, ids, _ = self.detector.detectMarkers(cv_image) - - self.node.get_logger().info(f"Camera {self.camera_name}: Detected {len(corners)} markers.") - - # Draw detected markers always - if len(corners) > 0: - cv2.aruco.drawDetectedMarkers(cv_image, corners, ids) - + corners, ids, rejected_img_points = self.detector.detectMarkers(cv_image) detected_ids = set() - if ids is not None and len(corners) > 0: - if self.board_mode == 'grid_board' and self.board is not None: - # Estimate board pose (uses subset of visible markers) - retval, rvec, tvec = cv2.aruco.estimatePoseBoard(corners, ids, self.board, self.camera_matrix, self.dist_coeffs, None, None) - if retval > 0: + if ids is not None: + for i, id in enumerate(ids): + marker_id = id[0] + detected_ids.add(marker_id) + + if marker_id not in self.marker_transforms and marker_id not in self.reliable_marker_transforms: + self.marker_transforms[marker_id] = deque(maxlen=30) + + objPoints = np.array([ [-self.marker_length/2, self.marker_length/2, 0], + [self.marker_length/2, self.marker_length/2, 0], + [self.marker_length/2, -self.marker_length/2, 0], + [-self.marker_length/2,-self.marker_length/2, 0]], dtype=np.float32) + + success, rvec, tvec = cv2.solvePnP(objPoints, corners[i], self.camera_matrix, self.dist_coeffs) + if success: rot_matrix, _ = cv2.Rodrigues(rvec) - board_transform = np.eye(4) - board_transform[:3, :3] = rot_matrix - board_transform[:3, 3] = tvec.flatten() - # Publish axes for board origin (use half marker length for axis size) + translation_matrix = np.eye(4) + translation_matrix[:3, :3] = rot_matrix + translation_matrix[:3, 3] = tvec.flatten() + + # Draw the transform + cv2.aruco.drawDetectedMarkers(cv_image, corners, ids) cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.marker_length/2) - - # Collect transforms for each visible marker id - for i, id_val in enumerate(ids): - marker_id = int(id_val[0]) - detected_ids.add(marker_id) - if marker_id not in self.marker_transforms and marker_id not in self.reliable_marker_transforms: - self.marker_transforms[marker_id] = deque(maxlen=30) - # local center of marker: average of its object points in board coordinates - local_index = marker_id - self.grid_first_marker_id - if 0 <= local_index < len(self.board.objPoints): - obj_pts = np.array(self.board.objPoints[local_index], dtype=np.float32).reshape(-1, 3) - local_center = obj_pts.mean(axis=0) - # world center = R * local_center + t - world_center = rot_matrix @ local_center + tvec.flatten() - marker_transform = np.eye(4) - marker_transform[:3, :3] = rot_matrix - marker_transform[:3, 3] = world_center - if marker_id not in self.reliable_marker_transforms: - self.marker_transforms[marker_id].append(marker_transform) - else: - self.node.get_logger().warn(f"Camera {self.camera_name}: Board pose not estimated (retval={retval}).") - else: - # Single marker mode identical to previous logic - for i, id_val in enumerate(ids): - marker_id = int(id_val[0]) - detected_ids.add(marker_id) - if marker_id not in self.marker_transforms and marker_id not in self.reliable_marker_transforms: - self.marker_transforms[marker_id] = deque(maxlen=30) - objPoints = np.array([ - [-self.marker_length/2, self.marker_length/2, 0], - [ self.marker_length/2, self.marker_length/2, 0], - [ self.marker_length/2,-self.marker_length/2, 0], - [-self.marker_length/2,-self.marker_length/2, 0] - ], dtype=np.float32) - success, rvec, tvec = cv2.solvePnP(objPoints, corners[i], self.camera_matrix, self.dist_coeffs) - if success: - rot_matrix, _ = cv2.Rodrigues(rvec) - translation_matrix = np.eye(4) - translation_matrix[:3, :3] = rot_matrix - translation_matrix[:3, 3] = tvec.flatten() - cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.marker_length/2) - if marker_id not in self.reliable_marker_transforms: - self.marker_transforms[marker_id].append(translation_matrix) - - # Publish annotated image - ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") - self.cv2_image_publisher.publish(ros_image) - - # Iterate for precision checks - if len(self.marker_transforms) > 0: - self.node.get_logger().info( - f"Camera {self.camera_name}: Marker transforms collected: " + ", ".join([ - f"Marker {mid}: {len(transforms)}" for mid, transforms in self.marker_transforms.items() - ]) - ) - for marker_id, transforms in list(self.marker_transforms.items()): - if len(transforms) == transforms.maxlen: - self.check_precision(marker_id, transforms) - - # Remove reliable markers from accumulation dict - for marker_id in list(self.reliable_marker_transforms.keys()): - if marker_id in self.marker_transforms: - del self.marker_transforms[marker_id] + ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") + self.cv2_image_publisher.publish(ros_image) + + # Filter out the already reliable markers + if marker_id in self.reliable_marker_transforms: + continue + else: + self.marker_transforms[marker_id].append(translation_matrix) + + # Add None for markers not detected in this frame + for marker_id in self.marker_transforms: + if marker_id not in detected_ids: + # Restart the precision of the marker if not seen + # self.marker_transforms[marker_id].append(None) + pass + + # iterate through each marker of the marker_transforms dictionary + for marker_id, transforms in self.marker_transforms.items(): + if len(transforms) == 30: + self.check_precision(marker_id, transforms) + + # delete all the transforms from the marker_transforms dictionary + for marker_id, transform in self.reliable_marker_transforms.items(): + if marker_id in self.marker_transforms: + del self.marker_transforms[marker_id] def check_precision(self, marker_id, transform): @@ -850,8 +779,6 @@ def is_precise(self, transforms): angles.append(angle) rotation_range = np.max(angles) - self.node.get_logger().info(f"Camera {self.camera_name}: Position range: {position_range}, Rotation range (radians): {rotation_range}") - return np.all(position_range < 0.01) and np.all(rotation_range < np.radians(1)) else: return False From 17f76a4f28608abf24e00776725b1ef049bdd527 Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Wed, 22 Oct 2025 11:14:44 +0200 Subject: [PATCH 05/17] parameter print --- .../src/extrinsic_calibrator_class.py | 22 ++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py index c3ab44c..decbea1 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py @@ -655,8 +655,28 @@ def __init__(self, node:Node, aruco_params): self.aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_params.aruco_dict)) else: node.get_logger().error(f"cv2.aruco doesn't have a dictionary with the name '{aruco_params.aruco_dict}'") + self.marker_length = aruco_params.marker_length - + self.board_mode = aruco_params.board_mode + self.grid_rows = aruco_params.grid_rows + self.grid_cols = aruco_params.grid_cols + self.grid_marker_separation = aruco_params.grid_marker_separation + self.grid_first_marker_id = aruco_params.grid_first_marker_id + self.grid_image_size_px = aruco_params.grid_image_size_px + self.generate_individual_markers = aruco_params.generate_individual_markers + + node.get_logger().info( + f"Aruco parameters set:\n" + f" Dictionary: {aruco_params.aruco_dict}\n" + f" Marker Length: {self.marker_length}\n" + f" Board Mode: {self.board_mode}\n" + f" Grid Rows: {self.grid_rows}\n" + f" Grid Cols: {self.grid_cols}\n" + f" Marker Separation: {self.grid_marker_separation}\n" + f" Grid First Marker ID: {self.grid_first_marker_id}\n" + f" Grid Image Size (px): {self.grid_image_size_px}\n" + f" Generate Individual Markers: {self.generate_individual_markers}" + ) class Camera(): From 8962bdaae8a2b4806b8c1158bb5367ba719de339 Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Thu, 23 Oct 2025 10:24:08 +0200 Subject: [PATCH 06/17] Board pose estimation --- .../src/extrinsic_calibrator_class.py | 132 +++++++++++------- extrinsic_calibrator_core/requirments.txt | 2 +- 2 files changed, 85 insertions(+), 49 deletions(-) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py index decbea1..d356282 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py @@ -85,7 +85,8 @@ def __init__(self): self.imported_cameras_params = cameras_param_listener.get_params() camera_names = list(self.imported_cameras_params.camera_names) - + self.get_logger().info(f"Detected camera names from parameters: {camera_names}") + # construct the cameras self.array_of_cameras = [] for idx, cam_name in enumerate(camera_names): @@ -677,6 +678,28 @@ def __init__(self, node:Node, aruco_params): f" Grid Image Size (px): {self.grid_image_size_px}\n" f" Generate Individual Markers: {self.generate_individual_markers}" ) + + self.board = None + if self.board_mode == "grid_board": + self.board = cv2.aruco.GridBoard( + (self.grid_cols, self.grid_rows), + self.marker_length, + self.grid_marker_separation, + self.aruco_dict, + ) + node.get_logger().info( + f"Calibrator running in grid board mode:\n" + f" Dictionary: {self.board.getDictionary().markerSize}X{self.board.getDictionary().markerSize}\n" + f" Marker Length: {self.board.getMarkerLength()}\n" + f" Grid Size: {self.board.getGridSize()}\n" + f" Ids: {self.board.getIds()}\n" + f" Marker Separation: {self.board.getMarkerSeparation()}\n" + ) + else: + node.get_logger().error(f"Calibrator runing in single mode") + + #TODO delete unused parameters + class Camera(): @@ -685,7 +708,7 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c self.node = node self.camera_name = camera_name self.camera_id = camera_id - self.image_topic = image_topic + self.image_topic = "/robotic_platform/" + image_topic self.camera_info_topic = camera_info_topic self.bridge = bridge self.tf_broadcaster = broadcaster @@ -700,12 +723,17 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c self.parameters = cv2.aruco.DetectorParameters() self.detector = cv2.aruco.ArucoDetector(self.aruco_dict, self.parameters) self.marker_length = aruco_params.marker_length # length of the marker side in meters (adjust as needed) + self.board_mode = aruco_params.board_mode + self.board = aruco_params.board # May be None if single marker mode + self.grid_first_marker_id = aruco_params.grid_first_marker_id # Subscribe to the camera image topic and camera info self.image_sub = self.node.create_subscription(Image, image_topic, self.image_callback, 1) self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) self.cv2_image_publisher = self.node.create_publisher(Image, f"{image_topic}/detected_markers", 10) + self.node.get_logger().info(f"Camera {self.camera_name} subscribed to {image_topic} and {camera_info_topic} topics.") + self.marker_transforms = {} self.reliable_marker_transforms = {} @@ -725,56 +753,65 @@ def image_callback(self, msg): cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8") # For ArUco detection, you can use the filtered_image directly - corners, ids, rejected_img_points = self.detector.detectMarkers(cv_image) + corners, ids, rejected = self.detector.detectMarkers(cv_image) + + # Visualize marker detection + if ids is not None and len(ids) > 0: + cv2.aruco.drawDetectedMarkers(cv_image, corners, ids) + detected_ids = set() - if ids is not None: - for i, id in enumerate(ids): - marker_id = id[0] - detected_ids.add(marker_id) - - if marker_id not in self.marker_transforms and marker_id not in self.reliable_marker_transforms: - self.marker_transforms[marker_id] = deque(maxlen=30) - objPoints = np.array([ [-self.marker_length/2, self.marker_length/2, 0], - [self.marker_length/2, self.marker_length/2, 0], - [self.marker_length/2, -self.marker_length/2, 0], - [-self.marker_length/2,-self.marker_length/2, 0]], dtype=np.float32) + if ids is not None and len(corners) > 0: + if self.board is not None: # board mode + success, rvec, tvec = cv2.aruco.estimatePoseBoard( + corners, ids, self.board, self.camera_matrix, self.dist_coeffs, None, None + ) + + if self.camera_name == "camera_centre_calibration_frame": + self.node.get_logger().info(f"Camera {self.camera_name}: rvec: {rvec}, tvec: {tvec}") - success, rvec, tvec = cv2.solvePnP(objPoints, corners[i], self.camera_matrix, self.dist_coeffs) - if success: - rot_matrix, _ = cv2.Rodrigues(rvec) - translation_matrix = np.eye(4) - translation_matrix[:3, :3] = rot_matrix - translation_matrix[:3, 3] = tvec.flatten() + # visualize the board pose + if success > 0: + cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.board.getMarkerLength()/2) + + + else: # Single marker mode + for i, id in enumerate(ids): + marker_id = id[0] + detected_ids.add(marker_id) - # Draw the transform - cv2.aruco.drawDetectedMarkers(cv_image, corners, ids) - cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.marker_length/2) - ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") - self.cv2_image_publisher.publish(ros_image) + if marker_id not in self.marker_transforms and marker_id not in self.reliable_marker_transforms: + self.marker_transforms[marker_id] = deque(maxlen=30) + + objPoints = np.array([ [-self.marker_length/2, self.marker_length/2, 0], + [self.marker_length/2, self.marker_length/2, 0], + [self.marker_length/2, -self.marker_length/2, 0], + [-self.marker_length/2,-self.marker_length/2, 0]], dtype=np.float32) + + success, rvec, tvec = cv2.solvePnP(objPoints, corners[i], self.camera_matrix, self.dist_coeffs) + if success: + rot_matrix, _ = cv2.Rodrigues(rvec) + translation_matrix = np.eye(4) + translation_matrix[:3, :3] = rot_matrix + translation_matrix[:3, 3] = tvec.flatten() + + cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.marker_length/2) + if marker_id not in self.reliable_marker_transforms: + self.marker_transforms[marker_id].append(translation_matrix) + + ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") + self.cv2_image_publisher.publish(ros_image) - # Filter out the already reliable markers - if marker_id in self.reliable_marker_transforms: - continue - else: - self.marker_transforms[marker_id].append(translation_matrix) - - # Add None for markers not detected in this frame - for marker_id in self.marker_transforms: - if marker_id not in detected_ids: - # Restart the precision of the marker if not seen - # self.marker_transforms[marker_id].append(None) - pass - # iterate through each marker of the marker_transforms dictionary - for marker_id, transforms in self.marker_transforms.items(): - if len(transforms) == 30: - self.check_precision(marker_id, transforms) + # iterate through each marker of the marker_transforms dictionary + for marker_id, transforms in self.marker_transforms.items(): + if len(transforms) == 30: + self.check_precision(marker_id, transforms) - # delete all the transforms from the marker_transforms dictionary - for marker_id, transform in self.reliable_marker_transforms.items(): - if marker_id in self.marker_transforms: - del self.marker_transforms[marker_id] + # delete all the transforms from the marker_transforms dictionary + for marker_id, transform in self.reliable_marker_transforms.items(): + if marker_id in self.marker_transforms: + del self.marker_transforms[marker_id] def check_precision(self, marker_id, transform): @@ -805,11 +842,10 @@ def is_precise(self, transforms): def are_all_transforms_precise(self): + self.node.get_logger().info(f"Camera {self.camera_name}: Reliable markers: {list(self.reliable_marker_transforms.keys())}") + self.node.get_logger().info(f"Camera {self.camera_name}: Not Reliable markers: {list(self.marker_transforms.keys())}") if len(self.reliable_marker_transforms) > 0 and len(self.marker_transforms) == 0: self.node.get_logger().info(f"Camera {self.camera_name}: All markers are reliable") return True else: - for marker_id, transform in self.marker_transforms.items(): - if marker_id not in self.reliable_marker_transforms.keys(): - self.node.get_logger().warn(f"Camera {self.camera_name}: Marker {marker_id} is not reliable, yet") return False \ No newline at end of file diff --git a/extrinsic_calibrator_core/requirments.txt b/extrinsic_calibrator_core/requirments.txt index facc113..71435c8 100644 --- a/extrinsic_calibrator_core/requirments.txt +++ b/extrinsic_calibrator_core/requirments.txt @@ -1,6 +1,6 @@ joblib==1.5.2 numpy==2.2.6 -opencv-python==4.12.0.88 +opencv-contrib-python==4.12.0.88 prettytable==3.16.0 scikit-learn==1.7.1 scipy==1.15.3 From edab9d791b63444237d7975fad950d0694f46abd Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Thu, 23 Oct 2025 12:08:01 +0200 Subject: [PATCH 07/17] remove usused parameters --- .../config/aruco_parameters.yaml | 12 +----------- .../src/extrinsic_calibrator_class.py | 7 ------- 2 files changed, 1 insertion(+), 18 deletions(-) diff --git a/extrinsic_calibrator_core/config/aruco_parameters.yaml b/extrinsic_calibrator_core/config/aruco_parameters.yaml index 89dc93f..3327e9c 100644 --- a/extrinsic_calibrator_core/config/aruco_parameters.yaml +++ b/extrinsic_calibrator_core/config/aruco_parameters.yaml @@ -22,14 +22,4 @@ aruco_params: grid_marker_separation: type: double default_value: 0.04 - description: "Separation between markers in the grid board (meters). See 'b' on the image there - https://github.com/ethz-asl/kalibr/wiki/calibration-targets" - grid_first_marker_id: - type: int - default_value: 0 - description: "ID of the first marker in the grid board. Every marker has an unique ID." - grid_image_size_px: # TODO - check it - type: int - default_value: 1200 - generate_individual_markers: # TODO - check it - type: bool - default_value: false \ No newline at end of file + description: "Separation between markers in the grid board (meters). See 'b' on the image there - https://github.com/ethz-asl/kalibr/wiki/calibration-targets" \ No newline at end of file diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py index d356282..919aea6 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py @@ -662,9 +662,6 @@ def __init__(self, node:Node, aruco_params): self.grid_rows = aruco_params.grid_rows self.grid_cols = aruco_params.grid_cols self.grid_marker_separation = aruco_params.grid_marker_separation - self.grid_first_marker_id = aruco_params.grid_first_marker_id - self.grid_image_size_px = aruco_params.grid_image_size_px - self.generate_individual_markers = aruco_params.generate_individual_markers node.get_logger().info( f"Aruco parameters set:\n" @@ -674,9 +671,6 @@ def __init__(self, node:Node, aruco_params): f" Grid Rows: {self.grid_rows}\n" f" Grid Cols: {self.grid_cols}\n" f" Marker Separation: {self.grid_marker_separation}\n" - f" Grid First Marker ID: {self.grid_first_marker_id}\n" - f" Grid Image Size (px): {self.grid_image_size_px}\n" - f" Generate Individual Markers: {self.generate_individual_markers}" ) self.board = None @@ -725,7 +719,6 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c self.marker_length = aruco_params.marker_length # length of the marker side in meters (adjust as needed) self.board_mode = aruco_params.board_mode self.board = aruco_params.board # May be None if single marker mode - self.grid_first_marker_id = aruco_params.grid_first_marker_id # Subscribe to the camera image topic and camera info self.image_sub = self.node.create_subscription(Image, image_topic, self.image_callback, 1) From d66705e3daf8fa3af352aad4574de819c8dd7e65 Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Thu, 23 Oct 2025 12:33:55 +0000 Subject: [PATCH 08/17] Created script to generate aruco grids, working grid based calibration --- .../CMakeLists.txt | 0 .../package.xml | 0 .../src/extrinsic_calibrator_class.py | 41 +++++++++-------- generate_aruco_grid.py | 45 +++++++++++++++++++ 4 files changed, 67 insertions(+), 19 deletions(-) rename {extrinsic_calibrator_board => extrinsic_calibrator}/CMakeLists.txt (100%) rename {extrinsic_calibrator_board => extrinsic_calibrator}/package.xml (100%) create mode 100644 generate_aruco_grid.py diff --git a/extrinsic_calibrator_board/CMakeLists.txt b/extrinsic_calibrator/CMakeLists.txt similarity index 100% rename from extrinsic_calibrator_board/CMakeLists.txt rename to extrinsic_calibrator/CMakeLists.txt diff --git a/extrinsic_calibrator_board/package.xml b/extrinsic_calibrator/package.xml similarity index 100% rename from extrinsic_calibrator_board/package.xml rename to extrinsic_calibrator/package.xml diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py index 919aea6..92ec2ac 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py @@ -690,9 +690,7 @@ def __init__(self, node:Node, aruco_params): f" Marker Separation: {self.board.getMarkerSeparation()}\n" ) else: - node.get_logger().error(f"Calibrator runing in single mode") - - #TODO delete unused parameters + node.get_logger().error(f"Calibrator running in single markers mode") @@ -746,7 +744,7 @@ def image_callback(self, msg): cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8") # For ArUco detection, you can use the filtered_image directly - corners, ids, rejected = self.detector.detectMarkers(cv_image) + corners, ids, _ = self.detector.detectMarkers(cv_image) # Visualize marker detection if ids is not None and len(ids) > 0: @@ -759,14 +757,20 @@ def image_callback(self, msg): success, rvec, tvec = cv2.aruco.estimatePoseBoard( corners, ids, self.board, self.camera_matrix, self.dist_coeffs, None, None ) - - if self.camera_name == "camera_centre_calibration_frame": - self.node.get_logger().info(f"Camera {self.camera_name}: rvec: {rvec}, tvec: {tvec}") # visualize the board pose if success > 0: - cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.board.getMarkerLength()/2) + cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.board.getMarkerLength() * 2) + if 0 not in self.marker_transforms and 0 not in self.reliable_marker_transforms: + self.marker_transforms[0] = deque(maxlen=30) + + if 0 not in self.reliable_marker_transforms: + rot_matrix, _ = cv2.Rodrigues(rvec) + translation_matrix = np.eye(4) + translation_matrix[:3, :3] = rot_matrix + translation_matrix[:3, 3] = tvec.flatten() + self.marker_transforms[0].append(translation_matrix) else: # Single marker mode for i, id in enumerate(ids): @@ -791,20 +795,19 @@ def image_callback(self, msg): cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.marker_length/2) if marker_id not in self.reliable_marker_transforms: self.marker_transforms[marker_id].append(translation_matrix) + + # iterate through each marker of the marker_transforms dictionary + for marker_id, transforms in self.marker_transforms.items(): + if len(transforms) == 30: + self.check_precision(marker_id, transforms) + + # delete all the transforms from the marker_transforms dictionary + for marker_id, _ in self.reliable_marker_transforms.items(): + if marker_id in self.marker_transforms: + del self.marker_transforms[marker_id] ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") self.cv2_image_publisher.publish(ros_image) - - - # iterate through each marker of the marker_transforms dictionary - for marker_id, transforms in self.marker_transforms.items(): - if len(transforms) == 30: - self.check_precision(marker_id, transforms) - - # delete all the transforms from the marker_transforms dictionary - for marker_id, transform in self.reliable_marker_transforms.items(): - if marker_id in self.marker_transforms: - del self.marker_transforms[marker_id] def check_precision(self, marker_id, transform): diff --git a/generate_aruco_grid.py b/generate_aruco_grid.py new file mode 100644 index 0000000..ca30c0e --- /dev/null +++ b/generate_aruco_grid.py @@ -0,0 +1,45 @@ +import cv2 +import argparse + +def generate_aruco_grid(output_path, grid_cols, grid_rows, grid_marker_separation, marker_length, aruco_dict, pixels_per_meter): + + aruco_dictonary = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_dict)) + + board = cv2.aruco.GridBoard( + (grid_cols, grid_rows), + marker_length, + grid_marker_separation, + aruco_dictonary, + ) + + board_width = grid_cols * marker_length + (grid_cols - 1) * grid_marker_separation + board_height = grid_rows * marker_length + (grid_rows - 1) * grid_marker_separation + + width_px = int(board_width * pixels_per_meter) + height_px = int(board_height * pixels_per_meter) + + img = board.generateImage((width_px, height_px)) + + cv2.imwrite(output_path, img) + print(f"Aruco grid saved to {output_path}") + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate an ArUco grid board image.") + parser.add_argument("--output", type=str, default="aruco_grid.png", help="Output image file path") + parser.add_argument("--grid_cols", type=int, default=5, help="Number of squares in X direction") + parser.add_argument("--grid_rows", type=int, default=5, help="Number of squares in Y direction") + parser.add_argument("--grid_marker_separation", type=float, default=0.025, help="Square length in meters") + parser.add_argument("--marker_length", type=float, default=0.1, help="Marker length in meters") + parser.add_argument("--aruco_dict", type=str, default="DICT_6X6_1000", help="ArUco dictionary name") + parser.add_argument("--ppm", type=int, default=10000, help="Pixels per meter") + args = parser.parse_args() + + generate_aruco_grid( + args.output, + args.grid_cols, + args.grid_rows, + args.grid_marker_separation, + args.marker_length, + args.aruco_dict, + args.ppm + ) \ No newline at end of file From 53544a1829c82fdfaba32da81f633a0cd555e998 Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Thu, 23 Oct 2025 14:59:34 +0200 Subject: [PATCH 09/17] revert not needed changes --- .../src/aruco_generator_class.py | 59 +++++++------------ 1 file changed, 20 insertions(+), 39 deletions(-) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py index 2daeb6e..1516d87 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py @@ -54,7 +54,7 @@ from rclpy.node import Node # Custom parameters -from extrinsic_calibrator_core.python_aruco_parameters import ParamListener as ArucoParamListener +from extrinsic_calibrator_core.python_aruco_parameters import aruco_params from extrinsic_calibrator_core.src.extrinsic_calibrator_class import ArucoParams @@ -64,52 +64,32 @@ def __init__(self): super().__init__('aruco_marker_generator') self.get_logger().info("Aruco Marker Generator Node has started.") - aruco_params_listener = ArucoParamListener(self) + aruco_params_listener = aruco_params.ParamListener(self) imported_aruco_params = aruco_params_listener.get_params() self.real_aruco_params = ArucoParams(self,imported_aruco_params) - # Parameters for generation (pixel size of individual marker when single mode) + # Parameters for marker generation self.declare_parameter('marker_size', 200) self.declare_parameter('output_directory', '~/markers') marker_size = self.get_parameter('marker_size').value output_directory = os.path.expanduser(self.get_parameter('output_directory').value) - # Check or create output directory + # Check if output directory exists, create if it doesn't if not os.path.exists(output_directory): os.makedirs(output_directory) self.get_logger().info(f"Created output directory: {output_directory}") else: self.get_logger().info(f"Output directory: {output_directory} already exists") - - if self.real_aruco_params.board_mode == 'grid_board' and self.real_aruco_params.board is not None: - # Generate a single grid board image - board_px = getattr(self.real_aruco_params, 'grid_image_size_px', 1200) - margin_px = int(board_px * 0.02) - self.get_logger().info(f"Generating ArUco Grid Board ({self.real_aruco_params.grid_cols}x{self.real_aruco_params.grid_rows}) -> {board_px}px") - board_image = self.real_aruco_params.board.generateImage((board_px, board_px), margin_px, 1) - rotated_board = cv2.rotate(board_image, cv2.ROTATE_180) - output_path = os.path.join(output_directory, 'aruco_grid_board.png') - cv2.imwrite(output_path, rotated_board) - self.get_logger().info(f"Grid board image saved to {output_path}") - - # Optionally also dump individual marker images - if getattr(self.real_aruco_params, 'generate_individual_markers', False): - num_markers = self.real_aruco_params.grid_rows * self.real_aruco_params.grid_cols - first_id = self.real_aruco_params.grid_first_marker_id - for local_idx in range(num_markers): - marker_id = first_id + local_idx - if not self.generate_marker(marker_id, marker_size, output_directory, self.real_aruco_params): - self.get_logger().warn(f"Failed to generate individual marker {marker_id}") - self.get_logger().info("ArUco grid generation finished successfully. Hit Ctrl+C to exit") - else: - # Single marker mode: generate all markers from dictionary - num_markers = len(self.real_aruco_params.aruco_dict.bytesList) - for marker_id in range(0, num_markers): - if not self.generate_marker(marker_id, marker_size, output_directory, self.real_aruco_params): - self.get_logger().info("ArUco generation failed") - return False - self.get_logger().info("ArUco marker set generation finished successfully. Hit Ctrl+C to exit") + + # Generate all markers from aruco_dict + num_markers = len(self.real_aruco_params.aruco_dict.bytesList) + for marker_id in range(0, num_markers): + if not self.generate_marker(marker_id, marker_size, output_directory, self.real_aruco_params): + self.get_logger().info("ArUco generation failed") + return False + + self.get_logger().info("ArUco generation finished successfully. Hit Ctrl+C to exit") while(1): pass @@ -119,15 +99,16 @@ def __init__(self): def generate_marker(self, marker_id, marker_size, output_directory, aruco_params:ArucoParams): - try: - marker_image = cv2.aruco.generateImageMarker(aruco_params.aruco_dict, marker_id, marker_size) - except Exception as e: - self.get_logger().error(f"Error generating marker {marker_id}: {e}") - return False + # Generate the marker image + marker_image = cv2.aruco.generateImageMarker(aruco_params.aruco_dict, marker_id, marker_size) + # Rotate the image 180 degrees rotated_marker = cv2.rotate(marker_image, cv2.ROTATE_180) + + # Save the rotated marker image to the specified directory output_path = os.path.join(output_directory, f'aruco_marker_{marker_id}.png') cv2.imwrite(output_path, rotated_marker) + # self.get_logger().info(f'Marker with ID {marker_id} generated and saved to {output_path}') return True - + \ No newline at end of file From ac449259d4d30ad10d52104eb469719c0c5b9efd Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Thu, 23 Oct 2025 15:01:02 +0200 Subject: [PATCH 10/17] revert not needed changes --- extrinsic_calibrator_examples/CMakeLists.txt | 38 ++++ extrinsic_calibrator_examples/README.md | 191 ++++++++++++++++++ .../config/d435.yaml | 23 +++ .../config/d435_intrinsics.yaml | 20 ++ .../config/l515.yaml | 23 +++ .../config/l515_intrinsics.yaml | 20 ++ .../extrinsic_calibrator_examples/__init__.py | 0 .../launch_extrinsic_calibrator.launch.py | 80 ++++++++ .../launch/launch_rviz.launch.py | 67 ++++++ .../launch/launch_usb_cameras.launch.py | 76 +++++++ extrinsic_calibrator_examples/license.md | 140 +++++++++++++ extrinsic_calibrator_examples/package.xml | 38 ++++ .../rviz/extrinsic.rviz | 188 +++++++++++++++++ .../scripts/__init__.py | 0 14 files changed, 904 insertions(+) create mode 100644 extrinsic_calibrator_examples/CMakeLists.txt create mode 100644 extrinsic_calibrator_examples/README.md create mode 100644 extrinsic_calibrator_examples/config/d435.yaml create mode 100644 extrinsic_calibrator_examples/config/d435_intrinsics.yaml create mode 100644 extrinsic_calibrator_examples/config/l515.yaml create mode 100644 extrinsic_calibrator_examples/config/l515_intrinsics.yaml create mode 100644 extrinsic_calibrator_examples/extrinsic_calibrator_examples/__init__.py create mode 100644 extrinsic_calibrator_examples/launch/launch_extrinsic_calibrator.launch.py create mode 100644 extrinsic_calibrator_examples/launch/launch_rviz.launch.py create mode 100644 extrinsic_calibrator_examples/launch/launch_usb_cameras.launch.py create mode 100644 extrinsic_calibrator_examples/license.md create mode 100644 extrinsic_calibrator_examples/package.xml create mode 100644 extrinsic_calibrator_examples/rviz/extrinsic.rviz create mode 100644 extrinsic_calibrator_examples/scripts/__init__.py diff --git a/extrinsic_calibrator_examples/CMakeLists.txt b/extrinsic_calibrator_examples/CMakeLists.txt new file mode 100644 index 0000000..68afc9e --- /dev/null +++ b/extrinsic_calibrator_examples/CMakeLists.txt @@ -0,0 +1,38 @@ +cmake_minimum_required(VERSION 3.8) +project(extrinsic_calibrator_examples) + +if(NOT CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 14) +endif() + +# DEPENDENCIES + +# Find packages +find_package(ament_cmake REQUIRED) +find_package(ament_cmake_python REQUIRED) +find_package(rclcpp REQUIRED) +find_package(rclpy REQUIRED) +find_package(cv_bridge REQUIRED) +find_package(extrinsic_calibrator_core REQUIRED) +find_package(rviz2 REQUIRED) +find_package(sensor_msgs REQUIRED) +find_package(std_msgs REQUIRED) +find_package(tf2_ros REQUIRED) + +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() + +# COPY A PARTICULAR FOLDER TO THE INSTALL DIRECTORY +# Install config dependencies +install( + DIRECTORY + config + rviz + DESTINATION + share/${PROJECT_NAME} +) + + +# LAUNCH +# Install launchfile +ament_auto_package(INSTALL_TO_SHARE launch) \ No newline at end of file diff --git a/extrinsic_calibrator_examples/README.md b/extrinsic_calibrator_examples/README.md new file mode 100644 index 0000000..07ba2a5 --- /dev/null +++ b/extrinsic_calibrator_examples/README.md @@ -0,0 +1,191 @@ +# extrinsic_calibrator_examples + +## Overview + +`extrinsic_calibrator_examples` is a ROS2 package designed to provide examples on how to use the `extrinsic_calibrator_core` package as well as useful ros2 launch files to launch the cameras, the calibrator, as well as a demonstration rviz file. + +## Features + +- Launch file to launch a set of cameras using the `usb_camera` package as well as the corresponding set of config files to configure the cameras. +- Laucnh file to launch the rviz file to visualize the markers and the camera frames as well as the rviz file to configure it. +- Launch file to launch all the previous, as well as the calibrator, them being the set of cameras, the rviz visualizer and the calibrator itself. + +## Configuration + +The package provides configuration options through YAML files. + +### Camera configuration + +Here you have an example configuration file `l515.yaml` file to configure the camera according to the `usb_camera` package, as well as the corresponding intrinsic calibration file. + +```yaml +/**: + ros__parameters: + video_device: "/dev/video12" # "ffplay /dev/video12" to test + framerate: 6.0 + io_method: "mmap" + frame_id: "cam2_frame" + pixel_format: "yuyv" # see usb_cam/supported_formats for list of supported formats + av_device_format: "YUV422P" + image_width: 640 + image_height: 480 + camera_name: "cam2" + camera_info_url: "package://extrinsic_calibrator_examples/config/l515_intrinsics.yaml" + brightness: -1 + contrast: -1 + saturation: -1 + sharpness: -1 + gain: -1 + auto_white_balance: true + white_balance: 4000 + autoexposure: true + exposure: 100 + autofocus: false + focus: -1 +``` + +Don't forget to modify the parameter `camera_info_url` to properly link the camera configuration to the intrinsic calibration file. + +```yaml +image_width: 640 +image_height: 480 +camera_name: "cam2" +camera_matrix: + rows: 3 + cols: 3 + data: [607.4058837890625, 0.0, 325.59991455078125, 0.0, 607.5341186523438, 247.25904846191406, 0.0, 0.0, 1.0] +distortion_model: "plumb_bob" +distortion_coefficients: + rows: 1 + cols: 5 + data: [0.19551624357700348, -0.5865326523780823, -0.002620677463710308, 0.0008374004391953349, 0.5133219957351685] +rectification_matrix: + rows: 3 + cols: 3 + data: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0] +projection_matrix: + rows: 3 + cols: 4 + data: [607.4058837890625, 0.0, 325.59991455078125, 0.0, 0.0, 607.5341186523438, 247.25904846191406, 0.0, 0.0, 0.0, 1.0, 0.0] + +``` + +In case you want to launch more cameras with the same launch file, simply add them as additional nodes in the launch file `launch_usb_cameras.launch.py`: + +```py +d435_config = os.path.join(config_dir, 'd435.yaml') +l515_config = os.path.join(config_dir, 'l515.yaml') +# d457_config = os.path.join(config_dir, 'd457.yaml') + +return LaunchDescription([ + Node( + package='usb_cam', + executable='usb_cam_node_exe', + name='d435_camera', + namespace='camera_1', + parameters=[d435_config], + output='screen' + ), + Node( + package='usb_cam', + executable='usb_cam_node_exe', + name='l515_camera', + namespace='camera_2', + parameters=[l515_config], + output='screen' + ), + # Node( + # package='usb_cam', + # executable='usb_cam_node_exe', + # name='d457_camera', + # namespace='camera_3', + # parameters=[d457_config], + # output='screen' + # ), +]) + +``` + +## Usage + +### Launching the Cameras + +Using the `usb_cam` package for your camera streams,you can launch the set of all cameras using: +```sh +ros2 launch extrinsic_calibrator_examples launch_usb_cameras.launch.py +``` + +### Launching the rviz visualizer + +An example rviz config file is provided which includes displays for the `/camera_1/image_raw/detected_markers` topic and the `/camera_2/image_raw/detected_markers` topic as well as the tf2 display of the found markers and cameras. To launch it, use: +```sh +ros2 launch extrinsic_calibrator_examples launch_rviz.launch.py +``` + +### Launching Both Cameras and the Calibrator + +To simultaneously launch the cameras, the rviz visualizer and the extrinsic calibrator, use: +```sh +ros2 launch extrinsic_calibrator_examples launch_extrinsic_calibrator.launch.py +``` + + +## Dependencies + +The package relies on the following libraries and ROS2 packages: + +- `extrinsic_calibrator_core` for the core functionality +- `usb_cam` package for camera streaming +- `rviz2` for visualization + + +To install the necessary dependencies, ensure you run: +```sh +# update libraries +sudo apt-get update +# install ros dependencies +rosdep update +``` + +## Author Information + +**Authors:** +- [Josep Rueda Collell](mailto:rueda_999@hotmail.com) +- [Ander Gonzalez](mailto:ander.gonzalez@ikelan.es) + +**Created:** October 2024 + +**Affiliation:** [IKERLAN](https://www.ikerlan.es) + +setup_paint + +### Citation +If you use this code, please cite: +**Josep Rueda Collell**. "ROS2 Extrinsic Camera Calibrator using ArUco Markers". (2024). + +--- + +Developed as part of **AI-PRISM** project. + + + + + +*AI Powered human-centred Robot Interactions for Smart Manufacturing* + + + + + +Horizon Europe – Grant Agreement number [101058589](https://cordis.europa.eu/project/id/101058589) + +*Funded by the European Union. Views and opinions expressed are however those of the author(s) only and do not necessarily reflect those of the European Union. The European Union cannot be held responsible for them. Neither the European Union nor the granting authority can be held responsible for them.* + +## License + +This software is provided under a dual license system. You may choose between: + +- **GNU Affero General Public License v3**: For open-source development, subject to the conditions of this license. +- **Commercial License**: For proprietary use. For more details on the commercial license, please contact us at [info@ikerlan.es](mailto:info@ikerlan.es). + +Please see the [LICENSE](./license.md) file for the complete terms and conditions of each license option. \ No newline at end of file diff --git a/extrinsic_calibrator_examples/config/d435.yaml b/extrinsic_calibrator_examples/config/d435.yaml new file mode 100644 index 0000000..babec8f --- /dev/null +++ b/extrinsic_calibrator_examples/config/d435.yaml @@ -0,0 +1,23 @@ +/**: + ros__parameters: + video_device: "/dev/video4" # "ffplay /dev/video4" to test + framerate: 6.0 + io_method: "mmap" + frame_id: "cam1_frame" + pixel_format: "yuyv" # see usb_cam/supported_formats for list of supported formats + av_device_format: "YUV422P" + image_width: 640 + image_height: 480 + camera_name: "cam1" + camera_info_url: "package://extrinsic_calibrator_examples/config/d435_intrinsics.yaml" + brightness: -1 + contrast: -1 + saturation: -1 + sharpness: -1 + gain: -1 + auto_white_balance: true + white_balance: 4000 + autoexposure: true + exposure: 100 + autofocus: false + focus: -1 \ No newline at end of file diff --git a/extrinsic_calibrator_examples/config/d435_intrinsics.yaml b/extrinsic_calibrator_examples/config/d435_intrinsics.yaml new file mode 100644 index 0000000..2504d46 --- /dev/null +++ b/extrinsic_calibrator_examples/config/d435_intrinsics.yaml @@ -0,0 +1,20 @@ +image_width: 640 +image_height: 480 +camera_name: "cam1" +camera_matrix: + rows: 3 + cols: 3 + data: [604.122802734375, 0.0, 327.40875244140625, 0.0, 603.847900390625, 244.95323181152344, 0.0, 0.0, 1.0] +distortion_model: "plumb_bob" +distortion_coefficients: + rows: 1 + cols: 5 + data: [0.0, 0.0, 0.0, 0.0, 0.0] +rectification_matrix: + rows: 3 + cols: 3 + data: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0] +projection_matrix: + rows: 3 + cols: 4 + data: [604.122802734375, 0.0, 327.40875244140625, 0.0, 0.0, 603.847900390625, 244.95323181152344, 0.0, 0.0, 0.0, 1.0, 0.0] diff --git a/extrinsic_calibrator_examples/config/l515.yaml b/extrinsic_calibrator_examples/config/l515.yaml new file mode 100644 index 0000000..f9950b8 --- /dev/null +++ b/extrinsic_calibrator_examples/config/l515.yaml @@ -0,0 +1,23 @@ +/**: + ros__parameters: + video_device: "/dev/video12" # "ffplay /dev/video12" to test + framerate: 6.0 + io_method: "mmap" + frame_id: "cam2_frame" + pixel_format: "yuyv" # see usb_cam/supported_formats for list of supported formats + av_device_format: "YUV422P" + image_width: 640 + image_height: 480 + camera_name: "cam2" + camera_info_url: "package://extrinsic_calibrator_examples/config/l515_intrinsics.yaml" + brightness: -1 + contrast: -1 + saturation: -1 + sharpness: -1 + gain: -1 + auto_white_balance: true + white_balance: 4000 + autoexposure: true + exposure: 100 + autofocus: false + focus: -1 \ No newline at end of file diff --git a/extrinsic_calibrator_examples/config/l515_intrinsics.yaml b/extrinsic_calibrator_examples/config/l515_intrinsics.yaml new file mode 100644 index 0000000..f4ab8f4 --- /dev/null +++ b/extrinsic_calibrator_examples/config/l515_intrinsics.yaml @@ -0,0 +1,20 @@ +image_width: 640 +image_height: 480 +camera_name: "cam2" +camera_matrix: + rows: 3 + cols: 3 + data: [607.4058837890625, 0.0, 325.59991455078125, 0.0, 607.5341186523438, 247.25904846191406, 0.0, 0.0, 1.0] +distortion_model: "plumb_bob" +distortion_coefficients: + rows: 1 + cols: 5 + data: [0.19551624357700348, -0.5865326523780823, -0.002620677463710308, 0.0008374004391953349, 0.5133219957351685] +rectification_matrix: + rows: 3 + cols: 3 + data: [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0] +projection_matrix: + rows: 3 + cols: 4 + data: [607.4058837890625, 0.0, 325.59991455078125, 0.0, 0.0, 607.5341186523438, 247.25904846191406, 0.0, 0.0, 0.0, 1.0, 0.0] diff --git a/extrinsic_calibrator_examples/extrinsic_calibrator_examples/__init__.py b/extrinsic_calibrator_examples/extrinsic_calibrator_examples/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/extrinsic_calibrator_examples/launch/launch_extrinsic_calibrator.launch.py b/extrinsic_calibrator_examples/launch/launch_extrinsic_calibrator.launch.py new file mode 100644 index 0000000..986cee1 --- /dev/null +++ b/extrinsic_calibrator_examples/launch/launch_extrinsic_calibrator.launch.py @@ -0,0 +1,80 @@ +# ------------------------------------------------------------------------------ +# This file is part of **extrinsic-calibrator: +# October 2024 +# Copyright 2024 IKERLAN. All Rights Reserved. +# +# +# LICENSE NOTICE +# +# This software is available under a dual license system. Choose between: +# - GNU Affero General Public License v3.0 for open-source usage, or +# - A commercial license for proprietary development. +# For commercial license details, contact us at info@ikerlan.es. +# +# GNU Affero General Public License v3.0 +# Version 3, 19 November 2007 +# © 2007 Free Software Foundation, Inc. +# +# Licensed under a dual license system: +# 1. Open-source usage under the GNU Affero General Public License v3.0 +# (AGPL-3.0), allowing you to freely use, modify, and distribute the +# software for open-source projects. You can find a copy of the AGPL-3.0 +# license at https://www.gnu.org/licenses/agpl-3.0.html. +# 2. For commercial/proprietary use, a separate commercial license is required. +# Please contact us at info@ikerlan.es for inquiries about our commercial +# licensing options. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# Author Information: +# Author: Josep Rueda Collell +# Created: October 2024 +# Affiliation: IKERLAN (https://www.ikerlan.es) +# ------------------------------------------------------------------------------ + +from launch import LaunchDescription +from launch.actions import IncludeLaunchDescription +from launch.launch_description_sources import PythonLaunchDescriptionSource +from launch_ros.actions import Node +from launch_ros.substitutions import FindPackageShare +from launch.substitutions import PathJoinSubstitution +from launch.substitutions import PathJoinSubstitution + + +def generate_launch_description(): + return LaunchDescription([ + # Launch the extrinsic calibrator node. The config file is in the config folder and is passed to the node using the generate_parameter_library + Node( + package='extrinsic_calibrator_core', + executable='extrinsic_calibrator_node.py', + name='extrinsic_calibrator_node', + output='screen', + ), + + # Laucnh the set of usb-cameras with their own config_files + IncludeLaunchDescription( + PythonLaunchDescriptionSource(PathJoinSubstitution([ + FindPackageShare("extrinsic_calibrator_examples"), + "launch", + "launch_usb_cameras.launch.py"])) + ), + + # Laucnh the rviz visualizer with the TF of the map and the cameras + IncludeLaunchDescription( + PythonLaunchDescriptionSource(PathJoinSubstitution([ + FindPackageShare("extrinsic_calibrator_examples"), + "launch", + "launch_rviz.launch.py"])) + ), + ]) diff --git a/extrinsic_calibrator_examples/launch/launch_rviz.launch.py b/extrinsic_calibrator_examples/launch/launch_rviz.launch.py new file mode 100644 index 0000000..fcc63b8 --- /dev/null +++ b/extrinsic_calibrator_examples/launch/launch_rviz.launch.py @@ -0,0 +1,67 @@ +# ------------------------------------------------------------------------------ +# This file is part of **extrinsic-calibrator: +# October 2024 +# Copyright 2024 IKERLAN. All Rights Reserved. +# +# +# LICENSE NOTICE +# +# This software is available under a dual license system. Choose between: +# - GNU Affero General Public License v3.0 for open-source usage, or +# - A commercial license for proprietary development. +# For commercial license details, contact us at info@ikerlan.es. +# +# GNU Affero General Public License v3.0 +# Version 3, 19 November 2007 +# © 2007 Free Software Foundation, Inc. +# +# Licensed under a dual license system: +# 1. Open-source usage under the GNU Affero General Public License v3.0 +# (AGPL-3.0), allowing you to freely use, modify, and distribute the +# software for open-source projects. You can find a copy of the AGPL-3.0 +# license at https://www.gnu.org/licenses/agpl-3.0.html. +# 2. For commercial/proprietary use, a separate commercial license is required. +# Please contact us at info@ikerlan.es for inquiries about our commercial +# licensing options. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# Author Information: +# Author: Josep Rueda Collell +# Created: October 2024 +# Affiliation: IKERLAN (https://www.ikerlan.es) +# ------------------------------------------------------------------------------ + +import os +from ament_index_python.packages import get_package_share_directory +from launch import LaunchDescription +from launch_ros.actions import Node + + +def generate_launch_description(): + package_name = 'extrinsic_calibrator_examples' + rviz_dir = os.path.join(get_package_share_directory(package_name), 'rviz') + + rviz_config = os.path.join(rviz_dir, 'extrinsic.rviz') + + return LaunchDescription([ + Node( + package='rviz2', + executable='rviz2', + name='rviz2', + arguments=['-d', rviz_config], + output='screen' + ), + + ]) diff --git a/extrinsic_calibrator_examples/launch/launch_usb_cameras.launch.py b/extrinsic_calibrator_examples/launch/launch_usb_cameras.launch.py new file mode 100644 index 0000000..6d2105c --- /dev/null +++ b/extrinsic_calibrator_examples/launch/launch_usb_cameras.launch.py @@ -0,0 +1,76 @@ +# ------------------------------------------------------------------------------ +# This file is part of **extrinsic-calibrator: +# October 2024 +# Copyright 2024 IKERLAN. All Rights Reserved. +# +# +# LICENSE NOTICE +# +# This software is available under a dual license system. Choose between: +# - GNU Affero General Public License v3.0 for open-source usage, or +# - A commercial license for proprietary development. +# For commercial license details, contact us at info@ikerlan.es. +# +# GNU Affero General Public License v3.0 +# Version 3, 19 November 2007 +# © 2007 Free Software Foundation, Inc. +# +# Licensed under a dual license system: +# 1. Open-source usage under the GNU Affero General Public License v3.0 +# (AGPL-3.0), allowing you to freely use, modify, and distribute the +# software for open-source projects. You can find a copy of the AGPL-3.0 +# license at https://www.gnu.org/licenses/agpl-3.0.html. +# 2. For commercial/proprietary use, a separate commercial license is required. +# Please contact us at info@ikerlan.es for inquiries about our commercial +# licensing options. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +# +# Author Information: +# Author: Josep Rueda Collell +# Created: October 2024 +# Affiliation: IKERLAN (https://www.ikerlan.es) +# ------------------------------------------------------------------------------ + +import os +from ament_index_python.packages import get_package_share_directory +from launch import LaunchDescription +from launch_ros.actions import Node + + +def generate_launch_description(): + package_name = 'extrinsic_calibrator_examples' + config_dir = os.path.join(get_package_share_directory(package_name), 'config') + + d435_config = os.path.join(config_dir, 'd435.yaml') + l515_config = os.path.join(config_dir, 'l515.yaml') + + return LaunchDescription([ + Node( + package='usb_cam', + executable='usb_cam_node_exe', + name='d435_camera', + namespace='camera_1', + parameters=[d435_config], + output='screen' + ), + Node( + package='usb_cam', + executable='usb_cam_node_exe', + name='l515_camera', + namespace='camera_2', + parameters=[l515_config], + output='screen' + ) + ]) diff --git a/extrinsic_calibrator_examples/license.md b/extrinsic_calibrator_examples/license.md new file mode 100644 index 0000000..aff2e2f --- /dev/null +++ b/extrinsic_calibrator_examples/license.md @@ -0,0 +1,140 @@ +# IKERLAN, S. COOP +This file is part of **extrinsic-calibrator: humble** +October 2024 + +# LICENSE NOTICE + +This software has been liberated under a dual license system, on which you can choose between the open-source GNU Affero General Public License v3 in the case you are interested in using this software for an open-source development, or a commercial license in the case you are interested in using this software for a privative development. If you want to know more about our commercial license, please contact us in info@ikerlan.es. + +# GNU AFFERO GENERAL PUBLIC LICENSE +Version 3, 19 November 2007 +Copyright © 2007 Free Software Foundation, Inc. +Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. +Preamble +The GNU Affero General Public License is a free, copyleft license for software and other kinds of works, specifically designed to ensure cooperation with the community in the case of network server software. +The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, our General Public Licenses are intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. +When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. +Developers that use our General Public Licenses protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License which gives you legal permission to copy, distribute and/or modify the software. +A secondary benefit of defending all users' freedom is that improvements made in alternate versions of the program, if they receive widespread use, become available for other developers to incorporate. Many developers of free software are heartened and encouraged by the resulting cooperation. However, in the case of software used on network servers, this result may fail to come about. The GNU General Public License permits making a modified version and letting the public access it on a server without ever releasing its source code to the public. +The GNU Affero General Public License is designed specifically to ensure that, in such cases, the modified source code becomes available to the community. It requires the operator of a network server to provide the source code of the modified version running there to the users of that server. Therefore, public use of a modified version, on a publicly accessible server, gives the public access to the source code of the modified version. +An older license, called the Affero General Public License and published by Affero, was designed to accomplish similar goals. This is a different license, not a version of the Affero GPL, but Affero has released a new version of the Affero GPL which permits relicensing under this license. +The precise terms and conditions for copying, distribution and modification follow. +TERMS AND CONDITIONS +0. Definitions. +"This License" refers to version 3 of the GNU Affero General Public License. +"Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. +"The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. +To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. +A "covered work" means either the unmodified Program or a work based on the Program. +To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. +To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. +An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. +1. Source Code. +The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. +A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. +The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. +The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. +The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. +The Corresponding Source for a work in source code form is that same work. +2. Basic Permissions. +All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. +You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. +Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. +3. Protecting Users' Legal Rights From Anti-Circumvention Law. +No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. +When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. +4. Conveying Verbatim Copies. +You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. +You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. +5. Conveying Modified Source Versions. +You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: +• a) The work must carry prominent notices stating that you modified it, and giving a relevant date. +• b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". +• c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. +• d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. +A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. +6. Conveying Non-Source Forms. +You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: +• a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. +• b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. +• c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. +• d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. +• e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. +A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. +A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. +"Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. +If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). +The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. +Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. +7. Additional Terms. +"Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. +When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. +Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: +• a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or +• b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or +• c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or +• d) Limiting the use for publicity purposes of names of licensors or authors of the material; or +• e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or +• f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. +All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. +If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. +Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. +8. Termination. +You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). +However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. +Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. +Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. +9. Acceptance Not Required for Having Copies. +You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. +10. Automatic Licensing of Downstream Recipients. +Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. +An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. +You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. +11. Patents. +A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". +A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. +Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. +In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. +If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. +If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. +A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. +Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. +12. No Surrender of Others' Freedom. +If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. +13. Remote Network Interaction; Use with the GNU General Public License. +Notwithstanding any other provision of this License, if you modify the Program, your modified version must prominently offer all users interacting with it remotely through a computer network (if your version supports such interaction) an opportunity to receive the Corresponding Source of your version by providing access to the Corresponding Source from a network server at no charge, through some standard or customary means of facilitating copying of software. This Corresponding Source shall include the Corresponding Source for any work covered by version 3 of the GNU General Public License that is incorporated pursuant to the following paragraph. +Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the work with which it is combined will remain governed by version 3 of the GNU General Public License. +14. Revised Versions of this License. +The Free Software Foundation may publish revised and/or new versions of the GNU Affero General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. +Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU Affero General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU Affero General Public License, you may choose any version ever published by the Free Software Foundation. +If the Program specifies that a proxy can decide which future versions of the GNU Affero General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. +Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. +15. Disclaimer of Warranty. +THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. +16. Limitation of Liability. +IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. +17. Interpretation of Sections 15 and 16. +If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. +END OF TERMS AND CONDITIONS +How to Apply These Terms to Your New Programs +If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. +To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as + published by the Free Software Foundation, either version 3 of the + License, or (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . +Also add information on how to contact you by electronic and paper mail. +If your software can interact with users remotely through a computer network, you should also make sure that it provides a way for users to get its source. For example, if your program is a web application, its interface could display a "Source" link that leads users to an archive of the code. There are many ways you could offer source, and different solutions will be better for different programs; see section 13 for the specific requirements. +You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU AGPL, see . + diff --git a/extrinsic_calibrator_examples/package.xml b/extrinsic_calibrator_examples/package.xml new file mode 100644 index 0000000..90f40bd --- /dev/null +++ b/extrinsic_calibrator_examples/package.xml @@ -0,0 +1,38 @@ + + + + extrinsic_calibrator_examples + 0.1.0 + ROS2 package designed to provide examples on how to use the extrinsic_calibrator_core package as well as useful ros2 launch files to launch the cameras, the calibrator, as well as a demonstration rviz file. + Josep Rueda Collell + Ander Gonzalez + AGPL-3.0-only + + ament_cmake + ament_cmake_python + + rclcpp + rclpy + cv_bridge + extrinsic_calibrator_core + rviz2 + sensor_msgs + std_msgs + tf2_ros + tf_transformations + usb_cam + + rosidl_default_generators + + rosidl_default_runtime + + rosidl_interface_packages + + ament_lint_auto + ament_lint_common + + + ament_cmake + + + \ No newline at end of file diff --git a/extrinsic_calibrator_examples/rviz/extrinsic.rviz b/extrinsic_calibrator_examples/rviz/extrinsic.rviz new file mode 100644 index 0000000..6a66681 --- /dev/null +++ b/extrinsic_calibrator_examples/rviz/extrinsic.rviz @@ -0,0 +1,188 @@ +Panels: + - Class: rviz_common/Displays + Help Height: 78 + Name: Displays + Property Tree Widget: + Expanded: + - /Global Options1 + - /Status1 + - /Image1 + - /Image2 + - /TF1 + - /TF1/Frames1 + - /TF1/Tree1 + Splitter Ratio: 0.6382352709770203 + Tree Height: 88 + - Class: rviz_common/Selection + Name: Selection + - Class: rviz_common/Tool Properties + Expanded: + - /2D Goal Pose1 + - /Publish Point1 + Name: Tool Properties + Splitter Ratio: 0.5886790156364441 + - Class: rviz_common/Views + Expanded: + - /Current View1 + Name: Views + Splitter Ratio: 0.5 + - Class: rviz_common/Time + Experimental: false + Name: Time + SyncMode: 0 + SyncSource: "" +Visualization Manager: + Class: "" + Displays: + - Alpha: 0.5 + Cell Size: 1 + Class: rviz_default_plugins/Grid + Color: 160; 160; 164 + Enabled: true + Line Style: + Line Width: 0.029999999329447746 + Value: Lines + Name: Grid + Normal Cell Count: 0 + Offset: + X: 0 + Y: 0 + Z: 0 + Plane: XY + Plane Cell Count: 10 + Reference Frame: + Value: true + - Class: rviz_default_plugins/Image + Enabled: true + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Image + Normalize Range: true + Topic: + Depth: 5 + Durability Policy: Volatile + History Policy: Keep Last + Reliability Policy: Reliable + Value: /camera_1/image_raw/detected_markers + Value: true + - Class: rviz_default_plugins/Image + Enabled: true + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Image + Normalize Range: true + Topic: + Depth: 5 + Durability Policy: Volatile + History Policy: Keep Last + Reliability Policy: Reliable + Value: /camera_2/image_raw/detected_markers + Value: true + - Class: rviz_default_plugins/TF + Enabled: true + Frame Timeout: 15 + Frames: + All Enabled: false + cam2: + Value: true + map: + Value: true + Marker Scale: 1 + Name: TF + Show Arrows: true + Show Axes: true + Show Names: true + Tree: + map: + cam2: + {} + Update Interval: 0 + Value: true + Enabled: true + Global Options: + Background Color: 48; 48; 48 + Fixed Frame: map + Frame Rate: 30 + Name: root + Tools: + - Class: rviz_default_plugins/Interact + Hide Inactive Objects: true + - Class: rviz_default_plugins/MoveCamera + - Class: rviz_default_plugins/Select + - Class: rviz_default_plugins/FocusCamera + - Class: rviz_default_plugins/Measure + Line color: 128; 128; 0 + - Class: rviz_default_plugins/SetInitialPose + Covariance x: 0.25 + Covariance y: 0.25 + Covariance yaw: 0.06853891909122467 + Topic: + Depth: 5 + Durability Policy: Volatile + History Policy: Keep Last + Reliability Policy: Reliable + Value: /initialpose + - Class: rviz_default_plugins/SetGoal + Topic: + Depth: 5 + Durability Policy: Volatile + History Policy: Keep Last + Reliability Policy: Reliable + Value: /goal_pose + - Class: rviz_default_plugins/PublishPoint + Single click: true + Topic: + Depth: 5 + Durability Policy: Volatile + History Policy: Keep Last + Reliability Policy: Reliable + Value: /clicked_point + Transformation: + Current: + Class: rviz_default_plugins/TF + Value: true + Views: + Current: + Class: rviz_default_plugins/Orbit + Distance: 4.191425800323486 + Enable Stereo Rendering: + Stereo Eye Separation: 0.05999999865889549 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Focal Point: + X: 0 + Y: 0 + Z: 0 + Focal Shape Fixed Size: true + Focal Shape Size: 0.05000000074505806 + Invert Z Axis: false + Name: Current View + Near Clip Distance: 0.009999999776482582 + Pitch: 0.7203978896141052 + Target Frame: + Value: Orbit (rviz) + Yaw: 0.8403979539871216 + Saved: ~ +Window Geometry: + Displays: + collapsed: false + Height: 1007 + Hide Left Dock: false + Hide Right Dock: false + Image: + collapsed: false + QMainWindow State: 000000ff00000000fd00000004000000000000018300000355fc020000000cfb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003b000000e1000000c700fffffffb0000000a0049006d00610067006501000001080000003b0000000000000000fb0000000a0049006d00610067006501000001490000001a0000000000000000fb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d0061006700650100000122000001330000002800fffffffb0000000a0049006d006100670065010000025b000001350000002800ffffff000000010000010f00000355fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000003b00000355000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000007740000003efc0100000002fb0000000800540069006d00650100000000000007740000025300fffffffb0000000800540069006d00650100000000000004500000000000000000000004d60000035500000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + Selection: + collapsed: false + Time: + collapsed: false + Tool Properties: + collapsed: false + Views: + collapsed: false + Width: 1908 + X: -32 + Y: -28 diff --git a/extrinsic_calibrator_examples/scripts/__init__.py b/extrinsic_calibrator_examples/scripts/__init__.py new file mode 100644 index 0000000..e69de29 From 65a7e855f5b0f8447fb28b959311fb46724c619a Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Mon, 27 Oct 2025 12:21:22 +0100 Subject: [PATCH 11/17] added generating boards in .svg format, generated a few example aruco boards for A4, A3 and A2 formats --- ...x4_marl_len_005_sep_00125_DICT6X6_1000.svg | 710 ++++++++++++++++++ ...x6_marl_len_005_sep_00125_DICT6X6_1000.svg | 352 +++++++++ ...4x3_mar_len_005_sep_00125_DICT6X6_1000.svg | 174 +++++ generate_aruco_grid.py | 92 ++- 4 files changed, 1315 insertions(+), 13 deletions(-) create mode 100644 example_boards/aruco_A2_portrait_6x4_marl_len_005_sep_00125_DICT6X6_1000.svg create mode 100644 example_boards/aruco_A3_portrait_8x6_marl_len_005_sep_00125_DICT6X6_1000.svg create mode 100644 example_boards/aruco_A4_portrait_4x3_mar_len_005_sep_00125_DICT6X6_1000.svg diff --git a/example_boards/aruco_A2_portrait_6x4_marl_len_005_sep_00125_DICT6X6_1000.svg b/example_boards/aruco_A2_portrait_6x4_marl_len_005_sep_00125_DICT6X6_1000.svg new file mode 100644 index 0000000..d827be7 --- /dev/null +++ b/example_boards/aruco_A2_portrait_6x4_marl_len_005_sep_00125_DICT6X6_1000.svg @@ -0,0 +1,710 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/example_boards/aruco_A3_portrait_8x6_marl_len_005_sep_00125_DICT6X6_1000.svg b/example_boards/aruco_A3_portrait_8x6_marl_len_005_sep_00125_DICT6X6_1000.svg new file mode 100644 index 0000000..7ae2baf --- /dev/null +++ b/example_boards/aruco_A3_portrait_8x6_marl_len_005_sep_00125_DICT6X6_1000.svg @@ -0,0 +1,352 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/example_boards/aruco_A4_portrait_4x3_mar_len_005_sep_00125_DICT6X6_1000.svg b/example_boards/aruco_A4_portrait_4x3_mar_len_005_sep_00125_DICT6X6_1000.svg new file mode 100644 index 0000000..08b6a10 --- /dev/null +++ b/example_boards/aruco_A4_portrait_4x3_mar_len_005_sep_00125_DICT6X6_1000.svg @@ -0,0 +1,174 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/generate_aruco_grid.py b/generate_aruco_grid.py index ca30c0e..96254fc 100644 --- a/generate_aruco_grid.py +++ b/generate_aruco_grid.py @@ -1,31 +1,97 @@ import cv2 import argparse +from pathlib import Path -def generate_aruco_grid(output_path, grid_cols, grid_rows, grid_marker_separation, marker_length, aruco_dict, pixels_per_meter): +def _save_svg(img, output_path: str, board_width_m: float, board_height_m: float) -> None: + if len(img.shape) == 3: + gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) + else: + gray = img + + _, bw = cv2.threshold(gray, 127, 255, cv2.THRESH_BINARY) + h, w = bw.shape + + width_mm = board_width_m * 1000.0 + height_mm = board_height_m * 1000.0 + + active = {} + rectangles = [] + + for y in range(h): + runs = [] + row = bw[y] + x = 0 + while x < w: + if row[x] == 0: + start = x + while x < w and row[x] == 0: + x += 1 + runs.append((start, x - start)) + else: + x += 1 + + current_keys = set(runs) + to_remove = [] + for key, (sy, height) in active.items(): + if key not in current_keys: + rx, rw = key + rectangles.append((rx, sy, rw, height)) + to_remove.append(key) + for key in to_remove: + active.pop(key) + + for run in runs: + if run in active: + sy, height = active[run] + active[run] = (sy, height + 1) + else: + active[run] = (y, 1) + for key, (sy, height) in active.items(): + rx, rw = key + rectangles.append((rx, sy, rw, height)) + + lines = [ + f'', + '' + ] + for (rx, ry, rw, rh) in rectangles: + lines.append(f'') + + lines.append('') + Path(output_path).write_text("\n".join(lines), encoding="utf-8") + +def generate_aruco_grid(output_path, grid_cols, grid_rows, grid_marker_separation, marker_length, aruco_dict, pixels_per_meter): aruco_dictonary = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_dict)) + marker_px = int(round(marker_length * pixels_per_meter)) + sep_px = int(round(grid_marker_separation * pixels_per_meter)) + board = cv2.aruco.GridBoard( - (grid_cols, grid_rows), - marker_length, - grid_marker_separation, - aruco_dictonary, - ) + (grid_cols, grid_rows), + marker_length, + grid_marker_separation, + aruco_dictonary, + ) + + board_width_m = grid_cols * marker_length + (grid_cols - 1) * grid_marker_separation + board_height_m = grid_rows * marker_length + (grid_rows - 1) * grid_marker_separation - board_width = grid_cols * marker_length + (grid_cols - 1) * grid_marker_separation - board_height = grid_rows * marker_length + (grid_rows - 1) * grid_marker_separation + width_px = grid_cols * marker_px + (grid_cols - 1) * sep_px + height_px = grid_rows * marker_px + (grid_rows - 1) * sep_px - width_px = int(board_width * pixels_per_meter) - height_px = int(board_height * pixels_per_meter) + img = cv2.aruco.drawPlanarBoard(board, (width_px, height_px), marginSize=0, borderBits=1) - img = board.generateImage((width_px, height_px)) + if output_path.lower().endswith('.svg'): + _save_svg(img, output_path, board_width_m, board_height_m) + else: + cv2.imwrite(output_path, img) - cv2.imwrite(output_path, img) print(f"Aruco grid saved to {output_path}") if __name__ == "__main__": parser = argparse.ArgumentParser(description="Generate an ArUco grid board image.") - parser.add_argument("--output", type=str, default="aruco_grid.png", help="Output image file path") + parser.add_argument("--output", type=str, default="aruco_grid.svg", help="Output image file path (.svg or raster like .png)") parser.add_argument("--grid_cols", type=int, default=5, help="Number of squares in X direction") parser.add_argument("--grid_rows", type=int, default=5, help="Number of squares in Y direction") parser.add_argument("--grid_marker_separation", type=float, default=0.025, help="Square length in meters") From 9597267ee119382f6309af14a3fc20ed9d59707a Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Tue, 28 Oct 2025 15:38:36 +0100 Subject: [PATCH 12/17] Charuco board generation: - added new param (square_lenght) - corrected unequal vertical and horizontal margin - generated example of boards for A4, A3 and A2 format - redefined some variables names to be more clear --- ...x4_marl_len_005_sep_00125_DICT6X6_1000.svg | 710 ------------------ ...x6_marl_len_005_sep_00125_DICT6X6_1000.svg | 352 --------- ...4x3_mar_len_005_sep_00125_DICT6X6_1000.svg | 174 ----- ...6_mar_len_05_sep_len_0125_DICT6X6_1000.svg | 710 ++++++++++++++++++ ...4_mar_len_05_sep_len_0125_DICT6X6_1000.svg | 352 +++++++++ ...3_mar_len_05_sep_len_0125_DICT6X6_1000.svg | 174 +++++ ...x6_mar_len_003_sq_len_006_DICT6X6_1000.svg | 376 ++++++++++ ...x4_mar_len_003_sq_len_006_DICT6X6_1000.svg | 186 +++++ ...x3_mar_len_004_sq_len_008_DICT6X6_1000.svg | 97 +++ generate_aruco_grid.py | 111 --- generate_board.py | 149 ++++ 11 files changed, 2044 insertions(+), 1347 deletions(-) delete mode 100644 example_boards/aruco_A2_portrait_6x4_marl_len_005_sep_00125_DICT6X6_1000.svg delete mode 100644 example_boards/aruco_A3_portrait_8x6_marl_len_005_sep_00125_DICT6X6_1000.svg delete mode 100644 example_boards/aruco_A4_portrait_4x3_mar_len_005_sep_00125_DICT6X6_1000.svg create mode 100644 example_boards/aruco_boards/A2_aruco_8x6_mar_len_05_sep_len_0125_DICT6X6_1000.svg create mode 100644 example_boards/aruco_boards/A3_aruco_6x4_mar_len_05_sep_len_0125_DICT6X6_1000.svg create mode 100644 example_boards/aruco_boards/A4_aruco_4x3_mar_len_05_sep_len_0125_DICT6X6_1000.svg create mode 100644 example_boards/charuco_boards/A2_charuco_8x6_mar_len_003_sq_len_006_DICT6X6_1000.svg create mode 100644 example_boards/charuco_boards/A3_charuco_6x4_mar_len_003_sq_len_006_DICT6X6_1000.svg create mode 100644 example_boards/charuco_boards/A4_charuco_4x3_mar_len_004_sq_len_008_DICT6X6_1000.svg delete mode 100644 generate_aruco_grid.py create mode 100644 generate_board.py diff --git a/example_boards/aruco_A2_portrait_6x4_marl_len_005_sep_00125_DICT6X6_1000.svg b/example_boards/aruco_A2_portrait_6x4_marl_len_005_sep_00125_DICT6X6_1000.svg deleted file mode 100644 index d827be7..0000000 --- a/example_boards/aruco_A2_portrait_6x4_marl_len_005_sep_00125_DICT6X6_1000.svg +++ /dev/null @@ -1,710 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/example_boards/aruco_A3_portrait_8x6_marl_len_005_sep_00125_DICT6X6_1000.svg b/example_boards/aruco_A3_portrait_8x6_marl_len_005_sep_00125_DICT6X6_1000.svg deleted file mode 100644 index 7ae2baf..0000000 --- a/example_boards/aruco_A3_portrait_8x6_marl_len_005_sep_00125_DICT6X6_1000.svg +++ /dev/null @@ -1,352 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/example_boards/aruco_A4_portrait_4x3_mar_len_005_sep_00125_DICT6X6_1000.svg b/example_boards/aruco_A4_portrait_4x3_mar_len_005_sep_00125_DICT6X6_1000.svg deleted file mode 100644 index 08b6a10..0000000 --- a/example_boards/aruco_A4_portrait_4x3_mar_len_005_sep_00125_DICT6X6_1000.svg +++ /dev/null @@ -1,174 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/example_boards/aruco_boards/A2_aruco_8x6_mar_len_05_sep_len_0125_DICT6X6_1000.svg b/example_boards/aruco_boards/A2_aruco_8x6_mar_len_05_sep_len_0125_DICT6X6_1000.svg new file mode 100644 index 0000000..c320196 --- /dev/null +++ b/example_boards/aruco_boards/A2_aruco_8x6_mar_len_05_sep_len_0125_DICT6X6_1000.svg @@ -0,0 +1,710 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/example_boards/aruco_boards/A3_aruco_6x4_mar_len_05_sep_len_0125_DICT6X6_1000.svg b/example_boards/aruco_boards/A3_aruco_6x4_mar_len_05_sep_len_0125_DICT6X6_1000.svg new file mode 100644 index 0000000..307b47d --- /dev/null +++ b/example_boards/aruco_boards/A3_aruco_6x4_mar_len_05_sep_len_0125_DICT6X6_1000.svg @@ -0,0 +1,352 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/example_boards/aruco_boards/A4_aruco_4x3_mar_len_05_sep_len_0125_DICT6X6_1000.svg b/example_boards/aruco_boards/A4_aruco_4x3_mar_len_05_sep_len_0125_DICT6X6_1000.svg new file mode 100644 index 0000000..ee7b923 --- /dev/null +++ b/example_boards/aruco_boards/A4_aruco_4x3_mar_len_05_sep_len_0125_DICT6X6_1000.svg @@ -0,0 +1,174 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/example_boards/charuco_boards/A2_charuco_8x6_mar_len_003_sq_len_006_DICT6X6_1000.svg b/example_boards/charuco_boards/A2_charuco_8x6_mar_len_003_sq_len_006_DICT6X6_1000.svg new file mode 100644 index 0000000..8eb5c9c --- /dev/null +++ b/example_boards/charuco_boards/A2_charuco_8x6_mar_len_003_sq_len_006_DICT6X6_1000.svg @@ -0,0 +1,376 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/example_boards/charuco_boards/A3_charuco_6x4_mar_len_003_sq_len_006_DICT6X6_1000.svg b/example_boards/charuco_boards/A3_charuco_6x4_mar_len_003_sq_len_006_DICT6X6_1000.svg new file mode 100644 index 0000000..cec0fbe --- /dev/null +++ b/example_boards/charuco_boards/A3_charuco_6x4_mar_len_003_sq_len_006_DICT6X6_1000.svg @@ -0,0 +1,186 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/example_boards/charuco_boards/A4_charuco_4x3_mar_len_004_sq_len_008_DICT6X6_1000.svg b/example_boards/charuco_boards/A4_charuco_4x3_mar_len_004_sq_len_008_DICT6X6_1000.svg new file mode 100644 index 0000000..75559a3 --- /dev/null +++ b/example_boards/charuco_boards/A4_charuco_4x3_mar_len_004_sq_len_008_DICT6X6_1000.svg @@ -0,0 +1,97 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/generate_aruco_grid.py b/generate_aruco_grid.py deleted file mode 100644 index 96254fc..0000000 --- a/generate_aruco_grid.py +++ /dev/null @@ -1,111 +0,0 @@ -import cv2 -import argparse -from pathlib import Path - -def _save_svg(img, output_path: str, board_width_m: float, board_height_m: float) -> None: - if len(img.shape) == 3: - gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) - else: - gray = img - - _, bw = cv2.threshold(gray, 127, 255, cv2.THRESH_BINARY) - h, w = bw.shape - - width_mm = board_width_m * 1000.0 - height_mm = board_height_m * 1000.0 - - active = {} - rectangles = [] - - for y in range(h): - runs = [] - row = bw[y] - x = 0 - while x < w: - if row[x] == 0: - start = x - while x < w and row[x] == 0: - x += 1 - runs.append((start, x - start)) - else: - x += 1 - - current_keys = set(runs) - to_remove = [] - for key, (sy, height) in active.items(): - if key not in current_keys: - rx, rw = key - rectangles.append((rx, sy, rw, height)) - to_remove.append(key) - for key in to_remove: - active.pop(key) - - for run in runs: - if run in active: - sy, height = active[run] - active[run] = (sy, height + 1) - else: - active[run] = (y, 1) - - for key, (sy, height) in active.items(): - rx, rw = key - rectangles.append((rx, sy, rw, height)) - - lines = [ - f'', - '' - ] - for (rx, ry, rw, rh) in rectangles: - lines.append(f'') - - lines.append('') - Path(output_path).write_text("\n".join(lines), encoding="utf-8") - -def generate_aruco_grid(output_path, grid_cols, grid_rows, grid_marker_separation, marker_length, aruco_dict, pixels_per_meter): - aruco_dictonary = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_dict)) - - marker_px = int(round(marker_length * pixels_per_meter)) - sep_px = int(round(grid_marker_separation * pixels_per_meter)) - - board = cv2.aruco.GridBoard( - (grid_cols, grid_rows), - marker_length, - grid_marker_separation, - aruco_dictonary, - ) - - board_width_m = grid_cols * marker_length + (grid_cols - 1) * grid_marker_separation - board_height_m = grid_rows * marker_length + (grid_rows - 1) * grid_marker_separation - - width_px = grid_cols * marker_px + (grid_cols - 1) * sep_px - height_px = grid_rows * marker_px + (grid_rows - 1) * sep_px - - img = cv2.aruco.drawPlanarBoard(board, (width_px, height_px), marginSize=0, borderBits=1) - - if output_path.lower().endswith('.svg'): - _save_svg(img, output_path, board_width_m, board_height_m) - else: - cv2.imwrite(output_path, img) - - print(f"Aruco grid saved to {output_path}") - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Generate an ArUco grid board image.") - parser.add_argument("--output", type=str, default="aruco_grid.svg", help="Output image file path (.svg or raster like .png)") - parser.add_argument("--grid_cols", type=int, default=5, help="Number of squares in X direction") - parser.add_argument("--grid_rows", type=int, default=5, help="Number of squares in Y direction") - parser.add_argument("--grid_marker_separation", type=float, default=0.025, help="Square length in meters") - parser.add_argument("--marker_length", type=float, default=0.1, help="Marker length in meters") - parser.add_argument("--aruco_dict", type=str, default="DICT_6X6_1000", help="ArUco dictionary name") - parser.add_argument("--ppm", type=int, default=10000, help="Pixels per meter") - args = parser.parse_args() - - generate_aruco_grid( - args.output, - args.grid_cols, - args.grid_rows, - args.grid_marker_separation, - args.marker_length, - args.aruco_dict, - args.ppm - ) \ No newline at end of file diff --git a/generate_board.py b/generate_board.py new file mode 100644 index 0000000..ecea93f --- /dev/null +++ b/generate_board.py @@ -0,0 +1,149 @@ +import cv2 +import numpy as np +import argparse +from pathlib import Path + +def save_svg(img, output_path: str, board_width_m: float, board_height_m: float) -> None: + if len(img.shape) == 3: + gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) + else: + gray = img + + _, bw = cv2.threshold(gray, 127, 255, cv2.THRESH_BINARY) + h, w = bw.shape + + width_mm = board_width_m * 1000.0 + height_mm = board_height_m * 1000.0 + + active = {} + rectangles = [] + + for y in range(h): + runs = [] + row = bw[y] + x = 0 + while x < w: + if row[x] == 0: + start = x + while x < w and row[x] == 0: + x += 1 + runs.append((start, x - start)) + else: + x += 1 + + current_keys = set(runs) + to_remove = [] + for key, (sy, height) in active.items(): + if key not in current_keys: + rx, rw = key + rectangles.append((rx, sy, rw, height)) + to_remove.append(key) + for key in to_remove: + active.pop(key) + + for run in runs: + if run in active: + sy, height = active[run] + active[run] = (sy, height + 1) + else: + active[run] = (y, 1) + + for key, (sy, height) in active.items(): + rx, rw = key + rectangles.append((rx, sy, rw, height)) + + lines = [ + f'', + '' + ] + for (rx, ry, rw, rh) in rectangles: + lines.append(f'') + + lines.append('') + Path(output_path).write_text("\n".join(lines), encoding="utf-8") + +def generate_aruco_board(board_type, output_path, board_rows, board_cols, square_length, marker_separation_length, marker_length, aruco_dict, pixels_per_meter): + aruco_dictonary = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_dict)) + + marker_px = int(round(marker_length * pixels_per_meter)) + sep_px = int(round(marker_separation_length * pixels_per_meter)) + + if board_type == "aruco_board": + board = cv2.aruco.GridBoard( + (board_cols, board_rows), + marker_length, + marker_separation_length, + aruco_dictonary, + ) + + board_width_m = board_cols * marker_length + (board_cols - 1) * marker_separation_length + board_height_m = board_rows * marker_length + (board_rows - 1) * marker_separation_length + + marker_px = int(round(marker_length * pixels_per_meter)) + sep_px = int(round(marker_separation_length * pixels_per_meter)) + inner_w_px = board_cols * marker_px + (board_cols - 1) * sep_px + inner_h_px = board_rows * marker_px + (board_rows - 1) * sep_px + + elif board_type == "charuco_board": + board = cv2.aruco.CharucoBoard( + (board_cols, board_rows), + square_length, + marker_length, + aruco_dictonary, + ) + + board_width_m = board_cols * square_length + board_height_m = board_rows * square_length + + square_px = int(round(square_length * pixels_per_meter)) + inner_w_px = board_cols * square_px + inner_h_px = board_rows * square_px + + else: + raise ValueError(f"Wrong board type: {board_type}. Options: aruco_board, charuco_board") + + margin_m = 0.2 * marker_length + margin_px = int(round(margin_m * pixels_per_meter)) + + img_inner = board.generateImage((inner_w_px, inner_h_px), marginSize=0, borderBits=1) + + # Add marigns. Done mannualy because maringSize in board.generateImage addd vertically smaller margins than horizontally + final_h = inner_h_px + 2 * margin_px + final_w = inner_w_px + 2 * margin_px + canvas = np.full((final_h, final_w), 255, dtype=img_inner.dtype) + canvas[margin_px:margin_px + inner_h_px, margin_px:margin_px + inner_w_px] = img_inner + + total_width_m = board_width_m + 2 * margin_m + total_height_m = board_height_m + 2 * margin_m + + if output_path.lower().endswith('.svg'): + save_svg(canvas, output_path, total_width_m, total_height_m) + else: + cv2.imwrite(output_path, canvas) + + print(f"Aruco board saved to {output_path}") + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Generate an ArUco board board image.") + parser.add_argument("--board_type", type=str,default="charuco_board", help="Board type. Options: aruco_board, charuco_board") + parser.add_argument("--output", type=str, default="board.svg", help="Output image file path (.svg or raster like .png)") + parser.add_argument("--board_rows", type=int, default=5, help="Number of squares in Y direction") + parser.add_argument("--board_cols", type=int, default=5, help="Number of squares in X direction") + parser.add_argument("--square_length", type=float, default=0.2, help="Square length in meters") + parser.add_argument("--marker_separation_length", type=float, default=0.0125, help="Distance between markers in meters") + parser.add_argument("--marker_length", type=float, default=0.1, help="Marker length in meters") + parser.add_argument("--aruco_dict", type=str, default="DICT_6X6_1000", help="ArUco dictionary name") + parser.add_argument("--ppm", type=int, default=11811, help="Pixels per meter") + args = parser.parse_args() + + generate_aruco_board( + args.board_type, + args.output, + args.board_rows, + args.board_cols, + args.square_length, + args.marker_separation_length, + args.marker_length, + args.aruco_dict, + args.ppm + ) \ No newline at end of file From d456e4106c25500ad64885bb316563bf4990f35b Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Fri, 31 Oct 2025 16:27:36 +0100 Subject: [PATCH 13/17] Refactor camera classes for handling 3 methods of calibration in a more elegant way - Updated aruco_parameters.yaml with ChArUco parameters - Enhanced extrinsic_calibrator_class.py to handle different calibration methods dynamically. - Introduced new classes for ArUco parameters to avoid mishmash - Introduced new camera classes which preform calibration. Each class inherits from base camera class which keeps some shared stuff - each of the class has its own calibration procedure and then the process of validating the calibration quality is common for each method --- .../config/aruco_parameters.yaml | 10 +- .../src/aruco_generator_class.py | 6 +- .../src/extrinsic_calibrator_class.py | 251 ++----------- .../src/models/__init__.py | 0 .../src/models/aruco_params.py | 63 ++++ .../src/models/camera.py | 332 ++++++++++++++++++ 6 files changed, 443 insertions(+), 219 deletions(-) create mode 100644 extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/__init__.py create mode 100644 extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/aruco_params.py create mode 100644 extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py diff --git a/extrinsic_calibrator_core/config/aruco_parameters.yaml b/extrinsic_calibrator_core/config/aruco_parameters.yaml index 3327e9c..18a2020 100644 --- a/extrinsic_calibrator_core/config/aruco_parameters.yaml +++ b/extrinsic_calibrator_core/config/aruco_parameters.yaml @@ -9,8 +9,8 @@ aruco_params: description: "Length of one side of the marker in meters." board_mode: type: string - default_value: "grid_board" - description: "Board generation mode. Options: single_marker, grid_board" + default_value: "charuco_board" + description: "Board generation mode. Options: single_marker, aruco_board, charuco_board" grid_rows: type: int default_value: 5 @@ -22,4 +22,8 @@ aruco_params: grid_marker_separation: type: double default_value: 0.04 - description: "Separation between markers in the grid board (meters). See 'b' on the image there - https://github.com/ethz-asl/kalibr/wiki/calibration-targets" \ No newline at end of file + description: "Separation between markers in the grid board (meters). See 'b' on the image there - https://github.com/ethz-asl/kalibr/wiki/calibration-targets" + square_length: + type: double + default_value: 0.2 + description: "Length of one side of the chess square in the charuco board (meters)." \ No newline at end of file diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py index 1516d87..bf4ad0e 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/aruco_generator_class.py @@ -55,7 +55,7 @@ # Custom parameters from extrinsic_calibrator_core.python_aruco_parameters import aruco_params -from extrinsic_calibrator_core.src.extrinsic_calibrator_class import ArucoParams +from extrinsic_calibrator_core.src.models.aruco_params import ArucoMarkerParams @@ -66,7 +66,7 @@ def __init__(self): aruco_params_listener = aruco_params.ParamListener(self) imported_aruco_params = aruco_params_listener.get_params() - self.real_aruco_params = ArucoParams(self,imported_aruco_params) + self.real_aruco_params = ArucoMarkerParams(self,imported_aruco_params) # Parameters for marker generation self.declare_parameter('marker_size', 200) @@ -98,7 +98,7 @@ def __init__(self): rclpy.shutdown() - def generate_marker(self, marker_id, marker_size, output_directory, aruco_params:ArucoParams): + def generate_marker(self, marker_id, marker_size, output_directory, aruco_params:ArucoMarkerParams): # Generate the marker image marker_image = cv2.aruco.generateImageMarker(aruco_params.aruco_dict, marker_id, marker_size) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py index 92ec2ac..8d8598e 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py @@ -59,6 +59,7 @@ from geometry_msgs.msg import TransformStamped from rclpy.node import Node from sensor_msgs.msg import CameraInfo, Image +import rclpy import tf2_ros import tf_transformations @@ -66,6 +67,11 @@ from extrinsic_calibrator_core.python_aruco_parameters import aruco_params from extrinsic_calibrator_core.python_camera_topics_parameters import cameras_params +# Camera-detectors +from extrinsic_calibrator_core.src.models.camera import CameraAruco, CameraArucoBoard, CameraChArUcoBoard, CameraBase + +# aruco params +from extrinsic_calibrator_core.src.models.aruco_params import ArucoMarkerParams, ArucoBoardParams, ChArUcoBoardParams class ExtrinsicCalibrator(Node): def __init__(self): @@ -77,10 +83,18 @@ def __init__(self): # OpenCV bridge for converting ROS Image to OpenCV image self.bridge = CvBridge() + # Import ArUco parameters aruco_params_listener = aruco_params.ParamListener(self) imported_aruco_params = aruco_params_listener.get_params() - self.real_aruco_params = ArucoParams(self,imported_aruco_params) + if imported_aruco_params.board_mode == "single_marker": + self.real_aruco_params = ArucoMarkerParams(self, imported_aruco_params) + elif imported_aruco_params.board_mode == "aruco_board": + self.real_aruco_params = ArucoBoardParams(self, imported_aruco_params) + elif imported_aruco_params.board_mode == "charuco_board": + self.real_aruco_params = ChArUcoBoardParams(self, imported_aruco_params) + + # Import Cameras parameters cameras_param_listener = cameras_params.ParamListener(self) self.imported_cameras_params = cameras_param_listener.get_params() @@ -99,7 +113,17 @@ def __init__(self): if not image_topic or not camera_info_topic: self.get_logger().error(f"Skipping camera '{cam_name}' due to missing 'image_topic' or 'camera_info_topic' parameter.") else: - self.array_of_cameras.append(Camera(self, cam_name, idx, image_topic, camera_info_topic, self.bridge, self.tf_broadcaster, self.real_aruco_params)) + mode = imported_aruco_params.board_mode + if mode == "single_marker": + camera = CameraAruco(self, cam_name, idx, image_topic, camera_info_topic, self.bridge, self.tf_broadcaster, self.real_aruco_params) + elif mode == "aruco_board": + camera = CameraArucoBoard(self, cam_name, idx, image_topic, camera_info_topic, self.bridge, self.tf_broadcaster, self.real_aruco_params) + elif mode == "charuco_board": + camera = CameraChArUcoBoard(self, cam_name, idx, image_topic, camera_info_topic, self.bridge, self.tf_broadcaster, self.real_aruco_params) + else: + raise ValueError(f"Unknown board mode: {mode}. Supported modes are 'single_marker', 'aruco_board', and 'charuco_board'.") + + self.array_of_cameras.append(camera) # periodically check if all cameras are calibrated self.timer = self.create_timer(2.0, self.check_camera_transforms_callback) @@ -109,7 +133,6 @@ def check_camera_transforms_callback(self): if all([camera.are_all_transforms_precise() for camera in self.array_of_cameras]): self.get_logger().info(f"All marker transforms gathered successfully") for camera in self.array_of_cameras: - camera:Camera self.get_logger().info(f"Camera {camera.camera_name} has received all marker transforms") camera.image_sub.destroy() camera.camera_info_sub.destroy() @@ -118,7 +141,7 @@ def check_camera_transforms_callback(self): return True else: for camera in self.array_of_cameras: - camera:Camera + camera:CameraBase if camera.camera_matrix is None or camera.dist_coeffs is None: self.get_logger().warn(f"Camera {camera.camera_name} parameters not yet received. Is the camera_info topic correct?") self.get_logger().warn(f"Not all marker transforms gathered successfully") @@ -210,7 +233,7 @@ def find_central_marker(self): # For each cell in the table, provide two tables, one indicating how many other markers are seen byt the same camera, and another indicating how many cameras sees one individual camera for camera in self.array_of_cameras: - camera:Camera + camera:CameraBase for marker_id in range(self.largest_marker + 1): camera_counter = 0 marker_counter = 0 @@ -226,7 +249,7 @@ def find_central_marker(self): # The total result will be the addition of the two tables for camera in self.array_of_cameras: - camera:Camera + camera:CameraBase for marker_id in range(self.largest_marker + 1): if self.is_marker_visible_from_camera_table[marker_id][camera.camera_id]: self.scores_table[marker_id][camera.camera_id] = cameras_table[marker_id][camera.camera_id] + markers_table[marker_id][camera.camera_id] @@ -250,7 +273,7 @@ def find_random_max_index(self, scores_table): # Loop over the table to find the max value and its indices for marker_id in range(self.largest_marker + 1): for camera in self.array_of_cameras: - camera:Camera + camera:CameraBase if scores_table[marker_id][camera.camera_id] is not None: value = scores_table[marker_id][camera.camera_id] if value > max_value: @@ -271,7 +294,7 @@ def generate_transform_between_markers_table(self): # Create one table connected markers by a single camera for camera in self.array_of_cameras: - camera:Camera + camera:CameraBase camera.can_camera_connect_two_markers_table = [[False for _ in range(self.largest_marker + 1)] for _ in range(self.largest_marker + 1)] for origin_marker_id in range(self.largest_marker + 1): for destination_marker_id in range(self.largest_marker + 1): @@ -282,7 +305,7 @@ def generate_transform_between_markers_table(self): # Merge the previous table into a huge table to check which markers can be connected at all self.does_transform_exist_between_markers_table = [[False for _ in range(self.largest_marker + 1)] for _ in range(self.largest_marker + 1)] for camera in self.array_of_cameras: - camera:Camera + camera:CameraBase for origin_marker_id in range(self.largest_marker + 1): for destination_marker_id in range(self.largest_marker + 1): if camera.can_camera_connect_two_markers_table[origin_marker_id][destination_marker_id] == True: @@ -417,7 +440,7 @@ def return_the_cameras_between_markers_in_path(self,path): for marker_id_index in range(len(path_being_evaluated)-1): if type(path_being_evaluated[marker_id_index]) is int and type(path_being_evaluated[marker_id_index + 1]) is not str: for camera in self.array_of_cameras: - camera:Camera + camera:CameraBase if self.is_marker_visible_from_camera_table[path_being_evaluated[marker_id_index]][camera.camera_id] and self.is_marker_visible_from_camera_table[path_being_evaluated[marker_id_index + 1]][camera.camera_id]: new_path = copy.deepcopy(path_being_evaluated) new_path.insert(marker_id_index + 1, camera.camera_name) @@ -466,7 +489,7 @@ def compose_marker_to_marker_transform(self, paths): destination_marker_id = path[element_index + 2] for camera in self.array_of_cameras: if camera.camera_name == camera_name: - camera:Camera + camera:CameraBase if camera.can_camera_connect_two_markers_table[origin_marker_id][destination_marker_id]: transform = np.dot(np.linalg.inv(camera.reliable_marker_transforms[origin_marker_id]), camera.reliable_marker_transforms[destination_marker_id]) path_transform = np.dot(path_transform, transform) @@ -491,7 +514,7 @@ def generate_camera_to_marker_transform_table(self): # Create a table with the transforms between the cameras and the markers self.camera_to_marker_transform_table = [[None for _ in range(len(self.array_of_cameras))] for _ in range(self.largest_marker + 1)] for camera in self.array_of_cameras: - camera:Camera + camera:CameraBase for marker_id in range(self.largest_marker + 1): if self.is_marker_visible_from_camera_table[marker_id][camera.camera_id]: self.camera_to_marker_transform_table[marker_id][camera.camera_id] = camera.reliable_marker_transforms[marker_id] @@ -505,7 +528,7 @@ def generate_world_to_cameras_transform_table(self): # Create a table with the transforms between "map" and the cameras self.map_to_cameras_transform_table = [None for _ in range(len(self.array_of_cameras))] for camera in self.array_of_cameras: - camera:Camera + camera:CameraBase # check if the self.does_transform_exist_between_markers_table[camera_id]: has any True marker for marker_id in range(self.largest_marker + 1): if self.is_marker_visible_from_camera_table[marker_id][camera.camera_id]: @@ -598,7 +621,7 @@ def broadcast_cameras_and_markers_to_world(self): # Add all the camera transforms for camera in self.array_of_cameras: - camera:Camera + camera:CameraBase if self.map_to_cameras_transform_table[camera.camera_id] is not None: t = TransformStamped() t.header.stamp = self.get_clock().now().to_msg() @@ -646,202 +669,4 @@ def display_marker_to_marker_table(self, title, table_data): else: for marker_id, row in enumerate(table_data): table.add_row([marker_id] + [cell for cell in row]) - self.get_logger().info(f"{title}\n" + table.get_string()) - - - -class ArucoParams(): - def __init__(self, node:Node, aruco_params): - if hasattr(cv2.aruco, aruco_params.aruco_dict): - self.aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_params.aruco_dict)) - else: - node.get_logger().error(f"cv2.aruco doesn't have a dictionary with the name '{aruco_params.aruco_dict}'") - - self.marker_length = aruco_params.marker_length - self.board_mode = aruco_params.board_mode - self.grid_rows = aruco_params.grid_rows - self.grid_cols = aruco_params.grid_cols - self.grid_marker_separation = aruco_params.grid_marker_separation - - node.get_logger().info( - f"Aruco parameters set:\n" - f" Dictionary: {aruco_params.aruco_dict}\n" - f" Marker Length: {self.marker_length}\n" - f" Board Mode: {self.board_mode}\n" - f" Grid Rows: {self.grid_rows}\n" - f" Grid Cols: {self.grid_cols}\n" - f" Marker Separation: {self.grid_marker_separation}\n" - ) - - self.board = None - if self.board_mode == "grid_board": - self.board = cv2.aruco.GridBoard( - (self.grid_cols, self.grid_rows), - self.marker_length, - self.grid_marker_separation, - self.aruco_dict, - ) - node.get_logger().info( - f"Calibrator running in grid board mode:\n" - f" Dictionary: {self.board.getDictionary().markerSize}X{self.board.getDictionary().markerSize}\n" - f" Marker Length: {self.board.getMarkerLength()}\n" - f" Grid Size: {self.board.getGridSize()}\n" - f" Ids: {self.board.getIds()}\n" - f" Marker Separation: {self.board.getMarkerSeparation()}\n" - ) - else: - node.get_logger().error(f"Calibrator running in single markers mode") - - - -class Camera(): - def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, camera_info_topic:str, bridge:CvBridge, broadcaster:tf2_ros.TransformBroadcaster, aruco_params:ArucoParams): - - self.node = node - self.camera_name = camera_name - self.camera_id = camera_id - self.image_topic = "/robotic_platform/" + image_topic - self.camera_info_topic = camera_info_topic - self.bridge = bridge - self.tf_broadcaster = broadcaster - - self.node.get_logger().info(f"Camera {self.camera_name} created.") - - self.camera_matrix = None - self.dist_coeffs = None - - # Define Aruco marker properties - self.aruco_dict = aruco_params.aruco_dict - self.parameters = cv2.aruco.DetectorParameters() - self.detector = cv2.aruco.ArucoDetector(self.aruco_dict, self.parameters) - self.marker_length = aruco_params.marker_length # length of the marker side in meters (adjust as needed) - self.board_mode = aruco_params.board_mode - self.board = aruco_params.board # May be None if single marker mode - - # Subscribe to the camera image topic and camera info - self.image_sub = self.node.create_subscription(Image, image_topic, self.image_callback, 1) - self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) - self.cv2_image_publisher = self.node.create_publisher(Image, f"{image_topic}/detected_markers", 10) - - self.node.get_logger().info(f"Camera {self.camera_name} subscribed to {image_topic} and {camera_info_topic} topics.") - - self.marker_transforms = {} - self.reliable_marker_transforms = {} - - - def camera_info_callback(self, msg): - if self.camera_matrix is None: - self.camera_matrix = np.array(msg.k).reshape((3, 3)) - self.dist_coeffs = np.array(msg.d) - self.node.get_logger().info(f"Camera {self.camera_name} parameters received.") - - - def image_callback(self, msg): - if self.camera_matrix is None or self.dist_coeffs is None: - self.node.get_logger().warn(f"Camera {self.camera_name} parameters not yet received.") - return - - cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8") - - # For ArUco detection, you can use the filtered_image directly - corners, ids, _ = self.detector.detectMarkers(cv_image) - - # Visualize marker detection - if ids is not None and len(ids) > 0: - cv2.aruco.drawDetectedMarkers(cv_image, corners, ids) - - detected_ids = set() - - if ids is not None and len(corners) > 0: - if self.board is not None: # board mode - success, rvec, tvec = cv2.aruco.estimatePoseBoard( - corners, ids, self.board, self.camera_matrix, self.dist_coeffs, None, None - ) - - # visualize the board pose - if success > 0: - cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.board.getMarkerLength() * 2) - - if 0 not in self.marker_transforms and 0 not in self.reliable_marker_transforms: - self.marker_transforms[0] = deque(maxlen=30) - - if 0 not in self.reliable_marker_transforms: - rot_matrix, _ = cv2.Rodrigues(rvec) - translation_matrix = np.eye(4) - translation_matrix[:3, :3] = rot_matrix - translation_matrix[:3, 3] = tvec.flatten() - self.marker_transforms[0].append(translation_matrix) - - else: # Single marker mode - for i, id in enumerate(ids): - marker_id = id[0] - detected_ids.add(marker_id) - - if marker_id not in self.marker_transforms and marker_id not in self.reliable_marker_transforms: - self.marker_transforms[marker_id] = deque(maxlen=30) - - objPoints = np.array([ [-self.marker_length/2, self.marker_length/2, 0], - [self.marker_length/2, self.marker_length/2, 0], - [self.marker_length/2, -self.marker_length/2, 0], - [-self.marker_length/2,-self.marker_length/2, 0]], dtype=np.float32) - - success, rvec, tvec = cv2.solvePnP(objPoints, corners[i], self.camera_matrix, self.dist_coeffs) - if success: - rot_matrix, _ = cv2.Rodrigues(rvec) - translation_matrix = np.eye(4) - translation_matrix[:3, :3] = rot_matrix - translation_matrix[:3, 3] = tvec.flatten() - - cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.marker_length/2) - if marker_id not in self.reliable_marker_transforms: - self.marker_transforms[marker_id].append(translation_matrix) - - # iterate through each marker of the marker_transforms dictionary - for marker_id, transforms in self.marker_transforms.items(): - if len(transforms) == 30: - self.check_precision(marker_id, transforms) - - # delete all the transforms from the marker_transforms dictionary - for marker_id, _ in self.reliable_marker_transforms.items(): - if marker_id in self.marker_transforms: - del self.marker_transforms[marker_id] - - ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") - self.cv2_image_publisher.publish(ros_image) - - - def check_precision(self, marker_id, transform): - if self.is_precise(transform): - self.node.get_logger().info(f"Camera {self.camera_name}: Marker {marker_id} is reliable") - # add the last transform of the array in the dictionary as reliable marker transform - self.reliable_marker_transforms[marker_id] = transform[-1] - - - def is_precise(self, transforms): - if all(transform is not None for transform in transforms): - positions = np.array([t[:3, 3] for t in transforms]) - position_range = np.ptp(positions, axis=0) - - rotations = [R.from_matrix(t[:3, :3]) for t in transforms] - quat = np.array([r.as_quat() for r in rotations]) - angles = [] - for i in range(len(quat)): - for j in range(i+1, len(quat)): - dot = np.abs(np.dot(quat[i], quat[j])) - angle = 2 * np.arccos(np.clip(dot, -1.0, 1.0)) - angles.append(angle) - rotation_range = np.max(angles) - - return np.all(position_range < 0.01) and np.all(rotation_range < np.radians(1)) - else: - return False - - - def are_all_transforms_precise(self): - self.node.get_logger().info(f"Camera {self.camera_name}: Reliable markers: {list(self.reliable_marker_transforms.keys())}") - self.node.get_logger().info(f"Camera {self.camera_name}: Not Reliable markers: {list(self.marker_transforms.keys())}") - if len(self.reliable_marker_transforms) > 0 and len(self.marker_transforms) == 0: - self.node.get_logger().info(f"Camera {self.camera_name}: All markers are reliable") - return True - else: - return False \ No newline at end of file + self.get_logger().info(f"{title}\n" + table.get_string()) \ No newline at end of file diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/__init__.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/aruco_params.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/aruco_params.py new file mode 100644 index 0000000..85ddbef --- /dev/null +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/aruco_params.py @@ -0,0 +1,63 @@ +import cv2 + +from rclpy.node import Node + +class ArucoMarkerParams(): + def __init__(self, node:Node, aruco_params): + + if hasattr(cv2.aruco, aruco_params.aruco_dict): + self.aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_params.aruco_dict)) + else: + node.get_logger().error(f"cv2.aruco doesn't have a dictionary with the name '{aruco_params.aruco_dict}'") + + self.marker_length = aruco_params.marker_length + + node.get_logger().info( + f"{self.__class__.__name__} set:\n" + f" Dictionary: {aruco_params.aruco_dict}\n" + f" Marker Length: {self.marker_length}\n" + ) + +class ArucoBoardParams(): + def __init__(self, node:Node, aruco_params): + + if hasattr(cv2.aruco, aruco_params.aruco_dict): + self.aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_params.aruco_dict)) + else: + node.get_logger().error(f"cv2.aruco doesn't have a dictionary with the name '{aruco_params.aruco_dict}'") + + self.marker_length = aruco_params.marker_length + self.grid_rows = aruco_params.grid_rows + self.grid_cols = aruco_params.grid_cols + self.grid_marker_separation = aruco_params.grid_marker_separation + + node.get_logger().info( + f"{self.__class__.__name__} set:\n" + f" Dictionary: {aruco_params.aruco_dict}\n" + f" Marker Length: {self.marker_length}\n" + f" Grid Rows: {self.grid_rows}\n" + f" Grid Cols: {self.grid_cols}\n" + f" Marker Separation: {self.grid_marker_separation}\n" + ) + + +class ChArUcoBoardParams(): + def __init__(self, node:Node, aruco_params): + + if hasattr(cv2.aruco, aruco_params.aruco_dict): + self.aruco_dict = cv2.aruco.getPredefinedDictionary(getattr(cv2.aruco, aruco_params.aruco_dict)) + else: + node.get_logger().error(f"cv2.aruco doesn't have a dictionary with the name '{aruco_params.aruco_dict}'") + + self.marker_length = aruco_params.marker_length + self.square_length = aruco_params.square_length + self.grid_rows = aruco_params.grid_rows + self.grid_cols = aruco_params.grid_cols + + node.get_logger().info( + f"[{self.__class__.__name__}] set:\n" + f" Dictionary: {aruco_params.aruco_dict}\n" + f" Marker Length: {self.marker_length}\n" + f" Grid Rows: {self.grid_rows}\n" + f" Grid Cols: {self.grid_cols}\n" + ) \ No newline at end of file diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py new file mode 100644 index 0000000..0b795a6 --- /dev/null +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py @@ -0,0 +1,332 @@ +import cv2 +import numpy as np +from cv_bridge import CvBridge +from scipy.spatial.transform import Rotation as R +from abc import ABC, abstractmethod + +from collections import deque + +import tf2_ros +from rclpy.node import Node +from sensor_msgs.msg import Image, CameraInfo + +from extrinsic_calibrator_core.src.models.aruco_params import ArucoMarkerParams, ArucoBoardParams, ChArUcoBoardParams + +class CameraBase(ABC): + def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, bridge:CvBridge, broadcaster:tf2_ros.TransformBroadcaster): + + self.node = node + self.camera_name = camera_name + self.camera_id = camera_id + self.image_topic = "/robotic_platform/" + image_topic + self.bridge = bridge + self.tf_broadcaster = broadcaster + + self.camera_matrix = None + self.dist_coeffs = None + + self.image_sub = self.node.create_subscription(Image, image_topic, self.image_callback, 1) + self.cv2_image_publisher = self.node.create_publisher(Image, f"{image_topic}/detected_markers", 10) + + self.marker_transforms = {} + self.reliable_marker_transforms = {} + + @abstractmethod + def image_callback(self, msg): + pass + + def check_precision(self, marker_id, transform): + if self.is_precise(transform): + self.node.get_logger().info(f"Camera {self.camera_name}: Marker {marker_id} is reliable") + # add the last transform of the array in the dictionary as reliable marker transform + self.reliable_marker_transforms[marker_id] = transform[-1] + + + def is_precise(self, transforms): + if all(transform is not None for transform in transforms): + positions = np.array([t[:3, 3] for t in transforms]) + position_range = np.ptp(positions, axis=0) + + rotations = [R.from_matrix(t[:3, :3]) for t in transforms] + quat = np.array([r.as_quat() for r in rotations]) + angles = [] + for i in range(len(quat)): + for j in range(i+1, len(quat)): + dot = np.abs(np.dot(quat[i], quat[j])) + angle = 2 * np.arccos(np.clip(dot, -1.0, 1.0)) + angles.append(angle) + rotation_range = np.max(angles) + + self.node.get_logger().info(f"position range: {np.mean(position_range)}, rotation range (degrees): {np.degrees(rotation_range)}") + + return np.all(position_range < 0.01) and np.all(rotation_range < np.radians(1)) + else: + return False + + + def are_all_transforms_precise(self): + self.node.get_logger().info(f"Camera {self.camera_name}: Reliable markers: {list(self.reliable_marker_transforms.keys())}") + self.node.get_logger().info(f"Camera {self.camera_name}: Not Reliable markers: {list(self.marker_transforms.keys())}") + if len(self.reliable_marker_transforms) > 0 and len(self.marker_transforms) == 0: + self.node.get_logger().info(f"Camera {self.camera_name}: All markers are reliable") + return True + else: + return False + + +class CameraAruco(CameraBase): + def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, camera_info_topic:str, bridge:CvBridge, broadcaster:tf2_ros.TransformBroadcaster, aruco_params : ArucoMarkerParams): + super().__init__(node, camera_name, camera_id, image_topic, bridge, broadcaster) + + self.aruco_dict = aruco_params.aruco_dict + self.marker_length = aruco_params.marker_length + + self.parameters = cv2.aruco.DetectorParameters() + self.detector = cv2.aruco.ArucoDetector(self.aruco_dict, self.parameters) + + self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) + + self.node.get_logger().info(f"Camera {self.camera_name} created.") + self.node.get_logger().info(f"Camera {self.camera_name} subscribed to image topic: {image_topic}.") + self.node.get_logger().info(f"Camera {self.camera_name} subscribed to camera_info topic: {camera_info_topic}.") + + def camera_info_callback(self, msg): + if self.camera_matrix is None: + self.camera_matrix = np.array(msg.k).reshape((3, 3)) + self.dist_coeffs = np.array(msg.d) + self.node.get_logger().info(f"Camera {self.camera_name} parameters received.") + + def image_callback(self, msg): + if self.camera_matrix is None or self.dist_coeffs is None: + self.node.get_logger().warn(f"Camera {self.camera_name} parameters not yet received.") + return + + cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8") + + # For ArUco detection, you can use the filtered_image directly + corners, ids, _ = self.detector.detectMarkers(cv_image) + detected_ids = set() + if ids is not None: + for i, id in enumerate(ids): + marker_id = id[0] + detected_ids.add(marker_id) + + if marker_id not in self.marker_transforms and marker_id not in self.reliable_marker_transforms: + self.marker_transforms[marker_id] = deque(maxlen=30) + + objPoints = np.array([ [-self.marker_length/2, self.marker_length/2, 0], + [self.marker_length/2, self.marker_length/2, 0], + [self.marker_length/2, -self.marker_length/2, 0], + [-self.marker_length/2,-self.marker_length/2, 0]], dtype=np.float32) + + success, rvec, tvec = cv2.solvePnP(objPoints, corners[i], self.camera_matrix, self.dist_coeffs) + if success: + rot_matrix, _ = cv2.Rodrigues(rvec) + translation_matrix = np.eye(4) + translation_matrix[:3, :3] = rot_matrix + translation_matrix[:3, 3] = tvec.flatten() + + # Draw the transform + cv2.aruco.drawDetectedMarkers(cv_image, corners, ids) + cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.marker_length/2) + ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") + self.cv2_image_publisher.publish(ros_image) + + # Filter out the already reliable markers + if marker_id in self.reliable_marker_transforms: + continue + else: + self.marker_transforms[marker_id].append(translation_matrix) + + # Add None for markers not detected in this frame + for marker_id in self.marker_transforms: + if marker_id not in detected_ids: + # Restart the precision of the marker if not seen + # self.marker_transforms[marker_id].append(None) + pass + + # iterate through each marker of the marker_transforms dictionary + for marker_id, transforms in self.marker_transforms.items(): + if len(transforms) == 30: + self.check_precision(marker_id, transforms) + + # delete all the transforms from the marker_transforms dictionary + for marker_id, transform in self.reliable_marker_transforms.items(): + if marker_id in self.marker_transforms: + del self.marker_transforms[marker_id] + +class CameraArucoBoard(CameraBase): + def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, camera_info_topic:str, bridge:CvBridge, broadcaster:tf2_ros.TransformBroadcaster, aruco_params : ArucoBoardParams): + super().__init__(node, camera_name, camera_id, image_topic, bridge, broadcaster) # removed extra aruco_params + + self.detector_parameters = cv2.aruco.DetectorParameters() + self.detector = cv2.aruco.ArucoDetector(aruco_params.aruco_dict, self.detector_parameters) + + self.board = cv2.aruco.GridBoard( + (aruco_params.grid_cols, aruco_params.grid_rows), + aruco_params.marker_length, + aruco_params.grid_marker_separation, + aruco_params.aruco_dict, + ) + + self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) + + self.node.get_logger().info(f"Camera {self.camera_name} created.") + self.node.get_logger().info(f"Camera {self.camera_name} subscribed to image topic: {image_topic}.") + self.node.get_logger().info(f"Camera {self.camera_name} subscribed to camera_info topic: {camera_info_topic}.") + + node.get_logger().info( + f" Camera {camera_name} Created board:\n" + f" Dictionary: {self.board.getDictionary().markerSize}X{self.board.getDictionary().markerSize}\n" + f" Marker Length: {self.board.getMarkerLength()}\n" + f" Grid Size: {self.board.getGridSize()}\n" + f" Ids: {self.board.getIds()}\n" + f" Marker Separation: {self.board.getMarkerSeparation()}\n" + ) + + def camera_info_callback(self, msg): + if self.camera_matrix is None: + self.camera_matrix = np.array(msg.k).reshape((3, 3)) + self.dist_coeffs = np.array(msg.d) + self.node.get_logger().info(f"Camera {self.camera_name} parameters received.") + + def image_callback(self, msg): + if self.camera_matrix is None or self.dist_coeffs is None: + self.node.get_logger().warn(f"Camera {self.camera_name} parameters not yet received.") + return + + cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8") + + # For ArUco detection, you can use the filtered_image directly + corners, ids, _ = self.detector.detectMarkers(cv_image) + + # Visualize marker detection + if ids is not None and len(ids) > 0: + cv2.aruco.drawDetectedMarkers(cv_image, corners, ids) + + if ids is not None and len(corners) > 0: + + success, rvec, tvec = cv2.aruco.estimatePoseBoard( + corners, ids, self.board, self.camera_matrix, self.dist_coeffs, None, None + ) + + # If pose estimation is successful + if success > 0: + cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.board.getMarkerLength() * 2) + + if 0 not in self.marker_transforms and 0 not in self.reliable_marker_transforms: + self.marker_transforms[0] = deque(maxlen=30) + + if 0 not in self.reliable_marker_transforms: + rot_matrix, _ = cv2.Rodrigues(rvec) + translation_matrix = np.eye(4) + translation_matrix[:3, :3] = rot_matrix + translation_matrix[:3, 3] = tvec.flatten() + self.marker_transforms[0].append(translation_matrix) + + # iterate through each marker of the marker_transforms dictionary + for marker_id, transforms in self.marker_transforms.items(): + if len(transforms) == 30: + self.check_precision(marker_id, transforms) + + # delete all the transforms from the marker_transforms dictionary + for marker_id, _ in self.reliable_marker_transforms.items(): + if marker_id in self.marker_transforms: + del self.marker_transforms[marker_id] + + ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") + self.cv2_image_publisher.publish(ros_image) + +class CameraChArUcoBoard(CameraBase): + def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, camera_info_topic:str, bridge:CvBridge, broadcaster:tf2_ros.TransformBroadcaster, aruco_params : ChArUcoBoardParams): + super().__init__(node, camera_name, camera_id, image_topic, bridge, broadcaster) + + self.board = cv2.aruco.CharucoBoard( + (aruco_params.grid_cols, aruco_params.grid_rows), + aruco_params.square_length, + aruco_params.marker_length, + aruco_params.aruco_dict, + ) + + self.charuco_parameters = cv2.aruco.CharucoParameters() + self.detector_parameters = cv2.aruco.DetectorParameters() + self.detector = cv2.aruco.CharucoDetector(self.board, self.charuco_parameters, self.detector_parameters) # aruco_params.aruco_dict + + # placing camera info subscription here is temporary - just to enable extrinsic calibration without preforming intrinsic calibration first + self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) + + self.node.get_logger().info(f"Camera {self.camera_name} created.") + self.node.get_logger().info(f"Camera {self.camera_name} subscribed to image topic: {image_topic}.") + self.node.get_logger().info(f"Camera {self.camera_name} subscribed to camera_info topic: {camera_info_topic}.") + + node.get_logger().info( + f" Camera {camera_name} Created board:\n" + f" Dictionary: {self.board.getDictionary().markerSize}X{self.board.getDictionary().markerSize}\n" + f" Marker Length: {self.board.getMarkerLength()}\n" + f" Square Length: {self.board.getSquareLength()}\n" + f" Chessboard Size: {self.board.getChessboardSize()}\n" + f" Ids: {self.board.getIds()}\n" + ) + + def camera_info_callback(self, msg): + if self.camera_matrix is None: + self.camera_matrix = np.array(msg.k).reshape((3, 3)) + self.dist_coeffs = np.array(msg.d) + self.node.get_logger().info(f"Camera {self.camera_name} parameters received.") + + #TODO: rewrite for ChArUco boards + def image_callback(self, msg): + + if self.camera_matrix is None or self.dist_coeffs is None: + self.node.get_logger().warn(f"Camera {self.camera_name} parameters not yet received.") + return + + cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8") + + charuco_corners, charuco_ids, marker_corners, marker_ids = self.detector.detectBoard(cv_image) + + + # If detection is successful + if (marker_ids is not None and len(marker_ids) > 0) and (charuco_ids is not None and len(charuco_ids) > 0): + + # visualize the detected markers, chess square corners and board pose + cv2.aruco.drawDetectedMarkers(cv_image, marker_corners, marker_ids) + cv2.aruco.drawDetectedCornersCharuco(cv_image, charuco_corners, charuco_ids, (255, 0, 0)) + + # Estimate the board pose + success, rvec, tvec = cv2.aruco.estimatePoseCharucoBoard( + charuco_corners, charuco_ids, self.board, self.camera_matrix, self.dist_coeffs, None, None + ) + + # If pose estimation is successful + if success > 0: + + cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.board.getSquareLength() * 2) + + if 0 not in self.marker_transforms and 0 not in self.reliable_marker_transforms: + self.marker_transforms[0] = deque(maxlen=30) + + if 0 not in self.reliable_marker_transforms: + rot_matrix, _ = cv2.Rodrigues(rvec) + translation_matrix = np.eye(4) + translation_matrix[:3, :3] = rot_matrix + translation_matrix[:3, 3] = tvec.flatten() + self.marker_transforms[0].append(translation_matrix) + + # iterate through each marker of the marker_transforms dictionary + for marker_id, transforms in self.marker_transforms.items(): + if len(transforms) == 30: + self.check_precision(marker_id, transforms) + + # delete all the transforms from the marker_transforms dictionary + for marker_id, _ in self.reliable_marker_transforms.items(): + if marker_id in self.marker_transforms: + del self.marker_transforms[marker_id] + + ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") + self.cv2_image_publisher.publish(ros_image) + + # TODO: implement this functionality + def write_camera_intrinsics(self, file_path:str): + return 1 + \ No newline at end of file From 7c839a0472e7a7de50d4a37b713a69d5058817ba Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Mon, 3 Nov 2025 11:32:44 +0000 Subject: [PATCH 14/17] revert changes from previous logic which inquired idea od preforming intrinsic calibration, code cleanups --- .../src/models/camera.py | 107 +++++++----------- 1 file changed, 43 insertions(+), 64 deletions(-) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py index 0b795a6..4306c96 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py @@ -13,36 +13,49 @@ from extrinsic_calibrator_core.src.models.aruco_params import ArucoMarkerParams, ArucoBoardParams, ChArUcoBoardParams class CameraBase(ABC): - def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, bridge:CvBridge, broadcaster:tf2_ros.TransformBroadcaster): + def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, camera_info_topic:str, bridge:CvBridge, broadcaster:tf2_ros.TransformBroadcaster): self.node = node self.camera_name = camera_name self.camera_id = camera_id self.image_topic = "/robotic_platform/" + image_topic + self.camera_info_topic = camera_info_topic self.bridge = bridge self.tf_broadcaster = broadcaster self.camera_matrix = None self.dist_coeffs = None + self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) self.image_sub = self.node.create_subscription(Image, image_topic, self.image_callback, 1) self.cv2_image_publisher = self.node.create_publisher(Image, f"{image_topic}/detected_markers", 10) self.marker_transforms = {} self.reliable_marker_transforms = {} + def camera_info_callback(self, msg : CameraInfo): + if self.camera_matrix is None: + self.camera_matrix = np.array(msg.k).reshape((3, 3)) + self.dist_coeffs = np.array(msg.d) + self.node.get_logger().info(f"Camera {self.camera_name} parameters received.") + @abstractmethod - def image_callback(self, msg): + def image_callback(self, msg : Image): pass - def check_precision(self, marker_id, transform): + def log_initialization(self): + self.node.get_logger().info(f"Camera {self.camera_name} created.") + self.node.get_logger().info(f"Camera {self.camera_name} subscribed to image topic: {self.image_topic}.") + self.node.get_logger().info(f"Camera {self.camera_name} subscribed to camera_info topic: {self.camera_info_topic}.") + + def check_precision(self, marker_id : int, transform : np.ndarray): if self.is_precise(transform): self.node.get_logger().info(f"Camera {self.camera_name}: Marker {marker_id} is reliable") # add the last transform of the array in the dictionary as reliable marker transform self.reliable_marker_transforms[marker_id] = transform[-1] - def is_precise(self, transforms): + def is_precise(self, transforms : np.ndarray): if all(transform is not None for transform in transforms): positions = np.array([t[:3, 3] for t in transforms]) position_range = np.ptp(positions, axis=0) @@ -76,7 +89,7 @@ def are_all_transforms_precise(self): class CameraAruco(CameraBase): def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, camera_info_topic:str, bridge:CvBridge, broadcaster:tf2_ros.TransformBroadcaster, aruco_params : ArucoMarkerParams): - super().__init__(node, camera_name, camera_id, image_topic, bridge, broadcaster) + super().__init__(node, camera_name, camera_id, image_topic, camera_info_topic, bridge, broadcaster) self.aruco_dict = aruco_params.aruco_dict self.marker_length = aruco_params.marker_length @@ -84,19 +97,10 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c self.parameters = cv2.aruco.DetectorParameters() self.detector = cv2.aruco.ArucoDetector(self.aruco_dict, self.parameters) - self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) - - self.node.get_logger().info(f"Camera {self.camera_name} created.") - self.node.get_logger().info(f"Camera {self.camera_name} subscribed to image topic: {image_topic}.") - self.node.get_logger().info(f"Camera {self.camera_name} subscribed to camera_info topic: {camera_info_topic}.") + super().log_initialization() - def camera_info_callback(self, msg): - if self.camera_matrix is None: - self.camera_matrix = np.array(msg.k).reshape((3, 3)) - self.dist_coeffs = np.array(msg.d) - self.node.get_logger().info(f"Camera {self.camera_name} parameters received.") - - def image_callback(self, msg): + # This method is unchanged from the original extrinsic_calibrator + def image_callback(self, msg : Image): if self.camera_matrix is None or self.dist_coeffs is None: self.node.get_logger().warn(f"Camera {self.camera_name} parameters not yet received.") return @@ -157,10 +161,7 @@ def image_callback(self, msg): class CameraArucoBoard(CameraBase): def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, camera_info_topic:str, bridge:CvBridge, broadcaster:tf2_ros.TransformBroadcaster, aruco_params : ArucoBoardParams): - super().__init__(node, camera_name, camera_id, image_topic, bridge, broadcaster) # removed extra aruco_params - - self.detector_parameters = cv2.aruco.DetectorParameters() - self.detector = cv2.aruco.ArucoDetector(aruco_params.aruco_dict, self.detector_parameters) + super().__init__(node, camera_name, camera_id, image_topic, camera_info_topic, bridge, broadcaster) self.board = cv2.aruco.GridBoard( (aruco_params.grid_cols, aruco_params.grid_rows), @@ -168,12 +169,9 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c aruco_params.grid_marker_separation, aruco_params.aruco_dict, ) - - self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) - self.node.get_logger().info(f"Camera {self.camera_name} created.") - self.node.get_logger().info(f"Camera {self.camera_name} subscribed to image topic: {image_topic}.") - self.node.get_logger().info(f"Camera {self.camera_name} subscribed to camera_info topic: {camera_info_topic}.") + self.detector_parameters = cv2.aruco.DetectorParameters() + self.detector = cv2.aruco.ArucoDetector(aruco_params.aruco_dict, self.detector_parameters) node.get_logger().info( f" Camera {camera_name} Created board:\n" @@ -184,34 +182,32 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c f" Marker Separation: {self.board.getMarkerSeparation()}\n" ) - def camera_info_callback(self, msg): - if self.camera_matrix is None: - self.camera_matrix = np.array(msg.k).reshape((3, 3)) - self.dist_coeffs = np.array(msg.d) - self.node.get_logger().info(f"Camera {self.camera_name} parameters received.") + super().log_initialization() + + def image_callback(self, msg : Image): - def image_callback(self, msg): if self.camera_matrix is None or self.dist_coeffs is None: self.node.get_logger().warn(f"Camera {self.camera_name} parameters not yet received.") return cv_image = self.bridge.imgmsg_to_cv2(msg, "bgr8") - # For ArUco detection, you can use the filtered_image directly corners, ids, _ = self.detector.detectMarkers(cv_image) - # Visualize marker detection - if ids is not None and len(ids) > 0: + # If detection is successful + if ids is not None and len(corners) > 0 and len(ids) > 0: + + # Visualize detected markers cv2.aruco.drawDetectedMarkers(cv_image, corners, ids) - if ids is not None and len(corners) > 0: - + # Estimate the board pose success, rvec, tvec = cv2.aruco.estimatePoseBoard( corners, ids, self.board, self.camera_matrix, self.dist_coeffs, None, None ) # If pose estimation is successful if success > 0: + # Visualize board pose cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.board.getMarkerLength() * 2) if 0 not in self.marker_transforms and 0 not in self.reliable_marker_transforms: @@ -239,7 +235,7 @@ def image_callback(self, msg): class CameraChArUcoBoard(CameraBase): def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, camera_info_topic:str, bridge:CvBridge, broadcaster:tf2_ros.TransformBroadcaster, aruco_params : ChArUcoBoardParams): - super().__init__(node, camera_name, camera_id, image_topic, bridge, broadcaster) + super().__init__(node, camera_name, camera_id, image_topic, camera_info_topic, bridge, broadcaster) self.board = cv2.aruco.CharucoBoard( (aruco_params.grid_cols, aruco_params.grid_rows), @@ -250,14 +246,7 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c self.charuco_parameters = cv2.aruco.CharucoParameters() self.detector_parameters = cv2.aruco.DetectorParameters() - self.detector = cv2.aruco.CharucoDetector(self.board, self.charuco_parameters, self.detector_parameters) # aruco_params.aruco_dict - - # placing camera info subscription here is temporary - just to enable extrinsic calibration without preforming intrinsic calibration first - self.camera_info_sub = self.node.create_subscription(CameraInfo, camera_info_topic, self.camera_info_callback, 1) - - self.node.get_logger().info(f"Camera {self.camera_name} created.") - self.node.get_logger().info(f"Camera {self.camera_name} subscribed to image topic: {image_topic}.") - self.node.get_logger().info(f"Camera {self.camera_name} subscribed to camera_info topic: {camera_info_topic}.") + self.detector = cv2.aruco.CharucoDetector(self.board, self.charuco_parameters, self.detector_parameters) node.get_logger().info( f" Camera {camera_name} Created board:\n" @@ -268,14 +257,9 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c f" Ids: {self.board.getIds()}\n" ) - def camera_info_callback(self, msg): - if self.camera_matrix is None: - self.camera_matrix = np.array(msg.k).reshape((3, 3)) - self.dist_coeffs = np.array(msg.d) - self.node.get_logger().info(f"Camera {self.camera_name} parameters received.") + super().log_initialization() - #TODO: rewrite for ChArUco boards - def image_callback(self, msg): + def image_callback(self, msg : Image): if self.camera_matrix is None or self.dist_coeffs is None: self.node.get_logger().warn(f"Camera {self.camera_name} parameters not yet received.") @@ -285,11 +269,10 @@ def image_callback(self, msg): charuco_corners, charuco_ids, marker_corners, marker_ids = self.detector.detectBoard(cv_image) - # If detection is successful if (marker_ids is not None and len(marker_ids) > 0) and (charuco_ids is not None and len(charuco_ids) > 0): - # visualize the detected markers, chess square corners and board pose + # Visualize detected markers and chess square corners cv2.aruco.drawDetectedMarkers(cv_image, marker_corners, marker_ids) cv2.aruco.drawDetectedCornersCharuco(cv_image, charuco_corners, charuco_ids, (255, 0, 0)) @@ -300,9 +283,10 @@ def image_callback(self, msg): # If pose estimation is successful if success > 0: - + # Visualize board pose cv2.drawFrameAxes(cv_image, self.camera_matrix, self.dist_coeffs, rvec, tvec, self.board.getSquareLength() * 2) + # '0' marker == board pose if 0 not in self.marker_transforms and 0 not in self.reliable_marker_transforms: self.marker_transforms[0] = deque(maxlen=30) @@ -317,16 +301,11 @@ def image_callback(self, msg): for marker_id, transforms in self.marker_transforms.items(): if len(transforms) == 30: self.check_precision(marker_id, transforms) - - # delete all the transforms from the marker_transforms dictionary + + # delete all the transforms from the marker_transforms dictionary for marker_id, _ in self.reliable_marker_transforms.items(): if marker_id in self.marker_transforms: del self.marker_transforms[marker_id] ros_image = self.bridge.cv2_to_imgmsg(cv_image, "bgr8") - self.cv2_image_publisher.publish(ros_image) - - # TODO: implement this functionality - def write_camera_intrinsics(self, file_path:str): - return 1 - \ No newline at end of file + self.cv2_image_publisher.publish(ros_image) \ No newline at end of file From b2314793a7f127e78756c49469e15ff16da1a9e1 Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Tue, 4 Nov 2025 11:19:38 +0000 Subject: [PATCH 15/17] unification of variable names, solving dependency issue --- .../config/aruco_parameters.yaml | 14 ++++++------- .../src/extrinsic_calibrator_class.py | 6 +++--- .../src/models/aruco_params.py | 20 +++++++++---------- .../src/models/camera.py | 12 +++++++---- extrinsic_calibrator_core/requirments.txt | 2 +- 5 files changed, 29 insertions(+), 25 deletions(-) diff --git a/extrinsic_calibrator_core/config/aruco_parameters.yaml b/extrinsic_calibrator_core/config/aruco_parameters.yaml index 18a2020..c03ea54 100644 --- a/extrinsic_calibrator_core/config/aruco_parameters.yaml +++ b/extrinsic_calibrator_core/config/aruco_parameters.yaml @@ -10,19 +10,19 @@ aruco_params: board_mode: type: string default_value: "charuco_board" - description: "Board generation mode. Options: single_marker, aruco_board, charuco_board" - grid_rows: + description: "Board generation mode. Options: single_markers, aruco_board, charuco_board" + board_rows: type: int default_value: 5 - description: "Number of marker rows in the grid board." - grid_cols: + description: "Number of marker rows in the aruco board." + board_cols: type: int default_value: 5 - description: "Number of marker columns in the grid board." - grid_marker_separation: + description: "Number of marker columns in the aruco board." + marker_separation_length: type: double default_value: 0.04 - description: "Separation between markers in the grid board (meters). See 'b' on the image there - https://github.com/ethz-asl/kalibr/wiki/calibration-targets" + description: "Separation between markers in the aruco board (meters). See 'b' on the image there - https://github.com/ethz-asl/kalibr/wiki/calibration-targets" square_length: type: double default_value: 0.2 diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py index 8d8598e..3d1626b 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py @@ -87,7 +87,7 @@ def __init__(self): aruco_params_listener = aruco_params.ParamListener(self) imported_aruco_params = aruco_params_listener.get_params() - if imported_aruco_params.board_mode == "single_marker": + if imported_aruco_params.board_mode == "single_markers": self.real_aruco_params = ArucoMarkerParams(self, imported_aruco_params) elif imported_aruco_params.board_mode == "aruco_board": self.real_aruco_params = ArucoBoardParams(self, imported_aruco_params) @@ -114,14 +114,14 @@ def __init__(self): self.get_logger().error(f"Skipping camera '{cam_name}' due to missing 'image_topic' or 'camera_info_topic' parameter.") else: mode = imported_aruco_params.board_mode - if mode == "single_marker": + if mode == "single_markers": camera = CameraAruco(self, cam_name, idx, image_topic, camera_info_topic, self.bridge, self.tf_broadcaster, self.real_aruco_params) elif mode == "aruco_board": camera = CameraArucoBoard(self, cam_name, idx, image_topic, camera_info_topic, self.bridge, self.tf_broadcaster, self.real_aruco_params) elif mode == "charuco_board": camera = CameraChArUcoBoard(self, cam_name, idx, image_topic, camera_info_topic, self.bridge, self.tf_broadcaster, self.real_aruco_params) else: - raise ValueError(f"Unknown board mode: {mode}. Supported modes are 'single_marker', 'aruco_board', and 'charuco_board'.") + raise ValueError(f"Unknown board mode: {mode}. Supported modes are 'single_markers', 'aruco_board', and 'charuco_board'.") self.array_of_cameras.append(camera) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/aruco_params.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/aruco_params.py index 85ddbef..cd3e174 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/aruco_params.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/aruco_params.py @@ -27,17 +27,17 @@ def __init__(self, node:Node, aruco_params): node.get_logger().error(f"cv2.aruco doesn't have a dictionary with the name '{aruco_params.aruco_dict}'") self.marker_length = aruco_params.marker_length - self.grid_rows = aruco_params.grid_rows - self.grid_cols = aruco_params.grid_cols - self.grid_marker_separation = aruco_params.grid_marker_separation + self.board_rows = aruco_params.board_rows + self.board_cols = aruco_params.board_cols + self.marker_separation_length = aruco_params.marker_separation_length node.get_logger().info( f"{self.__class__.__name__} set:\n" f" Dictionary: {aruco_params.aruco_dict}\n" f" Marker Length: {self.marker_length}\n" - f" Grid Rows: {self.grid_rows}\n" - f" Grid Cols: {self.grid_cols}\n" - f" Marker Separation: {self.grid_marker_separation}\n" + f" Board Rows: {self.board_rows}\n" + f" Board Cols: {self.board_cols}\n" + f" Marker Separation: {self.marker_separation_length}\n" ) @@ -51,13 +51,13 @@ def __init__(self, node:Node, aruco_params): self.marker_length = aruco_params.marker_length self.square_length = aruco_params.square_length - self.grid_rows = aruco_params.grid_rows - self.grid_cols = aruco_params.grid_cols + self.board_rows = aruco_params.board_rows + self.board_cols = aruco_params.board_cols node.get_logger().info( f"[{self.__class__.__name__}] set:\n" f" Dictionary: {aruco_params.aruco_dict}\n" f" Marker Length: {self.marker_length}\n" - f" Grid Rows: {self.grid_rows}\n" - f" Grid Cols: {self.grid_cols}\n" + f" Board Rows: {self.board_rows}\n" + f" Board Cols: {self.board_cols}\n" ) \ No newline at end of file diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py index 4306c96..f5294d2 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py @@ -164,9 +164,9 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c super().__init__(node, camera_name, camera_id, image_topic, camera_info_topic, bridge, broadcaster) self.board = cv2.aruco.GridBoard( - (aruco_params.grid_cols, aruco_params.grid_rows), + (aruco_params.board_cols, aruco_params.board_rows), aruco_params.marker_length, - aruco_params.grid_marker_separation, + aruco_params.marker_separation_length, aruco_params.aruco_dict, ) @@ -177,7 +177,7 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c f" Camera {camera_name} Created board:\n" f" Dictionary: {self.board.getDictionary().markerSize}X{self.board.getDictionary().markerSize}\n" f" Marker Length: {self.board.getMarkerLength()}\n" - f" Grid Size: {self.board.getGridSize()}\n" + f" Board Size: {self.board.getGridSize()}\n" f" Ids: {self.board.getIds()}\n" f" Marker Separation: {self.board.getMarkerSeparation()}\n" ) @@ -238,7 +238,7 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c super().__init__(node, camera_name, camera_id, image_topic, camera_info_topic, bridge, broadcaster) self.board = cv2.aruco.CharucoBoard( - (aruco_params.grid_cols, aruco_params.grid_rows), + (aruco_params.board_cols, aruco_params.board_rows), aruco_params.square_length, aruco_params.marker_length, aruco_params.aruco_dict, @@ -277,6 +277,10 @@ def image_callback(self, msg : Image): cv2.aruco.drawDetectedCornersCharuco(cv_image, charuco_corners, charuco_ids, (255, 0, 0)) # Estimate the board pose + # print which python is used there + import sys + print(sys.executable) + print(cv2.__version__) success, rvec, tvec = cv2.aruco.estimatePoseCharucoBoard( charuco_corners, charuco_ids, self.board, self.camera_matrix, self.dist_coeffs, None, None ) diff --git a/extrinsic_calibrator_core/requirments.txt b/extrinsic_calibrator_core/requirments.txt index 71435c8..c9b072c 100644 --- a/extrinsic_calibrator_core/requirments.txt +++ b/extrinsic_calibrator_core/requirments.txt @@ -1,5 +1,5 @@ joblib==1.5.2 -numpy==2.2.6 +numpy==1.26.4 opencv-contrib-python==4.12.0.88 prettytable==3.16.0 scikit-learn==1.7.1 From a5d766a2fa72095fd8a1e4b46f78fd695448114c Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Tue, 4 Nov 2025 12:39:52 +0000 Subject: [PATCH 16/17] add pose_variation metric --- .../src/{models => detectors}/__init__.py | 0 .../src/{models => detectors}/aruco_params.py | 0 .../src/{models => detectors}/camera.py | 55 ++++++++++++------- .../src/extrinsic_calibrator_class.py | 9 +-- 4 files changed, 39 insertions(+), 25 deletions(-) rename extrinsic_calibrator_core/extrinsic_calibrator_core/src/{models => detectors}/__init__.py (100%) rename extrinsic_calibrator_core/extrinsic_calibrator_core/src/{models => detectors}/aruco_params.py (100%) rename extrinsic_calibrator_core/extrinsic_calibrator_core/src/{models => detectors}/camera.py (87%) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/__init__.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/detectors/__init__.py similarity index 100% rename from extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/__init__.py rename to extrinsic_calibrator_core/extrinsic_calibrator_core/src/detectors/__init__.py diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/aruco_params.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/detectors/aruco_params.py similarity index 100% rename from extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/aruco_params.py rename to extrinsic_calibrator_core/extrinsic_calibrator_core/src/detectors/aruco_params.py diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/detectors/camera.py similarity index 87% rename from extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py rename to extrinsic_calibrator_core/extrinsic_calibrator_core/src/detectors/camera.py index f5294d2..9c17680 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/models/camera.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/detectors/camera.py @@ -9,6 +9,7 @@ import tf2_ros from rclpy.node import Node from sensor_msgs.msg import Image, CameraInfo +from std_msgs.msg import Float32 from extrinsic_calibrator_core.src.models.aruco_params import ArucoMarkerParams, ArucoBoardParams, ChArUcoBoardParams @@ -32,12 +33,22 @@ def __init__(self, node:Node, camera_name:str, camera_id:int, image_topic:str, c self.marker_transforms = {} self.reliable_marker_transforms = {} + self.marker_pose_variations = {} + + self.pose_variation_timer = self.node.create_timer(0.5, self.pose_variation_timer_callback) + self.pose_variation_pub = self.node.create_publisher(Float32, f'{self.node.get_name()}/{self.camera_name}/pose_variation', 10) + + def pose_variation_timer_callback(self): + if self.marker_pose_variations: + msg = Float32() + msg.data = np.mean(list(self.marker_pose_variations.values())) + self.pose_variation_pub.publish(msg) def camera_info_callback(self, msg : CameraInfo): if self.camera_matrix is None: self.camera_matrix = np.array(msg.k).reshape((3, 3)) self.dist_coeffs = np.array(msg.d) - self.node.get_logger().info(f"Camera {self.camera_name} parameters received.") + self.node.get_logger().info(f"[Camera {self.camera_name}] Intrinsic parameters received.") @abstractmethod def image_callback(self, msg : Image): @@ -50,27 +61,37 @@ def log_initialization(self): def check_precision(self, marker_id : int, transform : np.ndarray): if self.is_precise(transform): - self.node.get_logger().info(f"Camera {self.camera_name}: Marker {marker_id} is reliable") + self.node.get_logger().info(f"[Camera {self.camera_name}] Marker {marker_id} is reliable") # add the last transform of the array in the dictionary as reliable marker transform self.reliable_marker_transforms[marker_id] = transform[-1] + self.marker_pose_variations[marker_id] = self.caluclate_pose_variation(*self.caluclate_position_rotation_range(transform)) + + def caluclate_position_rotation_range(self, transforms : np.ndarray): + positions = np.array([t[:3, 3] for t in transforms]) + position_range = np.ptp(positions, axis=0) + + rotations = [R.from_matrix(t[:3, :3]) for t in transforms] + quat = np.array([r.as_quat() for r in rotations]) + angles = [] + for i in range(len(quat)): + for j in range(i+1, len(quat)): + dot = np.abs(np.dot(quat[i], quat[j])) + angle = 2 * np.arccos(np.clip(dot, -1.0, 1.0)) + angles.append(angle) + rotation_range = np.max(angles) + return position_range, rotation_range + + def caluclate_pose_variation(self, position_range : np.ndarray, rotation_range : float) -> float: + pose_variation = np.mean(position_range) + np.degrees(rotation_range) + + return pose_variation def is_precise(self, transforms : np.ndarray): if all(transform is not None for transform in transforms): - positions = np.array([t[:3, 3] for t in transforms]) - position_range = np.ptp(positions, axis=0) - - rotations = [R.from_matrix(t[:3, :3]) for t in transforms] - quat = np.array([r.as_quat() for r in rotations]) - angles = [] - for i in range(len(quat)): - for j in range(i+1, len(quat)): - dot = np.abs(np.dot(quat[i], quat[j])) - angle = 2 * np.arccos(np.clip(dot, -1.0, 1.0)) - angles.append(angle) - rotation_range = np.max(angles) + position_range, rotation_range = self.caluclate_position_rotation_range(transforms) - self.node.get_logger().info(f"position range: {np.mean(position_range)}, rotation range (degrees): {np.degrees(rotation_range)}") + self.node.get_logger().debug(f"[{self.camera_name}] Position range: {np.mean(position_range)}, Rotation range (degrees): {np.degrees(rotation_range)}") return np.all(position_range < 0.01) and np.all(rotation_range < np.radians(1)) else: @@ -277,10 +298,6 @@ def image_callback(self, msg : Image): cv2.aruco.drawDetectedCornersCharuco(cv_image, charuco_corners, charuco_ids, (255, 0, 0)) # Estimate the board pose - # print which python is used there - import sys - print(sys.executable) - print(cv2.__version__) success, rvec, tvec = cv2.aruco.estimatePoseCharucoBoard( charuco_corners, charuco_ids, self.board, self.camera_matrix, self.dist_coeffs, None, None ) diff --git a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py index 3d1626b..e228f21 100644 --- a/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py +++ b/extrinsic_calibrator_core/extrinsic_calibrator_core/src/extrinsic_calibrator_class.py @@ -49,7 +49,6 @@ from collections import deque # Well-known libraries -import cv2 import numpy as np from prettytable import PrettyTable from scipy.spatial.transform import Rotation as R @@ -58,8 +57,6 @@ from cv_bridge import CvBridge from geometry_msgs.msg import TransformStamped from rclpy.node import Node -from sensor_msgs.msg import CameraInfo, Image -import rclpy import tf2_ros import tf_transformations @@ -68,10 +65,10 @@ from extrinsic_calibrator_core.python_camera_topics_parameters import cameras_params # Camera-detectors -from extrinsic_calibrator_core.src.models.camera import CameraAruco, CameraArucoBoard, CameraChArUcoBoard, CameraBase +from extrinsic_calibrator_core.src.detectors.camera import CameraAruco, CameraArucoBoard, CameraChArUcoBoard, CameraBase # aruco params -from extrinsic_calibrator_core.src.models.aruco_params import ArucoMarkerParams, ArucoBoardParams, ChArUcoBoardParams +from extrinsic_calibrator_core.src.detectors.aruco_params import ArucoMarkerParams, ArucoBoardParams, ChArUcoBoardParams class ExtrinsicCalibrator(Node): def __init__(self): @@ -99,7 +96,7 @@ def __init__(self): self.imported_cameras_params = cameras_param_listener.get_params() camera_names = list(self.imported_cameras_params.camera_names) - self.get_logger().info(f"Detected camera names from parameters: {camera_names}") + self.get_logger().info(f"Received camera names from rp_camera: {camera_names}") # construct the cameras self.array_of_cameras = [] From c17f58a3603e4467b447fe84c437b49c49da3b63 Mon Sep 17 00:00:00 2001 From: Szymon Nowak Date: Tue, 4 Nov 2025 15:29:56 +0000 Subject: [PATCH 17/17] A2 boards for lab --- ...x3_mar_len_006_sq_len_012_DICT6X6_1000.svg | 97 +++++++++ ...x4_mar_len_004_sq_len_008_DICT6X6_1000.svg | 186 ++++++++++++++++++ ...x6_mar_len_003_sq_len_006_DICT6X6_1000.svg | 0 ...x4_mar_len_003_sq_len_006_DICT6X6_1000.svg | 0 ...x3_mar_len_004_sq_len_008_DICT6X6_1000.svg | 0 generate_board.py | 2 +- 6 files changed, 284 insertions(+), 1 deletion(-) create mode 100644 example_boards/charuco_boards/A2/A2_charuco_4x3_mar_len_006_sq_len_012_DICT6X6_1000.svg create mode 100644 example_boards/charuco_boards/A2/A2_charuco_6x4_mar_len_004_sq_len_008_DICT6X6_1000.svg rename example_boards/charuco_boards/{ => A2}/A2_charuco_8x6_mar_len_003_sq_len_006_DICT6X6_1000.svg (100%) rename example_boards/charuco_boards/{ => A3}/A3_charuco_6x4_mar_len_003_sq_len_006_DICT6X6_1000.svg (100%) rename example_boards/charuco_boards/{ => A4}/A4_charuco_4x3_mar_len_004_sq_len_008_DICT6X6_1000.svg (100%) diff --git a/example_boards/charuco_boards/A2/A2_charuco_4x3_mar_len_006_sq_len_012_DICT6X6_1000.svg b/example_boards/charuco_boards/A2/A2_charuco_4x3_mar_len_006_sq_len_012_DICT6X6_1000.svg new file mode 100644 index 0000000..4805bc1 --- /dev/null +++ b/example_boards/charuco_boards/A2/A2_charuco_4x3_mar_len_006_sq_len_012_DICT6X6_1000.svg @@ -0,0 +1,97 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/example_boards/charuco_boards/A2/A2_charuco_6x4_mar_len_004_sq_len_008_DICT6X6_1000.svg b/example_boards/charuco_boards/A2/A2_charuco_6x4_mar_len_004_sq_len_008_DICT6X6_1000.svg new file mode 100644 index 0000000..9a6d118 --- /dev/null +++ b/example_boards/charuco_boards/A2/A2_charuco_6x4_mar_len_004_sq_len_008_DICT6X6_1000.svg @@ -0,0 +1,186 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/example_boards/charuco_boards/A2_charuco_8x6_mar_len_003_sq_len_006_DICT6X6_1000.svg b/example_boards/charuco_boards/A2/A2_charuco_8x6_mar_len_003_sq_len_006_DICT6X6_1000.svg similarity index 100% rename from example_boards/charuco_boards/A2_charuco_8x6_mar_len_003_sq_len_006_DICT6X6_1000.svg rename to example_boards/charuco_boards/A2/A2_charuco_8x6_mar_len_003_sq_len_006_DICT6X6_1000.svg diff --git a/example_boards/charuco_boards/A3_charuco_6x4_mar_len_003_sq_len_006_DICT6X6_1000.svg b/example_boards/charuco_boards/A3/A3_charuco_6x4_mar_len_003_sq_len_006_DICT6X6_1000.svg similarity index 100% rename from example_boards/charuco_boards/A3_charuco_6x4_mar_len_003_sq_len_006_DICT6X6_1000.svg rename to example_boards/charuco_boards/A3/A3_charuco_6x4_mar_len_003_sq_len_006_DICT6X6_1000.svg diff --git a/example_boards/charuco_boards/A4_charuco_4x3_mar_len_004_sq_len_008_DICT6X6_1000.svg b/example_boards/charuco_boards/A4/A4_charuco_4x3_mar_len_004_sq_len_008_DICT6X6_1000.svg similarity index 100% rename from example_boards/charuco_boards/A4_charuco_4x3_mar_len_004_sq_len_008_DICT6X6_1000.svg rename to example_boards/charuco_boards/A4/A4_charuco_4x3_mar_len_004_sq_len_008_DICT6X6_1000.svg diff --git a/generate_board.py b/generate_board.py index ecea93f..a91408a 100644 --- a/generate_board.py +++ b/generate_board.py @@ -107,7 +107,7 @@ def generate_aruco_board(board_type, output_path, board_rows, board_cols, square img_inner = board.generateImage((inner_w_px, inner_h_px), marginSize=0, borderBits=1) - # Add marigns. Done mannualy because maringSize in board.generateImage addd vertically smaller margins than horizontally + # Add marigns. Done mannualy because maringSize in board.generateImage adds vertically smaller margins than horizontally final_h = inner_h_px + 2 * margin_px final_w = inner_w_px + 2 * margin_px canvas = np.full((final_h, final_w), 255, dtype=img_inner.dtype)