diff --git a/README.md b/README.md index 50e05a7..296f8a4 100644 --- a/README.md +++ b/README.md @@ -6,32 +6,251 @@ Convert between [itk](https://itk.org) and [napari](https://napari.org) data structures. -Installation ------------- +## Installation -``` +```sh pip install itk-napari-conversion ``` -Usage ------ +## Usage -Convert an `itk.Image` to an `napari.layers.Image`: +### Image Conversion -``` +#### Convert ITK Image to napari Image Layer + +Convert an `itk.Image` to a `napari.layers.Image`: + +```python from itk_napari_conversion import image_layer_from_image image_layer = image_layer_from_image(image) ``` -Convert to an `napari.layers.Image` to an `itk.Image`: +**Features:** +- Automatically detects and handles RGB/RGBA images +- Preserves image metadata (spacing, origin, direction, custom metadata) +- Converts ITK's physical space information to napari's layer transformations: + - `spacing` → `scale` + - `origin` → `translate` + - `direction` → `rotate` + +**Example:** +```python +import itk +import napari +from itk_napari_conversion import image_layer_from_image + +# Read an image +image = itk.imread('path/to/image.nrrd') + +# Add custom metadata +image['units'] = 'mm' +image['patient_id'] = '12345' + +# Convert to napari layer +image_layer = image_layer_from_image(image) + +# The layer will have: +# - image_layer.scale = image spacing (in reverse order for NumPy) +# - image_layer.translate = image origin (in reverse order for NumPy) +# - image_layer.rotate = image direction matrix (in reverse order for NumPy) +# - image_layer.metadata = all custom metadata ``` + +#### Convert napari Image Layer to ITK Image + +Convert a `napari.layers.Image` to an `itk.Image`: + +```python from itk_napari_conversion import image_from_image_layer image = image_from_image_layer(image_layer) ``` -Hacking +**Features:** +- Automatically handles RGB/RGBA layers +- Converts napari layer transformations back to ITK physical space: + - `scale` → `spacing` + - `translate` → `origin` + - `rotate` → `direction` +- Preserves all metadata from the layer + +**Example:** +```python +import numpy as np +import napari +from itk_napari_conversion import image_from_image_layer + +# Create a napari image layer with transformations +viewer = napari.Viewer() +data = np.random.rand(100, 100, 100) + +# 45 degree rotation around z-axis +angle = np.radians(45) +rotate = np.array([ + [np.cos(angle), -np.sin(angle), 0], + [np.sin(angle), np.cos(angle), 0], + [0, 0, 1] +], dtype=np.float64) + +layer = viewer.add_image( + data, + scale=[2.0, 1.5, 1.5], # anisotropic spacing + rotate=rotate, + translate=[10.0, 20.0, 30.0], + metadata={'description': 'My volume'} +) + +# Convert to ITK +image = image_from_image_layer(layer) + +# The ITK image will have: +# - spacing: coordinates in ITK order, so reversed from napari `scale`: [1.5, 1.5, 2.0] +# - origin: coordinates in ITK order, so reversed from napari `translate`: [30.0, 20.0, 10.0] +# The ITK image will have: +# - spacing: coordinates in ITK order, so reversed from napari `scale`: [1.5, 1.5, 2.0] +# - origin: coordinates in ITK order, so reversed from napari `translate`: [30.0, 20.0, 10.0] +# - direction: transpose of napari `rotate` matrix + +# Access the direction matrix via dictionary access (recommended): +direction = image["direction"] +print(direction) +# [[0.70710678 0.70710678 0. ] +# [-0.70710678 0.70710678 0. ] +# [0. 0. 1. ]] + +# This is the transpose of napari's rotate matrix: +# [[cos(45°), sin(45°), 0], +# [-sin(45°), cos(45°), 0], +# [0, 0, 1]] + +# Note: image.GetDirection() may return a different matrix because it accesses +# the underlying ITK image metadata differently. Use dictionary access for +# consistency with how the direction was set. +``` + +### Point Set Conversion + +#### Convert ITK PointSet to napari Points Layer + +Convert an `itk.PointSet` to a `napari.layers.Points`: + +```python +from itk_napari_conversion import points_layer_from_point_set + +points_layer = points_layer_from_point_set(point_set) +``` + +**Features:** +- Extracts point coordinates from ITK PointSet using `itk.array_from_vector_container()` +- Converts point data (if present) to napari features dictionary + - Uses the first component as the 'feature' key +- Returns a `napari.layers.Points` object + +**Example:** +```python +import itk +import numpy as np +from itk_napari_conversion import points_layer_from_point_set + +# Create ITK PointSet +PointSetType = itk.PointSet[itk.F, 3] +point_set = PointSetType.New() + +# Add points +points_data = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]], dtype=np.float32) +points = itk.vector_container_from_array(points_data.flatten()) +point_set.SetPoints(points) + +# Add point data (features) +feature_data = np.array([10.0, 20.0], dtype=np.float32) +point_data = itk.vector_container_from_array(feature_data) +point_set.SetPointData(point_data) + +# Convert to napari +points_layer = points_layer_from_point_set(point_set) +# points_layer.features['feature'] will contain [10.0, 20.0] +``` + +#### Convert napari Points Layer to ITK PointSet + +Convert a `napari.layers.Points` to an `itk.PointSet`: + +```python +from itk_napari_conversion import point_set_from_points_layer + +point_set = point_set_from_points_layer(points_layer) +``` + +**Features:** +- Applies napari transformations (scale, rotate, translate) to point coordinates before conversion +- Extracts points from napari layer data +- Converts the first feature column (if present) to ITK point data +- Returns an `itk.PointSet` object with dimension determined from the data + +**Example:** +```python +import napari +import numpy as np +from itk_napari_conversion import point_set_from_points_layer + +# Create napari Points layer with transformations +data = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) +features = {'intensity': np.array([10.0, 20.0])} +scale = np.array([2.0, 2.0, 2.0]) +translate = np.array([100.0, 200.0, 300.0]) + +# Optional: add rotation (90 degrees around z-axis) +angle = np.radians(90) +rotate = np.array([ + [np.cos(angle), -np.sin(angle), 0], + [np.sin(angle), np.cos(angle), 0], + [0, 0, 1] +]) + +points_layer = napari.layers.Points( + data, + features=features, + scale=scale, + rotate=rotate, + translate=translate +) + +# Convert to ITK PointSet +point_set = point_set_from_points_layer(points_layer) + +# The points in the ITK PointSet will be in world coordinates: +# ((data * scale) @ rotate.T) + translate +# Point data will be stored from the 'intensity' feature +``` + +### Transformation Handling + +#### Images +- **ITK → napari**: Physical space metadata (spacing, origin, direction) is converted to napari layer transformations +- **napari → ITK**: Layer transformations are converted back to ITK physical space metadata + +#### Points +- **ITK → napari**: Points are copied as-is without transformations (identity transform assumed) +- **napari → ITK**: Layer transformations (scale, rotate, translate) are **applied to points** to convert them to world coordinates + - Transformations are applied in order: scale → rotate → translate + +### Notes + +**Images:** +- Supports 2D, 3D, and multi-dimensional images +- RGB and RGBA images are automatically detected and handled +- Axis order is automatically reversed between ITK (x, y, z) and NumPy/napari (z, y, x) +- Metadata is preserved bidirectionally + +**Points:** +- Point data in ITK is stored as float32 +- Only the first feature column from napari is used for ITK point data +- Empty point sets are handled gracefully +- Dimension is automatically determined from the point data shape +- Metadata conversion is not currently supported for point sets + +## Hacking ------- Contributions are welcome! diff --git a/itk_napari_conversion.py b/itk_napari_conversion.py index aeb3544..83e9775 100644 --- a/itk_napari_conversion.py +++ b/itk_napari_conversion.py @@ -1,8 +1,13 @@ """Convert between itk and napari data structures.""" -__version__ = "0.3.1" +__version__ = "0.4.0" -__all__ = ["image_layer_from_image", "image_from_image_layer"] +__all__ = [ + "image_layer_from_image", + "image_from_image_layer", + "points_layer_from_point_set", + "point_set_from_points_layer", +] import napari import itk @@ -55,3 +60,90 @@ def image_from_image_layer(image_layer): image["direction"] = np.ascontiguousarray(np.transpose(image_layer.rotate)).astype(np.float64) return image + + +def points_layer_from_point_set(point_set): + """Convert an itk.PointSet to a napari.layers.Points.""" + # Get points as numpy array + number_of_points = point_set.GetNumberOfPoints() + + if number_of_points == 0: + data = np.array([]).reshape(0, 3) # Default to 3D empty array + else: + points_array = itk.array_from_vector_container(point_set.GetPoints()) + data = points_array + + # Get point data (features) if available + point_data = point_set.GetPointData() + features = None + if point_data.Size() > 0: + point_data_array = itk.array_from_vector_container(point_data) + # Use first component as feature if it's a scalar + if point_data_array.ndim == 1: + features = {'feature': point_data_array} + else: + # If multi-component, use first component + features = {'feature': point_data_array[:, 0] if point_data_array.shape[1] > 0 else point_data_array[:, 0:1].flatten()} + + points_layer = napari.layers.Points( + data, + features=features, + ) + return points_layer + + +def point_set_from_points_layer(points_layer): + """Convert a napari.layers.Points to an itk.PointSet.""" + # Apply transformations (rotate, scale, translate) to points + data = points_layer.data.copy() # Make a copy to avoid modifying original + + # Napari always has an affine that includes scale and translate + # We need to apply it to get world coordinates + # Apply transformations in order: scale, rotate, translate + if points_layer.scale is not None: + data = data * points_layer.scale + if points_layer.rotate is not None: + # Apply rotation matrix to each point + rotate_matrix = np.asarray(points_layer.rotate) + data = data @ rotate_matrix.T + if points_layer.translate is not None: + data = data + points_layer.translate + + # Determine dimension from data + if len(data) == 0: + dimension = 3 # Default to 3D + else: + dimension = data.shape[1] + + # Create ITK PointSet + # Use float pixel type for point data by default + PointSetType = itk.PointSet[itk.F, dimension] + point_set = PointSetType.New() + + # Set points + if len(data) > 0: + points = itk.vector_container_from_array(data.astype(np.float32).flatten()) + point_set.SetPoints(points) + + # Set point data from features if available + if points_layer.features is not None and len(points_layer.features) > 0: + feature_keys = list(points_layer.features.keys()) + if len(feature_keys) > 0: + # Use the first feature column as point data + feature_name = feature_keys[0] + + # Handle both dict and DataFrame + if hasattr(points_layer.features, 'iloc'): + # It's a pandas DataFrame + feature_data = points_layer.features[feature_name].values + else: + # It's a dict + feature_data = points_layer.features[feature_name] + + if isinstance(feature_data, (list, np.ndarray)): + feature_array = np.array(feature_data).astype(np.float32) + if len(feature_array) > 0: + point_data = itk.vector_container_from_array(feature_array) + point_set.SetPointData(point_data) + + return point_set diff --git a/tests.py b/tests.py index 7e5e9e6..a6bb731 100644 --- a/tests.py +++ b/tests.py @@ -133,3 +133,167 @@ def check_angle(angle): for angle in [0, 30, 45, 60, 90, 120, 150, 180]: check_angle(angle) + + +def test_image_from_image_layer_rotate_3d(): + """Test 3D rotation matrix conversion from napari to ITK.""" + data = np.random.randint(256, size=(10, 10, 10), dtype=np.uint8) + + # 45 degree rotation around z-axis + angle = np.radians(45) + rotate = np.array([ + [np.cos(angle), -np.sin(angle), 0], + [np.sin(angle), np.cos(angle), 0], + [0, 0, 1] + ], dtype=np.float64) + + scale = [2.0, 1.5, 1.5] + translate = [10.0, 20.0, 30.0] + + image_layer = napari.layers.Image(data, scale=scale, rotate=rotate, translate=translate) + image = itk_napari_conversion.image_from_image_layer(image_layer) + + assert np.array_equal(data, itk.array_view_from_image(image)) + + # napari rotate is transposed compared to ITK direction + assert np.allclose(rotate, np.transpose(np.array(image["direction"]))) + + # Verify we can access direction via dictionary + direction_array = image["direction"] # Returns numpy array + assert direction_array.shape == (3, 3) + + # Verify the actual values in the direction matrix (transpose of rotate) + expected_direction = np.transpose(rotate) + assert np.allclose(direction_array, expected_direction) + + # Verify other transformations + assert np.allclose(scale, np.array(image["spacing"])) + assert np.allclose(translate, np.array(image["origin"])) + +def test_points_layer_from_point_set(): + # Create a simple 3D point set + PointSetType = itk.PointSet[itk.F, 3] + point_set = PointSetType.New() + + # Add some points + points_data = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]], dtype=np.float32) + points = itk.vector_container_from_array(points_data.flatten()) + point_set.SetPoints(points) + + # Convert to napari points layer + points_layer = itk_napari_conversion.points_layer_from_point_set(point_set) + + # Check that data matches + assert np.allclose(points_data, points_layer.data) + + +def test_points_layer_from_point_set_with_features(): + # Create a 3D point set with point data + PointSetType = itk.PointSet[itk.F, 3] + point_set = PointSetType.New() + + # Add points + points_data = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]], dtype=np.float32) + points = itk.vector_container_from_array(points_data.flatten()) + point_set.SetPoints(points) + + # Add point data (features) + feature_data = np.array([10.0, 20.0, 30.0], dtype=np.float32) + point_data = itk.vector_container_from_array(feature_data) + point_set.SetPointData(point_data) + + # Convert to napari points layer + points_layer = itk_napari_conversion.points_layer_from_point_set(point_set) + + # Check that data matches + assert np.allclose(points_data, points_layer.data) + assert points_layer.features is not None + assert 'feature' in points_layer.features + assert np.allclose(feature_data, points_layer.features['feature']) + + +def test_point_set_from_points_layer(): + # Create napari points layer + data = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]]) + points_layer = napari.layers.Points(data) + + # Convert to ITK point set + point_set = itk_napari_conversion.point_set_from_points_layer(points_layer) + + # Check points + points_array = itk.array_from_vector_container(point_set.GetPoints()) + assert np.allclose(data, points_array) + + +def test_point_set_from_points_layer_with_features(): + # Create napari points layer with features + data = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]]) + features = {'feature': np.array([10.0, 20.0, 30.0])} + points_layer = napari.layers.Points(data, features=features) + + # Convert to ITK point set + point_set = itk_napari_conversion.point_set_from_points_layer(points_layer) + + # Check points + points_array = itk.array_from_vector_container(point_set.GetPoints()) + assert np.allclose(data, points_array) + + # Check point data - verify it was set + point_data = point_set.GetPointData() + assert point_data.Size() > 0, "Point data should be set" + point_data_array = itk.array_from_vector_container(point_data) + assert np.allclose(features['feature'], point_data_array) + + +def test_point_set_from_points_layer_with_scale(): + # Create napari points layer with scale + data = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) + scale = np.array([2.0, 3.0, 4.0]) + points_layer = napari.layers.Points(data, scale=scale) + + # Convert to ITK point set + point_set = itk_napari_conversion.point_set_from_points_layer(points_layer) + + # Check that points are scaled + points_array = itk.array_from_vector_container(point_set.GetPoints()) + expected = data * scale + assert np.allclose(expected, points_array) + + +def test_point_set_from_points_layer_with_translate(): + # Create napari points layer with translation + data = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]) + translate = np.array([10.0, 20.0, 30.0]) + points_layer = napari.layers.Points(data, translate=translate) + + # Convert to ITK point set + point_set = itk_napari_conversion.point_set_from_points_layer(points_layer) + + # Check that points are translated + points_array = itk.array_from_vector_container(point_set.GetPoints()) + expected = data + translate + assert np.allclose(expected, points_array) + + +def test_point_set_from_points_layer_with_rotate(): + # Create napari points layer with rotation + data = np.array([[1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]) + + # 90 degree rotation around z-axis + angle = np.radians(90) + rotate = np.array([ + [np.cos(angle), -np.sin(angle), 0], + [np.sin(angle), np.cos(angle), 0], + [0, 0, 1] + ], dtype=np.float64) + + points_layer = napari.layers.Points(data, rotate=rotate) + + # Convert to ITK point set + point_set = itk_napari_conversion.point_set_from_points_layer(points_layer) + + # Check that points are rotated + points_array = itk.array_from_vector_container(point_set.GetPoints()) + expected = data @ rotate.T + assert np.allclose(expected, points_array, atol=1e-10) +