Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# v0.9.4 (Upcoming)

## Removals, Deprecations and Changes
* Removed the deprecated `iterator_type='v1'` option from `_imaging_frames_to_hdmf_iterator`. Use `iterator_type='v2'` (default) instead. [PR #1679](https://github.com/catalystneuro/neuroconv/pull/1679)
* Removed support for passing `rate` in trace metadata for fluorescence traces. The rate is now always calculated automatically from the segmentation extractor's timestamps or sampling frequency. [PR #1679](https://github.com/catalystneuro/neuroconv/pull/1679)
* Removed the deprecated `stub_frames` parameter from ophys interfaces (`BaseImagingExtractorInterface`, `BaseSegmentationExtractorInterface`, `BrukerTiffMultiPlaneConverter`, `BrukerTiffSinglePlaneConverter`, `MiniscopeConverter`, `Suite2pSegmentationInterface`, `MinianSegmentationInterface`, `MiniscopeImagingDataInterface`). Use `stub_samples` instead. [PR #1676](https://github.com/catalystneuro/neuroconv/pull/1676)
* Removed deprecated wrapper functions from `roiextractors_pending_deprecation.py` (March 2026 deadline): `add_imaging_plane_to_nwbfile`, `add_image_segmentation_to_nwbfile`, `add_photon_series_to_nwbfile`, `add_plane_segmentation_to_nwbfile`, `add_background_plane_segmentation_to_nwbfile`, `add_background_fluorescence_traces_to_nwbfile`, `add_summary_images_to_nwbfile`. [PR #1680](https://github.com/catalystneuro/neuroconv/pull/1680)
* Added `*args` positional argument deprecation to `add_imaging_to_nwbfile` to enforce keyword-only arguments. Will be enforced on or after September 2026. [PR #1680](https://github.com/catalystneuro/neuroconv/pull/1680)
Expand Down
37 changes: 4 additions & 33 deletions src/neuroconv/tools/roiextractors/roiextractors.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import psutil

# from hdmf.common import VectorData
from hdmf.data_utils import DataChunkIterator
from pydantic import FilePath
from pynwb import NWBFile
from pynwb.base import Images
Expand Down Expand Up @@ -895,7 +894,6 @@ def _imaging_frames_to_hdmf_iterator(
The type of iterator for chunked data writing.
'v2': Uses iterative write with control over chunking and progress bars.
None: Loads all data into memory before writing (not recommended for large datasets).
Note: 'v1' is deprecated and will be removed on or after March 2026.
iterator_options : dict, optional
Options for controlling the iterative write process. See the
`pynwb tutorial on iterative write <https://pynwb.readthedocs.io/en/stable/tutorials/advanced_io/plot_iterative_write.html#sphx-glr-tutorials-advanced-io-plot-iterative-write-py>`_
Expand All @@ -907,29 +905,13 @@ def _imaging_frames_to_hdmf_iterator(
The frames of the imaging extractor wrapped in an iterator for chunked writing.
"""

def data_generator(imaging):
num_samples = imaging.get_num_samples()
for i in range(num_samples):
yield imaging.get_series(start_sample=i, end_sample=i + 1).squeeze().T

assert iterator_type in ["v1", "v2", None], "'iterator_type' must be either 'v2' (recommended) or None."
assert iterator_type in ["v2", None], "'iterator_type' must be either 'v2' (recommended) or None."
iterator_options = dict() if iterator_options is None else iterator_options

if iterator_type is None:
_check_if_imaging_fits_into_memory(imaging=imaging)
return imaging.get_series().transpose((0, 2, 1))

if iterator_type == "v1":
warnings.warn(
"iterator_type='v1' is deprecated and will be removed on or after March 2026. "
"Use iterator_type='v2' for better chunking control and progress bar support.",
FutureWarning,
stacklevel=2,
)
if "buffer_size" not in iterator_options:
iterator_options.update(buffer_size=10)
return DataChunkIterator(data=data_generator(imaging), **iterator_options)

return ImagingExtractorDataChunkIterator(imaging_extractor=imaging, **iterator_options)


Expand Down Expand Up @@ -1101,7 +1083,6 @@ def write_imaging_to_nwbfile(
The type of iterator for chunked data writing.
'v2': Uses iterative write with control over chunking and progress bars.
None: Loads all data into memory before writing (not recommended for large datasets).
Note: 'v1' is deprecated and will be removed on or after March 2026.
iterator_options : dict, optional
Options for controlling the iterative write process. See the
`pynwb tutorial on iterative write <https://pynwb.readthedocs.io/en/stable/tutorials/advanced_io/plot_iterative_write.html#sphx-glr-tutorials-advanced-io-plot-iterative-write-py>`_
Expand Down Expand Up @@ -1706,14 +1687,8 @@ def _add_fluorescence_traces_to_nwbfile(
roi_response_series_kwargs["data"] = SliceableDataChunkIterator(trace, **iterator_options)
roi_response_series_kwargs["rois"] = roi_table_region

# Deprecation warning for user-provided rate in metadata
if user_trace_metadata is not None and "rate" in user_trace_metadata:
warnings.warn(
f"Passing 'rate' in metadata for trace '{trace_name}' is deprecated and will be removed on or after March 2026. "
f"The rate will be automatically calculated from the segmentation extractor's timestamps or sampling frequency.",
FutureWarning,
stacklevel=2,
)
# Remove user-provided rate from metadata (rate is always calculated automatically)
roi_response_series_kwargs.pop("rate", None)

# Resolve timestamps: user-set > native hardware > none
timestamps_were_set = segmentation_extractor.has_time_vector()
Expand All @@ -1735,12 +1710,8 @@ def _add_fluorescence_traces_to_nwbfile(

if timestamps_are_regular:
roi_response_series_kwargs["starting_time"] = starting_time
# Use metadata rate if provided, otherwise use estimated/sampled rate
if "rate" not in roi_response_series_kwargs:
roi_response_series_kwargs["rate"] = rate
roi_response_series_kwargs["rate"] = rate
else:
# Irregular timestamps - remove rate from metadata if present (can't specify both)
roi_response_series_kwargs.pop("rate", None)
roi_response_series_kwargs["timestamps"] = timestamps

# Build the roi response series
Expand Down
25 changes: 0 additions & 25 deletions tests/test_modalities/test_ophys/test_tools_roiextractors.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import numpy as np
import psutil
import pytest
from hdmf.data_utils import DataChunkIterator
from hdmf.testing import TestCase
from numpy.testing import assert_array_equal, assert_raises
from numpy.typing import ArrayLike
Expand Down Expand Up @@ -1537,30 +1536,6 @@ def test_non_iterative_two_photon(self):
expected_two_photon_series_data = self.imaging_extractor.get_series().transpose((0, 2, 1))
assert_array_equal(two_photon_series_extracted, expected_two_photon_series_data)

def test_deprecated_v1_iterator_two_photon(self):
"""Test adding two photon series with deprecated v1 iterator type."""
with self.assertWarns(FutureWarning):
_add_photon_series_to_nwbfile_old_list_format(
imaging=self.imaging_extractor,
nwbfile=self.nwbfile,
metadata=self.two_photon_series_metadata,
iterator_type="v1",
)

# Check data
acquisition_modules = self.nwbfile.acquisition
assert self.two_photon_series_name in acquisition_modules
data_chunk_iterator = acquisition_modules[self.two_photon_series_name].data
assert isinstance(data_chunk_iterator, DataChunkIterator)
self.assertEqual(data_chunk_iterator.buffer_size, 10)

two_photon_series_extracted = np.concatenate([data_chunk.data for data_chunk in data_chunk_iterator])
# NWB stores images as num_columns x num_rows
expected_two_photon_series_shape = (self.num_samples, self.num_columns, self.num_rows)
assert two_photon_series_extracted.shape == expected_two_photon_series_shape
expected_two_photon_series_data = self.imaging_extractor.get_series().transpose((0, 2, 1))
assert_array_equal(two_photon_series_extracted, expected_two_photon_series_data)

def test_iterator_options_propagation(self):
"""Test that iterator options are propagated to the data chunk iterator."""
buffer_shape = (20, 5, 5)
Expand Down
Loading