Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
# v0.9.4 (Upcoming)

## Removals, Deprecations and Changes
* Removed the deprecated `staging` parameter from `automatic_dandi_upload`. Use `sandbox` instead. [PR #1678](https://github.com/catalystneuro/neuroconv/pull/1678)
* Removed the deprecated `container_name` parameter from `ImageInterface.add_to_nwbfile` and `DeepLabCutInterface.add_to_nwbfile`. Use `metadata_key` in `__init__` instead. [PR #1678](https://github.com/catalystneuro/neuroconv/pull/1678)
* Removed the deprecated `time_series_name` parameter from `add_recording_as_time_series_to_nwbfile`. Use `metadata_key` instead. [PR #1678](https://github.com/catalystneuro/neuroconv/pull/1678)

## Bug Fixes
* Fixed `get_json_schema_from_method_signature` to resolve PEP 563 string annotations (from `from __future__ import annotations`) before passing them to pydantic. This affected any interface defined in a module with deferred annotations (e.g. `MiniscopeConverter`, or external subclasses from SpikeInterface). [PR #1670](https://github.com/catalystneuro/neuroconv/pull/1670)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -559,8 +559,6 @@ def add_to_nwbfile(
self,
nwbfile: NWBFile,
metadata: dict | None = None,
*args, # TODO: change to * (keyword only) on or after August 2026
container_name: str | None = None,
):
"""
Conversion from DLC output files to nwb. Derived from dlc2nwb library.
Expand All @@ -571,53 +569,13 @@ def add_to_nwbfile(
nwb file to which the recording information is to be added
metadata: dict
metadata info for constructing the nwb file (optional).
container_name: str, default: None
name of the PoseEstimation container in the nwb. If None, uses the container_name from the interface.
This parameter is deprecated and will be removed on or after October 2025.
Use the pose_estimation_metadata_key parameter when initializing the interface instead to specify
the content of the metadata.

"""
# Handle deprecated positional arguments
if args:
parameter_names = [
"container_name",
]
num_positional_args_before_args = 2 # nwbfile, metadata
if len(args) > len(parameter_names):
raise TypeError(
f"add_to_nwbfile() takes at most {len(parameter_names) + num_positional_args_before_args} positional arguments but "
f"{len(args) + num_positional_args_before_args} were given. "
"Note: Positional arguments are deprecated and will be removed on or after August 2026. "
"Please use keyword arguments."
)
positional_values = dict(zip(parameter_names, args))
passed_as_positional = list(positional_values.keys())
warnings.warn(
f"Passing arguments positionally to DeepLabCutInterface.add_to_nwbfile() is deprecated "
f"and will be removed on or after August 2026. "
f"The following arguments were passed positionally: {passed_as_positional}. "
"Please use keyword arguments instead.",
FutureWarning,
stacklevel=2,
)
container_name = positional_values.get("container_name", container_name)
from ._dlc_utils import (
_add_pose_estimation_to_nwbfile,
_ensure_individuals_in_header,
)

# Use the pose_estimation_metadata_key from the instance if container_name not provided
if container_name is not None:
warnings.warn(
"The container_name parameter in add_to_nwbfile is deprecated and will be removed on or after October 2025. "
"Use the pose_estimation_metadata_key parameter when initializing the interface instead.",
DeprecationWarning,
stacklevel=2,
)
pose_estimation_metadata_key = container_name
else:
pose_estimation_metadata_key = self.pose_estimation_metadata_key
pose_estimation_metadata_key = self.pose_estimation_metadata_key

# Get default metadata
default_metadata = DeepDict(self.get_metadata())
Expand All @@ -626,22 +584,6 @@ def add_to_nwbfile(
if metadata is not None:
default_metadata.deep_update(metadata)

# Set the container name in the metadata, remove this once container_name is deprecated
if container_name is not None:
if (
"PoseEstimation" in default_metadata
and "PoseEstimationContainers" in default_metadata["PoseEstimation"]
):
if container_name in default_metadata["PoseEstimation"]["PoseEstimationContainers"]:
default_metadata["PoseEstimation"]["PoseEstimationContainers"][container_name][
"name"
] = container_name
else:
# If the container doesn't exist in the metadata, create it with the name
default_metadata["PoseEstimation"]["PoseEstimationContainers"][container_name] = {
"name": container_name
}

file_path = Path(self.source_data["file_path"])

# Read the data
Expand Down
44 changes: 1 addition & 43 deletions src/neuroconv/datainterfaces/image/imageinterface.py
Original file line number Diff line number Diff line change
Expand Up @@ -332,8 +332,6 @@ def add_to_nwbfile(
self,
nwbfile: NWBFile,
metadata: DeepDict | None = None,
*args, # TODO: change to * (keyword only) on or after August 2026
container_name: str | None = None,
) -> None:
"""
Add the image data to an NWB file.
Expand All @@ -344,43 +342,7 @@ def add_to_nwbfile(
The NWB file to add the images to
metadata : dict, optional
Metadata for the images
container_name : str, optional, deprecated
Name of the Images container. This parameter is deprecated and will be removed
on or after February 2026. Use metadata_key in __init__ instead.
If provided, it overrides the name from metadata.
"""
# Handle deprecated positional arguments
if args:
parameter_names = [
"container_name",
]
num_positional_args_before_args = 2 # nwbfile, metadata
if len(args) > len(parameter_names):
raise TypeError(
f"add_to_nwbfile() takes at most {len(parameter_names) + num_positional_args_before_args} positional arguments but "
f"{len(args) + num_positional_args_before_args} were given. "
"Note: Positional arguments are deprecated and will be removed on or after August 2026. "
"Please use keyword arguments."
)
positional_values = dict(zip(parameter_names, args))
passed_as_positional = list(positional_values.keys())
warnings.warn(
f"Passing arguments positionally to ImageInterface.add_to_nwbfile() is deprecated "
f"and will be removed on or after August 2026. "
f"The following arguments were passed positionally: {passed_as_positional}. "
"Please use keyword arguments instead.",
FutureWarning,
stacklevel=2,
)
container_name = positional_values.get("container_name", container_name)

if container_name is not None:
warnings.warn(
"The 'container_name' parameter is deprecated and will be removed on or after February 2026. "
"Use 'metadata_key' in the __init__ method instead.",
FutureWarning,
stacklevel=2,
)

if metadata is None:
metadata = self.get_metadata()
Expand All @@ -389,11 +351,7 @@ def add_to_nwbfile(
images_metadata = metadata.get("Images", {})
container_metadata = images_metadata.get(self.metadata_key, {})

# Use container_name only if explicitly provided (deprecated), otherwise use metadata
if container_name is not None:
name = container_name
else:
name = container_metadata.get("name", self.metadata_key)
name = container_metadata.get("name", self.metadata_key)

description = container_metadata.get("description", "Images loaded through ImageInterface")

Expand Down
25 changes: 2 additions & 23 deletions src/neuroconv/tools/data_transfers/_dandi.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@ def automatic_dandi_upload(
nwb_folder_path: DirectoryPath,
dandiset_folder_path: DirectoryPath | None = None,
version: str = "draft",
sandbox: bool | None = None,
staging: bool | None = None,
sandbox: bool = False,
cleanup: bool = False,
number_of_jobs: int | None = None,
number_of_threads: int | None = None,
Expand Down Expand Up @@ -52,13 +51,8 @@ def automatic_dandi_upload(
version : str, default="draft"
The version of the Dandiset to download. Even if no data has been uploaded yes, this step downloads an essential
Dandiset metadata yaml file. Default is "draft", which is the latest state.
sandbox : bool, optional
sandbox : bool, default: False
Is the Dandiset hosted on the sandbox server? This is mostly for testing purposes.
Defaults to False.
staging : bool, optional
.. deprecated:: 0.6.0
The 'staging' parameter is deprecated and will be removed in February 2026.
Use 'sandbox' instead.
cleanup : bool, default: False
Whether to remove the Dandiset folder path and nwb_folder_path.
number_of_jobs : int, optional
Expand All @@ -77,21 +71,6 @@ def automatic_dandi_upload(
message = "The 'instance' parameter must be either 'dandi', 'ember', or a full URL starting with 'https://'."
raise ValueError(message)

# Handle deprecated 'staging' parameter and set defaults
if staging is not None and sandbox is not None:
raise ValueError("Cannot specify both 'staging' and 'sandbox' parameters. Use 'sandbox' only.")

if staging is not None:
warn(
"The 'staging' parameter is deprecated and will be removed in February 2026. " "Use 'sandbox' instead.",
DeprecationWarning,
stacklevel=2,
)
sandbox = staging

if sandbox is None:
sandbox = False

# Determine the actual dandi_instance name and URL based on instance and sandbox parameters
if instance == "dandi" and sandbox:
url_base = "https://sandbox.dandiarchive.org"
Expand Down
15 changes: 2 additions & 13 deletions src/neuroconv/tools/spikeinterface/spikeinterface.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import warnings
from collections import defaultdict
from typing import Any, Literal, Optional
from typing import Any, Literal

import numpy as np
import psutil
Expand Down Expand Up @@ -1235,7 +1235,6 @@ def add_recording_as_time_series_to_nwbfile(
iterator_options: dict | None = None,
iterator_opts: dict | None = None,
always_write_timestamps: bool = False,
time_series_name: Optional[str] = None,
metadata_key: str = "TimeSeries",
):
"""
Expand Down Expand Up @@ -1294,16 +1293,6 @@ def add_recording_as_time_series_to_nwbfile(
raise ValueError("Cannot specify both 'iterator_opts' and 'iterator_options'. Use 'iterator_options'.")
iterator_options = iterator_opts

# Handle backward compatibility for time_series_name
if time_series_name is not None:
warnings.warn(
"The 'time_series_name' parameter is deprecated and will be removed in or after February 2026. "
"Use 'metadata_key' to specify the metadata entry instead.",
DeprecationWarning,
stacklevel=2,
)
metadata_key = time_series_name

num_segments = recording.get_num_segments()
for segment_index in range(num_segments):
_add_time_series_segment_to_nwbfile(
Expand Down Expand Up @@ -1363,7 +1352,7 @@ def _add_time_series_segment_to_nwbfile(
"The recording extractor has heterogeneous units or is lacking scaling factors. "
"The time series will be saved with unit 'n.a.' and the conversion factors will not be set. "
"To fix this issue, either: "
"1) Set the unit in the metadata['TimeSeries'][time_series_name]['unit'] field, or "
"1) Set the unit in the metadata['TimeSeries'][metadata_key]['unit'] field, or "
"2) Set the `physical_unit`, `gain_to_physical_unit`, and `offset_to_physical_unit` properties "
"on the recording object with consistent units across all channels. "
f"Channel units: {units if units is not None else 'None'}, "
Expand Down
44 changes: 0 additions & 44 deletions tests/remote_transfer_services/dandi_transfer_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,47 +84,3 @@ def test_automatic_dandi_upload_non_parallel_non_threaded(tmp_path):
number_of_jobs=1,
number_of_threads=1,
)


@pytest.mark.skipif(
not HAVE_DANDI_KEY,
reason="You must set your DANDI_SANDBOX_API_KEY to run this test!",
)
def test_staging_sandbox_conflict(tmp_path):
"""Test that providing both 'staging' and 'sandbox' parameters raises ValueError."""

nwb_folder_path = tmp_path / "test_nwb"
nwb_folder_path.mkdir()

with pytest.raises(ValueError, match="Cannot specify both 'staging' and 'sandbox' parameters"):
automatic_dandi_upload(dandiset_id="200000", nwb_folder_path=nwb_folder_path, sandbox=True, staging=True)


@pytest.mark.skipif(
not HAVE_DANDI_KEY,
reason="You must set your DANDI_SANDBOX_API_KEY to run this test!",
)
def test_staging_backward_compatibility(tmp_path):
"""Test that staging=True works the same as sandbox=True with deprecation warning."""
import warnings

nwb_folder_path = tmp_path / "test_nwb"
nwb_folder_path.mkdir()
metadata = get_default_nwbfile_metadata()
metadata["NWBFile"].update(
session_start_time=datetime.now().astimezone(),
session_id=f"test-staging-compat-{sys.platform}-{get_python_version().replace('.', '-')}",
)
metadata.update(Subject=dict(subject_id="foo", species="Mus musculus", age="P1D", sex="U"))
with NWBHDF5IO(path=nwb_folder_path / "test_nwb_staging.nwb", mode="w") as io:
io.write(make_nwbfile_from_metadata(metadata=metadata))

with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")

# This should work with deprecation warning
automatic_dandi_upload(dandiset_id="200560", nwb_folder_path=nwb_folder_path, staging=True)

# Check that deprecation warning was issued
deprecation_warnings = [warning for warning in w if issubclass(warning.category, DeprecationWarning)]
assert len(deprecation_warnings) == 1, f"Expected 1 deprecation warning, got {len(deprecation_warnings)}"
14 changes: 7 additions & 7 deletions tests/test_modalities/test_ecephys/test_tools_spikeinterface.py
Original file line number Diff line number Diff line change
Expand Up @@ -1158,8 +1158,8 @@ def test_default_values(self):
expected_data = recording.get_traces(segment_index=0)
np.testing.assert_array_almost_equal(expected_data, extracted_data)

def test_time_series_name(self):
"""Test that time_series_name is used to look up metadata."""
def test_metadata_key(self):
"""Test that metadata_key is used to look up metadata."""
# Create a recording object for testing
num_channels = 3
sampling_frequency = 1.0
Expand All @@ -1185,7 +1185,7 @@ def test_time_series_name(self):
recording=recording,
nwbfile=nwbfile,
metadata=metadata,
time_series_name="CustomTimeSeries",
metadata_key="CustomTimeSeries",
iterator_type=None,
)

Expand All @@ -1194,8 +1194,8 @@ def test_time_series_name(self):
assert time_series.unit == "custom_unit"
assert time_series.description == "Custom description"

def test_custom_metadata_with_time_series_name(self):
"""Test that custom metadata is applied when time_series_name is provided."""
def test_custom_metadata_with_metadata_key(self):
"""Test that custom metadata is applied when metadata_key is provided."""
# Create a recording object for testing
num_channels = 3
sampling_frequency = 1.0
Expand Down Expand Up @@ -1223,7 +1223,7 @@ def test_custom_metadata_with_time_series_name(self):
recording=recording,
nwbfile=nwbfile,
metadata=metadata,
time_series_name="MyCustomSeries",
metadata_key="MyCustomSeries",
iterator_type=None,
)

Expand Down Expand Up @@ -1342,7 +1342,7 @@ def test_metadata_priority(self):
recording=recording,
nwbfile=nwbfile,
metadata=metadata,
time_series_name="TimeSeriesRaw",
metadata_key="TimeSeriesRaw",
)

time_series = nwbfile.acquisition["TimeSeriesRaw"]
Expand Down
Loading