Skip to content

Commit 5ab71e3

Browse files
authored
Remove February 2026 deprecated parameters (#1678)
1 parent 462e704 commit 5ab71e3

File tree

7 files changed

+16
-189
lines changed

7 files changed

+16
-189
lines changed

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11
# v0.9.4 (Upcoming)
22

33
## Removals, Deprecations and Changes
4+
* Removed the deprecated `staging` parameter from `automatic_dandi_upload`. Use `sandbox` instead. [PR #1678](https://github.com/catalystneuro/neuroconv/pull/1678)
5+
* Removed the deprecated `container_name` parameter from `ImageInterface.add_to_nwbfile` and `DeepLabCutInterface.add_to_nwbfile`. Use `metadata_key` in `__init__` instead. [PR #1678](https://github.com/catalystneuro/neuroconv/pull/1678)
6+
* Removed the deprecated `time_series_name` parameter from `add_recording_as_time_series_to_nwbfile`. Use `metadata_key` instead. [PR #1678](https://github.com/catalystneuro/neuroconv/pull/1678)
47

58
## Bug Fixes
69
* Fixed `get_json_schema_from_method_signature` to resolve PEP 563 string annotations (from `from __future__ import annotations`) before passing them to pydantic. This affected any interface defined in a module with deferred annotations (e.g. `MiniscopeConverter`, or external subclasses from SpikeInterface). [PR #1670](https://github.com/catalystneuro/neuroconv/pull/1670)

src/neuroconv/datainterfaces/behavior/deeplabcut/deeplabcutdatainterface.py

Lines changed: 1 addition & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -559,8 +559,6 @@ def add_to_nwbfile(
559559
self,
560560
nwbfile: NWBFile,
561561
metadata: dict | None = None,
562-
*args, # TODO: change to * (keyword only) on or after August 2026
563-
container_name: str | None = None,
564562
):
565563
"""
566564
Conversion from DLC output files to nwb. Derived from dlc2nwb library.
@@ -571,53 +569,13 @@ def add_to_nwbfile(
571569
nwb file to which the recording information is to be added
572570
metadata: dict
573571
metadata info for constructing the nwb file (optional).
574-
container_name: str, default: None
575-
name of the PoseEstimation container in the nwb. If None, uses the container_name from the interface.
576-
This parameter is deprecated and will be removed on or after October 2025.
577-
Use the pose_estimation_metadata_key parameter when initializing the interface instead to specify
578-
the content of the metadata.
579-
580572
"""
581-
# Handle deprecated positional arguments
582-
if args:
583-
parameter_names = [
584-
"container_name",
585-
]
586-
num_positional_args_before_args = 2 # nwbfile, metadata
587-
if len(args) > len(parameter_names):
588-
raise TypeError(
589-
f"add_to_nwbfile() takes at most {len(parameter_names) + num_positional_args_before_args} positional arguments but "
590-
f"{len(args) + num_positional_args_before_args} were given. "
591-
"Note: Positional arguments are deprecated and will be removed on or after August 2026. "
592-
"Please use keyword arguments."
593-
)
594-
positional_values = dict(zip(parameter_names, args))
595-
passed_as_positional = list(positional_values.keys())
596-
warnings.warn(
597-
f"Passing arguments positionally to DeepLabCutInterface.add_to_nwbfile() is deprecated "
598-
f"and will be removed on or after August 2026. "
599-
f"The following arguments were passed positionally: {passed_as_positional}. "
600-
"Please use keyword arguments instead.",
601-
FutureWarning,
602-
stacklevel=2,
603-
)
604-
container_name = positional_values.get("container_name", container_name)
605573
from ._dlc_utils import (
606574
_add_pose_estimation_to_nwbfile,
607575
_ensure_individuals_in_header,
608576
)
609577

610-
# Use the pose_estimation_metadata_key from the instance if container_name not provided
611-
if container_name is not None:
612-
warnings.warn(
613-
"The container_name parameter in add_to_nwbfile is deprecated and will be removed on or after October 2025. "
614-
"Use the pose_estimation_metadata_key parameter when initializing the interface instead.",
615-
DeprecationWarning,
616-
stacklevel=2,
617-
)
618-
pose_estimation_metadata_key = container_name
619-
else:
620-
pose_estimation_metadata_key = self.pose_estimation_metadata_key
578+
pose_estimation_metadata_key = self.pose_estimation_metadata_key
621579

622580
# Get default metadata
623581
default_metadata = DeepDict(self.get_metadata())
@@ -626,22 +584,6 @@ def add_to_nwbfile(
626584
if metadata is not None:
627585
default_metadata.deep_update(metadata)
628586

629-
# Set the container name in the metadata, remove this once container_name is deprecated
630-
if container_name is not None:
631-
if (
632-
"PoseEstimation" in default_metadata
633-
and "PoseEstimationContainers" in default_metadata["PoseEstimation"]
634-
):
635-
if container_name in default_metadata["PoseEstimation"]["PoseEstimationContainers"]:
636-
default_metadata["PoseEstimation"]["PoseEstimationContainers"][container_name][
637-
"name"
638-
] = container_name
639-
else:
640-
# If the container doesn't exist in the metadata, create it with the name
641-
default_metadata["PoseEstimation"]["PoseEstimationContainers"][container_name] = {
642-
"name": container_name
643-
}
644-
645587
file_path = Path(self.source_data["file_path"])
646588

647589
# Read the data

src/neuroconv/datainterfaces/image/imageinterface.py

Lines changed: 1 addition & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -332,8 +332,6 @@ def add_to_nwbfile(
332332
self,
333333
nwbfile: NWBFile,
334334
metadata: DeepDict | None = None,
335-
*args, # TODO: change to * (keyword only) on or after August 2026
336-
container_name: str | None = None,
337335
) -> None:
338336
"""
339337
Add the image data to an NWB file.
@@ -344,43 +342,7 @@ def add_to_nwbfile(
344342
The NWB file to add the images to
345343
metadata : dict, optional
346344
Metadata for the images
347-
container_name : str, optional, deprecated
348-
Name of the Images container. This parameter is deprecated and will be removed
349-
on or after February 2026. Use metadata_key in __init__ instead.
350-
If provided, it overrides the name from metadata.
351345
"""
352-
# Handle deprecated positional arguments
353-
if args:
354-
parameter_names = [
355-
"container_name",
356-
]
357-
num_positional_args_before_args = 2 # nwbfile, metadata
358-
if len(args) > len(parameter_names):
359-
raise TypeError(
360-
f"add_to_nwbfile() takes at most {len(parameter_names) + num_positional_args_before_args} positional arguments but "
361-
f"{len(args) + num_positional_args_before_args} were given. "
362-
"Note: Positional arguments are deprecated and will be removed on or after August 2026. "
363-
"Please use keyword arguments."
364-
)
365-
positional_values = dict(zip(parameter_names, args))
366-
passed_as_positional = list(positional_values.keys())
367-
warnings.warn(
368-
f"Passing arguments positionally to ImageInterface.add_to_nwbfile() is deprecated "
369-
f"and will be removed on or after August 2026. "
370-
f"The following arguments were passed positionally: {passed_as_positional}. "
371-
"Please use keyword arguments instead.",
372-
FutureWarning,
373-
stacklevel=2,
374-
)
375-
container_name = positional_values.get("container_name", container_name)
376-
377-
if container_name is not None:
378-
warnings.warn(
379-
"The 'container_name' parameter is deprecated and will be removed on or after February 2026. "
380-
"Use 'metadata_key' in the __init__ method instead.",
381-
FutureWarning,
382-
stacklevel=2,
383-
)
384346

385347
if metadata is None:
386348
metadata = self.get_metadata()
@@ -389,11 +351,7 @@ def add_to_nwbfile(
389351
images_metadata = metadata.get("Images", {})
390352
container_metadata = images_metadata.get(self.metadata_key, {})
391353

392-
# Use container_name only if explicitly provided (deprecated), otherwise use metadata
393-
if container_name is not None:
394-
name = container_name
395-
else:
396-
name = container_metadata.get("name", self.metadata_key)
354+
name = container_metadata.get("name", self.metadata_key)
397355

398356
description = container_metadata.get("description", "Images loaded through ImageInterface")
399357

src/neuroconv/tools/data_transfers/_dandi.py

Lines changed: 2 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,7 @@ def automatic_dandi_upload(
1616
nwb_folder_path: DirectoryPath,
1717
dandiset_folder_path: DirectoryPath | None = None,
1818
version: str = "draft",
19-
sandbox: bool | None = None,
20-
staging: bool | None = None,
19+
sandbox: bool = False,
2120
cleanup: bool = False,
2221
number_of_jobs: int | None = None,
2322
number_of_threads: int | None = None,
@@ -52,13 +51,8 @@ def automatic_dandi_upload(
5251
version : str, default="draft"
5352
The version of the Dandiset to download. Even if no data has been uploaded yes, this step downloads an essential
5453
Dandiset metadata yaml file. Default is "draft", which is the latest state.
55-
sandbox : bool, optional
54+
sandbox : bool, default: False
5655
Is the Dandiset hosted on the sandbox server? This is mostly for testing purposes.
57-
Defaults to False.
58-
staging : bool, optional
59-
.. deprecated:: 0.6.0
60-
The 'staging' parameter is deprecated and will be removed in February 2026.
61-
Use 'sandbox' instead.
6256
cleanup : bool, default: False
6357
Whether to remove the Dandiset folder path and nwb_folder_path.
6458
number_of_jobs : int, optional
@@ -77,21 +71,6 @@ def automatic_dandi_upload(
7771
message = "The 'instance' parameter must be either 'dandi', 'ember', or a full URL starting with 'https://'."
7872
raise ValueError(message)
7973

80-
# Handle deprecated 'staging' parameter and set defaults
81-
if staging is not None and sandbox is not None:
82-
raise ValueError("Cannot specify both 'staging' and 'sandbox' parameters. Use 'sandbox' only.")
83-
84-
if staging is not None:
85-
warn(
86-
"The 'staging' parameter is deprecated and will be removed in February 2026. " "Use 'sandbox' instead.",
87-
DeprecationWarning,
88-
stacklevel=2,
89-
)
90-
sandbox = staging
91-
92-
if sandbox is None:
93-
sandbox = False
94-
9574
# Determine the actual dandi_instance name and URL based on instance and sandbox parameters
9675
if instance == "dandi" and sandbox:
9776
url_base = "https://sandbox.dandiarchive.org"

src/neuroconv/tools/spikeinterface/spikeinterface.py

Lines changed: 2 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import warnings
22
from collections import defaultdict
3-
from typing import Any, Literal, Optional
3+
from typing import Any, Literal
44

55
import numpy as np
66
import psutil
@@ -1235,7 +1235,6 @@ def add_recording_as_time_series_to_nwbfile(
12351235
iterator_options: dict | None = None,
12361236
iterator_opts: dict | None = None,
12371237
always_write_timestamps: bool = False,
1238-
time_series_name: Optional[str] = None,
12391238
metadata_key: str = "TimeSeries",
12401239
):
12411240
"""
@@ -1294,16 +1293,6 @@ def add_recording_as_time_series_to_nwbfile(
12941293
raise ValueError("Cannot specify both 'iterator_opts' and 'iterator_options'. Use 'iterator_options'.")
12951294
iterator_options = iterator_opts
12961295

1297-
# Handle backward compatibility for time_series_name
1298-
if time_series_name is not None:
1299-
warnings.warn(
1300-
"The 'time_series_name' parameter is deprecated and will be removed in or after February 2026. "
1301-
"Use 'metadata_key' to specify the metadata entry instead.",
1302-
DeprecationWarning,
1303-
stacklevel=2,
1304-
)
1305-
metadata_key = time_series_name
1306-
13071296
num_segments = recording.get_num_segments()
13081297
for segment_index in range(num_segments):
13091298
_add_time_series_segment_to_nwbfile(
@@ -1363,7 +1352,7 @@ def _add_time_series_segment_to_nwbfile(
13631352
"The recording extractor has heterogeneous units or is lacking scaling factors. "
13641353
"The time series will be saved with unit 'n.a.' and the conversion factors will not be set. "
13651354
"To fix this issue, either: "
1366-
"1) Set the unit in the metadata['TimeSeries'][time_series_name]['unit'] field, or "
1355+
"1) Set the unit in the metadata['TimeSeries'][metadata_key]['unit'] field, or "
13671356
"2) Set the `physical_unit`, `gain_to_physical_unit`, and `offset_to_physical_unit` properties "
13681357
"on the recording object with consistent units across all channels. "
13691358
f"Channel units: {units if units is not None else 'None'}, "

tests/remote_transfer_services/dandi_transfer_tools.py

Lines changed: 0 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -84,47 +84,3 @@ def test_automatic_dandi_upload_non_parallel_non_threaded(tmp_path):
8484
number_of_jobs=1,
8585
number_of_threads=1,
8686
)
87-
88-
89-
@pytest.mark.skipif(
90-
not HAVE_DANDI_KEY,
91-
reason="You must set your DANDI_SANDBOX_API_KEY to run this test!",
92-
)
93-
def test_staging_sandbox_conflict(tmp_path):
94-
"""Test that providing both 'staging' and 'sandbox' parameters raises ValueError."""
95-
96-
nwb_folder_path = tmp_path / "test_nwb"
97-
nwb_folder_path.mkdir()
98-
99-
with pytest.raises(ValueError, match="Cannot specify both 'staging' and 'sandbox' parameters"):
100-
automatic_dandi_upload(dandiset_id="200000", nwb_folder_path=nwb_folder_path, sandbox=True, staging=True)
101-
102-
103-
@pytest.mark.skipif(
104-
not HAVE_DANDI_KEY,
105-
reason="You must set your DANDI_SANDBOX_API_KEY to run this test!",
106-
)
107-
def test_staging_backward_compatibility(tmp_path):
108-
"""Test that staging=True works the same as sandbox=True with deprecation warning."""
109-
import warnings
110-
111-
nwb_folder_path = tmp_path / "test_nwb"
112-
nwb_folder_path.mkdir()
113-
metadata = get_default_nwbfile_metadata()
114-
metadata["NWBFile"].update(
115-
session_start_time=datetime.now().astimezone(),
116-
session_id=f"test-staging-compat-{sys.platform}-{get_python_version().replace('.', '-')}",
117-
)
118-
metadata.update(Subject=dict(subject_id="foo", species="Mus musculus", age="P1D", sex="U"))
119-
with NWBHDF5IO(path=nwb_folder_path / "test_nwb_staging.nwb", mode="w") as io:
120-
io.write(make_nwbfile_from_metadata(metadata=metadata))
121-
122-
with warnings.catch_warnings(record=True) as w:
123-
warnings.simplefilter("always")
124-
125-
# This should work with deprecation warning
126-
automatic_dandi_upload(dandiset_id="200560", nwb_folder_path=nwb_folder_path, staging=True)
127-
128-
# Check that deprecation warning was issued
129-
deprecation_warnings = [warning for warning in w if issubclass(warning.category, DeprecationWarning)]
130-
assert len(deprecation_warnings) == 1, f"Expected 1 deprecation warning, got {len(deprecation_warnings)}"

tests/test_modalities/test_ecephys/test_tools_spikeinterface.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1158,8 +1158,8 @@ def test_default_values(self):
11581158
expected_data = recording.get_traces(segment_index=0)
11591159
np.testing.assert_array_almost_equal(expected_data, extracted_data)
11601160

1161-
def test_time_series_name(self):
1162-
"""Test that time_series_name is used to look up metadata."""
1161+
def test_metadata_key(self):
1162+
"""Test that metadata_key is used to look up metadata."""
11631163
# Create a recording object for testing
11641164
num_channels = 3
11651165
sampling_frequency = 1.0
@@ -1185,7 +1185,7 @@ def test_time_series_name(self):
11851185
recording=recording,
11861186
nwbfile=nwbfile,
11871187
metadata=metadata,
1188-
time_series_name="CustomTimeSeries",
1188+
metadata_key="CustomTimeSeries",
11891189
iterator_type=None,
11901190
)
11911191

@@ -1194,8 +1194,8 @@ def test_time_series_name(self):
11941194
assert time_series.unit == "custom_unit"
11951195
assert time_series.description == "Custom description"
11961196

1197-
def test_custom_metadata_with_time_series_name(self):
1198-
"""Test that custom metadata is applied when time_series_name is provided."""
1197+
def test_custom_metadata_with_metadata_key(self):
1198+
"""Test that custom metadata is applied when metadata_key is provided."""
11991199
# Create a recording object for testing
12001200
num_channels = 3
12011201
sampling_frequency = 1.0
@@ -1223,7 +1223,7 @@ def test_custom_metadata_with_time_series_name(self):
12231223
recording=recording,
12241224
nwbfile=nwbfile,
12251225
metadata=metadata,
1226-
time_series_name="MyCustomSeries",
1226+
metadata_key="MyCustomSeries",
12271227
iterator_type=None,
12281228
)
12291229

@@ -1342,7 +1342,7 @@ def test_metadata_priority(self):
13421342
recording=recording,
13431343
nwbfile=nwbfile,
13441344
metadata=metadata,
1345-
time_series_name="TimeSeriesRaw",
1345+
metadata_key="TimeSeriesRaw",
13461346
)
13471347

13481348
time_series = nwbfile.acquisition["TimeSeriesRaw"]

0 commit comments

Comments
 (0)