@@ -388,7 +388,7 @@ def load_channel_locations(eid, probe=None, one=None, aligned=False, brain_atlas
388388
389389
390390def load_spike_sorting_fast (eid , one = None , probe = None , dataset_types = None , spike_sorter = None , revision = None ,
391- brain_regions = None , nested = True ):
391+ brain_regions = None , nested = True , collection = None ):
392392 """
393393 From an eid, loads spikes and clusters for all probes
394394 The following set of dataset types are loaded:
@@ -403,12 +403,14 @@ def load_spike_sorting_fast(eid, one=None, probe=None, dataset_types=None, spike
403403 :param probe: name of probe to load in, if not given all probes for session will be loaded
404404 :param dataset_types: additional spikes/clusters objects to add to the standard default list
405405 :param spike_sorter: name of the spike sorting you want to load (None for default)
406+ :param collection: name of the spike sorting collection to load - exclusive with spike sorter name ex: "alf/probe00"
406407 :param return_channels: (bool) defaults to False otherwise tries and load channels from disk
407408 :param brain_regions: ibllib.atlas.regions.BrainRegions object - will label acronyms if provided
408409 :param nested: if a single probe is required, do not output a dictionary with the probe name as key
409410 :return: spikes, clusters (dict of bunch, 1 bunch per probe)
410411 """
411- collection = _collection_filter_from_args (probe , spike_sorter )
412+ if collection is None :
413+ collection = _collection_filter_from_args (probe , spike_sorter )
412414 _logger .debug (f"load spike sorting with collection filter { collection } " )
413415 kwargs = dict (eid = eid , one = one , collection = collection , revision = revision , dataset_types = dataset_types ,
414416 brain_regions = brain_regions )
0 commit comments