@@ -101,7 +101,7 @@ def __init__(self, one=None):
101101 assert one
102102 self .one = one
103103
104- def _patch_dataset (self , path , dset_id = None , dry = False , ftp = False ):
104+ def _patch_dataset (self , path , dset_id = None , revision = None , dry = False , ftp = False ):
105105 """
106106 This private methods gets the dataset information from alyx, computes the local
107107 and remote paths and initiates the file copy
@@ -113,6 +113,10 @@ def _patch_dataset(self, path, dset_id=None, dry=False, ftp=False):
113113 dset_id = None
114114 assert dset_id
115115 assert is_uuid_string (dset_id )
116+ # If the revision is not None then we need to add the revision into the path. Note the moving of the file
117+ # is handled by one registration client
118+ if revision is not None :
119+ path = path .parent .joinpath (f'#{ revision } #' , path .name )
116120 assert path .exists ()
117121 dset = self .one .alyx .rest ('datasets' , 'read' , id = dset_id )
118122 fr = next (fr for fr in dset ['file_records' ] if 'flatiron' in fr ['data_repository' ])
@@ -185,7 +189,7 @@ def patch_dataset(self, file_list, dry=False, ftp=False, **kwargs):
185189 return
186190 # from the dataset info, set flatIron flag to exists=True
187191 for p , d in zip (file_list , response ):
188- self ._patch_dataset (p , dset_id = d ['id' ], dry = dry , ftp = ftp )
192+ self ._patch_dataset (p , dset_id = d ['id' ], revision = d [ 'revision' ], dry = dry , ftp = ftp )
189193 return response
190194
191195 def patch_datasets (self , file_list , ** kwargs ):
0 commit comments