1919import one .alf .exceptions as alferr
2020from one .alf .cache import QC_TYPE
2121import iblutil .io .params as iopar
22+ from iblutil .util import Bunch
2223
2324from ibllib .oneibl import patcher , registration , data_handlers as handlers
2425import ibllib .io .extractors .base
@@ -84,12 +85,13 @@ def mock_input(prompt):
8485 return FTP_pars [next (k for k in FTP_pars .keys () if k in prompt .replace (',' , '' ).split ())]
8586
8687
87- class TestGlobusPatcher (unittest .TestCase ):
88- """Tests for the ibllib.oneibl.patcher.GlobusPatcher class."""
89-
88+ class _GlobusPatcherTest (unittest .TestCase ):
9089 globus_sdk_mock = None
9190 """unittest.mock._patch: Mock object for globus_sdk package."""
9291
92+ patcher_class = None
93+ """object: The patcher class to instantiate during setup."""
94+
9395 @mock .patch ('one.remote.globus._setup' )
9496 def setUp (self , _ ) -> None :
9597 # Create a temp dir for writing datasets to
@@ -107,12 +109,21 @@ def setUp(self, _) -> None:
107109 'expires_at_seconds' : datetime .datetime .now ().timestamp () + 60 ** 2
108110 })
109111 # Mock the globus SDK so that no actual tasks are submitted
110- self . globus_sdk_mock = mock .patch ('one.remote.globus.globus_sdk' )
111- self .globus_sdk_mock .start ()
112- self .addCleanup (self . globus_sdk_mock .stop )
112+ globus_sdk_mock = mock .patch ('one.remote.globus.globus_sdk' )
113+ self .globus_sdk_mock = globus_sdk_mock .start ()
114+ self .addCleanup (globus_sdk_mock .stop )
113115 self .one = ONE (** TEST_DB )
116+
117+
118+ class TestGlobusPatcher (_GlobusPatcherTest ):
119+ """Tests for the ibllib.oneibl.patcher.GlobusPatcher class."""
120+
121+ patcher_class = patcher .GlobusPatcher
122+
123+ def setUp (self ) -> None :
124+ super ().setUp ()
114125 with mock .patch ('one.remote.globus.load_client_params' , return_value = self .pars ):
115- self .globus_patcher = patcher . GlobusPatcher (one = self .one )
126+ self .globus_patcher = self . patcher_class (one = self .one )
116127
117128 def test_patch_datasets (self ):
118129 """Tests for GlobusPatcher.patch_datasets and GlobusPatcher.launch_transfers methods."""
@@ -163,6 +174,98 @@ def test_patch_datasets(self):
163174 self .globus_patcher .client .submit_transfer .assert_called ()
164175
165176
177+ class TestIBLGlobusPatcher (_GlobusPatcherTest ):
178+ """Tests for the ibllib.oneibl.patcher.IBLGlobusPatcher class."""
179+
180+ patcher_class = patcher .IBLGlobusPatcher
181+
182+ def setUp (self ) -> None :
183+ super ().setUp ()
184+ with mock .patch ('one.remote.globus.load_client_params' , return_value = self .pars ):
185+ self .globus_patcher = self .patcher_class (alyx = self .one .alyx )
186+
187+ def test_delete_datasets (self ):
188+ """Tests for IBLGlobusPatcher.delete_datasets method."""
189+ # The following dataset should have two file records, a flatiron one that exists and an SR one that doesn't
190+ did = '80fabd30-9dc8-4778-b349-d175af63e1bd'
191+ self .dset = self .one .alyx .rest ('datasets' , 'read' , id = did )
192+ assert len (self .dset ['file_records' ]) == 2 , 'expected two file records for this test dataset'
193+
194+ # Some Globus endpoint IDs to return with Alyx REST mock
195+ self .endpoint_ids = {name : str (uuid4 ()) for name in ('mainen_lab_SR' , 'flatiron_mainenlab' )}
196+
197+ task_id = uuid4 ()
198+ self .globus_patcher .client .submit_delete .return_value = Bunch (data = {'task_id' : str (task_id )})
199+
200+ # TEST 1: Test delete of flatiron dataset with UUID
201+ with mock .patch .object (self .one .alyx , 'rest' , side_effect = self ._alyx_patch ) as alyx_mock :
202+ task_ids , deleted = self .globus_patcher .delete_dataset (did )
203+ alyx_mock .assert_called_with ('datasets' , 'delete' , id = did )
204+ self .globus_sdk_mock .DeleteData .assert_called_once ()
205+ self .assertEqual (task_ids , [task_id ])
206+ self .assertCountEqual (deleted , ['flatiron_mainenlab' ])
207+ expected = [PurePosixPath (f'ZFM-01935/2021-02-05/001/alf/_ibl_wheelMoves.intervals.{ did } .npy' )]
208+ self .assertEqual (expected , deleted ['flatiron_mainenlab' ])
209+
210+ # TEST 2: Test deleting with dataset record dict, an existing SR and AWS file record, and missing globus ID for flatiron
211+ for fr in self .dset ['file_records' ]:
212+ if fr ['data_repository' ] == 'mainen_lab_SR' :
213+ fr ['exists' ] = True # False -> True
214+ elif fr ['data_repository' ] == 'flatiron_mainenlab' :
215+ # Add an AWS file record
216+ s3_fr = fr .copy ()
217+ s3_fr ['data_repository' ] = 'aws_mainenlab'
218+ s3_fr ['data_repository_path' ] = 'data' + s3_fr ['data_repository_path' ]
219+ relative_path = '/' .join (Path (s3_fr ['data_url' ]).parts [4 :])
220+ s3_fr ['data_url' ] = (
221+ 'https://bucket.s3.amazonaws.com/' + s3_fr ['data_repository_path' ] + relative_path )
222+ self .dset ['file_records' ].append (s3_fr )
223+ # Also make the flatiron endpoint ID None
224+ del self .endpoint_ids ['flatiron_mainenlab' ]
225+ # Reset mock calls
226+ self .globus_patcher .client .reset_mock ()
227+ self .globus_sdk_mock .reset_mock ()
228+ with mock .patch .object (self .one .alyx , 'rest' , side_effect = self ._alyx_patch ) as alyx_mock , \
229+ mock .patch ('ibllib.oneibl.patcher.Popen' ) as proc_mock :
230+ line = mock .MagicMock ()
231+ line .decode .return_value = '...'
232+ proc_mock ().wait .return_value = 0
233+ proc_mock ().stdout .readline .side_effect = (line ,)
234+ proc_mock .reset_mock () # reset call count
235+ # Test dry + with debug log level (should add no-progress flag to aws command)
236+ with self .assertLogs (patcher .__name__ , level = 'DEBUG' ) as log :
237+ task_ids , deleted = self .globus_patcher .delete_dataset (self .dset , dry = True )
238+ self .assertEqual ([], task_ids )
239+ self .assertCountEqual (['mainen_lab_SR' , 'aws_mainenlab' ], deleted )
240+ self .assertFalse (any (args == ('datasets' , 'delete' ) for args , _ in alyx_mock .call_args_list ))
241+ self .globus_sdk_mock .DeleteData .assert_not_called ()
242+ expected = [
243+ 'aws' , 's3' , 'rm' , 's3://bucket' + s3_fr ['data_url' ][31 :], '--profile' , 'ibladmin' , '--dryrun' , '--no-progress'
244+ ]
245+ proc_mock .assert_called_once_with (expected , stdout = - 1 , stderr = - 2 )
246+
247+ # Test not dry + with higher log level (should add only-show-errors flag to aws command)
248+ with self .assertLogs (patcher .__name__ , level = 'ERROR' ) as log :
249+ task_ids , deleted = self .globus_patcher .delete_dataset (self .dset , dry = False )
250+ # Should log failure due to missing endpoint ID in Alyx
251+ self .assertEqual ('Unable to delete from flatiron_mainenlab' , log .records [- 1 ].getMessage ())
252+ alyx_mock .assert_called_with ('datasets' , 'delete' , id = did )
253+ self .globus_sdk_mock .DeleteData .assert_called_once ()
254+ expected = [* expected [:- 2 ], '--only-show-errors' ]
255+ proc_mock .assert_called_with (expected , stdout = - 1 , stderr = - 2 )
256+
257+ def _alyx_patch (self , endpoint , action , ** kwargs ):
258+ """Patch the AlyxClient to return the given dataset."""
259+ if endpoint == 'datasets' and action == 'read' :
260+ self .assertEqual (kwargs ['id' ], self .dset ['url' ][- 36 :])
261+ return self .dset
262+ if endpoint == 'data-repository' and action == 'read' :
263+ fr = next (fr for fr in self .dset ['file_records' ] if fr ['data_repository' ] == kwargs ['id' ])
264+ return {'name' : fr ['data_repository' ], 'globus_path' : fr ['data_repository_path' ],
265+ 'repository_type' : 'Fileserver' , 'globus_is_personal' : fr ['data_url' ] is None ,
266+ 'globus_endpoint_id' : self .endpoint_ids .get (fr ['data_repository' ])}
267+
268+
166269class TestAlyx2Path (unittest .TestCase ):
167270 dset = {
168271 'url' : 'https://alyx.internationalbrainlab.org/'
0 commit comments