2929 MultiLevelDatasetDescriptor ,
3030 MutableDataStore ,
3131)
32- from xcube .core .store .fs .registry import new_fs_data_store
32+ from xcube .core .store .fs .registry import get_filename_extensions , new_fs_data_store
3333from xcube .core .store .fs .store import FsDataStore
3434from xcube .core .zarrstore import GenericZarrStore
3535from xcube .util .temp import new_temp_dir
@@ -112,7 +112,7 @@ def test_open_packed(self):
112112# noinspection PyUnresolvedReferences,PyPep8Naming
113113class FsDataStoresTestMixin (ABC ):
114114 @abstractmethod
115- def create_data_store (self ) -> FsDataStore :
115+ def create_data_store (self , read_only = False ) -> FsDataStore :
116116 pass
117117
118118 @classmethod
@@ -137,6 +137,28 @@ def prepare_fs(cls, fs: fsspec.AbstractFileSystem, root: str):
137137 with fs .open (file_path , "w" ) as fp :
138138 fp .write ("\n " )
139139
140+ def test_no_write_to_read_only (self ):
141+ data_store = self .create_data_store (read_only = True )
142+ data = new_cube_data ()
143+ with self .assertRaises (DataStoreError ) as dse :
144+ data_store .write_data (data )
145+ self .assertEqual ("Data store is read-only." , f"{ dse .exception } " )
146+
147+ def test_no_delete_on_read_only (self ):
148+ data_store = self .create_data_store (read_only = True )
149+ with self .assertRaises (DataStoreError ) as dse :
150+ data_store .delete_data ("the_data_id_does_not_even_matter.nc" )
151+ self .assertEqual ("Data store is read-only." , f"{ dse .exception } " )
152+
153+ def test_cannot_open_unknown_format (self ):
154+ data_store = self .create_data_store ()
155+ with self .assertRaises (DataStoreError ) as dse :
156+ data_store .open_data ("unknown.format" )
157+ self .assertEqual (
158+ "Cannot determine data type for data resource 'unknown.format'" ,
159+ f"{ dse .exception } " ,
160+ )
161+
140162 def test_mldataset_levels (self ):
141163 data_store = self .create_data_store ()
142164 self ._assert_multi_level_dataset_format_supported (data_store )
@@ -491,21 +513,21 @@ def _assert_dataset_supported(
491513
492514
493515class FileFsDataStoresTest (FsDataStoresTestMixin , unittest .TestCase ):
494- def create_data_store (self ) -> FsDataStore :
516+ def create_data_store (self , read_only = False ) -> FsDataStore :
495517 root = os .path .join (new_temp_dir (prefix = "xcube" ), ROOT_DIR )
496518 self .prepare_fs (fsspec .filesystem ("file" ), root )
497- return new_fs_data_store ("file" , root = root , max_depth = 3 )
519+ return new_fs_data_store ("file" , root = root , max_depth = 3 , read_only = read_only )
498520
499521
500522class MemoryFsDataStoresTest (FsDataStoresTestMixin , unittest .TestCase ):
501- def create_data_store (self ) -> FsDataStore :
523+ def create_data_store (self , read_only = False ) -> FsDataStore :
502524 root = ROOT_DIR
503525 self .prepare_fs (fsspec .filesystem ("memory" ), root )
504- return new_fs_data_store ("memory" , root = root , max_depth = 3 )
526+ return new_fs_data_store ("memory" , root = root , max_depth = 3 , read_only = read_only )
505527
506528
507529class S3FsDataStoresTest (FsDataStoresTestMixin , S3Test ):
508- def create_data_store (self ) -> FsDataStore :
530+ def create_data_store (self , read_only = False ) -> FsDataStore :
509531 root = ROOT_DIR
510532 storage_options = dict (
511533 anon = False ,
@@ -515,5 +537,47 @@ def create_data_store(self) -> FsDataStore:
515537 )
516538 self .prepare_fs (fsspec .filesystem ("s3" , ** storage_options ), root )
517539 return new_fs_data_store (
518- "s3" , root = root , max_depth = 3 , storage_options = storage_options
540+ "s3" ,
541+ root = root ,
542+ max_depth = 3 ,
543+ storage_options = storage_options ,
544+ read_only = read_only ,
519545 )
546+
547+
548+ class GetFilenameExtensionsTest (unittest .TestCase ):
549+ def test_get_filename_extensions_openers (self ):
550+ opener_extensions = get_filename_extensions ("openers" )
551+ self .assertIn (".nc" , list (opener_extensions .keys ()))
552+ self .assertIn (".zarr" , list (opener_extensions .keys ()))
553+ self .assertIn (".levels" , list (opener_extensions .keys ()))
554+ self .assertIn (".shp" , list (opener_extensions .keys ()))
555+ self .assertIn (".geojson" , list (opener_extensions .keys ()))
556+ self .assertIn (".tif" , list (opener_extensions .keys ()))
557+ self .assertIn (".tiff" , list (opener_extensions .keys ()))
558+ self .assertIn (".geotiff" , list (opener_extensions .keys ()))
559+ self .assertTrue (len (opener_extensions [".nc" ]) >= 6 )
560+ self .assertTrue (len (opener_extensions [".zarr" ]) >= 6 )
561+ self .assertTrue (len (opener_extensions [".levels" ]) >= 12 )
562+ self .assertTrue (len (opener_extensions [".shp" ]) >= 6 )
563+ self .assertTrue (len (opener_extensions [".geojson" ]) >= 6 )
564+ self .assertTrue (len (opener_extensions [".tif" ]) >= 12 )
565+ self .assertTrue (len (opener_extensions [".tiff" ]) >= 12 )
566+ self .assertTrue (len (opener_extensions [".geotiff" ]) >= 12 )
567+
568+ def test_get_filename_extensions_writers (self ):
569+ writer_extensions = get_filename_extensions ("writers" )
570+ self .assertIn (".nc" , list (writer_extensions .keys ()))
571+ self .assertIn (".zarr" , list (writer_extensions .keys ()))
572+ self .assertIn (".levels" , list (writer_extensions .keys ()))
573+ self .assertIn (".shp" , list (writer_extensions .keys ()))
574+ self .assertIn (".geojson" , list (writer_extensions .keys ()))
575+ self .assertTrue (len (writer_extensions [".nc" ]) >= 6 )
576+ self .assertTrue (len (writer_extensions [".zarr" ]) >= 6 )
577+ self .assertTrue (len (writer_extensions [".levels" ]) >= 12 )
578+ self .assertTrue (len (writer_extensions [".shp" ]) >= 6 )
579+ self .assertTrue (len (writer_extensions [".geojson" ]) >= 6 )
580+
581+ def test_get_filename_extensions_invalid (self ):
582+ with self .assertRaises (DataStoreError ):
583+ get_filename_extensions ("rgth" )
0 commit comments