4646import xarray as xr
4747
4848import xcube .core .normalize
49+ from xcube .core .store import DATASET_TYPE
4950from xcube .core .store import DataDescriptor
5051from xcube .core .store import DataOpener
5152from xcube .core .store import DataStore
5253from xcube .core .store import DataStoreError
54+ from xcube .core .store import DataTypeLike
5355from xcube .core .store import DatasetDescriptor
5456from xcube .core .store import DefaultSearchMixin
55- from xcube .core .store import TYPE_SPECIFIER_CUBE
56- from xcube .core .store import TypeSpecifier
5757from xcube .util .jsonschema import JsonArraySchema
5858from xcube .util .jsonschema import JsonBooleanSchema
5959from xcube .util .jsonschema import JsonDateSchema
@@ -753,18 +753,18 @@ def get_data_store_params_schema(cls) -> JsonObjectSchema:
753753 )
754754
755755 @classmethod
756- def get_type_specifiers (cls ) -> Tuple [str , ...]:
757- return str ( TYPE_SPECIFIER_CUBE ) ,
756+ def get_data_types (cls ) -> Tuple [str , ...]:
757+ return DATASET_TYPE . alias ,
758758
759- def get_type_specifiers_for_data (self , data_id : str ) -> Tuple [str , ...]:
759+ def get_data_types_for_data (self , data_id : str ) -> Tuple [str , ...]:
760760 self ._validate_data_id (data_id )
761- return str ( TYPE_SPECIFIER_CUBE ) ,
761+ return DATASET_TYPE . alias ,
762762
763- def get_data_ids (self , type_specifier : Optional [str ] = None ,
763+ def get_data_ids (self ,
764+ data_type : DataTypeLike = None ,
764765 include_attrs : Container [str ] = None ) -> \
765766 Union [Iterator [str ], Iterator [Tuple [str , Dict [str , Any ]]]]:
766-
767- if self ._is_type_specifier_satisfied (type_specifier ):
767+ if self ._is_data_type_satisfied (data_type ):
768768 # Only if the type specifier isn't compatible
769769 return_tuples = include_attrs is not None
770770 # TODO: respect names other than "title" in include_attrs
@@ -773,38 +773,38 @@ def get_data_ids(self, type_specifier: Optional[str] = None,
773773 for data_id , handler in self ._handler_registry .items ():
774774 if return_tuples :
775775 if include_titles :
776- yield data_id ,\
776+ yield data_id , \
777777 {'title' :
778- handler .get_human_readable_data_id (data_id )}
778+ handler .get_human_readable_data_id (data_id )}
779779 else :
780780 yield data_id , {}
781781 else :
782782 yield data_id
783783
784- def has_data (self , data_id : str , type_specifier : Optional [str ] = None ) \
784+ def has_data (self , data_id : str , data_type : Optional [str ] = None ) \
785785 -> bool :
786- return self ._is_type_specifier_satisfied ( type_specifier ) and \
786+ return self ._is_data_type_satisfied ( data_type ) and \
787787 data_id in self ._handler_registry
788788
789789 def describe_data (self , data_id : str ,
790- type_specifier : Optional [str ] = None ) \
790+ data_type : Optional [str ] = None ) \
791791 -> DatasetDescriptor :
792792 self ._validate_data_id (data_id )
793- self ._validate_type_specifier ( type_specifier )
793+ self ._validate_data_type ( data_type )
794794 return self ._handler_registry [data_id ].describe_data (data_id )
795795
796796 # noinspection PyTypeChecker
797- def search_data (self , type_specifier : Optional [str ] = None ,
797+ def search_data (self , data_type : Optional [DataTypeLike ] = None ,
798798 ** search_params ) \
799799 -> Iterator [DataDescriptor ]:
800- self ._validate_type_specifier ( type_specifier )
801- return super ().search_data (type_specifier = type_specifier ,
800+ self ._validate_data_type ( data_type )
801+ return super ().search_data (data_type = data_type ,
802802 ** search_params )
803803
804804 def get_data_opener_ids (self , data_id : Optional [str ] = None ,
805- type_specifier : Optional [str ] = None ) \
805+ data_type : Optional [str ] = None ) \
806806 -> Tuple [str , ...]:
807- self ._validate_type_specifier ( type_specifier )
807+ self ._validate_data_type ( data_type )
808808 self ._validate_data_id (data_id , allow_none = True )
809809 return CDS_DATA_OPENER_ID ,
810810
@@ -827,23 +827,23 @@ def open_data(self, data_id: str, opener_id: Optional[str] = None,
827827 # Implementation helpers
828828
829829 @staticmethod
830- def _validate_type_specifier ( type_specifier : Union [ str , TypeSpecifier ] ):
831- if not CDSDataStore ._is_type_specifier_satisfied ( type_specifier ):
830+ def _validate_data_type ( data_type : DataTypeLike ):
831+ if not CDSDataStore ._is_data_type_satisfied ( data_type ):
832832 raise DataStoreError (
833- f'Supplied type specifier " { type_specifier } " is not compatible '
834- f'with "{ TYPE_SPECIFIER_CUBE } ."'
833+ f'Supplied data type { data_type !r } is not compatible'
834+ f' with "{ DATASET_TYPE !r } ."'
835835 )
836836
837837 @staticmethod
838- def _is_type_specifier_satisfied (
839- type_specifier : Union [ str , TypeSpecifier ] ) -> bool :
838+ def _is_data_type_satisfied (
839+ data_type : DataTypeLike ) -> bool :
840840 # At present, all datasets are available as cubes, so we simply check
841841 # against TYPE_SPECIFIER_CUBE. If more (non-cube) datasets are added,
842842 # the logic will have to be delegated to CDSDatasetHandler
843843 # implementations.
844- if type_specifier is None :
844+ if data_type is None :
845845 return True
846- return TYPE_SPECIFIER_CUBE . satisfies ( type_specifier )
846+ return DATASET_TYPE . is_super_type_of ( data_type )
847847
848848 @staticmethod
849849 def _assert_valid_opener_id (opener_id ):
0 commit comments