Skip to content

Commit 7d9b2d7

Browse files
authored
Merge pull request #53 from dcs4cop/forman-509-simpler_data_type
Replace TypeSpecifier by simpler DataType
2 parents cd1ec25 + 5e18d58 commit 7d9b2d7

File tree

4 files changed

+63
-66
lines changed

4 files changed

+63
-66
lines changed

test/test_era5.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -27,13 +27,13 @@
2727

2828
import unittest
2929

30-
import xcube
31-
import xcube.core
3230
from jsonschema import ValidationError
33-
from xcube.core.store import TYPE_SPECIFIER_CUBE
34-
from xcube.core.store import VariableDescriptor
3531

32+
import xcube
33+
import xcube.core
3634
from test.mocks import CDSClientMock
35+
from xcube.core.store import DATASET_TYPE
36+
from xcube.core.store import VariableDescriptor
3737
from xcube_cds.store import CDSDataOpener
3838
from xcube_cds.store import CDSDataStore
3939

@@ -179,8 +179,8 @@ def test_open_data_null_variables_list(self):
179179
store = CDSDataStore(client_class=CDSClientMock,
180180
endpoint_url=_CDS_API_URL,
181181
cds_api_key=_CDS_API_KEY)
182-
data_id = 'reanalysis-era5-single-levels-monthly-means:'\
183-
'monthly_averaged_reanalysis'
182+
data_id = 'reanalysis-era5-single-levels-monthly-means:' \
183+
'monthly_averaged_reanalysis'
184184
schema = store.get_open_data_params_schema(data_id)
185185
n_vars = len(schema.properties['variable_names'].items.enum)
186186
dataset = store.open_data(
@@ -214,11 +214,11 @@ def test_era5_describe_data(self):
214214
self.assertTupleEqual(('time', 'latitude', 'longitude'),
215215
vd.dims)
216216

217-
def test_get_type_specifiers_for_data(self):
217+
def test_get_data_types_for_data(self):
218218
store = CDSDataStore()
219219
self.assertEqual(
220-
(TYPE_SPECIFIER_CUBE, ),
221-
store.get_type_specifiers_for_data('reanalysis-era5-land')
220+
(DATASET_TYPE.alias,),
221+
store.get_data_types_for_data('reanalysis-era5-land')
222222
)
223223

224224
def test_has_data_true(self):

test/test_store.py

Lines changed: 25 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -49,12 +49,9 @@
4949

5050
import xcube
5151
import xcube.core
52-
from xcube.core.store import DataDescriptor
53-
from xcube.core.store import DataStoreError
54-
from xcube.core.store import TYPE_SPECIFIER_CUBE
55-
from xcube.core.store import TYPE_SPECIFIER_DATASET
56-
5752
from test.mocks import CDSClientMock
53+
from xcube.core.store import DATASET_TYPE
54+
from xcube.core.store import DataDescriptor
5855
from xcube_cds.constants import CDS_DATA_OPENER_ID
5956
from xcube_cds.datasets.reanalysis_era5 import ERA5DatasetHandler
6057
from xcube_cds.store import CDSDataOpener
@@ -111,13 +108,13 @@ def test_get_open_params_schema_without_data_id(self):
111108
actual['properties'].keys()
112109
)
113110

114-
def test_search_data_invalid_id(self):
111+
def test_search_data_invalid_data_type(self):
115112
store = CDSDataStore(endpoint_url=_CDS_API_URL,
116113
cds_api_key=_CDS_API_KEY)
117-
with self.assertRaises(DataStoreError):
118-
store.search_data('This is an invalid ID.')
114+
with self.assertRaises(ValueError):
115+
store.search_data(data_type='This is an invalid data type.')
119116

120-
def test_search_data_valid_id(self):
117+
def test_search_data_valid_data_type(self):
121118
store = CDSDataStore(endpoint_url=_CDS_API_URL,
122119
cds_api_key=_CDS_API_KEY)
123120
result = list(store.search_data('dataset'))
@@ -133,27 +130,31 @@ def test_get_data_store_params_schema(self):
133130
'additionalProperties': False
134131
}, CDSDataStore.get_data_store_params_schema().to_dict())
135132

136-
def test_get_type_specifiers(self):
137-
type_specifiers = CDSDataStore.get_type_specifiers()
138-
self.assertEqual(1, len(type_specifiers))
139-
self.assertIsInstance(type_specifiers[0], str)
140-
self.assertTupleEqual(('dataset[cube]',), type_specifiers)
133+
def test_get_data_types(self):
134+
data_types = CDSDataStore.get_data_types()
135+
self.assertEqual(1, len(data_types))
136+
self.assertIsInstance(data_types[0], str)
137+
self.assertTupleEqual((DATASET_TYPE.alias,), data_types)
141138

142139
def test_has_data_false(self):
143140
self.assertFalse(CDSDataStore().has_data('nonexistent data ID'))
144141

145-
def test_get_data_opener_ids_invalid_type_id(self):
146-
with self.assertRaises(DataStoreError):
147-
CDSDataStore().get_data_opener_ids(CDS_DATA_OPENER_ID,
148-
'this is an invalid ID')
142+
def test_get_data_opener_ids_invalid_data_type(self):
143+
with self.assertRaises(ValueError):
144+
CDSDataStore().get_data_opener_ids(
145+
data_id=CDS_DATA_OPENER_ID,
146+
data_type='this is an invalid data type'
147+
)
149148

150-
def test_get_data_opener_ids_invalid_opener_id(self):
149+
def test_get_data_opener_ids_invalid_data_id(self):
151150
with self.assertRaises(ValueError):
152-
CDSDataStore().get_data_opener_ids('this is an invalid ID',
153-
TYPE_SPECIFIER_DATASET)
151+
CDSDataStore().get_data_opener_ids(
152+
data_id='this is an invalid data ID',
153+
data_type=DATASET_TYPE
154+
)
154155

155156
def test_get_data_opener_ids_with_default_arguments(self):
156-
self.assertTupleEqual((CDS_DATA_OPENER_ID, ),
157+
self.assertTupleEqual((CDS_DATA_OPENER_ID,),
157158
CDSDataStore().get_data_opener_ids())
158159

159160
def test_get_store_open_params_schema_without_data_id(self):
@@ -166,9 +167,8 @@ def test_get_data_ids(self):
166167
store = CDSDataStore(client_class=CDSClientMock,
167168
endpoint_url=_CDS_API_URL,
168169
cds_api_key=_CDS_API_KEY)
169-
self.assertEqual([], list(store.get_data_ids('unsupported_type_spec')))
170-
self.assertEqual([],
171-
list(store.get_data_ids('dataset[unsupported_flag]')))
170+
with self.assertRaises(ValueError):
171+
list(store.get_data_ids(data_type='unsupported_data_type'))
172172

173173
# The number of available datasets is expected to increase over time,
174174
# so to avoid overfitting the test we just check that more than a few
@@ -177,8 +177,6 @@ def test_get_data_ids(self):
177177
minimum_expected_datasets = 5
178178
self.assertGreater(len(list(store.get_data_ids('dataset'))),
179179
minimum_expected_datasets)
180-
self.assertGreater(len(list(store.get_data_ids('dataset[cube]'))),
181-
minimum_expected_datasets)
182180

183181
def test_era5_transform_params_empty_variable_list(self):
184182
handler = ERA5DatasetHandler()
@@ -190,7 +188,6 @@ def test_era5_transform_params_empty_variable_list(self):
190188

191189

192190
class ClientUrlTest(unittest.TestCase):
193-
194191
"""Tests connected with passing CDS API URL and key to opener or store."""
195192

196193
def setUp(self):

xcube_cds/store.py

Lines changed: 28 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -46,14 +46,14 @@
4646
import xarray as xr
4747

4848
import xcube.core.normalize
49+
from xcube.core.store import DATASET_TYPE
4950
from xcube.core.store import DataDescriptor
5051
from xcube.core.store import DataOpener
5152
from xcube.core.store import DataStore
5253
from xcube.core.store import DataStoreError
54+
from xcube.core.store import DataTypeLike
5355
from xcube.core.store import DatasetDescriptor
5456
from xcube.core.store import DefaultSearchMixin
55-
from xcube.core.store import TYPE_SPECIFIER_CUBE
56-
from xcube.core.store import TypeSpecifier
5757
from xcube.util.jsonschema import JsonArraySchema
5858
from xcube.util.jsonschema import JsonBooleanSchema
5959
from xcube.util.jsonschema import JsonDateSchema
@@ -753,18 +753,18 @@ def get_data_store_params_schema(cls) -> JsonObjectSchema:
753753
)
754754

755755
@classmethod
756-
def get_type_specifiers(cls) -> Tuple[str, ...]:
757-
return str(TYPE_SPECIFIER_CUBE),
756+
def get_data_types(cls) -> Tuple[str, ...]:
757+
return DATASET_TYPE.alias,
758758

759-
def get_type_specifiers_for_data(self, data_id: str) -> Tuple[str, ...]:
759+
def get_data_types_for_data(self, data_id: str) -> Tuple[str, ...]:
760760
self._validate_data_id(data_id)
761-
return str(TYPE_SPECIFIER_CUBE),
761+
return DATASET_TYPE.alias,
762762

763-
def get_data_ids(self, type_specifier: Optional[str] = None,
763+
def get_data_ids(self,
764+
data_type: DataTypeLike = None,
764765
include_attrs: Container[str] = None) -> \
765766
Union[Iterator[str], Iterator[Tuple[str, Dict[str, Any]]]]:
766-
767-
if self._is_type_specifier_satisfied(type_specifier):
767+
if self._is_data_type_satisfied(data_type):
768768
# Only if the type specifier isn't compatible
769769
return_tuples = include_attrs is not None
770770
# TODO: respect names other than "title" in include_attrs
@@ -773,38 +773,38 @@ def get_data_ids(self, type_specifier: Optional[str] = None,
773773
for data_id, handler in self._handler_registry.items():
774774
if return_tuples:
775775
if include_titles:
776-
yield data_id,\
776+
yield data_id, \
777777
{'title':
778-
handler.get_human_readable_data_id(data_id)}
778+
handler.get_human_readable_data_id(data_id)}
779779
else:
780780
yield data_id, {}
781781
else:
782782
yield data_id
783783

784-
def has_data(self, data_id: str, type_specifier: Optional[str] = None) \
784+
def has_data(self, data_id: str, data_type: Optional[str] = None) \
785785
-> bool:
786-
return self._is_type_specifier_satisfied(type_specifier) and \
786+
return self._is_data_type_satisfied(data_type) and \
787787
data_id in self._handler_registry
788788

789789
def describe_data(self, data_id: str,
790-
type_specifier: Optional[str] = None) \
790+
data_type: Optional[str] = None) \
791791
-> DatasetDescriptor:
792792
self._validate_data_id(data_id)
793-
self._validate_type_specifier(type_specifier)
793+
self._validate_data_type(data_type)
794794
return self._handler_registry[data_id].describe_data(data_id)
795795

796796
# noinspection PyTypeChecker
797-
def search_data(self, type_specifier: Optional[str] = None,
797+
def search_data(self, data_type: Optional[DataTypeLike] = None,
798798
**search_params) \
799799
-> Iterator[DataDescriptor]:
800-
self._validate_type_specifier(type_specifier)
801-
return super().search_data(type_specifier=type_specifier,
800+
self._validate_data_type(data_type)
801+
return super().search_data(data_type=data_type,
802802
**search_params)
803803

804804
def get_data_opener_ids(self, data_id: Optional[str] = None,
805-
type_specifier: Optional[str] = None) \
805+
data_type: Optional[str] = None) \
806806
-> Tuple[str, ...]:
807-
self._validate_type_specifier(type_specifier)
807+
self._validate_data_type(data_type)
808808
self._validate_data_id(data_id, allow_none=True)
809809
return CDS_DATA_OPENER_ID,
810810

@@ -827,23 +827,23 @@ def open_data(self, data_id: str, opener_id: Optional[str] = None,
827827
# Implementation helpers
828828

829829
@staticmethod
830-
def _validate_type_specifier(type_specifier: Union[str, TypeSpecifier]):
831-
if not CDSDataStore._is_type_specifier_satisfied(type_specifier):
830+
def _validate_data_type(data_type: DataTypeLike):
831+
if not CDSDataStore._is_data_type_satisfied(data_type):
832832
raise DataStoreError(
833-
f'Supplied type specifier "{type_specifier}" is not compatible '
834-
f'with "{TYPE_SPECIFIER_CUBE}."'
833+
f'Supplied data type {data_type!r} is not compatible'
834+
f' with "{DATASET_TYPE!r}."'
835835
)
836836

837837
@staticmethod
838-
def _is_type_specifier_satisfied(
839-
type_specifier: Union[str, TypeSpecifier]) -> bool:
838+
def _is_data_type_satisfied(
839+
data_type: DataTypeLike) -> bool:
840840
# At present, all datasets are available as cubes, so we simply check
841841
# against TYPE_SPECIFIER_CUBE. If more (non-cube) datasets are added,
842842
# the logic will have to be delegated to CDSDatasetHandler
843843
# implementations.
844-
if type_specifier is None:
844+
if data_type is None:
845845
return True
846-
return TYPE_SPECIFIER_CUBE.satisfies(type_specifier)
846+
return DATASET_TYPE.is_super_type_of(data_type)
847847

848848
@staticmethod
849849
def _assert_valid_opener_id(opener_id):

xcube_cds/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,4 +20,4 @@
2020
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
2121
# SOFTWARE.
2222

23-
version = '0.8.2.dev0'
23+
version = '0.9.0.dev0'

0 commit comments

Comments
 (0)