Skip to content

Commit 9eb80b4

Browse files
committed
Merge branch 'main' into yogesh-xxx-demo-todos
2 parents 210d01c + 4596adf commit 9eb80b4

File tree

15 files changed

+486
-378
lines changed

15 files changed

+486
-378
lines changed

CHANGES.md

Lines changed: 29 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,41 @@
1-
## Changes in 1.9.1 (in development)
1+
## Changes in 1.10.1 (in development)
2+
3+
4+
## Changes in 1.10.0
25

36
### Enhancements
47

8+
* Bundled [xcube Viewer 1.6.0](https://github.com/xcube-dev/xcube-viewer/releases/tag/v1.6.0)
9+
that comes with enhanced layer management.
10+
11+
### Other changes
12+
13+
* Make test suite compatible with click >=8.2.0 (#1155)
14+
15+
* Pinned `tornado >=6.0,<6.5` due to an incompatibility with current
16+
xcube server implementation.
17+
18+
## Changes in 1.9.1
19+
20+
### Enhancements
21+
22+
* Bundled [xcube Viewer 1.5.1](https://github.com/xcube-dev/xcube-viewer/releases/tag/v1.5.1)
23+
with many fixes.
24+
525
* Introduced a server-side configuration attribute `EntrypointDatasetId` to specify
626
the initial dataset that should be displayed in the viewer application upon loading. (#1135)
727

828
* Added support for `SortValue` in the server configuration to define dataset sorting
929
within groups displayed in the viewer app's dataset selection dropdown. (#1135)
1030

31+
* Added a new server-side configuration attribute `DatasetGroups` to allow users to define
32+
the display order of dataset groups in the viewer application.
33+
See: https://github.com/xcube-dev/xcube-viewer/issues/521
34+
35+
* Introduced support for a `Description` field under `DatasetGroups`, shown as a tooltip
36+
when hovering over group titles in the viewer.
37+
See: https://github.com/xcube-dev/xcube-viewer/issues/521
38+
1139
### Other changes
1240

1341
* Improved the filesystem data stores (`"file"`, `"s3"`, ...):

environment.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ dependencies:
88
- affine >=2.2
99
- botocore >=1.34.51
1010
- cftime >=1.6.3
11-
- click >=8.0
11+
- click >=8.2.0
1212
- cmocean >=2.0
1313
- dask >=2021.6
1414
- dask-image >=0.6
@@ -39,7 +39,7 @@ dependencies:
3939
- setuptools >=41.0
4040
- shapely >=1.6
4141
- tabulate >=0.9
42-
- tornado >=6.0
42+
- tornado >=6.0,<6.5
4343
- urllib3 >=2.0
4444
- xarray >=2024.7
4545
- zarr >=2.11,<3 # until we can ensure zarr 3 compatibility; see Issue #1102

examples/serve/demo/config.yml

Lines changed: 24 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,16 @@ DatasetAttribution:
2525
#Viewer:
2626
# Configuration:
2727
# Path: s3://<bucket>/<path-to-your-viewer>/<resources>
28+
# Persistence:
29+
# Path: memory://states
30+
31+
DatasetGroups:
32+
- Identifier: first_local
33+
Title: Zarr
34+
Description: This is a zarr dataset group description!
35+
36+
- Identifier: second_local
37+
Title: GeoTIFF
2838

2939
Datasets:
3040
# The first dataset "./cube-1-250-250.levels" is a tile-optimized
@@ -35,7 +45,7 @@ Datasets:
3545
#
3646
- Identifier: local
3747
Title: Local OLCI L2C cube for region SNS
38-
GroupTitle: Zarr
48+
GroupId: first_local
3949
BoundingBox: [0.0, 50, 5.0, 52.5]
4050
FileSystem: file
4151
Path: cube-1-250-250.levels
@@ -64,7 +74,7 @@ Datasets:
6474
# Will not appear at all, because it is a "hidden" resource
6575
- Identifier: local_ts
6676
Title: "'local' optimized for time-series"
67-
GroupTitle: Zarr
77+
GroupId: first_local
6878
BoundingBox: [0.0, 50, 5.0, 52.5]
6979
FileSystem: file
7080
Path: cube-5-100-200.zarr
@@ -93,7 +103,7 @@ Datasets:
93103
# Will only appear for unauthorized clients
94104
- Identifier: local_1w
95105
Title: OLCI weekly L3 cube for region SNS computed from local L2C cube
96-
GroupTitle: Zarr
106+
GroupId: first_local
97107
BoundingBox: [0.0, 50, 5.0, 52.5]
98108
FileSystem: memory
99109
Path: resample_in_time.py
@@ -130,15 +140,24 @@ Datasets:
130140

131141
- Identifier: cog_local
132142
Title: COG example
133-
GroupTitle: GeoTIFF
143+
# If you do not provide a GroupId, this group will be then displayed after the
144+
# groups order in DatasetGroups. Try changing the order
145+
# of the groups in DatasetGroups to see the changes in viewer.
146+
# If you comment out GroupId here, you will get the
147+
# same behaviour because currently it is in second position.
148+
GroupId: second_local
149+
# GroupTitle: GeoTIFF - This should not be there if GroupId is used. Use Title from
150+
# DatasetGroups instead. If you do not want to use DatasetGroups for this group, you
151+
# can use GroupTitle to add a title to your group, but keep in mind, it will be
152+
# sorted after the groups in DatasetGroups. This allows backward-compatibility.
134153
FileSystem: file
135154
Path: sample-cog.tif
136155
Style: tif_style
137156
SortValue: 2
138157

139158
- Identifier: geotiff_local
140159
Title: GeoTIFF example
141-
GroupTitle: GeoTIFF
160+
GroupId: second_local
142161
FileSystem: file
143162
Path: sample-geotiff.tif
144163
Style: tif_style

test/cli/helpers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919

2020
class CliTest(unittest.TestCase, metaclass=ABCMeta):
2121
def invoke_cli(self, args: list[str]):
22-
self.runner = click.testing.CliRunner(mix_stderr=False)
22+
self.runner = click.testing.CliRunner()
2323
# noinspection PyTypeChecker
2424
return self.runner.invoke(cli, args, catch_exceptions=False)
2525

test/util/test_progress.py

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -261,18 +261,15 @@ def test_dask_progress(self):
261261
res = dask.array.random.normal(size=(100, 200), chunks=(25, 50))
262262
with observe_dask_progress("computing", 100):
263263
res.compute()
264-
265264
self.assertEqual(4, len(res.chunks[0]))
266265
self.assertTrue(len(observer.calls) >= 3)
267-
self.assertEqual(
268-
("begin", [("computing", 0.0, False, None)]), observer.calls[0]
269-
)
270-
self.assertEqual(
271-
("update", [("computing", 5 / 16, False, None)]), observer.calls[5]
272-
)
273-
self.assertEqual(
274-
("end", [("computing", 15 / 16, True, None)]), observer.calls[-1]
275-
)
266+
267+
event_types = [call[0] for call in observer.calls]
268+
update_count = event_types.count("update")
269+
270+
self.assertEqual("begin", event_types[0])
271+
self.assertEqual("end", event_types[-1])
272+
self.assertGreater(update_count, 2)
276273

277274

278275
class ProgressStateTest(unittest.TestCase):

xcube/core/resampling/spatial.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,10 @@ def resample_in_space(
9898
the resampling is a direct rectification.
9999
100100
Args:
101-
source_ds: The source dataset.
101+
source_ds: The source dataset. Data variables must have
102+
dimensions in the following order: optional `time` followed
103+
by the y-dimension (e.g., `y` or `lat`) followed by the
104+
x-dimension (e.g., `x` or `lon`).
102105
source_gm: The source grid mapping.
103106
target_gm: The target grid mapping. Must be regular.
104107
ref_ds: An optional dataset that provides the

xcube/core/store/store.py

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
from abc import ABC, abstractmethod
66
from collections.abc import Container, Iterator
7-
from typing import Any, Optional, Union
7+
from typing import Any, Optional, Union, TypeAlias
88

99
from xcube.constants import EXTENSION_POINT_DATA_STORES
1010
from xcube.util.extension import Extension, ExtensionPredicate, ExtensionRegistry
@@ -691,8 +691,19 @@ def deregister_data(self, data_id: str):
691691
"""
692692

693693

694-
class PreloadedDataStore(DataStore):
694+
class Preloaded(DataStore):
695695
"""A preload data store is a multable data store which contains the preload handle.
696+
This class solely acts as a protocol description or marker interface for `DataStore`
697+
instances returned from another data store's `preload_data` method.
698+
699+
The data stores returned from `preload_data` are not required to directly implement this interface.
700+
However, their instances must provide an attribute `preload_handle` of type `PreloadHandle`
701+
that is used to represent the pre-loading process and interact with it.
702+
703+
This approach helps make the source code more understandable even though instances
704+
of this class are not used at runtime. Instead, a different subclass of
705+
`DataStore` or `MutableDataStore` that includes this additional property is
706+
returned.
696707
697708
Instances of this class are returned by the ``DataStore.preload_data()`` method.
698709
"""
@@ -704,3 +715,6 @@ def preload_handle(self) -> PreloadHandle:
704715
Implementors of this interface may use a `ExecutorPreloadHandle` or consider
705716
returning a `NullPreloadHandle` if the progress is not observable.
706717
"""
718+
719+
720+
PreloadedDataStore: TypeAlias = DataStore | Preloaded

xcube/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,4 @@
22
# Permissions are hereby granted under the terms of the MIT License:
33
# https://opensource.org/licenses/MIT.
44

5-
version = "1.9.1.dev0"
5+
version = "1.10.1.dev0"

xcube/webapi/datasets/config.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@
6767
Title=STRING_SCHEMA,
6868
Description=STRING_SCHEMA,
6969
GroupTitle=STRING_SCHEMA,
70+
GroupId=STRING_SCHEMA,
7071
SortValue=NUMBER_SCHEMA,
7172
Tags=JsonArraySchema(items=STRING_SCHEMA),
7273
Variables=VARIABLES_SCHEMA,
@@ -248,11 +249,20 @@
248249
additional_properties=False,
249250
)
250251

252+
DATASET_GROUPS_SCHEMA = JsonObjectSchema(
253+
properties=dict(
254+
Identifier=IDENTIFIER_SCHEMA, Title=STRING_SCHEMA, Description=STRING_SCHEMA
255+
),
256+
required=["Identifier", "Title"],
257+
additional_properties=False,
258+
)
259+
251260
CONFIG_SCHEMA = JsonObjectSchema(
252261
properties=dict(
253262
DatasetAttribution=ATTRIBUTION_SCHEMA,
254263
AccessControl=ACCESS_CONTROL_SCHEMA,
255264
DatasetChunkCacheSize=CHUNK_SIZE_SCHEMA,
265+
DatasetGroups=JsonArraySchema(items=DATASET_GROUPS_SCHEMA),
256266
Datasets=JsonArraySchema(items=DATASET_CONFIG_SCHEMA),
257267
DataStores=JsonArraySchema(items=DATA_STORE_SCHEMA),
258268
Styles=JsonArraySchema(items=STYLE_SCHEMA),

xcube/webapi/datasets/context.py

Lines changed: 13 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -88,6 +88,7 @@ def __init__(self, server_ctx: Context, ml_dataset_openers=None):
8888
self._data_store_pool,
8989
self._dataset_configs,
9090
self.entrypoint_dataset_id,
91+
self._dataset_groups_config,
9192
) = self._process_dataset_configs(self.config, self.base_dir)
9293
self._cm_styles, self._colormap_registry = self._get_cm_styles()
9394

@@ -469,6 +470,10 @@ def get_dataset_configs(self) -> list[DatasetConfig]:
469470
assert self._dataset_configs is not None
470471
return self._dataset_configs
471472

473+
def get_dataset_groups_configs(self) -> list[DatasetConfig]:
474+
assert self._dataset_groups_config is not None
475+
return self._dataset_groups_config
476+
472477
def get_entrypoint_dataset_id(self) -> str | None:
473478
if self.entrypoint_dataset_id:
474479
return self.entrypoint_dataset_id
@@ -481,10 +486,11 @@ def get_data_store_pool(self) -> DataStorePool:
481486
@classmethod
482487
def _process_dataset_configs(
483488
cls, config: ServerConfig, base_dir: str
484-
) -> tuple[DataStorePool, list[dict[str, Any]], str]:
489+
) -> tuple[DataStorePool, list[dict[str, Any]], str, list[dict[str, Any]]]:
485490
data_store_configs = config.get("DataStores", [])
486491
dataset_configs = config.get("Datasets", [])
487492
entrypoint_dataset_id = config.get("EntrypointDatasetId", "")
493+
dataset_groups_configs = config.get("DatasetGroups", [])
488494

489495
data_store_pool = DataStorePool()
490496
for data_store_config_dict in data_store_configs:
@@ -504,7 +510,12 @@ def _process_dataset_configs(
504510
# entries:
505511
dataset_configs = [dict(c) for c in dataset_configs]
506512
cls._maybe_assign_store_instance_ids(dataset_configs, data_store_pool, base_dir)
507-
return data_store_pool, dataset_configs, entrypoint_dataset_id
513+
return (
514+
data_store_pool,
515+
dataset_configs,
516+
entrypoint_dataset_id,
517+
dataset_groups_configs,
518+
)
508519

509520
def get_rgb_color_mapping(
510521
self, ds_id: str, norm_range: tuple[float, float] = (0.0, 1.0)

0 commit comments

Comments
 (0)