Skip to content

Commit 1eee52c

Browse files
committed
Merge branch 'master' into forman-xxx-add_spatial_ref
2 parents 955a2d3 + ff02034 commit 1eee52c

File tree

11 files changed

+405
-166
lines changed

11 files changed

+405
-166
lines changed

.github/workflows/xcube_workflow.yaml

Lines changed: 40 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,11 @@ on:
55
release:
66
types: [published]
77

8+
env:
9+
APP_NAME: xcube
10+
ORG_NAME: bcdev
11+
IMG_REG_NAME: quay.io
12+
813
jobs:
914
unittest:
1015
runs-on: ubuntu-latest
@@ -38,17 +43,14 @@ jobs:
3843
verbose: true # optional (default = false)
3944
build-docker-image:
4045
runs-on: ubuntu-latest
41-
# Only run if unittests succeed
42-
needs: unittest
4346
# Build the docker image and push to quay.io
4447
name: build-docker-image
45-
env:
46-
APP_NAME: xcube
47-
ORG_NAME: bcdev
48+
# Only run if unittests succeed
49+
needs: unittest
4850
steps:
4951
- name: git-checkout
5052
uses: actions/checkout@v2
51-
# Strip the release tag from refs
53+
# Determine release tag from git ref
5254
- name: get-release-tag
5355
id: release
5456
run: echo ::set-output name=tag::${GITHUB_REF#refs/*/}
@@ -65,26 +67,23 @@ jobs:
6567
with:
6668
image: ${{ env.ORG_NAME }}/${{ env.APP_NAME }}
6769
tags: master, latest
68-
registry: quay.io
69-
username: ${{ secrets.QUAY_DOCKER_REPO_USERNAME }}
70-
password: ${{ secrets.QUAY_DOCKER_REPO_PASSWORD }}
70+
registry: ${{ env.IMG_REG_NAME }}
71+
username: ${{ secrets.IMG_REG_USERNAME }}
72+
password: ${{ secrets.IMG_REG_PASSWORD }}
7173
# Build and push docker release to quay.io when the event is a 'release'
7274
- uses: mr-smithers-excellent/docker-build-push@v5
7375
name: build-push-docker-image-release
7476
if: ${{ github.event_name == 'release' }}
7577
with:
7678
image: ${{ env.ORG_NAME }}/${{ env.APP_NAME }}
7779
tags: ${{ steps.release.outputs.tag }}
78-
registry: quay.io
79-
username: ${{ secrets.QUAY_DOCKER_REPO_USERNAME }}
80-
password: ${{ secrets.QUAY_DOCKER_REPO_PASSWORD }}
80+
registry: ${{ env.IMG_REG_NAME }}
81+
username: ${{ secrets.IMG_REG_USERNAME }}
82+
password: ${{ secrets.IMG_REG_PASSWORD }}
8183
update-version:
82-
env:
83-
PUSH: 0
84-
APP_NAME: xcube
8584
runs-on: ubuntu-latest
8685
needs: build-docker-image
87-
name: update-tag
86+
name: update-xcube-tag
8887
steps:
8988
- name: git-checkout
9089
uses: actions/checkout@v2
@@ -105,7 +104,7 @@ jobs:
105104
- name: get-hash
106105
id: get-hash
107106
run: |
108-
HASH=$(skopeo inspect docker://quay.io/bcdev/${{ env.APP_NAME }}:${{ steps.release.outputs.tag }} | jq '.Digest')
107+
HASH=$(skopeo inspect docker://${{ env.IMG_REG_NAME }}/${{ env.ORG_NAME }}/${{ env.APP_NAME }}:${{ steps.release.outputs.tag }} | jq '.Digest')
109108
if [[ "$HASH" == *"sha256"* ]]; then
110109
echo ::set-output name=hash::$HASH
111110
else
@@ -128,13 +127,34 @@ jobs:
128127
delimiter: ' '
129128
tag: ${{ steps.deployment-phase.outputs.tag }}
130129
hash: ${{ steps.get-hash.outputs.hash }}
131-
working-directory: "./k8s/xcube-gen/helm"
130+
working-directory: ./k8s/xcube-gen/helm
132131
- name: cat-result
133-
working-directory: "./k8s/xcube-gen/helm"
132+
working-directory: ./k8s/xcube-gen/helm
134133
run: |
135134
head values-dev.yaml
136135
head values-stage.yaml
137136
head values-prod.yaml
137+
# Update the xcube viewer app version tag in k8s-configs/xcube-viewer-app
138+
- name: set-version-tag-xcube-viewer-app
139+
uses: bc-org/gha-update-application-version-tags@main
140+
with:
141+
app: xcube
142+
phase: ${{ steps.deployment-phase.outputs.phase }}
143+
delimiter: ' '
144+
tag: ${{ steps.deployment-phase.outputs.tag }}
145+
hash: ${{ steps.get-hash.outputs.hash }}
146+
working-directory: ./k8s/xcube-viewer-app/xcube-api
147+
prefix: values-*
148+
# Check results
149+
- name: cat-result
150+
working-directory: ./k8s/xcube-viewer-app/xcube-api
151+
run: |
152+
echo "----------------DEV-------------------"
153+
head values-*-dev.yaml
154+
echo "----------------STAGE-----------------"
155+
head values-*-stage.yaml
156+
echo "----------------PROD------------------"
157+
head values-*-prod.yaml
138158
- name: Pushes to another repository
139159
# Don't run if run locally and should be ignored
140160
if: ${{ steps.deployment-phase.outputs.phase != 'ignore' && !env.ACT }}
@@ -147,4 +167,4 @@ jobs:
147167
destination-repository-name: 'k8s-configs'
148168
user-email: [email protected]
149169
target-branch: main
150-
commit-message: ${{ github.event.release }}. Set version to ${{ steps.release.outputs.tag }}
170+
commit-message: ${{ github.event.release }}. Set version to ${{ steps.release.outputs.tag }} and ${{ steps.release.outputs.hash }}

CHANGES.md

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,22 @@
22

33
### Enhancements
44

5+
* Introduced parameter `base_dataset_id` for writing multi-level
6+
datasets with the "file", "s3", and "memory" data stores.
7+
If given, the base dataset will be linked only with the
8+
value of `base_dataset_id`, instead of being copied as-is.
9+
This can save large amounts of storage space. (#617)
10+
511
### Fixes
612

13+
* Fixed `FsDataAccessor.write_data()` implementations,
14+
which now always return the passed in `data_id`. (#623)
15+
16+
* Fixes an issue where some datasets seemed to be shifted in the
17+
y-(latitude-) direction and were misplaced on maps whose tiles
18+
are served by `xcube serve`. Images with ascending y-values are
19+
now tiled correctly. (#626)
20+
721
### Other
822

923
* Replace the dependency on the rfc3339-validator PyPI package with a

test/core/store/fs/test_registry.py

Lines changed: 74 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
import os.path
22
import unittest
3+
import warnings
34
from abc import ABC, abstractmethod
4-
from typing import Type, Union
5+
from typing import Any, Dict, Optional, Type, Union
56

67
import fsspec
78
import xarray as xr
@@ -38,7 +39,7 @@ def prepare_fs(cls, fs: fsspec.AbstractFileSystem, root: str):
3839
# print(f'{fs.protocol}: making root {root}')
3940
fs.mkdirs(root)
4041

41-
# Write a text file into each subdirectory so
42+
# Write a text file into each subdirectory, so
4243
# we also test that store.get_data_ids() scans
4344
# recursively.
4445
dir_path = root
@@ -54,6 +55,7 @@ def prepare_fs(cls, fs: fsspec.AbstractFileSystem, root: str):
5455
def test_mldataset_levels(self):
5556
data_store = self.create_data_store()
5657
self.assertMultiLevelDatasetFormatSupported(data_store)
58+
self.assertMultiLevelDatasetFormatWithLinkSupported(data_store)
5759

5860
def test_dataset_zarr(self):
5961
data_store = self.create_data_store()
@@ -73,6 +75,47 @@ def assertMultiLevelDatasetFormatSupported(self,
7375
MultiLevelDataset,
7476
MultiLevelDatasetDescriptor)
7577

78+
# Test that use_saved_levels works
79+
self.assertDatasetSupported(data_store,
80+
'.levels',
81+
'mldataset',
82+
MultiLevelDataset,
83+
MultiLevelDatasetDescriptor,
84+
write_params=dict(
85+
use_saved_levels=True,
86+
))
87+
88+
def assertMultiLevelDatasetFormatWithLinkSupported(
89+
self,
90+
data_store: MutableDataStore
91+
):
92+
base_dataset = self.new_cube_data()
93+
base_dataset_id = f'{DATA_PATH}/base-ds.zarr'
94+
data_store.write_data(base_dataset, base_dataset_id)
95+
96+
# Test that base_dataset_id works
97+
self.assertDatasetSupported(data_store,
98+
'.levels',
99+
'mldataset',
100+
MultiLevelDataset,
101+
MultiLevelDatasetDescriptor,
102+
write_params=dict(
103+
base_dataset_id=base_dataset_id,
104+
))
105+
106+
# Test that base_dataset_id + use_saved_levels works
107+
self.assertDatasetSupported(data_store,
108+
'.levels',
109+
'mldataset',
110+
MultiLevelDataset,
111+
MultiLevelDatasetDescriptor,
112+
write_params=dict(
113+
base_dataset_id=base_dataset_id,
114+
use_saved_levels=True,
115+
))
116+
117+
data_store.delete_data(base_dataset_id)
118+
76119
def assertDatasetFormatSupported(self,
77120
data_store: MutableDataStore,
78121
filename_ext: str):
@@ -89,8 +132,12 @@ def assertDatasetSupported(
89132
expected_data_type_alias: str,
90133
expected_type: Union[Type[xr.Dataset],
91134
Type[MultiLevelDataset]],
92-
expected_descriptor_type: Union[Type[DatasetDescriptor],
93-
Type[MultiLevelDatasetDescriptor]]
135+
expected_descriptor_type: Union[
136+
Type[DatasetDescriptor],
137+
Type[MultiLevelDatasetDescriptor]
138+
],
139+
write_params: Optional[Dict[str, Any]] = None,
140+
open_params: Optional[Dict[str, Any]] = None,
94141
):
95142
"""
96143
Call all DataStore operations to ensure data of type
@@ -102,10 +149,15 @@ def assertDatasetSupported(
102149
:param expected_data_type_alias: The expected data type alias.
103150
:param expected_type: The expected data type.
104151
:param expected_descriptor_type: The expected data descriptor type.
152+
:param write_params: Optional write parameters
153+
:param open_params: Optional open parameters
105154
"""
106155

107156
data_id = f'{DATA_PATH}/ds{filename_ext}'
108157

158+
write_params = write_params or {}
159+
open_params = open_params or {}
160+
109161
self.assertIsInstance(data_store, MutableDataStore)
110162

111163
self.assertEqual({'dataset', 'mldataset', 'geodataframe'},
@@ -114,35 +166,44 @@ def assertDatasetSupported(
114166
with self.assertRaises(DataStoreError):
115167
data_store.get_data_types_for_data(data_id)
116168
self.assertEqual(False, data_store.has_data(data_id))
117-
self.assertEqual([], list(data_store.get_data_ids()))
169+
self.assertNotIn(data_id, set(data_store.get_data_ids()))
170+
171+
data = self.new_cube_data()
172+
written_data_id = data_store.write_data(data, data_id, **write_params)
173+
self.assertEqual(data_id, written_data_id)
118174

119-
data = new_cube(variables=dict(A=8, B=9))
120-
data_store.write_data(data, data_id)
121175
self.assertEqual({expected_data_type_alias},
122176
set(data_store.get_data_types_for_data(data_id)))
123177
self.assertEqual(True, data_store.has_data(data_id))
124-
self.assertEqual([data_id], list(data_store.get_data_ids()))
178+
self.assertIn(data_id, set(data_store.get_data_ids()))
125179

126-
data_descriptors = list(data_store.search_data())
180+
data_descriptors = list(data_store.search_data(
181+
data_type=expected_type)
182+
)
127183
self.assertEqual(1, len(data_descriptors))
128184
self.assertIsInstance(data_descriptors[0], DataDescriptor)
129185
self.assertIsInstance(data_descriptors[0], expected_descriptor_type)
130186

131-
data = data_store.open_data(data_id)
187+
data = data_store.open_data(data_id, **open_params)
132188
self.assertIsInstance(data, expected_type)
133189

134190
try:
135191
data_store.delete_data(data_id)
136-
except PermissionError: # Typically occurs on win32 due to fsspec
192+
except PermissionError as e: # May occur on win32 due to fsspec
193+
warnings.warn(f'{e}')
137194
return
138195
with self.assertRaises(DataStoreError):
139196
data_store.get_data_types_for_data(data_id)
140197
self.assertEqual(False, data_store.has_data(data_id))
141-
self.assertEqual([], list(data_store.get_data_ids()))
198+
self.assertNotIn(data_id, set(data_store.get_data_ids()))
142199

200+
@staticmethod
201+
def new_cube_data():
202+
cube = new_cube(variables=dict(A=8.5, B=9.5))
203+
return cube.chunk(dict(time=1, lat=90, lon=180))
143204

144-
class FileFsDataStoresTest(FsDataStoresTestMixin, unittest.TestCase):
145205

206+
class FileFsDataStoresTest(FsDataStoresTestMixin, unittest.TestCase):
146207
def create_data_store(self) -> FsDataStore:
147208
root = os.path.join(new_temp_dir(prefix='xcube'), ROOT_DIR)
148209
self.prepare_fs(fsspec.filesystem('file'), root)

0 commit comments

Comments
 (0)