Skip to content

Commit 4f393dc

Browse files
Adding multi arch support for the GraceHoppers
1 parent 83d6013 commit 4f393dc

File tree

2 files changed

+192
-57
lines changed

2 files changed

+192
-57
lines changed

.github/workflows/dockerhub.yml

Lines changed: 150 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,72 @@
1-
name: QEFM to Docker Hub
1+
name: CI to Dockerhub - Multi-arch
22

33
on:
4-
workflow_dispatch:
5-
push:
6-
branches:
7-
# - 'deployment-operations'
8-
- 'main'
4+
release:
5+
types: [published] # Trigger only when a release is created
6+
workflow_dispatch: # Keep only manual trigger
7+
98
jobs:
10-
docker:
9+
docker-amd:
10+
runs-on: ubuntu-latest
11+
steps:
12+
- name: Checkout
13+
uses: actions/checkout@v3
14+
15+
- name: Set up QEMU
16+
uses: docker/setup-qemu-action@v2
17+
18+
- name: Set up Docker Buildx
19+
uses: docker/setup-buildx-action@v2
20+
21+
- name: Login to Docker Hub
22+
uses: docker/login-action@v2
23+
with:
24+
username: ${{ secrets.DOCKERHUB_USERNAME }}
25+
password: ${{ secrets.DOCKERHUB_TOKEN }}
26+
27+
- name: Free up disk space
28+
uses: jlumbroso/free-disk-space@main
29+
with:
30+
tool-cache: true
31+
android: true
32+
dotnet: true
33+
haskell: true
34+
large-packages: true
35+
docker-images: true
36+
swap-storage: true
37+
38+
- name: Prune docker images and swap
39+
run: |
40+
sudo swapoff -a
41+
sudo rm -f /swapfile
42+
sudo apt clean
43+
docker image prune -a -f
44+
df -h
45+
46+
- name: Set build versions
47+
id: set_versions
48+
env:
49+
DEP_REPO: nasa-nccs-hpda/GenCast_FP
50+
run: |
51+
DEP_TAG=$(curl -s https://api.github.com/repos/${DEP_REPO}/releases/latest | jq -r .tag_name)
52+
echo "GenCastFP_TAG=$DEP_TAG" >> $GITHUB_ENV
53+
echo "VERSION_TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
54+
echo "Using ${DEP_REPO} release tag: $DEP_TAG"
55+
echo "Workflow release version: ${GITHUB_REF#refs/tags/}"
56+
57+
- name: Build and push
58+
uses: docker/build-push-action@v6
59+
with:
60+
context: .
61+
platforms: linux/amd64
62+
file: ./requirements/Dockerfile
63+
push: true
64+
tags: nasanccs/gencast-fp:${{ env.GenCastFP_TAG }}-amd64
65+
build-args: |
66+
GenCastFP_TAG=${{ env.GenCastFP_TAG }}
67+
68+
69+
docker-arm:
1170
runs-on: ubuntu-latest
1271
steps:
1372
-
@@ -25,6 +84,7 @@ jobs:
2584
with:
2685
username: ${{ secrets.DOCKERHUB_USERNAME }}
2786
password: ${{ secrets.DOCKERHUB_TOKEN }}
87+
2888
-
2989
name: Lower github-runner storage
3090
run: |
@@ -42,41 +102,93 @@ jobs:
42102
/usr/local/share/chromium \
43103
/usr/local/share/powershell \
44104
/usr/share/dotnet \
45-
/usr/share/swift
105+
/usr/share/swift \
106+
/opt/ghc \
107+
/usr/local/share/boost
46108
df -h /
47-
48-
#-
49-
# name: Clean up tools (https://github.com/marketplace/actions/free-disk-space-ubuntu)
50-
# uses: jlumbroso/free-disk-space@main
51-
# with:
52-
# # this might remove tools that are actually needed,
53-
# # if set to "true" but frees about 6 GB
54-
# tool-cache: true
55-
#
56-
# # all of these default to true, but feel free to set to
57-
# # "false" if necessary for your workflow
58-
# android: true
59-
# dotnet: true
60-
# haskell: true
61-
# large-packages: true
62-
# docker-images: true
63-
# swap-storage: true
109+
-
110+
name: Clean up tools
111+
uses: jlumbroso/free-disk-space@main
112+
with:
113+
# this might remove tools that are actually needed,
114+
# if set to "true" but frees about 6 GB
115+
tool-cache: true
64116

65-
#-
66-
# name: Prune docker images (https://github.com/jens-maus/RaspberryMatic/blob/d5044bef3307bc61166377c162569de1a61cf332/.github/workflows/ci.yml#L34-L40)
67-
# run: |
68-
# sudo swapoff -a
69-
# sudo rm -f /swapfile
70-
# sudo apt clean
71-
# # docker rmi $(docker image ls -aq)
72-
# docker image prune -a
73-
# df -h
74-
75-
-
117+
# all of these default to true, but feel free to set to
118+
# "false" if necessary for your workflow
119+
android: true
120+
dotnet: true
121+
haskell: true
122+
large-packages: true
123+
docker-images: true
124+
swap-storage: true
125+
126+
-
127+
name: Prune docker images (https://github.com/jens-maus/RaspberryMatic/blob/d5044bef3307bc61166377c162569de1a61cf332/.github/workflows/ci.yml#L34-L40)
128+
run: |
129+
sudo swapoff -a
130+
sudo rm -f /swapfile
131+
sudo apt clean
132+
# docker rmi $(docker image ls -aq)
133+
docker image prune -a
134+
df -h
135+
- name: Set build versions
136+
id: set_versions
137+
env:
138+
DEP_REPO: nasa-nccs-hpda/GenCast_FP
139+
run: |
140+
DEP_TAG=$(curl -s https://api.github.com/repos/${DEP_REPO}/releases/latest | jq -r .tag_name)
141+
echo "GenCastFP_TAG=$DEP_TAG" >> $GITHUB_ENV
142+
echo "VERSION_TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
143+
echo "Using ${DEP_REPO} release tag: $DEP_TAG"
144+
echo "Workflow release version: ${GITHUB_REF#refs/tags/}"
145+
-
76146
name: Build and push
77-
uses: docker/build-push-action@v4
147+
uses: docker/build-push-action@v6
78148
with:
79149
context: .
150+
platforms: linux/arm64
80151
file: ./requirements/Dockerfile
81152
push: true
82-
tags: nasanccs/gencast-fp:latest
153+
tags: nasanccs/gencast-fp:${{ env.GenCastFP_TAG }}-arm64
154+
build-args: |
155+
GenCastFP_TAG=${{ env.GenCastFP_TAG }}
156+
157+
manifest:
158+
needs: [docker-amd, docker-arm]
159+
runs-on: ubuntu-latest
160+
steps:
161+
- name: Login to Docker Hub
162+
uses: docker/login-action@v2
163+
with:
164+
username: ${{ secrets.DOCKERHUB_USERNAME }}
165+
password: ${{ secrets.DOCKERHUB_TOKEN }}
166+
- name: Set build versions
167+
id: set_versions
168+
env:
169+
DEP_REPO: nasa-nccs-hpda/GenCast_FP
170+
run: |
171+
DEP_TAG=$(curl -s https://api.github.com/repos/${DEP_REPO}/releases/latest | jq -r .tag_name)
172+
echo "GenCastFP_TAG=$DEP_TAG" >> $GITHUB_ENV
173+
echo "VERSION_TAG=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV
174+
echo "Using ${DEP_REPO} release tag: $DEP_TAG"
175+
echo "Workflow release version: ${GITHUB_REF#refs/tags/}"
176+
- name: Extract per-platform digests
177+
run: |
178+
AMD_DIGEST=$(docker buildx imagetools inspect nasanccs/gencast-fp:${{ env.GenCastFP_TAG }}-amd64 \
179+
--format '{{range .Manifest.Manifests}}{{if eq .Platform.Architecture "amd64"}}{{.Digest}}{{end}}{{end}}')
180+
ARM_DIGEST=$(docker buildx imagetools inspect nasanccs/gencast-fp:${{ env.GenCastFP_TAG }}-arm64 \
181+
--format '{{range .Manifest.Manifests}}{{if eq .Platform.Architecture "arm64"}}{{.Digest}}{{end}}{{end}}')
182+
echo "AMD_DIGEST=$AMD_DIGEST" >> $GITHUB_ENV
183+
echo "ARM_DIGEST=$ARM_DIGEST" >> $GITHUB_ENV
184+
- name: Create and push manifest
185+
run: |
186+
docker manifest create nasanccs/gencast-fp:latest \
187+
nasanccs/gencast-fp@${AMD_DIGEST} \
188+
nasanccs/gencast-fp@${ARM_DIGEST}
189+
docker manifest push nasanccs/gencast-fp:latest
190+
191+
docker manifest create nasanccs/gencast-fp:${{ env.GenCastFP_TAG }} \
192+
nasanccs/gencast-fp@${AMD_DIGEST} \
193+
nasanccs/gencast-fp@${ARM_DIGEST}
194+
docker manifest push nasanccs/gencast-fp:${{ env.GenCastFP_TAG }}

gencast_fp/postprocess/gencast_cf.py

Lines changed: 42 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -57,15 +57,19 @@ def _resolve_dt(ctime, ref_date):
5757
return pd.to_datetime(ref_ns) + pd.to_timedelta(float(c), unit='h')
5858

5959

60-
def proc_time_step(ds_org, ctime, ref_date, output_dir: Path, case="init", ens_mean=True):
60+
def proc_time_step(
61+
ds_org, ctime, ref_date,
62+
output_dir: Path, case="init", ens_mean=True
63+
):
6164

6265
FILL_VALUE = np.float32(1.0e15)
6366

6467
ds = ds_org.sel(time=ctime).expand_dims("time")
6568

6669
# Time
6770
# dt = pd.to_datetime(ref_date + ctime)
68-
dt = _resolve_dt(ctime, ref_date) # instead of: pd.to_datetime(ref_date + ctime)
71+
# instead of: pd.to_datetime(ref_date + ctime)
72+
dt = _resolve_dt(ctime, ref_date)
6973
HH = dt.strftime("%H")
7074
YYYY = dt.strftime("%Y")
7175
MM = dt.strftime("%m")
@@ -139,8 +143,10 @@ def proc_time_step(ds_org, ctime, ref_date, output_dir: Path, case="init", ens_m
139143
"PRECTOT": {"long_name": "total_precipitation", "units": "m"},
140144
"U": {"long_name": "eastward_wind", "units": "m s-1"},
141145
"V": {"long_name": "northward_wind", "units": "m s-1"},
142-
"OMEGA": {"long_name": "vertical_pressure_velocity", "units": "Pa s-1"},
143-
"PHIS": {"long_name": "surface_geopotential_height", "units": "m+2 s-2"},
146+
"OMEGA": {
147+
"long_name": "vertical_pressure_velocity", "units": "Pa s-1"},
148+
"PHIS": {
149+
"long_name": "surface_geopotential_height", "units": "m+2 s-2"},
144150
}
145151

146152
valid_rename = {k: v for k, v in rename_dict.items() if k in ds.variables}
@@ -174,7 +180,9 @@ def proc_time_step(ds_org, ctime, ref_date, output_dir: Path, case="init", ens_m
174180
fname = f"FMGenCast-initial-geos_date-{tstamp}_res-1.0_levels-13.nc"
175181
else:
176182
suffix = "_ens-mean.nc" if ens_mean else ".nc"
177-
fname = f"FMGenCast-prediction-geos_date-{tstamp}_res-1.0_levels-13{suffix}"
183+
fname = \
184+
"FMGenCast-prediction-geos_date-" + \
185+
f"{tstamp}_res-1.0_levels-13{suffix}"
178186

179187
output_dir.mkdir(parents=True, exist_ok=True)
180188
ds.to_netcdf(output_dir / fname, encoding=encoding, engine="netcdf4")
@@ -210,7 +218,8 @@ def run_postprocess_day(
210218
geos_dir.glob(f"*source-geos*{Y:04d}-{M:02d}-{D:02d}T{H:02d}_*.nc"))
211219

212220
if init_files:
213-
# ds_init = xr.open_dataset(init_files[0]).drop_vars("land_sea_mask", errors="ignore")
221+
# ds_init = xr.open_dataset(
222+
# init_files[0]).drop_vars("land_sea_mask", errors="ignore")
214223
ds_init = _open_xr_cf_safe(init_files[0]).drop_vars(
215224
"land_sea_mask", errors="ignore")
216225
# ref_init = np.datetime64(f"{Y}-{M}-{D}T00:00:00")
@@ -229,11 +238,13 @@ def run_postprocess_day(
229238
pred_files = sorted(
230239
pred_dir.glob(f"*geos_date-{Y:04d}-{M:02d}-{D:02d}T{H:02d}_*.nc"))
231240
if pred_files:
232-
# ds_pred = xr.open_dataset(pred_files[0]).drop_vars("land_sea_mask", errors="ignore")
241+
# ds_pred = xr.open_dataset(
242+
# pred_files[0]).drop_vars("land_sea_mask", errors="ignore")
233243
ds_pred = _open_xr_cf_safe(
234244
pred_files[0]).drop_vars("land_sea_mask", errors="ignore")
235245
# ref_pred = np.datetime64(f"{Y}-{M}-{D}T12:00:00")
236-
ref_pred = date + pd.Timedelta(hours=12) # pd.Timestamp(f"{Y}-{M}-{D}T12:00:00") # TODO: Modify to be +12
246+
# pd.Timestamp(f"{Y}-{M}-{D}T12:00:00") # TODO: Modify to be +12?
247+
ref_pred = date + pd.Timedelta(hours=12)
237248
for ctime in ds_pred.time.values:
238249
proc_time_step(
239250
ds_pred, ctime, ref_pred,
@@ -255,12 +266,16 @@ def run_postprocess_multiday(
255266
post_out_dir: str,
256267
ens_mean: bool = True,
257268
):
258-
"""Postprocess multiple days (inclusive) of GenCast outputs into CF-compliant NetCDFs.
269+
"""
270+
Postprocess multiple days (inclusive) of
271+
GenCast outputs into CF-compliant NetCDFs.
259272
Calls run_postprocess_day for each day in [start_date, end_date].
260273
"""
261274
# start_date = np.datetime64(start_date)
262275
# end_date = np.datetime64(end_date)
263-
# date_range = np.arange(start_date, end_date + np.timedelta64(1, "D"), dtype="datetime64[D]")
276+
# date_range = np.arange(
277+
# start_date, end_date + np.timedelta64(1, "D"),
278+
# dtype="datetime64[D]")
264279
fmt = "%Y-%m-%d:%H"
265280

266281
# Parse exact hour from input
@@ -293,17 +308,25 @@ def run_postprocess_multiday(
293308
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
294309
)
295310

296-
parser = argparse.ArgumentParser(description="Convert GenCast outputs to CF-compliant NetCDFs")
297-
parser.add_argument("--start_date", type=str, required=True, help="Start date (YYYY-MM-DD)")
298-
parser.add_argument("--end_date", type=str, required=True, help="End date (YYYY-MM-DD)")
299-
parser.add_argument("--geos_dir", type=str, required=True,
300-
help="Directory with GEOS inputs (for initial conditions)")
311+
parser = argparse.ArgumentParser(
312+
description="Convert GenCast outputs to CF-compliant NetCDFs")
313+
parser.add_argument(
314+
"--start_date", type=str, required=True,
315+
help="Start date (YYYY-MM-DD:HH)")
316+
parser.add_argument(
317+
"--end_date", type=str, required=True,
318+
help="End date (YYYY-MM-DD:HH)")
319+
parser.add_argument(
320+
"--geos_dir", type=str, required=True,
321+
help="Directory with GEOS inputs (for initial conditions)")
301322
parser.add_argument("--pred_dir", type=str, required=True,
302323
help="Directory with GenCast predictions")
303-
parser.add_argument("--post_out_dir", type=str, default="./output/postprocess",
304-
help="Directory for CF-compliant NetCDF outputs")
305-
parser.add_argument("--no_ens_mean", action="store_true",
306-
help="Disable ensemble mean (keep all ensemble members)")
324+
parser.add_argument(
325+
"--post_out_dir", type=str, default="./output/postprocess",
326+
help="Directory for CF-compliant NetCDF outputs")
327+
parser.add_argument(
328+
"--no_ens_mean", action="store_true",
329+
help="Disable ensemble mean (keep all ensemble members)")
307330

308331
args = parser.parse_args()
309332

0 commit comments

Comments
 (0)