Skip to content

Commit b0f2b42

Browse files
committed
Add e2e test for create_iceflow_parquet
1 parent 9cf2915 commit b0f2b42

File tree

1 file changed

+34
-1
lines changed

1 file changed

+34
-1
lines changed

tests/integration/test_e2e.py

Lines changed: 34 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,10 @@
1212

1313
import datetime as dt
1414

15+
import dask.dataframe as dd
1516
import pandas as pd
1617

17-
from nsidc.iceflow.api import fetch_iceflow_df
18+
from nsidc.iceflow.api import create_iceflow_parquet, fetch_iceflow_df
1819
from nsidc.iceflow.data.models import (
1920
BLATM1BDataset,
2021
BoundingBox,
@@ -144,3 +145,35 @@ def test_glah06(tmp_path):
144145
)
145146

146147
assert (results.ITRF == "ITRF2008").all()
148+
149+
150+
def test_create_iceflow_parquet(tmp_path):
151+
target_itrf = "ITRF2014"
152+
common_bounding_box = BoundingBox(
153+
lower_left_lon=-49.149,
154+
lower_left_lat=69.186,
155+
upper_right_lon=-48.949,
156+
upper_right_lat=69.238,
157+
)
158+
159+
# This should finds 4 results for ILATM1B v1 and 3 results for v2.
160+
parquet_path = create_iceflow_parquet(
161+
dataset_search_params=DatasetSearchParameters(
162+
datasets=[ILATM1BDataset(version="1"), ILATM1BDataset(version="2")],
163+
bounding_box=common_bounding_box,
164+
temporal=((dt.date(2007, 1, 1), dt.date(2014, 10, 28))),
165+
),
166+
output_dir=tmp_path,
167+
target_itrf=target_itrf,
168+
)
169+
170+
df = dd.read_parquet(parquet_path) # type: ignore[attr-defined]
171+
172+
# Assert that the parquet data has the expected columns
173+
expected_columns = sorted(["latitude", "longitude", "elevation", "dataset"])
174+
assert expected_columns == sorted(df.columns)
175+
176+
# Assert that the two datasets we expect are present.
177+
assert sorted(["ILATM1Bv1", "ILATM1Bv2"]) == sorted(
178+
df.dataset.unique().compute().values
179+
)

0 commit comments

Comments
 (0)