Skip to content
4 changes: 3 additions & 1 deletion docs/getting_started/explanation_concepts.md
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,9 @@ For several common input datasets, such as the Copernicus Marine Service analysi

```python
dataset = xr.open_mfdataset("insert_copernicus_data_files.nc")
fieldset = parcels.FieldSet.from_copernicusmarine(dataset)
fields = {"U": ds_fields["uo"], "V": ds_fields["vo"]}
ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)
fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)
```

In some cases, we might want to combine `parcels.Field`s from different sources in the same `parcels.FieldSet`, such as ocean currents from one dataset and Stokes drift from another. This is possible in Parcels by adding each `parcels.Field` separately:
Expand Down
9 changes: 6 additions & 3 deletions docs/getting_started/tutorial_output.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,10 @@
"ds_fields = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n",
"ds_fields.load() # load the dataset into memory\n",
"\n",
"fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)"
"# Convert to SGRID-compliant dataset and create FieldSet\n",
"fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"]}\n",
"ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n",
"fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)"
]
},
{
Expand Down Expand Up @@ -557,7 +560,7 @@
"metadata": {
"celltoolbar": "Metagegevens bewerken",
"kernelspec": {
"display_name": "test-notebooks",
"display_name": "docs",
"language": "python",
"name": "python3"
},
Expand All @@ -571,7 +574,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.0"
"version": "3.14.2"
}
},
"nbformat": 4,
Expand Down
11 changes: 8 additions & 3 deletions docs/getting_started/tutorial_quickstart.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,16 @@ As we can see, the reanalysis dataset contains eastward velocity `uo`, northward
(`thetao`) and salinity (`so`) fields.

These hydrodynamic fields need to be stored in a {py:obj}`parcels.FieldSet` object. Parcels provides tooling to parse many types
of models or observations into such a `parcels.FieldSet` object. Here, we use {py:func}`parcels.FieldSet.from_copernicusmarine()`, which
recognizes the standard names of a velocity field:
of models or observations into such a `parcels.FieldSet` object. This is done in a two-step approach.

First, we convert the dataset into an SGRID-compliant dataset, for example by using a version of `parcels.convert.<MODEL>_to_sgrid()`. Then, we create the `parcels.FieldSet` from the SGRID-compliant dataset using `parcels.FieldSet.from_sgrid_conventions()`.

Below, we use a combination of {py:func}`parcels.convert.copernicusmarine_to_sgrid()` and {py:func}`parcels.FieldSet.from_sgrid_conventions()`, providing the names of the velocity fields in the dataset in the dictionary `fields`:

```{code-cell}
fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)
fields = {"U": ds_fields["uo"], "V": ds_fields["vo"]}
ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)
fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)
```

The subset contains a region of the Agulhas current along the southeastern coast of Africa:
Expand Down
9 changes: 8 additions & 1 deletion docs/user_guide/examples/explanation_kernelloop.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,14 @@ ds_fields["VWind"] = xr.DataArray(
data=np.zeros((tdim, ydim, xdim)),
coords=[ds_fields.time, ds_fields.latitude, ds_fields.longitude])

fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)
fields = {
"U": ds_fields["uo"],
"V": ds_fields["vo"],
"UWind": ds_fields["UWind"],
"VWind": ds_fields["VWind"],
}
ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)
fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)

# Create a vecorfield for the wind
windvector = parcels.VectorField(
Expand Down
11 changes: 10 additions & 1 deletion docs/user_guide/examples/tutorial_Argofloats.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,16 @@
"# TODO check how we can get good performance without loading full dataset in memory\n",
"ds_fields.load() # load the dataset into memory\n",
"\n",
"fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)\n",
"# Select fields\n",
"fields = {\n",
" \"U\": ds_fields[\"uo\"],\n",
" \"V\": ds_fields[\"vo\"],\n",
" \"thetao\": ds_fields[\"thetao\"],\n",
"}\n",
"\n",
"# Convert to SGRID-compliant dataset and create FieldSet\n",
"ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n",
"fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)\n",
"fieldset.add_constant(\"mindepth\", 1.0)\n",
"\n",
"# Define a new Particle type including extra Variables\n",
Expand Down
9 changes: 6 additions & 3 deletions docs/user_guide/examples/tutorial_delaystart.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,10 @@
"ds_fields = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n",
"ds_fields.load() # load the dataset into memory\n",
"\n",
"fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)"
"# Convert to SGRID-compliant dataset and create FieldSet\n",
"fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"]}\n",
"ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n",
"fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)"
]
},
{
Expand Down Expand Up @@ -433,7 +436,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "test-notebooks",
"display_name": "docs",
"language": "python",
"name": "python3"
},
Expand All @@ -447,7 +450,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.0"
"version": "3.14.2"
}
},
"nbformat": 4,
Expand Down
4 changes: 3 additions & 1 deletion docs/user_guide/examples/tutorial_diffusion.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,9 @@
"metadata": {},
"outputs": [],
"source": [
"fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)\n",
"fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"]}\n",
"ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n",
"fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)\n",
"\n",
"\n",
"def degree_lat_to_meter(d):\n",
Expand Down
5 changes: 4 additions & 1 deletion docs/user_guide/examples/tutorial_dt_integrators.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,10 @@
"ds_fields = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n",
"ds_fields.load() # load the dataset into memory\n",
"\n",
"fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)\n",
"# Convert to SGRID-compliant dataset and create FieldSet\n",
"fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"]}\n",
"ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n",
"fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)\n",
"\n",
"# Check field resolution in time and space\n",
"print(\n",
Expand Down
14 changes: 11 additions & 3 deletions docs/user_guide/examples/tutorial_gsw_density.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,15 @@
"# TODO check how we can get good performance without loading full dataset in memory\n",
"ds_fields.load() # load the dataset into memory\n",
"\n",
"fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)"
"# Convert to SGRID-compliant dataset and create FieldSet\n",
"fields = {\n",
" \"U\": ds_fields[\"uo\"],\n",
" \"V\": ds_fields[\"vo\"],\n",
" \"thetao\": ds_fields[\"thetao\"],\n",
" \"so\": ds_fields[\"so\"],\n",
"}\n",
"ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n",
"fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)"
]
},
{
Expand Down Expand Up @@ -137,7 +145,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "test-notebooks",
"display_name": "docs",
"language": "python",
"name": "python3"
},
Expand All @@ -151,7 +159,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.0"
"version": "3.14.2"
}
},
"nbformat": 4,
Expand Down
9 changes: 6 additions & 3 deletions docs/user_guide/examples/tutorial_sampling.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,10 @@
"ds_fields = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n",
"ds_fields.load() # load the dataset into memory\n",
"\n",
"fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)"
"# Convert to SGRID-compliant dataset and create FieldSet\n",
"fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"], \"thetao\": ds_fields[\"thetao\"]}\n",
"ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n",
"fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)"
]
},
{
Expand Down Expand Up @@ -391,7 +394,7 @@
"metadata": {
"celltoolbar": "Raw-celnotatie",
"kernelspec": {
"display_name": "test-notebooks",
"display_name": "docs",
"language": "python",
"name": "python3"
},
Expand All @@ -405,7 +408,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.0"
"version": "3.14.2"
},
"pycharm": {
"stem_cell": {
Expand Down
15 changes: 9 additions & 6 deletions docs/user_guide/examples_v3/tutorial_splitparticles.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,13 @@
" \"CopernicusMarine_data_for_Argo_tutorial\"\n",
")\n",
"\n",
"ds = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n",
"ds.load() # load the dataset into memory\n",
"ds_fields = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n",
"ds_fields.load() # load the dataset into memory\n",
"\n",
"fieldset = parcels.FieldSet.from_copernicusmarine(ds)"
"# Convert to SGRID-compliant dataset and create FieldSet\n",
"fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"]}\n",
"ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n",
"fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)"
]
},
{
Expand Down Expand Up @@ -117,7 +120,7 @@
"source": [
"ds_out = xr.open_zarr(\"growingparticles.zarr\")\n",
"plt.plot(\n",
" (ds_out.time.values[:].T - ds.time.values[0]).astype(\"timedelta64[h]\"),\n",
" (ds_out.time.values[:].T - ds_fields.time.values[0]).astype(\"timedelta64[h]\"),\n",
" ds_out.mass.T,\n",
")\n",
"plt.grid()\n",
Expand All @@ -136,7 +139,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "test-notebooks",
"display_name": "docs",
"language": "python",
"name": "python3"
},
Expand All @@ -150,7 +153,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.0"
"version": "3.14.2"
}
},
"nbformat": 4,
Expand Down
77 changes: 9 additions & 68 deletions src/parcels/_core/fieldset.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
from parcels._logger import logger
from parcels._reprs import fieldset_repr
from parcels._typing import Mesh
from parcels.convert import _discover_U_and_V, _ds_rename_using_standard_names, _maybe_rename_coords
from parcels.convert import _ds_rename_using_standard_names
from parcels.interpolators import (
CGrid_Velocity,
Ux_Velocity,
Expand Down Expand Up @@ -186,64 +186,6 @@ def gridset(self) -> list[BaseGrid]:
grids.append(field.grid)
return grids

@classmethod
def from_copernicusmarine(cls, ds: xr.Dataset):
"""Create a FieldSet from a Copernicus Marine Service xarray.Dataset.

Parameters
----------
ds : xarray.Dataset
xarray.Dataset as obtained from the copernicusmarine toolbox.

Returns
-------
FieldSet
FieldSet object containing the fields from the dataset that can be used for a Parcels simulation.

Notes
-----
See https://help.marine.copernicus.eu/en/collections/9080063-copernicus-marine-toolbox for more information on the copernicusmarine toolbox.
The toolbox to ingest data from most of the products on the Copernicus Marine Service (https://data.marine.copernicus.eu/products) into an xarray.Dataset.
You can use indexing and slicing to select a subset of the data before passing it to this function.
Note that most Parcels uses will require both U and V fields to be present in the dataset. This function will try to find out which variables in the dataset correspond to U and V.
To override the automatic detection, rename the appropriate variables in your dataset to 'U' and 'V' before passing it to this function.

"""
ds = ds.copy()
ds = _discover_U_and_V(ds, _COPERNICUS_MARINE_CF_STANDARD_NAME_FALLBACKS)
expected_axes = set("XYZT") # TODO: Update after we have support for 2D spatial fields
if missing_axes := (expected_axes - set(ds.cf.axes)):
raise ValueError(
f"Dataset missing CF compliant metadata for axes "
f"{missing_axes}. Expected 'axis' attribute to be set "
f"on all dimension axes {expected_axes}. "
"HINT: Add xarray metadata attribute 'axis' to dimension - e.g., ds['lat'].attrs['axis'] = 'Y'"
)

ds = _maybe_rename_coords(ds, _COPERNICUS_MARINE_AXIS_VARNAMES)
if "W" in ds.data_vars:
# Negate W to convert from up positive to down positive (as that's the direction of positive z)
ds["W"].data *= -1

if "grid" in ds.cf.cf_roles:
raise ValueError(
"Dataset already has a 'grid' variable (according to cf_roles). Didn't expect there to be grid metadata on copernicusmarine datasets - please open an issue with more information about your dataset."
)
ds["grid"] = xr.DataArray(
0,
attrs=sgrid.Grid2DMetadata( # use dummy *_center dimensions - this is A grid data (all defined on nodes)
cf_role="grid_topology",
topology_dimension=2,
node_dimensions=("lon", "lat"),
face_dimensions=(
sgrid.DimDimPadding("x_center", "lon", sgrid.Padding.LOW),
sgrid.DimDimPadding("y_center", "lat", sgrid.Padding.LOW),
),
vertical_dimensions=(sgrid.DimDimPadding("z_center", "depth", sgrid.Padding.LOW),),
).to_attrs(),
)
return cls.from_sgrid_conventions(ds, mesh="spherical")

@classmethod
def from_fesom2(cls, ds: ux.UxDataset):
"""Create a FieldSet from a FESOM2 uxarray.UxDataset.
Expand Down Expand Up @@ -555,12 +497,11 @@ def _is_agrid(ds: xr.Dataset) -> bool:


def _is_coordinate_in_degrees(da: xr.DataArray) -> bool:
match da.attrs.get("units"):
case None:
raise ValueError(
f"Coordinate {da.name!r} of your dataset has no 'units' attribute - we don't know what the spatial units are."
)
case "degrees":
return True
case _:
return False
units = da.attrs.get("units")
if units is None:
raise ValueError(
f"Coordinate {da.name!r} of your dataset has no 'units' attribute - we don't know what the spatial units are."
)
if isinstance(units, str) and "degree" in units.lower():
return True
return False
1 change: 0 additions & 1 deletion src/parcels/_reprs.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,6 @@ def field_repr(field: Field, level: int = 0) -> str:
name : {field.name!r}
interp_method : {field.interp_method!r}
time_interval : {field.time_interval!r}
units : {field.units!r}
igrid : {field.igrid!r}
DataArray:
{textwrap.indent(repr(field.data), 8 * " ")}
Expand Down
Loading
Loading