diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 1e73e65b4f..7de5dd6358 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -59,3 +59,4 @@ bd535c710db78420b8e8b9d71d88d8339e899c59 cf433215b58ba8776ec5edfb0b0d80c0836ed3a0 16d57ff37859b34dab005693e3085d64e2bcd95a e8fc526e0d7818d45f171488c78392c4ff63902a +cdf40d265cc82775607a1bf25f5f527bacc97405 diff --git a/cime_config/testdefs/ExpectedTestFails.xml b/cime_config/testdefs/ExpectedTestFails.xml index 30394ff65d..390db0feea 100644 --- a/cime_config/testdefs/ExpectedTestFails.xml +++ b/cime_config/testdefs/ExpectedTestFails.xml @@ -178,13 +178,6 @@ - - - FAIL - #2453 - - - FAIL @@ -271,6 +264,10 @@ FAIL #2310 + + FAIL + #3038 + diff --git a/doc/ChangeLog b/doc/ChangeLog index 7b1fcdc110..4b9933ddc3 100644 --- a/doc/ChangeLog +++ b/doc/ChangeLog @@ -1,4 +1,69 @@ =============================================================== +Tag name: ctsm5.3.035 +Originator(s): samrabin (Sam Rabin, UCAR/TSS) +Date: Fri Mar 28 14:31:29 MDT 2025 +One-line Summary: Merge b4b-dev + +Purpose and description of changes +---------------------------------- + +Merging b4b-dev and ctsm5.3.034. + + +Significant changes to scientifically-supported configurations +-------------------------------------------------------------- + +Does this tag change answers significantly for any of the following physics configurations? +(Details of any changes will be given in the "Answer changes" section below.) + + [Put an [X] in the box for any configuration with significant answer changes.] + +[ ] clm6_0 + +[ ] clm5_0 + +[ ] ctsm5_0-nwp + +[ ] clm4_5 + + +Bugs fixed +---------- + +List of CTSM issues fixed (include CTSM Issue # and description): +- [Issue #1565: Documentation need for generic single point with nuopc](https://github.com/ESCOMP/CTSM/issues/1565) +- [Issue #2453: FUNIT test not working on izumi](https://github.com/ESCOMP/CTSM/issues/2453) +- [Issue #2847: Pitfall using mksurdata_esmf options --model-mesh-nx NX --model-mesh-ny NY when using unstructured meshes](https://github.com/ESCOMP/CTSM/issues/2847) +- [Issue #2892: Convert "How do I create a single-point run with the NUOPC (default) coupler?" to real docs](https://github.com/ESCOMP/CTSM/issues/2892) +- [Issue #2999: Throw errors in subset_data for known buggy setups](https://github.com/ESCOMP/CTSM/issues/2999) + + +Testing summary: +---------------- + + [PASS means all tests PASS; OK means tests PASS other than expected fails.] + + python testing (if python code has changed; see instructions in python/README.md; document testing done): + + derecho - PASS + + regular tests (aux_clm: https://github.com/ESCOMP/CTSM/wiki/System-Testing-Guide#pre-merge-system-testing): + + derecho ----- OK + izumi ------- OK + + +Other details +------------- + +Pull Requests that document the changes: +- [Pull Request #3002: Update single point docs (and improve subset_data etc. erroring) by samsrabin](https://github.com/ESCOMP/CTSM/pull/3002) +- [Pull Request #3023: Get pFUnit-based unit tests working on my Mac by billsacks](https://github.com/ESCOMP/CTSM/pull/3023) +- [Pull Request #3030: Avoid mksurfdata_esmf pitfall with --model-mesh-nx, ny by slevis-lmwg](https://github.com/ESCOMP/CTSM/pull/3030) +- [Pull Request #3037: ctsm5.3.035: b4b-dev merge 2025-03-27 by samsrabin](https://github.com/ESCOMP/CTSM/pull/3037) + +=============================================================== +=============================================================== Tag name: ctsm5.3.034 Originator(s): rgknox (Ryan Knox) Date: Thu Mar 27 16:03:56 MDT 2025 diff --git a/doc/ChangeSum b/doc/ChangeSum index af6a9cf010..f265f924f7 100644 --- a/doc/ChangeSum +++ b/doc/ChangeSum @@ -1,5 +1,6 @@ Tag Who Date Summary ============================================================================================================================ + ctsm5.3.035 samrabin 03/28/2025 Merge b4b-dev ctsm5.3.034 rgknox 03/27/2025 Fixes to enable passing FATES two-stream ERS and ERI restart tests. ctsm5.3.033 multiple 03/19/2025 Add new mimics_fi param = frac of litter inputs bypassing litter pools ctsm5.3.032 slevis 03/17/2025 Add option to use CRUJRA2024 with clm6 and clm5 diff --git a/doc/source/users_guide/overview/introduction.rst b/doc/source/users_guide/overview/introduction.rst index 8056e6ab16..f8fe858c00 100644 --- a/doc/source/users_guide/overview/introduction.rst +++ b/doc/source/users_guide/overview/introduction.rst @@ -60,7 +60,9 @@ As a followup to the tools chapter, :ref:`adding-new-resolutions-section` tells In :ref:`running-special-cases-section`, again for the expert user, we give details on how to do some particularly difficult special cases. For example, we give the protocol for spinning up the |version|-BGC and CLMCN models as well as CLM with dynamic vegetation active (CNDV). We give instructions to do a spinup case from a previous case with Coupler history output for atmospheric forcing. We also give instructions on running both the prognostic crop and irrigation models. Lastly we tell the user how to use the DATM model to send historical CO2 data to CLM. -:ref:`running-single-points` outlines how to do single-point or regional simulations using |version|. This is useful to either compare |version| simulations with point observational stations, such as tower sites (which might include your own atmospheric forcing), or to do quick simulations with CLM for example to test a new parameterization. There are several different ways given on how to perform single-point simulations which range from simple PTS_MODE to more complex where you create all your own datasets, tying into :ref:`using-clm-tools-section` and also :ref:`adding-new-resolutions-section` to add the files into the build-namelist XML database. +:ref:`running-single-points` outlines how to do single-point or regional simulations using |version|. This is useful to either compare |version| simulations with point observational stations, such as tower sites (which might include your own atmospheric forcing), or to do quick simulations with CLM for example to test a new parameterization. There are several different ways given on how to perform single-point simulations which range from simple sampling of existing inputs to more complex where you create all your own datasets, tying into :ref:`using-clm-tools-section` and also :ref:`adding-new-resolutions-section` to add the files into the build-namelist XML database. + +There is also :ref:`pts_mode`, which is useful for running single points as part of the Single Column Atmospheric Model (SCAM). :ref:`troubleshooting-index` gives some guidance on trouble-shooting problems when using |version|. It doesn't cover all possible problems with CLM, but gives you some guidelines for things that can be done for some common problems. diff --git a/doc/source/users_guide/running-single-points/index.rst b/doc/source/users_guide/running-single-points/index.rst index ba342d0ba9..f65e9c5bf3 100644 --- a/doc/source/users_guide/running-single-points/index.rst +++ b/doc/source/users_guide/running-single-points/index.rst @@ -15,6 +15,7 @@ Running Single Point Regional Cases :maxdepth: 2 single-point-and-regional-grid-configurations.rst - running-pts_mode-configurations.rst + running-single-point-subset-data.rst running-single-point-configurations.rst + running-pts_mode-configurations.rst diff --git a/doc/source/users_guide/running-single-points/running-pts_mode-configurations.rst b/doc/source/users_guide/running-single-points/running-pts_mode-configurations.rst index 53cae1bdf4..fb61397321 100644 --- a/doc/source/users_guide/running-single-points/running-pts_mode-configurations.rst +++ b/doc/source/users_guide/running-single-points/running-pts_mode-configurations.rst @@ -6,6 +6,9 @@ Running a single point using global data - PTS_MODE **************************************************** +.. warning:: + ``PTS_MODE`` has been mostly deprecated in favor of ``subset_data`` (Sect. :numref:`single_point_subset_data`). You should only consider using it if you are using the Single Column Atmospheric Model (SCAM). + ``PTS_MODE`` enables you to run the model using global datasets, but just picking a single point from those datasets and operating on it. It can be a very quick way to do fast simulations and get a quick turnaround. To setup a ``PTS_MODE`` simulation you use the ``-pts_lat`` and ``-pts_lon`` arguments to ``cime/scripts/create_newcase`` to give the latitude and longitude of the point you want to simulate for (the code will pick the point on the global grid nearest to the point you give. Here's an example to setup a simulation for the nearest point at 2-degree resolution to Boulder Colorado. diff --git a/doc/source/users_guide/running-single-points/running-single-point-configurations.rst b/doc/source/users_guide/running-single-points/running-single-point-configurations.rst index 0e7f1262e2..56cad6a11e 100644 --- a/doc/source/users_guide/running-single-points/running-single-point-configurations.rst +++ b/doc/source/users_guide/running-single-points/running-single-point-configurations.rst @@ -6,7 +6,7 @@ Running Single Point Configurations ****************************************** -In addition to ``PTS_MODE`` (Sect. :numref:`pts_mode`), CLM supports running using single-point or regional datasets that are customized to a particular region. CLM supports a a small number of out-of-the-box single-point and regional datasets. However, users can create their own dataset. +In addition to running with the outputs of ``subset_data`` (Sect. :numref:`single_point_subset_data`), CLM supports running using single-point or regional datasets that are customized to a particular region. CLM supports a a small number of out-of-the-box single-point and regional datasets. However, users can create their own dataset. To get the list of supported dataset resolutions do this: :: @@ -32,7 +32,7 @@ The resolution names that have an underscore in them ("_") are all single-point .. note:: When running a single point, the number of processors is automatically set to one, which is the only value allowed. .. warning:: - Just like ``PTS_MODE`` (Sect. :numref:`pts_mode`), by default these setups sometimes run with ``MPILIB=mpi-serial`` (in the ``env_build.xml`` file) turned on, which allows you to run the model interactively. On some machines this mode is NOT supported and you may need to change it to FALSE before you are able to build. + Just like running with the outputs from ``subset_data`` (Sect. :numref:`single_point_subset_data`), by default these setups sometimes run with ``MPILIB=mpi-serial`` (in the ``env_build.xml`` file) turned on, which allows you to run the model interactively. On some machines this mode is NOT supported and you may need to change it to FALSE before you are able to build. .. _single-point-global-climate: diff --git a/doc/source/users_guide/running-single-points/running-single-point-subset-data.rst b/doc/source/users_guide/running-single-points/running-single-point-subset-data.rst new file mode 100644 index 0000000000..f829f2c624 --- /dev/null +++ b/doc/source/users_guide/running-single-points/running-single-point-subset-data.rst @@ -0,0 +1,60 @@ +.. include:: ../substitutions.rst + +.. _single_point_subset_data: + +**************************************** +Running a single point using global data +**************************************** + +``subset_data`` enables you to run the model using global datasets, but just picking a single point from those datasets and operating on it. It can be a very quick way to do fast simulations and get a quick turnaround. + +Subset the data +------------------ + +For single-point cases, you need to subset a surface dataset and (optionally) DATM data. The Python script to subset this data can be found in the CTSM repository at ``tools/site_and_regional/subset_data``. + +Note that you will need to have a python environment set up that includes the packages ``scipy``, ``xarray``, and ``numpy``. If you have conda or miniconda installed, you can create a conda environment for this and other CTSM python tools using the script ``py_env_create`` at the top level of your CTSM checkout. + +To subset surface data and climate forcings (DATM) for a single point, use the command: + +.. code:: shell + + tools/site_and_regional/subset_data point \ + --lat $my_lat --lon $my_lon --site $my_site_name \ + --create-surface --create-datm \ + --datm-syr $my_start_year --datm-eyr $my_end_year \ + --create-user-mods --outdir $my_output_dir + +- ``$my_lat``: latitude of point, *must be between -90 and 90 degrees*. E.g., Boulder, CO, USA: 40. +- ``$my_lon``: longitude of point, *must be between 0 and 360 degrees*. E.g., Boulder, CO, USA: 55. +- ``$my_site_name``: name of site, *used for file naming* +- ``$my_start_year``: start year for DATM data to subset, *default between 1901 and 2014* +- ``$my_end_year``: end year for DATM data to subset, *default between 1901 and 2014; the default CRUJRA2024 DATM data ends in 2023, while the old default GSWP3 ends in 2015; see note below about switching the default DATM data* +- ``$my_output_dir``: output directory to place the subset data and user_mods directory. This should be something specific to *just* your data for ``$my_site_name``. + +You can also have the script subset land-use data. See the help (``tools/site_and_regional/subset_data --help``) for all argument options. + +**Note that this script defaults to subsetting specific surface, domain, and land-use files and the CRUJRA2024 DATM data, and can currently only be run as-is on Derecho. If you're not on Derecho, use the ``--inputdata-dir`` to specify where the top level of your CESM input data is. Also, to subset GSWP3 instead of CRUJRA2024 DATM data, you currently need to hardwire datm_type = "datm_gswp3" (instead of the default "datm_crujra") in python/ctsm/subset_data.py.** + +The ``--create-user-mods`` command tells the script to set up a user mods directory in your specified ``$my_output_dir`` and to specify the required ``PTS_LAT`` and ``PTS_LON`` settings. You can then use this user mods directory to set up your CTSM case, as described below. + +Create the case +------------------ + +You can use the user mods directory set up in the previous subset data step to tell CIME/CTSM where your subset files are located. + +.. code:: shell + + cime/scripts/create_newcase --case $my_case_name --res CLM_USRDAT \ + --compset $compset --run-unsupported \ + --user-mods-dirs $my_output_dir/user_mods + +- ``$my_case_name``: the path of the case directory you want to create +- ``$compset``: the compset you would like to use (for example, ``I2000Clm60Bgc``) +- Note the use of ``$my_output_dir/user_mods`` which is the ``user_mods/`` directory that the subset data script set up within your specified ``$my_output_dir``. + +Note that ``./case.setup`` on Derecho will automatically set queue to ``develop`` and walltime to one hour. You might need a longer walltime, but the maximum walltime for ``develop`` is one hour. To change it to two hours on Derecho: + +.. code:: shell + + ./xmlchange --subgroup case.run JOB_QUEUE=main,JOB_WALLCLOCK_TIME=2:00:00 diff --git a/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst b/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst index 61e1f25de8..d16dfa6f5e 100644 --- a/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst +++ b/doc/source/users_guide/running-single-points/single-point-and-regional-grid-configurations.rst @@ -10,7 +10,7 @@ CLM allows you to set up and run cases with a single-point or a local region as There are two different ways to do this for normal-supported site -``PTS_MODE`` +``subset_data`` runs for a single point using global datasets. ``CLM_USRDAT_NAME`` @@ -24,9 +24,9 @@ There are two different ways to do this for normal-supported site Running for a *normal supported site* is a great solution, if one of the supported single-point/regional datasets, is your region of interest (see :ref:`running-single-point-datasets`). All the datasets are created for you, and you can easily select one and run, out of the box with it using a supported resolution from the top level of the CESM scripts. The problem is that there is a very limited set of supported datasets. You can also use this method for your own datasets, but you have to create the datasets, and add them to the XML database in scripts, CLM and to the DATM. This is worthwhile if you want to repeat many multiple cases for a given point or region. -In general :ref:`pts_mode` is the quick and dirty method that gets you started without having to create datasets -- but has limitations. It's good for an initial attempt at seeing results for a point of interest, but since you can NOT restart with it, it's usage is limited. It is the quickest method as you can create a case for it directly from ``cime/scripts/create_newcase``. Although you can't restart, running a single point is very fast, and you can run for long simulation times even without restarts. +In general :ref:`single_point_subset_data` is the quick and dirty method that gets you started, but it has limitations. It's good for an initial attempt at seeing results for a point of interest, but since you can NOT restart with it, its usage is limited. It is the quickest method as you can create a case for it directly from ``cime/scripts/create_newcase``. Although you can't restart, running a single point is very fast, and you can run for long simulation times even without restarts. -Next, ``CLM_USRDAT_NAME`` is the best way to setup cases quickly where you have to create your own datasets (see :ref:`running-single-point-datasets`). With this method you don't have to change DATM or add files to the XML database -- but you have to follow a strict naming convention for files. However, once the files are named and in the proper location, you can easily setup new cases that use these datasets. This is good for treating all the required datasets as a "group" and for a particular model version. For advanced CLM developers who need to track dataset changes with different model versions you would be best off adding these datasets as supported datasets with the "normal supported datasets" method. +Next, ``CLM_USRDAT_NAME`` using ``subset_data`` is the best way to setup cases quickly where you have a simple tool to create your own datasets (see :ref:`single_point_subset_data`). With this method you don't have to change DATM or add files to the XML database. ``subset_data`` will create a usermod directory where you can store your files and the files needed to directly run a case. Finally, if you also have meteorology data that you want to force your CLM simulations with you'll need to setup cases as described in :ref:`creating-your-own-singlepoint-dataset`. You'll need to create CLM datasets either according to ``CLM_USRDAT_NAME``. You may also need to modify DATM to use your forcing data. And you'll need to change your forcing data to be in a format that DATM can use. diff --git a/python/ctsm/config_utils.py b/python/ctsm/config_utils.py index bd53825f14..872dbe646f 100644 --- a/python/ctsm/config_utils.py +++ b/python/ctsm/config_utils.py @@ -25,17 +25,15 @@ def lon_range_0_to_360(lon_in): Restrict longitude to 0 to 360 when given as -180 to 180. """ if -180 <= lon_in < 0: - lon_out = lon_in % 360 - logger.info( - "Resetting longitude from %s to %s to keep in the range " " 0 to 360", - str(lon_in), - str(lon_out), + raise NotImplementedError( + "A negative longitude suggests you input longitudes in the range [-180, 0)---" + "i.e., centered around the Prime Meridian. This code requires longitudes in the " + "range [0, 360)---i.e., starting at the International Date Line." ) - elif 0 <= lon_in <= 360 or lon_in is None: - lon_out = lon_in - else: + if not (0 <= lon_in <= 360 or lon_in is None): errmsg = "lon_in needs to be in the range 0 to 360" abort(errmsg) + lon_out = lon_in return lon_out diff --git a/python/ctsm/site_and_regional/single_point_case.py b/python/ctsm/site_and_regional/single_point_case.py index 2c2aebad52..ad64234391 100644 --- a/python/ctsm/site_and_regional/single_point_case.py +++ b/python/ctsm/site_and_regional/single_point_case.py @@ -608,7 +608,7 @@ def extract_datm_at(self, file_in, file_out): f_in.close() f_out.close() - def write_shell_commands(self, file): + def write_shell_commands(self, file, datm_syr, datm_eyr): """ writes out xml commands commands to a file (i.e. shell_commands) for single-point runs """ @@ -619,6 +619,10 @@ def write_shell_commands(self, file): self.write_to_file("./xmlchange PTS_LON={}".format(str(self.plon)), nl_file) self.write_to_file("./xmlchange PTS_LAT={}".format(str(self.plat)), nl_file) self.write_to_file("./xmlchange MPILIB=mpi-serial", nl_file) + if self.create_datm: + self.write_to_file(f"./xmlchange DATM_YR_ALIGN={datm_syr}", nl_file) + self.write_to_file(f"./xmlchange DATM_YR_START={datm_syr}", nl_file) + self.write_to_file(f"./xmlchange DATM_YR_END={datm_eyr}", nl_file) def write_datm_streams_lines(self, streamname, datmfiles, file): """ diff --git a/python/ctsm/subset_data.py b/python/ctsm/subset_data.py index ba4212eff1..ea0d6744c9 100644 --- a/python/ctsm/subset_data.py +++ b/python/ctsm/subset_data.py @@ -215,7 +215,11 @@ def get_parser(): ) rg_parser.add_argument( "--lon1", - help="Region westernmost longitude. [default: %(default)s]", + help=( + "Region westernmost longitude. Must be in [0, 360) format: i.e., starting at the" + " International Date Line rather than centered on the Prime Meridian. [default:" + " %(default)s]" + ), action="store", dest="lon1", required=False, @@ -224,7 +228,11 @@ def get_parser(): ) rg_parser.add_argument( "--lon2", - help="Region easternmost longitude. [default: %(default)s]", + help=( + "Region easternmost longitude. Must be in [0, 360) format: i.e., starting at the" + " International Date Line rather than centered on the Prime Meridian. [default:" + " %(default)s]" + ), action="store", dest="lon2", required=False, @@ -456,7 +464,8 @@ def check_args(args): """\ \n ------------------------------------ \n --surf-year option is NOT set to 1850 and the --create-landuse option - \n is selected which requires it to be 1850 + \n is selected which requires it to be 1850 (see + https://github.com/ESCOMP/CTSM/issues/2018) """ ) raise argparse.ArgumentError(None, err_msg) @@ -505,6 +514,17 @@ def check_args(args): ) raise argparse.ArgumentError(None, err_msg) + if args.run_type == "region" and args.create_datm: + err_msg = textwrap.dedent( + """\ + \n ------------------------------------ + \nERROR: For regional cases, you can not subset datm data + \n (see https://github.com/ESCOMP/CTSM/issues/2110) + \n but you can just use the global data instead + """ + ) + raise NotImplementedError(None, err_msg) + def setup_user_mods(user_mods_dir, cesmroot): """ @@ -691,7 +711,8 @@ def subset_point(args, file_dict: dict): # -- Write shell commands if single_point.create_user_mods: - single_point.write_shell_commands(os.path.join(args.user_mods_dir, "shell_commands")) + shell_commands_file = os.path.join(args.user_mods_dir, "shell_commands") + single_point.write_shell_commands(shell_commands_file, args.datm_syr, args.datm_eyr) logger.info("Successfully ran script for single point.") diff --git a/python/ctsm/test/test_unit_args_utils.py b/python/ctsm/test/test_unit_args_utils.py index 3a31b25224..2328e17d91 100755 --- a/python/ctsm/test/test_unit_args_utils.py +++ b/python/ctsm/test/test_unit_args_utils.py @@ -18,6 +18,7 @@ # pylint: disable=wrong-import-position from ctsm.args_utils import plon_type, plat_type from ctsm import unit_testing +from ctsm.test.test_unit_utils import wrong_lon_type_error_regex # pylint: disable=invalid-name @@ -40,8 +41,13 @@ def test_plonType_negative(self): """ Test of negative plon between -180 and 0 """ - result = plon_type(-30) - self.assertEqual(result, 330.0) + # When CTSM Issue #3001 is resolved, this assertRaisesRegex block should be deleted and the + # rest of this test uncommented + with self.assertRaisesRegex(NotImplementedError, wrong_lon_type_error_regex): + plon_type(-30) + + # result = plon_type(-30) + # self.assertEqual(result, 330.0) # -- > 360 def test_plonType_outOfBounds_positive(self): @@ -64,8 +70,13 @@ def test_plonType_negative_180(self): """ Test for when plon values are -180 """ - result = plon_type(-180) - self.assertEqual(result, 180.0) + # When CTSM Issue #3001 is resolved, this assertRaisesRegex block should be deleted and the + # rest of this test uncommented + with self.assertRaisesRegex(NotImplementedError, wrong_lon_type_error_regex): + plon_type(-180) + + # result = plon_type(-180) + # self.assertEqual(result, 180.0) # -- = 0 def test_plonType_zero(self): diff --git a/python/ctsm/test/test_unit_config_utils.py b/python/ctsm/test/test_unit_config_utils.py index c9ee23bac3..e45443d952 100644 --- a/python/ctsm/test/test_unit_config_utils.py +++ b/python/ctsm/test/test_unit_config_utils.py @@ -9,6 +9,7 @@ from ctsm import unit_testing from ctsm.config_utils import lon_range_0_to_360, get_config_value_or_array +from ctsm.test.test_unit_utils import wrong_lon_type_error_regex # Allow test names that pylint doesn't like; otherwise hard to make them # readable @@ -32,14 +33,26 @@ def setUp(self): def test_negative_lon(self): """Test lon_range_0_to_360 for a negative longitude""" lon = -180.0 - lon_new = lon_range_0_to_360(lon) - self.assertEqual(lon_new, 180.0, "lon not as expected") + + # When CTSM Issue #3001 is resolved, this assertRaisesRegex block should be deleted and the + # rest of this test uncommented + with self.assertRaisesRegex(NotImplementedError, wrong_lon_type_error_regex): + lon_range_0_to_360(lon) + + # lon_new = lon_range_0_to_360(lon) + # self.assertEqual(lon_new, 180.0, "lon not as expected") def test_negative2_lon(self): """Test lon_range_0_to_360 for a negative longitude""" lon = -5.0 - lon_new = lon_range_0_to_360(lon) - self.assertEqual(lon_new, 355.0, "lon not as expected") + + # When CTSM Issue #3001 is resolved, this assertRaisesRegex block should be deleted and the + # rest of this test uncommented + with self.assertRaisesRegex(NotImplementedError, wrong_lon_type_error_regex): + lon_range_0_to_360(lon) + + # lon_new = lon_range_0_to_360(lon) + # self.assertEqual(lon_new, 355.0, "lon not as expected") def test_regular_lon(self): """Test lon_range_0_to_360 for a regular longitude""" diff --git a/python/ctsm/test/test_unit_modify_fsurdat.py b/python/ctsm/test/test_unit_modify_fsurdat.py index b796cd940d..3220ba2569 100755 --- a/python/ctsm/test/test_unit_modify_fsurdat.py +++ b/python/ctsm/test/test_unit_modify_fsurdat.py @@ -12,6 +12,7 @@ from ctsm import unit_testing from ctsm.config_utils import lon_range_0_to_360 from ctsm.modify_input_files.modify_fsurdat import ModifyFsurdat +from ctsm.test.test_unit_utils import wrong_lon_type_error_regex # Allow test names that pylint doesn't like; otherwise hard to make them # readable @@ -171,36 +172,42 @@ def test_getNotRectangle_lon1leLon2Lat1gtLat2(self): # get cols, rows also min_lon = -3 # expects min_lon < max_lon min_lat = -2 # expects min_lat < max_lat - longxy, latixy, cols, rows = self._get_longxy_latixy( - _min_lon=min_lon, _max_lon=6, _min_lat=min_lat, _max_lat=5 - ) - - # get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2 - # I have chosen the lon/lat ranges to match their corresponding index - # values to keep this simple (see usage below) - lon_1 = 0 - lon_2 = 4 # lon_1 < lon_2 - lat_1 = 4 - lat_2 = 0 # lat_1 > lat_2 - rectangle = ModifyFsurdat._get_rectangle( - lon_1=lon_1, - lon_2=lon_2, - lat_1=lat_1, - lat_2=lat_2, - longxy=longxy, - latixy=latixy, - ) - not_rectangle = np.logical_not(rectangle) - compare = np.ones((rows, cols)) - # assert this to confirm intuitive understanding of these matrices - self.assertEqual(np.size(not_rectangle), np.size(compare)) - # Hardwire where I expect not_rectangle to be False (0) - # I have chosen the lon/lat ranges to match their corresponding index - # values to keep this simple - compare[: lat_2 - min_lat + 1, lon_1 - min_lon : lon_2 - min_lon + 1] = 0 - compare[lat_1 - min_lat :, lon_1 - min_lon : lon_2 - min_lon + 1] = 0 - np.testing.assert_array_equal(not_rectangle, compare) + # When CTSM Issue #3001 is resolved, this assertRaisesRegex block should be deleted and the + # rest of this test uncommented + with self.assertRaisesRegex(NotImplementedError, wrong_lon_type_error_regex): + self._get_longxy_latixy(_min_lon=min_lon, _max_lon=6, _min_lat=min_lat, _max_lat=5) + + # longxy, latixy, cols, rows = self._get_longxy_latixy( + # _min_lon=min_lon, _max_lon=6, _min_lat=min_lat, _max_lat=5 + # ) + + # # get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2 + # # I have chosen the lon/lat ranges to match their corresponding index + # # values to keep this simple (see usage below) + # lon_1 = 0 + # lon_2 = 4 # lon_1 < lon_2 + # lat_1 = 4 + # lat_2 = 0 # lat_1 > lat_2 + # rectangle = ModifyFsurdat._get_rectangle( + # lon_1=lon_1, + # lon_2=lon_2, + # lat_1=lat_1, + # lat_2=lat_2, + # longxy=longxy, + # latixy=latixy, + # ) + # not_rectangle = np.logical_not(rectangle) + # compare = np.ones((rows, cols)) + # # assert this to confirm intuitive understanding of these matrices + # self.assertEqual(np.size(not_rectangle), np.size(compare)) + + # # Hardwire where I expect not_rectangle to be False (0) + # # I have chosen the lon/lat ranges to match their corresponding index + # # values to keep this simple + # compare[: lat_2 - min_lat + 1, lon_1 - min_lon : lon_2 - min_lon + 1] = 0 + # compare[lat_1 - min_lat :, lon_1 - min_lon : lon_2 - min_lon + 1] = 0 + # np.testing.assert_array_equal(not_rectangle, compare) def test_getNotRectangle_lon1gtLon2Lat1leLat2(self): """ @@ -261,38 +268,43 @@ def test_getNotRectangle_lon1gtLon2Lat1gtLat2(self): # get cols, rows also min_lon = -8 # expects min_lon < max_lon min_lat = -9 # expects min_lat < max_lat - longxy, latixy, cols, rows = self._get_longxy_latixy( - _min_lon=min_lon, _max_lon=5, _min_lat=min_lat, _max_lat=6 - ) - # get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2 - # I have chosen the lon/lat ranges to match their corresponding index - # values to keep this simple (see usage below) - lon_1 = -1 - lon_2 = -6 # lon_1 > lon_2 - lat_1 = 0 - lat_2 = -3 # lat_1 > lat_2 - rectangle = ModifyFsurdat._get_rectangle( - lon_1=lon_1, - lon_2=lon_2, - lat_1=lat_1, - lat_2=lat_2, - longxy=longxy, - latixy=latixy, - ) - not_rectangle = np.logical_not(rectangle) - compare = np.ones((rows, cols)) - # assert this to confirm intuitive understanding of these matrices - self.assertEqual(np.size(not_rectangle), np.size(compare)) - - # Hardwire where I expect not_rectangle to be False (0) - # I have chosen the lon/lat ranges to match their corresponding index - # values to keep this simple - compare[: lat_2 - min_lat + 1, : lon_2 - min_lon + 1] = 0 - compare[: lat_2 - min_lat + 1, lon_1 - min_lon :] = 0 - compare[lat_1 - min_lat :, : lon_2 - min_lon + 1] = 0 - compare[lat_1 - min_lat :, lon_1 - min_lon :] = 0 - np.testing.assert_array_equal(not_rectangle, compare) + # When CTSM Issue #3001 is resolved, this assertRaisesRegex block should be deleted and the + # rest of this test uncommented + with self.assertRaisesRegex(NotImplementedError, wrong_lon_type_error_regex): + self._get_longxy_latixy(_min_lon=min_lon, _max_lon=5, _min_lat=min_lat, _max_lat=6) + + # longxy, latixy, cols, rows = self._get_longxy_latixy( + # _min_lon=min_lon, _max_lon=5, _min_lat=min_lat, _max_lat=6 + # ) + # # get not_rectangle from user-defined lon_1, lon_2, lat_1, lat_2 + # # I have chosen the lon/lat ranges to match their corresponding index + # # values to keep this simple (see usage below) + # lon_1 = -1 + # lon_2 = -6 # lon_1 > lon_2 + # lat_1 = 0 + # lat_2 = -3 # lat_1 > lat_2 + # rectangle = ModifyFsurdat._get_rectangle( + # lon_1=lon_1, + # lon_2=lon_2, + # lat_1=lat_1, + # lat_2=lat_2, + # longxy=longxy, + # latixy=latixy, + # ) + # not_rectangle = np.logical_not(rectangle) + # compare = np.ones((rows, cols)) + # # assert this to confirm intuitive understanding of these matrices + # self.assertEqual(np.size(not_rectangle), np.size(compare)) + + # # Hardwire where I expect not_rectangle to be False (0) + # # I have chosen the lon/lat ranges to match their corresponding index + # # values to keep this simple + # compare[: lat_2 - min_lat + 1, : lon_2 - min_lon + 1] = 0 + # compare[: lat_2 - min_lat + 1, lon_1 - min_lon :] = 0 + # compare[lat_1 - min_lat :, : lon_2 - min_lon + 1] = 0 + # compare[lat_1 - min_lat :, lon_1 - min_lon :] = 0 + # np.testing.assert_array_equal(not_rectangle, compare) def test_getNotRectangle_lonsStraddle0deg(self): """ diff --git a/python/ctsm/test/test_unit_subset_data.py b/python/ctsm/test/test_unit_subset_data.py index a918fb35f0..a089a11a90 100755 --- a/python/ctsm/test/test_unit_subset_data.py +++ b/python/ctsm/test/test_unit_subset_data.py @@ -20,6 +20,7 @@ from ctsm import unit_testing from ctsm.subset_data import get_parser, setup_files, check_args from ctsm.path_utils import path_to_ctsm_root +from ctsm.test.test_unit_utils import wrong_lon_type_error_regex # pylint: disable=invalid-name @@ -235,6 +236,22 @@ def test_create_mesh_without_domain(self): ): check_args(self.args) + # When CTSM issue #2110 is resolved, this test should be removed. + def test_subset_region_errors_if_datm(self): + """ + Test that you can't run subset_data for a region with --create-datm + """ + sys.argv = [ + "subset_data", + "region", + "--create-datm", + ] + self.args = self.parser.parse_args() + with self.assertRaisesRegex( + NotImplementedError, "For regional cases, you can not subset datm data" + ): + check_args(self.args) + def test_complex_option_works(self): """ Test that check_args won't flag a set of complex options that is valid @@ -252,13 +269,36 @@ def test_complex_option_works(self): "1850", "--create-mesh", "--create-domain", - "--create-datm", + # "--create-datm", # Uncomment this when CTSM issue #2110 is resolved "--verbose", "--crop", ] self.args = self.parser.parse_args() check_args(self.args) + # When CTSM issue #3001 is fixed, this test should be replaced with one that checks for correct + # conversion of longitudes specified in the [-180, 180) format. + def test_negative_lon_errors(self): + """ + Test that a negative longitude results in a descriptive error + """ + sys.argv = [ + "subset_data", + "region", + "--create-domain", + "--verbose", + "--lat1", + "0", + "--lat2", + "40", + "--lon1", + "-20", + "--lon2", + "40", + ] + with self.assertRaisesRegex(NotImplementedError, wrong_lon_type_error_regex): + self.args = self.parser.parse_args() + if __name__ == "__main__": unit_testing.setup_for_tests() diff --git a/python/ctsm/test/test_unit_utils.py b/python/ctsm/test/test_unit_utils.py index aed43cfede..4ed8019792 100755 --- a/python/ctsm/test/test_unit_utils.py +++ b/python/ctsm/test/test_unit_utils.py @@ -16,6 +16,9 @@ # to make readable unit test names # pylint: disable=invalid-name +# When CTSM Issue #3001 is resolved, this should be deleted +wrong_lon_type_error_regex = r"\[-180, 0\).*\[0, 360\)" + class TestUtilsFillTemplateFile(unittest.TestCase): """Tests of utils: fill_template_file""" @@ -64,16 +67,28 @@ def test_lonRange0To360_lonIsNeg180(self): Tests that negative inputs to lon_range_0_to_360 get 360 added to them """ inval = -180 - result = lon_range_0_to_360(inval) - self.assertEqual(result, inval + 360) + + # When CTSM Issue #3001 is resolved, this assertRaisesRegex block should be deleted and the + # rest of this test uncommented + with self.assertRaisesRegex(NotImplementedError, wrong_lon_type_error_regex): + lon_range_0_to_360(inval) + + # result = lon_range_0_to_360(inval) + # self.assertEqual(result, inval + 360) def test_lonRange0To360_lonIsNegGreaterThan1(self): """ Tests that negative inputs to lon_range_0_to_360 get 360 added to them """ inval = -0.001 - result = lon_range_0_to_360(inval) - self.assertEqual(result, inval + 360) + + # When CTSM Issue #3001 is resolved, this assertRaisesRegex block should be deleted and the + # rest of this test uncommented + with self.assertRaisesRegex(NotImplementedError, wrong_lon_type_error_regex): + lon_range_0_to_360(inval) + + # result = lon_range_0_to_360(inval) + # self.assertEqual(result, inval + 360) def test_lonRange0To360_lonIs0(self): """ diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 7af324f918..9707af4f0b 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -5,7 +5,7 @@ include(CIME_initial_setup) #list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../tools/mksurfdata_esmf/cmake") list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../share/cmake") -list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../component/cmeps/cmake") +list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_SOURCE_DIR}/../components/cmeps/cmake") project(clm_tests Fortran C) @@ -14,10 +14,26 @@ include(CIME_utils) set(CLM_ROOT "..") # find needed external packages -# This is where ESMF could be asked for, but it's already included in the share build brought in below # NetCDF is required -- because PIO and NetCDF are required by the standard default ESMF libraries find_package(NetCDF 4.7.4 REQUIRED Fortran) +# The following - for finding ESMF - is copied from the share CMakeLists.txt +if (DEFINED ENV{ESMF_ROOT}) + list(APPEND CMAKE_MODULE_PATH $ENV{ESMF_ROOT}/cmake) +endif() +find_package(ESMF REQUIRED) +# This adds include directories needed for ESMF +set(CMAKE_Fortran_FLAGS "${CMAKE_Fortran_FLAGS} ${ESMF_F90COMPILEPATHS} ") +# This (which is *not* done in the share CMakeLists.txt) adds all directories and +# libraries needed when linking ESMF, including any dependencies of ESMF. (But note that +# this does *not* include the "-lesmf" itself). In particular, note that this includes any +# link flags needed to link against PIO, which is needed on some systems (including +# derecho); bringing in these PIO-related link flags via this ESMF mechanism allows us to +# avoid explicitly including PIO as a link library, which wouldn't work on systems where +# there is no separate PIO library and instead ESMF is built with its internal PIO +# library. +link_libraries(${ESMF_INTERFACE_LINK_LIBRARIES}) + # Add source directories from other share code (csm_share, etc.). This should be # done first, so that in case of name collisions, the CLM versions take # precedence (when there are two files with the same name, the one added later @@ -45,29 +61,32 @@ add_subdirectory(${CLM_ROOT}/src/self_tests clm_self_tests) add_subdirectory(unit_test_stubs) add_subdirectory(unit_test_shr) -# Remove shr_mpi_mod from share_sources. -# This is needed because we want to use the mock shr_mpi_mod in place of the real one +# Remove some things from share_sources # # TODO: this should be moved into a general-purpose function in Sourcelist_utils. -# Then this block of code could be replaced with a single call, like: +# Then each removal could be replaced with a single call, like: # remove_source_file(${share_sources} "shr_mpi_mod.F90") foreach (sourcefile ${share_sources}) + # Remove shr_mpi_mod from share_sources. + # This is needed because we want to use the mock shr_mpi_mod in place of the real one string(REGEX MATCH "shr_mpi_mod.F90" match_found ${sourcefile}) if(match_found) list(REMOVE_ITEM share_sources ${sourcefile}) endif() -endforeach() -# Bring in PIO, jsut because it's needed for the default ESMF library and included in other submodules like share and cmeps -if (DEFINED PIO) - set(PIO_PATH ${PIO}) -else() - set(PIO_PATH $ENV{PIO}) -endif() + # Remove shr_pio_mod from share_sources. This is needed to avoid an explicit dependency + # on PIO. This removal is needed on some systems but not on others: the unit test build + # works without this removal on a Mac with a pre-built PIO library, but failed (with + # error message, "Cannot open module file 'pio.mod'") on a Mac without a pre-built PIO + # (where ESMF was built with its internal PIO). + string(REGEX MATCH "shr_pio_mod.F90" match_found ${sourcefile}) + if(match_found) + list(REMOVE_ITEM share_sources ${sourcefile}) + endif() +endforeach() # Build libraries containing stuff needed for the unit tests. # Eventually, these add_library calls should probably be distributed into the correct location, rather than being in this top-level CMakeLists.txt file. -# This line of bringing in the share library also brings in ESMF and PIO add_library(csm_share ${share_sources} ${drv_sources_needed}) declare_generated_dependencies(csm_share "${share_genf90_sources}") add_library(clm ${clm_sources}) @@ -77,24 +96,13 @@ add_dependencies(clm csm_share esmf) # We need to look for header files here, in order to pick up shr_assert.h include_directories(${CLM_ROOT}/share/include) - -# PIO2 library to the include and the linking step -add_compile_definitions(PIO2) - -add_library(pioc STATIC IMPORTED) -add_library(piof STATIC IMPORTED) -set_property(TARGET pioc PROPERTY IMPORTED_LOCATION $ENV{PIO}/lib/libpioc.so) -set_property(TARGET piof PROPERTY IMPORTED_LOCATION $ENV{PIO}/lib/libpiof.so) - # Tell cmake to look for libraries & mod files here, because this is where we built libraries include_directories(${CMAKE_CURRENT_BINARY_DIR}) -include_directories (${ESMF_F90COMPILEPATHS}) -include_directories ($ENV{PIO}/include) include_directories (${NETCDF}/include) # Directories and libraries to include in the link step link_directories(${CMAKE_CURRENT_BINARY_DIR}) -link_libraries( pioc piof netcdf esmf ) +link_libraries( netcdf esmf ) # Add the test directories # Note: it's possible that these could be added by each source directory that diff --git a/tools/mksurfdata_esmf/src/mkinputMod.F90 b/tools/mksurfdata_esmf/src/mkinputMod.F90 index 9c8564df03..3523588ea9 100644 --- a/tools/mksurfdata_esmf/src/mkinputMod.F90 +++ b/tools/mksurfdata_esmf/src/mkinputMod.F90 @@ -342,6 +342,18 @@ subroutine check_namelist_input() call shr_sys_abort('nglcec must be at least 1') end if + ! Reorder user's mksrf_fgrid_mesh_nx and mksrf_fgrid_mesh_ny for 1D + ! cases if they set model-mesh-nx = 1 instead of model-mesh-ny = 1. + ! This way the follow-up if-statements works. + if (mksrf_fgrid_mesh_nx == 1) then + mksrf_fgrid_mesh_nx = mksrf_fgrid_mesh_ny + mksrf_fgrid_mesh_ny = 1 + if (root_task) then + write(ndiag,'(a)') 'WARNING: The code reversed your mksrf_fgrid_mesh_nx and mksrf_fgrid_mesh_ny to ' + write(ndiag,*) 'the expected by the code ', mksrf_fgrid_mesh_nx, mksrf_fgrid_mesh_ny + end if + end if + if (mksrf_fgrid_mesh_ny == 1) then outnc_1d = .true. outnc_dims = 1