diff --git a/.gitignore b/.gitignore
index d4b7d998..22189f42 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,6 +7,11 @@ __pycache__/
*.so
*.nc
*.xml
+
+# CMEMS creditial file
+CMEMS_cred.py
+motu_config.ini
+
# Distribution / packaging
.Python
build/
@@ -28,6 +33,8 @@ wheels/
MANIFEST
outputs
.DS_Store
+DATA
+SSW_FVCOM
# PyInstaller
# Usually these files are written by a python script from a template
diff --git a/README.rst b/README.rst
index af36ede7..92c4b43f 100644
--- a/README.rst
+++ b/README.rst
@@ -1,10 +1,11 @@
PyNEMO
======
-To be updated soon. This work springboards from the `PyNEMO Project `_.
+Full documentation here.
What is this repository for?
----------------------------
+PyNEMO boundary generation toolbox for NEMO ocean model.
How do I get set up?
--------------------
@@ -20,16 +21,16 @@ Steps to take to install PyNEMO, creating a specific conda virtual environment i
- Install Conda, either Anaconda or Miniconda (outside scope of this readme)
- Create conda environment for PyNEMO::
- $ cd to/PyNEMO/directory
- $ conda env create -f environment_pynemo.yml
+ $ cd PyNEMO
+ $ conda env create -f pynemo_37.yml
- Activate the new virtual environment::
- $ source activate pynemo_env
+ $ source activate pynemo3
-- Install Jave JRE (outside scope of this readme) and link libjvm.dylib to LD_LIBRARY_PATH variable::
+- Install Jave JDK (outside scope of this readme) and link Java Home to conda environment::
- $ export LD_LIBRARY_PATH=/path/to/java/library/folder/containing/libjvm.dylib:$LD_LIBARY_PATH # see notes below
+ $ export JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk-13.0.2.jdk/Contents/Home # see notes below
- Install PyNEMO::
@@ -39,45 +40,54 @@ Steps to take to install PyNEMO, creating a specific conda virtual environment i
This should result in PyNEMO being installed in the virtual environment, and can be checked by entering::
- $ pynemo -v
+ $ pynemo -h
Resulting in a help usage prompt::
- $ usage: pynemo -g -s
+ $ usage: pynemo [-g] -s -d
+ -g (optional) will open settings editor before extracting the data
+ -s file to use
+ -d (optional) will download CMEMS data using provided bdy file
The virtual environment can be deactivated to return you to the normal prompt by typing::
-$ conda deactivate
-To reactivate, the following needs to be typed::
+ $ conda deactivate
- $ source activate pynemo_env
+To reactivate, the following needs to be typed::
+ $ source activate pynemo3
To use PyNEMO, the following command is entered: (the example will run an benchmarking test)::
$ pynemo -s /path/to/namelist/file (e.g. PyNEMO/inputs/namelist_remote.bdy)
-**Additional NOTES**
+Other commands include -d which downloads the specified CMEMS data in the namelist bdy file.::
-For Macbook Pro 2015, macOS Mojave and Java SDK 13 and JRE 8 the following path for the libjvm library should be correct::
+ $ pynemo -d /PyNEMO/inputs/namelist_cmems.bdy
- /Library/Java/JavaVirtualMachines/jdk-13.0.1.jdk/Contents/Home/lib/server
+To use the CMEMS download service an account needs to be created at http://marine.copernicus.eu/services-portfolio/access-to-products/
+Once created the user name and password need to be added to PyNEMO. To do this a file with the name CMEMS_cred.py in the utils folder
+needs to be created with two defined strings one called user and the other called pwd to define the user name and password.::
-Resulting in the following command: (this will be different for different java versions and operating systems)::
+ $ touch pynemo/utils/CMEMS_cred.py
+ $ vim pynemo/utils/CMEMS_cred.py
+ press i
+ user='username'
+ pwd='password'
+ press esc and then :q
- $ export LD_LIBRARY_PATH=/Library/Java/JavaVirtualMachines/jdk-13.0.1.jdk/Contents/Home/lib/server:$LD_LIBRARY_PATH
+**IMPORTANT** This will create a py file in the right place with the parameters required to download CMEMS, the password is stored as plain text so please
+do not reuse any existing password!
-For an iMac 2013, macOS Catalina and JRE 8 only the followinng path was found to be correct::
-
- /Library/Internet\ Plug-Ins/JavaAppletPlugin.plugin/Contents/Home/lib/server
-
-With the following command being required to set the environment variable::
+PyNEMO creates a log file be default, this provided info, warning and error messages. By default this is called nrct.log and is saved in the directory where pynemo is run from. (usually /PyNEMO)
+New runs are appended onto the end of the log file so it will periodically need to be delelted to reduce the size of the log.
- $ export LD_LIBRARY_PATH=/Library/Internet\ Plug-Ins/JavaAppletPlugin.plugin/Contents/Home/lib/server:$LD_LIBRARY_PATH
+**Additional NOTES**
-The conda environment creation command has not yet been tested. The yml document (can be opened using text editor) gives a list of all the modules and their versions that are required for PyNEMO so a environment can be constructed using this document as reference (or if you use pip!)
+The above path for Java Home was valid for a Macbook Pro 2015 with macOS Catalina and Java SDK 13.0.2
+however for different java versions, operating systems etc this may be different
-**Update** conda environment yaml file has been tested (and works!) on a Macbook Pro 2015 and iMac 2013 running Anaconda 3.7 and Miniconda 3.7 respectively.
+The conda environment yaml file has been tested with miniconda 3.7 and found to install the environment correctly.
Contribution guidelines
-----------------------
@@ -101,6 +111,19 @@ The PyNEMO module can be tested using the bench marking namelist bdy file in the
:width: 800
:alt: Example BDY coords output
+Unit Tests
+-------------------
+
+To test operation of the PyNEMO module, running the PyTest script in the unit tests folder will perform a range of tests on different child grids,
+e.g. checking the interpolation of the source data on to the child grid. To do this the following command is required::
+
+ $ pytest -v pynemo/pynemo_unit_test.py
+
+The results of the test will show if all tests pass or the errors that result from failed tests.
+
+Currently **(26/03/2020)** there are 7 tests that cover checking the interpolation results of different child grids. The input data is generated as part of the
+test and is removed afterwards. The number of tests will be increased in the future to cover more PyNEMO functionality.
+
Who do I talk to?
-----------------
@@ -110,5 +133,6 @@ Who do I talk to?
* Other community or team contact
+ thopri
For more information regarding the use and development of PyNEMO see: [PyNEMO Wiki](https://github.com/jdha/PyNEMO/wiki)
diff --git a/README_markdown.md b/README_markdown.md
deleted file mode 100644
index 3ad308b1..00000000
--- a/README_markdown.md
+++ /dev/null
@@ -1,104 +0,0 @@
-# PyNEMO
-
-To be udated soon. This work springboards from the [PyNEMO](http://pynemo.readthedocs.io/en/latest/index.html) Project.
-
-## What is this repository for? ##
-
-## How do I get set up? ##
-
-Steps to take to install PyNEMO, creating a specific conda virtual environment is highly recommended. [click here for more about virtual enviroments](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html)
-
-- Install Conda, either Anaconda or Miniconda (outside of this readme)
-- Create conda environment for PyNEMO
-```
-$ cd to/PyNEMO/directory
-```
-```
-$ conda env create -f environment_pynemo.yml
-```
-- Activate the new virtual environment
-```
-$ source activate pynemo_env
-```
-- Install Jave JRE (outside this readme) and link libjvm.dylib to LD_LIBRARY_PATH variable
-```
-$ export LD_LIBRARY_PATH=/path/to/java/library/folder/containing/libjvm.dylib:$LD_LIBARY_PATH # see notes below
-```
-- Install Git (outside this readme)
-```
-$ git clone https://github.com/NOC-MSM/PyNEMO.git
-```
-- Install PyNEMO:
-```
-$ cd /location/of/pynemo/repo
-```
-```
-$ python setup.py build
-```
-```
-$ python setup.py install
-```
-
-This should result in PyNEMO being installed in the virtual environment, and can be checked by entering:
-```
-$ pynemo -v
-```
-
-Resulting in a help usage prompt:
-```
-$ usage: pynemo -g -s
-```
-
-The virtual environment can be deactivated to return you to the normal prompt by typing:
-```
-$ conda deactivate
-```
-
-To reactivate, the following needs to be typed
-```
-$ source activate pynemo_env
-```
-
-To use PyNEMO, the following command is entered: (the example will run an benchmarking test)
-```
-$ pynemo -s /path/to/namelist/file (e.g. PyNEMO/inputs/namelist_remote.bdy)
-```
-
-**Additional NOTES**
-
-for MacOs and Java SDK 13 and JRE 8 the following path should be correct - /Library/Java/JavaVirtualMachines/jdk-13.0.1.jdk/Contents/Home/lib/server
-
-Resulting in the following command: (this will be different for different java versions and operating systems)
-```
-$ export LD_LIBRARY_PATH=/Library/Java/JavaVirtualMachines/jdk-13.0.1.jdk/Contents/Home/lib/server:$LD_LIBRARY_PATH
-```
-The conda environment creation command has not yet been tested. The yml document (can be opened using text editor) gives a list of all the modules and their versions that are required for PyNEMO so a environment can be constructed using this document as reference (or if you use pip!)
-
-## Contribution guidelines ##
-
-## Bench Marking Tests ##
-
-The PyNEMO module can be tested using the bench marking namelist bdy file in the inputs folder. To check the outputs of the benchmark test, these can be visualised using the plotting script within the test_scripts folder. The following steps are required,
-
-- Run PyNEMO using the namelist file in the inputs folder (namelist_remote.bdy) e.g.
-
- - $ pynemo -s /path/to/namelist/file
-
-- This will create two output files coordinates.bdy.nc and NNA_R12_bdyT_y1979)m11.nc in an outputs folder
-
-- To check the coordinates.bdy.nc has the correct boundary points, the script bdy_coords_plot.py will plot the domain boundaries and shown the different locations of the rim width (increasing number should go inwards) This script is located in the test_scripts folder.
-
-- The result should look like this (if using the current benchmark data)
-
-
-
-## Who do I talk to? ##
-
-* Repo owner or admin
-
-jdha
-
-* Other community or team contact
-
-
-For more information regarding the use and development of PyNEMO see: [PyNEMO Wiki](https://github.com/jdha/PyNEMO/wiki)
diff --git a/docs/source/CMEMS_downloader_usage.rst b/docs/source/CMEMS_downloader_usage.rst
new file mode 100644
index 00000000..02c6fdac
--- /dev/null
+++ b/docs/source/CMEMS_downloader_usage.rst
@@ -0,0 +1,140 @@
+CMEMS downloader usage
+=======================
+
+**IMPORTANT** The CMEMS downloader has only been tested with the GLOBAL_ANALYSIS_FORECAST_PHY_001_024 model and specifcally
+the hourly SSH and U V product. This also has temperature stored within it, but not salinity. Other models and products should work but are
+currently likely to need some changes to the code to cope with different variable names within the data. This will be fixed
+in a later release of PyNEMO that is able to handle different variable and tracer names.
+
+PyNEMO has a CMEMS downloading function incorporated within it, this will download a section of the CMEMS global model (more models to be added)
+'GLOBAL_ANALYSIS_FORECAST_PHY_001_024-TDS' for the defined time period in the namelist file
+
+To use the downloading function, the following command is used::
+
+ $ pynemo -d namelist.bdy
+
+Where the -d flag tells PyNEMO to use the CMEMS downloader and download data as specified in the namelist file. The log file
+that PyNEMO produces provides a log of what the downloader does. The CMEMS MOTU system is prone to disconnects and failure
+so there is download retry and error handling built in. Most of the options required should not need editing and are there for
+future use in case URL's and filenames on CMEMS change.
+
+The options that can be configured are described in further detail below::
+
+ !------------------------------------------------------------------------------
+ ! I/O
+ !------------------------------------------------------------------------------
+ sn_src_dir = '/Users/thopri/Projects/PyNEMO/inputs/CMEMS.ncml' ! src_files/'
+ sn_dst_dir = '/Users/thopri/Projects/PyNEMO/outputs'
+
+ sn_fn = 'NNA_R12' ! prefix for output files
+ nn_fv = -1e20 ! set fill value for output files
+ nn_src_time_adj = 0 ! src time adjustment
+ sn_dst_metainfo = 'CMEMS example'
+
+ !------------------------------------------------------------------------------
+ ! CMEMS Data Source Configuration
+ !------------------------------------------------------------------------------
+ ln_use_cmems = .true.
+ ln_download_cmems = .true.
+ sn_cmems_dir = '/Users/thopri/Projects/PyNEMO/inputs/' ! where to download CMEMS input files (static and variable)
+ ln_download_static = .true.
+ ln_subset_static = .true.
+ nn_num_retry = 4 ! how many times to retry CMEMS download after non critical errors?
+ !------------------------------------------------------------------------------
+ ! CMEMS MOTU Configuration (for Boundary Data)
+ !------------------------------------------------------------------------------
+ sn_motu_server = 'http://nrt.cmems-du.eu/motu-web/Motu'
+ sn_cmems_config_template = '/Users/thopri/Projects/PyNEMO/pynemo/config/motu_config_template.ini'
+ sn_cmems_config = '/Users/thopri/Projects/PyNEMO/pynemo/config/motu_config.ini'
+ sn_cmems_model = 'GLOBAL_ANALYSIS_FORECAST_PHY_001_024-TDS'
+ sn_cmems_product = 'global-analysis-forecast-phy-001-024'
+ sn_dl_prefix = 'subset'
+ !------------------------------------------------------------------------------
+ ! CMEMS FTP Configuration (for Static Files)
+ !------------------------------------------------------------------------------
+ sn_ftp_server = 'nrt.cmems-du.eu'
+ sn_static_dir = '/Core/GLOBAL_ANALYSIS_FORECAST_PHY_001_024/global-analysis-forecast-phy-001-024-statics'
+ sn_static_filenames = 'GLO-MFC_001_024_coordinates.nc GLO-MFC_001_024_mask_bathy.nc GLO-MFC_001_024_mdt.nc'
+ sn_cdo_loc = '/opt/local/bin/cdo' ! location of cdo executable can be found by running "where cdo"
+ !------------------------------------------------------------------------------
+ ! CMEMS Extent Configuration
+ !------------------------------------------------------------------------------
+ nn_latitude_min = 40
+ nn_latitude_max = 66
+ nn_longitude_min = -22
+ nn_longitude_max = 16
+ nn_depth_min = 0.493
+ nn_depth_max = 5727.918000000001
+
+Some of the options define the behaviour of the downloader, others define locations to save files and others detail models
+and grid files to download. Finally the spatial extent to download is also required.
+
+I/O and NCML file
+-------------------------
+
+The location of the NCML file is listed a string defining the source directory or "sn_src_dir". The output folder is also
+defined here as "sn_dst_dir", **NOTE** if this directory does not exist it will need to be created and permissoned correctly
+for PyNEMO to run properly. The NCML file details the input files to agregate and what the variable names are. This file
+can be generated using the ncml_generator, with variable names found using the CMEMS catalogue. https://resources.marine.copernicus.eu/?option=com_csw&task=results
+For more information please read the ncml generator page.
+
+**NOTE** A NCML file must be used and it also must use a regular expression. The CMEMS downloader uses this regular expression to determine what grid a
+given variable is part of e.g. temperature and salinity on the T grid. The example CMEMS.ncml file includes: an implementation of how to define
+temperature, SSH and U and V components of ocean currents.
+
+Firstly, the string "sn_fn" defines the prefix for the output files. The number "nn_fv" defines the fill value, and the number
+"nn_src_time_adj" defines the source time adjustment. The rest of the boxes are CMEMS specific.
+
+Data Source Configuration
+--------------------------
+
+The first section defines the CMESM data source configuration. The boolean "ln_use_cmems" when set to true will use the
+CMEMS downloader function to download the requested data, this is defined in the ncml file which can be generated using the
+NCML generator. Among other things this file defines what data variables to download. This term also changes the variable
+names to CMEMS specific ones e.g. thetao for temperature and so for salinity. This is in contrast to the NEMO specific ones
+such as Votemper and Vosaline. When set to false no download occurs and variable names are kept to NEMO specific.
+
+MOTU Configuration
+-------------------
+
+In the next section when set to true "ln_download_cmems" will download the boundary tracer data, e.g. time series of temperature and saliniy.
+When set to false PyNEMO will skip this download. The string "sn_cmems_dir" defines where to save these downloaded files.
+PyNEMO requires grid data, this isn't possible to download using the same method as the tracer data which uses the MOTU
+python client. To get the grid data, an ftp request is made to download the global grids which are then subset to the relevent
+size. The booleans "ln_downlad_static" and "ln_subset_static" determine this behavior. Finally there is an int named
+"nn_num_retry" this defines the number of times to retry downloading the CMEMS data. The data connections are prone to failure
+so if a non critical error occurs the function will automatically try to redownload. This int defines how many times it will
+try to do this. Typically this static data and subsetting are only required once so these can be set to true for first download
+and then set to false when more time series data is required.
+
+As mentioned previously, the time series boundary data is downloaded using MOTU, this is an efficent and robust web server that
+handles, extracts and transforms oceanographic data. By populating a configuration file, this can be sent to the MOTU server
+which will return the requested data in the requested format. The section CMEMS MOTU configuration sets this up. Most of these
+options should not need changing. The location of the MOTU server for CMEMS is defined here, and the location of the config
+template file and also the location of the config file to submit. The only options that should require changing are the model,
+product and prefix options. These define which CMEMS model and product to download and the prefix is a user defined string to prefix
+the downloads. A catalogue of the CMEMS model and products can be found at https://resources.marine.copernicus.eu/?option=com_csw&task=results
+Currently PyNEMO has only been tested using the physical global forecast model although the downloader should be able to download
+other models and products, it has not been tested and their are known issues with other products that restrict seamless download.
+e.g. the NorthWest Atlantic model is not currently compatible due to differences in how the model variables are stored.
+
+FTP Configuration for Static and Grid files
+--------------------------------------------
+
+The next section CMEMS FTP configuration, defines which FTP server, remote directory and files to download. This should require
+modification unless CMEMS changes the file structure or names. Note it is important that the filenames are separated by a space
+as this is what PyNEMO is expecting. Finally the location of CDO executable which should be installed to enable subsetting to occur.
+This can be found by running::
+
+ $ where cdo
+
+
+Extent configuration
+---------------------
+
+Finally the last box, this is where the extent to download is configured, it is up to the user to decide but it is suggested this
+is at least 1 degree wider than the destination or child configuration. The depth range to request is also defined here. This information can
+be extracted from the CMEMS catalogue. Once set for a given configuration this will not need to be edited.
+
+
+
diff --git a/docs/source/_static/comparision_fes.png b/docs/source/_static/comparision_fes.png
new file mode 100644
index 00000000..5616ff3b
Binary files /dev/null and b/docs/source/_static/comparision_fes.png differ
diff --git a/docs/source/_static/eg1.png b/docs/source/_static/eg1.png
new file mode 100644
index 00000000..37a6e29b
Binary files /dev/null and b/docs/source/_static/eg1.png differ
diff --git a/docs/source/_static/example_bdy_coords.png b/docs/source/_static/example_bdy_coords.png
new file mode 100644
index 00000000..cd83e138
Binary files /dev/null and b/docs/source/_static/example_bdy_coords.png differ
diff --git a/docs/source/index.rst b/docs/source/index.rst
index 0d69c13f..fafc2ee8 100644
--- a/docs/source/index.rst
+++ b/docs/source/index.rst
@@ -1,4 +1,4 @@
-NRCT User Guide
+PyNEMO User Guide
=================
Contents:
@@ -10,6 +10,8 @@ Contents:
installation
usage
ncml_generator_usage
+ tides
+ CMEMS_downloader_usage
examples
troubleshooting
diff --git a/docs/source/installation.rst b/docs/source/installation.rst
index 0f7a7435..7517deb1 100644
--- a/docs/source/installation.rst
+++ b/docs/source/installation.rst
@@ -1,52 +1,126 @@
Installation
============
-This page provides a guide to installing pyNEMO.
+This page provides a guide to installing PyNEMO.
Dependencies
^^^^^^^^^^^^
+anaconda:
+ - basemap=1.2.0
+ - netcdf4=1.5.3
+ - pyqt=5.9.2
+ - scipy=1.2.1
+ - python=3.7.6
+ - pip=20.0.2
+ - pandas=1.0.1
+ - pytest=5.3.5
+ - xarray=0.15.0
-1. Python 2.7 (Not tested with 3.x)
-2. scipy
-3. netCDF4-python
-4. numpy
-5. matplotlib
-6. basemap
-7. thredds_crawler
-8. seawater
-9. pyjnius (optional)
+pip:
+ - idna==2.9
+ - lxml==4.5.0
+ - pyjnius==1.2.1
+ - seawater==3.3.4
+ - thredds-crawler==1.5.4
+ - motuclient==1.8.4
+ - sphinx==3.0.2
+ - sphinx-rtd-theme==0.4.3
-Anaconda
-^^^^^^^^
+How do I install PyNEMO?
+------------------------
-Using conda: pyNEMO supports Win64, OSX and Linux. for other operating systems please build from source.
+Steps to take to install PyNEMO, creating a specific conda virtual environment is highly recommended.
+`click here for more about virtual enviroments `_
-.. note:: It is recommended to create a seperate virtual environment for pyNEMO.
- Please follow the instructions on doing this at http://www.continuum.io/blog/conda
+- Install Git (outside scope of this guide)
+- Clone PyNEMO repository::
-::
+ $ git clone https://github.com/NOC-MSM/PyNEMO.git
- conda install -c https://conda.anaconda.org/srikanthnagella pynemo
+- Install Conda, either Anaconda or Miniconda (outside scope of this guide)
+- Create conda environment for PyNEMO::
-This will install pynemo and its dependencies. This build is generally outdated as development and
-bug fixes to the source are a regular occurrence. It may be better to install from source until a beta
-release is available.
+ $ cd PyNEMO
+ $ conda env create -f pynemo_37.yml
-From Source
-^^^^^^^^^^^
+- Activate the new virtual environment::
-Installing pyNEMO using other flavours of software or from source. Install all the dependencies and
-download the source code from svn and install.
+ $ source activate pynemo3
-::
+- Install Jave JDK (outside scope of this guide) and link Java Home to conda environment::
- svn checkout http://ccpforge.cse.rl.ac.uk/svn/pynemo/trunk/Python/
- python setup.py install
-
-.. note:: If building from source in the Anaconda environment all dependencies can
- be installed using conda apart from thredds_crawler and pyjnius which can
- be installed using the following Anaconda channel:
+ $ export JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk-13.0.2.jdk/Contents/Home # see notes below
+
+ (Update needed here)
+
+**NOTE** this link has to be set everytime the enviornment is activated. It can be automated using the .bashrc file or by
+setting environment hooks in the conda activate and deactivate files.
+
+- Install PyNEMO::
+
+ $ cd /location/of/pynemo/repo
+ $ python setup.py build
+ $ python setup.py install
+
+This should result in PyNEMO being installed in the virtual environment, and can be checked by entering::
+
+ $ pynemo -h
+
+Resulting in a help usage prompt::
+
+ $ usage: pynemo [-g] -s -d
+ -g (optional) will open settings editor before extracting the data
+ -s file to use
+ -d (optional) will download CMEMS data using provided bdy file
+
+The virtual environment can be deactivated to return you to the normal prompt by typing::
+
+ $ conda deactivate
+
+To reactivate, the following needs to be typed::
+
+ $ source activate pynemo3
+
+
+Jave Home Environment path
+--------------------------
+
+The above path for Java Home was valid for a Macbook Pro 2015 with macOS Catalina and Java SDK 13.0.2
+however for different java versions, operating systems etc this may be different
+
+The conda environment yaml file has been tested with miniconda 3.7 and found to install the environment correctly.
+
+Bench Marking Tests
+-------------------
+
+The PyNEMO module can be tested using the bench marking namelist bdy file in the inputs folder. To check the outputs of the benchmark test, these can be visualised using the plotting script within the test_scripts folder. The following steps are required,
+
+- Run PyNEMO using the namelist file in the inputs folder (namelist_remote.bdy) e.g.::
+
+ $ pynemo -s /path/to/namelist/file
+
+- This will create two output files coordinates.bdy.nc and NNA_R12_bdyT_y1979)m11.nc in an outputs folder
+
+- To check the coordinates.bdy.nc has the correct boundary points, the script bdy_coords_plot.py will plot the domain boundaries and shown the different locations of the rim width (increasing number should go inwards) This script is located in the test_scripts folder.
+
+- The result should look like this (if using the current benchmark data)
+
+.. image:: /_static/example_bdy_coords.png
+ :width: 800
+ :alt: Example BDY coords output
+
+Unit Tests
+-----------
+
+To test operation of the PyNEMO module, running the PyTest script in the unit tests folder will perform a range of tests on different child grids,
+e.g. checking the interpolation of the source data on to the child grid. To do this the following command is required::
+
+ $ pytest -v pynemo/pynemo_unit_test.py
+
+The results of the test will show if all tests pass or the errors that result from failed tests.
+
+Currently **(26/03/2020)** there are 7 tests that cover checking the interpolation results of different child grids. The input data is generated as part of the
+test and is removed afterwards. The number of tests will be increased in the future to cover more PyNEMO functionality.
+
+For more information regarding the use and development of PyNEMO see: [PyNEMO Wiki](https://github.com/jdha/PyNEMO/wiki)
-::
- conda install -c https://conda.anaconda.org/srikanthnagella thredds_crawler
- conda install -c https://conda.anaconda.org/srikanthnagella pyjnius
diff --git a/docs/source/intro.rst b/docs/source/intro.rst
index 463a8df2..3b6be276 100644
--- a/docs/source/intro.rst
+++ b/docs/source/intro.rst
@@ -18,4 +18,7 @@ algorithm. The idea behind this targetted method is that it provides a generic
method of interpolation for any flavour of ocean model in order to set up a
regional NEMO model configuration. At present (alpha release) the tools do not
contain many options, but those that exist are accessed either through a NEMO style
-namelist or a convient GUI.
+namelist or a convient GUI.
+
+PyNEMO has been updated to include integration with CMEMS data repository and the ability to produce tidal boundaries
+using TPXO or FES2014 as boundary input.
diff --git a/docs/source/tides.rst b/docs/source/tides.rst
new file mode 100644
index 00000000..3d1ff9e7
--- /dev/null
+++ b/docs/source/tides.rst
@@ -0,0 +1,103 @@
+Tidal Boundary Conditions Generation
+====================================
+
+By providing a global tidal model dataset (TPXO and FES are currently supported) PyNEMO can generate boundary conditions for the
+NEMO configuration supplied using the namelist file.
+
+Namelist options
+----------------
+
+To use the namelist needs to be configured with the required options. These are listed below::
+
+ ln_tide = .true. ! =T : produce bdy tidal conditions
+ sn_tide_model = 'fes' ! Name of tidal model (fes|tpxo)
+ clname(1) = 'M2' ! constituent name
+ clname(2) = 'S2'
+ clname(3) = 'O1'
+ clname(4) = 'K1'
+ clname(5) = 'N2'
+ ln_trans = .false. ! interpolate transport rather than velocities
+ ! TPXO file locations
+ sn_tide_grid = './grid_tpxo7.2.nc'
+ sn_tide_h = './h_tpxo7.2.nc'
+ sn_tide_u = './u_tpxo7.2.nc'
+ ! location of FES data
+ sn_tide_fes = './FES/'
+
+these options define the location of the tidal model datasets, note this differs depending on model as TPXO has all harmonic
+constants in one netcdf file whereas FES has three separate netcdf files (one for amplitude two for currents) for each constant. Extra harmonics can be appended
+to the clname(n) list. FES supports 34 constants and TPXO7.2 has 13 to choose from. Other versions of TPXO should work with PyNEMO
+but have not been yet been tested. **NOTE** FES dataset filenames must have be in the format of constituent then type. e.g.::
+
+ M2_Z.nc (for amplitude)
+ M2_U.nc (for U component of velocity)
+ M2_V.nc (for V component of velocity)
+
+If this is not undertaken the PyNEMO will not recognise the files. TPXO data files are specified directly so these can be anyname although it is best to stick with the default
+names as shown above. So far the tidal model datasets have been downloaded and used locally but could also be stored on a THREDDS server although this has
+not been tested with the global tide models.
+
+Other options include "ln_tide" a boolean that when set to true will generate tidal boundaries. "sn_tide_model" is a string that defines the model to use, currently only
+"fes" or "tpxo" are supported. "ln_trans" is a boolean that when set to true will interpolate transport rather than velocities.
+
+Harmonic Output Checker
+-----------------------
+
+There is an harmonic output checker that can be utilised to check the output of PyNEMO with a reference tide model. So far
+the only supported reference model is FES but TPXO will be added in the future. Any tidal output from PyNEMO can be checked
+(e.g. FES and TPXO). While using the same model used as input to check output doesn't improve accuracy, it does confirm that the
+output is within acceptable/expected limits of the nearest model reference point.
+
+There are differences as PyNEMO interpolates the harmonics and the tidal checker does not, so there can be some difference
+in the values particularly close to coastlines.
+
+The checker can be enabled by editing the following in the relevent bdy file::
+
+ ln_tide_checker = .true. ! run tide checker on PyNEMO tide output
+ sn_ref_model = 'fes' ! which model to check output against (FES only)
+
+The boolean determines if to run the checker or not, this takes place after creating the interpolated harmonics
+and writing them to disk. The string denotes which tide model to use as reference, so far only FES is supported.
+The string denoting model is not strictly needed, by default fes is used.
+
+The checker will output information regarding the checking to the NRCT log, and also write an spreadsheet to the output folder containing any
+exceedance values, the closest reference model value and their locations. Amplitude and phase are checked independently, so both have latitude and longitude
+associated with them. It is also useful to know the amplitude of a exceeded phase to see how much impact it will have so this
+is also written to the spreadsheet. An example output is shown below, as can be seen the majority of the amplitudes, both
+the two amplitudes exceedances and the ones associated with the phase exceedances are low (~0.01), so can most likely be ignored.
+There a few phase exceedances that have higher amplitudes (~0.2) which would potentially require further investigation. A common
+reason for such an exceedance is due to coastlines and the relevant point being further away from an FES data point.
+
+Tide Checker Example Output for M2 U currents
+---------------------------------------------
+
+.. figure:: _static/comparision_fes.png
+ :align: center
+
+The actual thresholds for both amplitude and phase are based on the amplitude of the output or reference, this is due to
+different tolerances based on the amplitude. e.g. high amplitudes should have lower percentage differences to the FES reference,
+than lower ones simply due to the absolute amount of the ampltiude itself, e.g. a 0.1 m difference for a 1.0 m amplitude is
+acceptable but not for a 0.01 m amplitude. The smaller amplitudes contribute less to the overall tide height so larger percentage
+differences are acceptable. The same also applies to phases, where large amplitude phases have little room for differences but at
+lower amplitudes this is less critical so a higher threshold is tolerated.
+
+The following power functions are used to determine what threshold to apply based on the reference model amplitude.
+
+Amplitude Threshold
+-------------------
+
+.. important:: Percentage Exceedance = 26.933 * Reference Amplitude ^ -0.396'
+
+Phases Threshold
+----------------
+
+.. important:: Phase Exceedance = 5.052 * PyNEMO Amplitude ^ -0.60
+
+
+Future work
+-----------
+
+Create options of harmonic constants to request rather than manually specifying a list. These could be based on common requirements
+and/or based on the optimal harmonics to use for a specified time frame.
+
+
diff --git a/docs/source/troubleshooting.rst b/docs/source/troubleshooting.rst
index 769efdbb..c9a98fee 100644
--- a/docs/source/troubleshooting.rst
+++ b/docs/source/troubleshooting.rst
@@ -1,6 +1,9 @@
Troubleshooting
===============
+**Always** check the PyNEMO log file. This is usually saved in the working directory of PyNEMO as nrct.log. It gives helpful information
+which may help to diagnose issues. E.g. ValueErrors that are result of a THREDDS server being down and unable to provide data files.
+
1. pyNEMO crashing in MacOSX (Yosemite)?
* Downgrade the scipy package to 0.15
@@ -12,3 +15,6 @@ Troubleshooting
3. Getting this error 'Warning: Please make sure pyjnius is installed and jvm.dll/libjvm.so/libjvm.dylib is in the path' ?
* This error is displayed when the application cannot find the java installation on the local machine. please install a java 7.x runtime from http://www.oracle.com/technetwork/java/javase/downloads/jre7-downloads-1880261.html and append the path to the library in the system path. eg. on windows SET PATH="C:\\Program Files (x86)\\Java\\jre1.7\\bin\\client" on Linux in shell export LD_LIBRARY_PATH=/opt/java/jdk1.7.0_45/jre/lib/amd64/server:$LD_LIBRARY_PATH in osx export DYLD_LIBRARY_PATH=/System/Library/Java/JavaVirtualMachines/jdk1.7.0_09.jdk/Contents/Home/jre/lib/server:$DYLD_LIBRARY_PATH
+
+4. Pyjinus error? Socket Timeout? JVM issues? This sometimes happend when requesting tracer boundaries such as temp and salinty along with
+tidal boundaries. Re running PyNEMO usally works.
diff --git a/docs/source/usage.rst b/docs/source/usage.rst
index fd1a6c32..0ce72409 100644
--- a/docs/source/usage.rst
+++ b/docs/source/usage.rst
@@ -1,18 +1,33 @@
Usage
=====
-There are two tools available in pyNEMO. They are described in detail below.
-
-pynemo
-------
-
-This command line tool reads a BDY file, extracts boundary data and prepares
-the data for a NEMO simulation. The bdy file is a plain text file containing
-key value pairs. Please look at the sample `namelist.bdy
-`_
-file, which shares common syntax with the NEMO simulation namelist input file.
-
-.. note:: Directory paths in bdy file can be relative or absolute.
- The application picks the relative path from the current working
+There are four tools available in pyNEMO. These are "boundary file generation" where boundary data files are generated from a
+parent grid along the boundary of a child grid. Boundary data can comprise of tracers such as temperature and salinity. Or
+tidal data from global tide models. Sea surface height and ocean currents are also supported. PyNEMO now has an integrated
+CMEMS repository downloader. Invoking this option will download data of interest (as specified in NCML file) for a region of interest
+(as specified in BDY file). PyNEMO uses NCML files to define variable names and data location. this allows multiple netcdf input files
+to appear as one. This commonmly used on THREDDS servers but is also used locally for CMEMS boundary data input. The GUI allows this NCML
+files to be generated. Finally there is a settings editor that allows you to edit the pynemo configuration file (BDY file)
+
+Boundary file generation
+------------------------
+This command line tool reads a BDY file, extracts boundary data and prepares the data for a NEMO simulation. The bdy file
+is a plain text file containing key value pairs. Please look at the sample namelists in the github repository. They are
+stored in the inputs directory.
+
+PyNEMO now also requires an NCML file (Netcdf markup) that defines the variables and remaps their names so that they are
+compatable with PyNEMO. This is most commonly required with CMEMS runs as the variable names are different. In previous versions
+PyNEMO was able to scan a local directory for netcdf files, this is now no longer supported and an NCML file MUST be referenced
+in the bdy file.
+
+.. note:: PyNEMO now requires an NCML file as well as a BDY file to run, this can be adapted from the examples in the inputs
+ folder or generated using the NCML GUI.
+
+There are three examples of ncml files and they all use the same child grid but ultilise different data sources. One uses local data,
+the other uses data hosted on a THREDDS server. The last one is configured to download CMEMS data first and then run using the
+downloaded data. The namelist file shares common syntax with the NEMO simulation namelist input file.
+
+.. note:: Directory paths in bdy file can be relative or absolute.
+ The application picks the relative path from the current working
directory.
Syntax for pynemo command is
@@ -25,10 +40,11 @@ For help
::
- > pynemo -h
- > usage: pynemo [-g] -s
+ > pynemo -h
+ > usage: pynemo [-g] -s -d
> -g (optional) will open settings editor before extracting the data
- > -s file to use
+ > -s namelist file to use to generate boundary data
+ > -d namelist file to use to download CMEMS data
Example comamnd
@@ -36,8 +52,43 @@ Example comamnd
> pynemo -g -s namelist.bdy
+CMEMS data download
+-------------------
+To download CMEMS data, the flag -d needs to be specified when running pynemo. This will use the specified namelist file and
+download the relevent CMEMS data. Once successful the same namelist file can be used to generate the boundary conditions by
+running PyNEMO with the -s flag. Example command.::
+
+ $ pynemo -d /PyNEMO/inputs/namelist_cmems.bdy
+
+To use the CMEMS download service an account needs to be created at http://marine.copernicus.eu/services-portfolio/access-to-products/
+Once created the user name and password need to be added to PyNEMO. To do this a file with the name CMEMS_cred.py in the utils folder
+needs to be created with two defined strings one called user and the other called pwd to define the user name and password.::
+
+ $ touch pynemo/utils/CMEMS_cred.py
+ $ vim pynemo/utils/CMEMS_cred.py
+ press i
+ user='username goes here'
+ pwd='password goes here'
+ press esc and then :wq
+
+**IMPORTANT** This will create a py file in the right place with the parameters required to download CMEMS, the password is stored as plain text so please
+do not reuse any existing password you use!
+
+The CMEMS download usage page has more information about how to configure the namelist file.
-pynemo_settings_editor
+GUI NCML Generator
+------------------
+This GUI tool facilitates the creation of a virtual dataset for input into PyNEMO. The virtual dataset is defined using NetCDF Markup Language (NcML ).
+
+Using NcML, it is possible to:
+
+- modify metadata
+- modify and restructure variables
+- combine or aggregate data from multiple datasets. The datasets may reside in the local file system or in a remote OPeNDAP (http://www.opendap.org/) server.
+
+Please see NcML generator usage page for more information in using the GUI.
+
+pynemo settings editor
----------------------
This tool will open a window where you can edit the mask and change the values of bdy parameters.
@@ -47,7 +98,7 @@ Syntax for pynemo_settings_editor command is
::
> pynemo_settings_editor [-s ]
-
+
.. note:: If no file name is specified then a file dialog box will open to select a file.
For help
@@ -56,9 +107,9 @@ For help
> pynemo_settings_editor -h
> usage: pynemo_settings_editor -s
-
+
Example:
::
-
+
pynemo_settings_editor -s namelist.bdy
diff --git a/environment_pynemo.yml b/environment_pynemo.yml
deleted file mode 100644
index e8df8c86..00000000
--- a/environment_pynemo.yml
+++ /dev/null
@@ -1,181 +0,0 @@
-name: pynemo_env
-channels:
- - conda-forge
- - anaconda
- - srikanthnagella
- - defaults
-dependencies:
- - alabaster=0.7.12=py27_0
- - appnope=0.1.0=py27hb466136_0
- - asn1crypto=1.0.1=py27_0
- - astroid=1.6.5=py27_0
- - attrs=19.2.0=py_0
- - babel=2.7.0=py_0
- - backports=1.0=py_2
- - backports.functools_lru_cache=1.5=py_2
- - backports.shutil_get_terminal_size=1.0.0=py27_2
- - backports_abc=0.5=py27h6972548_0
- - basemap=1.2.0=py27h0acbc05_0
- - blas=1.0=mkl
- - bleach=3.1.0=py27_0
- - bzip2=1.0.8=h1de35cc_0
- - ca-certificates=2019.10.16=0
- - cartopy=0.16.0=py27h9263bd1_0
- - certifi=2019.9.11=py27_0
- - cffi=1.12.3=py27hb5b8e2f_0
- - cftime=1.0.3.4=py27h1d22016_1001
- - chardet=3.0.4=py27_1003
- - cloudpickle=1.2.2=py_0
- - configparser=4.0.2=py27_0
- - cryptography=2.7=py27ha12b0ac_0
- - curl=7.65.3=ha441bb4_0
- - cycler=0.10.0=py27hfc73c78_0
- - cython=0.29.13=py27h0a44026_0
- - dbus=1.13.6=h90a0687_0
- - decorator=4.4.0=py27_1
- - defusedxml=0.6.0=py_0
- - docutils=0.15.2=py27_0
- - entrypoints=0.3=py27_0
- - enum34=1.1.6=py27_1
- - expat=2.2.6=h0a44026_0
- - freetype=2.9.1=hb4e5f40_0
- - functools32=3.2.3.2=py27_1
- - futures=3.3.0=py27_0
- - geos=3.6.2=h5470d99_2
- - gettext=0.19.8.1=h15daf44_3
- - glib=2.56.2=hd9629dc_0
- - hdf4=4.2.13=h39711bb_2
- - hdf5=1.10.1=ha036c08_1
- - icu=58.2=h4b95b61_1
- - idna=2.8=py27_0
- - imagesize=1.1.0=py27_0
- - intel-openmp=2019.4=233
- - ipaddress=1.0.22=py27_0
- - ipykernel=4.10.0=py27_0
- - ipython=5.8.0=py27_0
- - ipython_genutils=0.2.0=py27h8b9a179_0
- - ipywidgets=7.5.1=py_0
- - isort=4.3.21=py27_0
- - jedi=0.15.1=py27_0
- - jinja2=2.10.3=py_0
- - jpeg=9b=he5867d9_2
- - jsonschema=3.0.2=py27_0
- - jupyter=1.0.0=py27_7
- - jupyter_client=5.3.3=py27_1
- - jupyter_console=5.2.0=py27_1
- - jupyter_core=4.5.0=py_0
- - keyring=18.0.0=py27_0
- - kiwisolver=1.1.0=py27h0a44026_0
- - krb5=1.16.1=hddcf347_7
- - lazy-object-proxy=1.4.2=py27h1de35cc_0
- - libcurl=7.65.3=h051b688_0
- - libcxx=4.0.1=hcfea43d_1
- - libcxxabi=4.0.1=hcfea43d_1
- - libedit=3.1.20181209=hb402a30_0
- - libffi=3.2.1=h475c297_4
- - libgfortran=3.0.1=h93005f0_2
- - libiconv=1.15=hdd342a3_7
- - libnetcdf=4.5.0=h42fd751_7
- - libpng=1.6.37=ha441bb4_0
- - libsodium=1.0.16=h3efe00b_0
- - libssh2=1.8.2=ha12b0ac_0
- - libtiff=4.0.10=hcb84e12_2
- - libxml2=2.9.9=hf6e021a_1
- - libxslt=1.1.33=h33a18ac_0
- - lxml=4.4.1=py27hef8c89e_0
- - markupsafe=1.1.1=py27h1de35cc_0
- - matplotlib=2.2.3=py27h54f8f79_0
- - mccabe=0.6.1=py27_1
- - mistune=0.8.4=py27h1de35cc_0
- - mkl=2019.4=233
- - mkl-service=2.3.0=py27hfbe908c_0
- - mkl_fft=1.0.14=py27h5e564d8_0
- - mkl_random=1.1.0=py27ha771720_0
- - nbconvert=5.6.0=py27_1
- - nbformat=4.4.0=py27hddc86d0_0
- - ncurses=6.1=h0a44026_1
- - netcdf4=1.3.1=py27he3ffdca_2
- - notebook=5.7.8=py27_0
- - numpy=1.16.5=py27hacdab7b_0
- - numpy-base=1.16.5=py27h6575580_0
- - numpydoc=0.9.1=py_0
- - olefile=0.46=py27_0
- - openssl=1.1.1d=h1de35cc_3
- - owslib=0.18.0=py_0
- - packaging=19.2=py_0
- - pandoc=2.2.3.2=0
- - pandocfilters=1.4.2=py27_1
- - parso=0.5.1=py_0
- - pathlib2=2.3.5=py27_0
- - pcre=8.43=h0a44026_0
- - pexpect=4.7.0=py27_0
- - pickleshare=0.7.5=py27_0
- - pillow=6.2.0=py27hb68e598_0
- - pip=19.2.3=py27_0
- - proj4=5.0.1=h1de35cc_0
- - prometheus_client=0.7.1=py_0
- - prompt_toolkit=1.0.15=py27h4a7b9c2_0
- - psutil=5.6.3=py27h1de35cc_0
- - ptyprocess=0.6.0=py27_0
- - pycodestyle=2.5.0=py27_0
- - pycparser=2.19=py27_0
- - pyepsg=0.4.0=py27_0
- - pyflakes=2.1.1=py27_0
- - pygments=2.4.2=py_0
- - pyjnius=1.4=py27_0
- - pylint=1.9.2=py27_0
- - pyopenssl=19.0.0=py27_0
- - pyparsing=2.4.2=py_0
- - pyproj=1.9.5.1=py27h833a5d7_1
- - pyqt=4.11.4=py27_4
- - pyrsistent=0.15.4=py27h1de35cc_0
- - pyshp=2.1.0=py_0
- - pysocks=1.7.1=py27_0
- - python=2.7.16=h97142e2_7
- - python-dateutil=2.8.0=py27_0
- - python.app=2=py27_9
- - pytz=2019.3=py_0
- - pyzmq=18.1.0=py27h0a44026_0
- - qt=4.8.7=1
- - qtawesome=0.6.0=py_0
- - qtconsole=4.5.5=py_0
- - qtpy=1.9.0=py_0
- - readline=7.0=h1de35cc_5
- - requests=2.22.0=py27_0
- - rope=0.14.0=py_0
- - scandir=1.10.0=py27h1de35cc_0
- - scipy=1.2.1=py27h1410ff5_0
- - seawater=3.3.4=py_1
- - send2trash=1.5.0=py27_0
- - setuptools=41.4.0=py27_0
- - shapely=1.6.4=py27h20de77a_0
- - simplegeneric=0.8.1=py27_2
- - singledispatch=3.4.0.3=py27he22c18d_0
- - sip=4.18=py27_0
- - six=1.12.0=py27_0
- - snowballstemmer=2.0.0=py_0
- - sphinx=1.8.5=py27_0
- - sphinxcontrib=1.0=py27_1
- - sphinxcontrib-websupport=1.1.2=py_0
- - spyder=3.2.8=py27_0
- - spyder-kernels=1.4.0=py27_0
- - sqlite=3.30.0=ha441bb4_0
- - subprocess32=3.5.4=py27h1de35cc_0
- - terminado=0.8.2=py27_0
- - testpath=0.4.2=py27_0
- - thredds_crawler=1.0.0=py27_0
- - tk=8.6.8=ha441bb4_0
- - tornado=5.1.1=py27h1de35cc_0
- - traitlets=4.3.3=py27_0
- - typing=3.7.4.1=py27_0
- - urllib3=1.24.2=py27_0
- - wcwidth=0.1.7=py27h817c265_0
- - webencodings=0.5.1=py27_1
- - wheel=0.33.6=py27_0
- - widgetsnbextension=3.5.1=py27_0
- - wrapt=1.11.2=py27h1de35cc_0
- - wurlitzer=1.0.3=py27_0
- - xz=5.2.4=h1de35cc_4
- - zeromq=4.3.1=h0a44026_3
- - zlib=1.2.11=h1de35cc_3
- - zstd=1.3.7=h5bba6e5_0
diff --git a/inputs/FVCOM.ncml b/inputs/FVCOM.ncml
new file mode 100644
index 00000000..9856acc8
--- /dev/null
+++ b/inputs/FVCOM.ncml
@@ -0,0 +1,190 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/inputs/GLO-MFC_001_024.ncml b/inputs/GLO-MFC_001_024.ncml
new file mode 100644
index 00000000..7aa31fd2
--- /dev/null
+++ b/inputs/GLO-MFC_001_024.ncml
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/inputs/namelist_cmems.bdy b/inputs/namelist_cmems.bdy
new file mode 100755
index 00000000..509206cc
--- /dev/null
+++ b/inputs/namelist_cmems.bdy
@@ -0,0 +1,157 @@
+!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+!! NEMO/OPA : namelist for BDY generation tool
+!!
+!! User inputs for generating open boundary conditions
+!! employed by the BDY module in NEMO. Boundary data
+!! can be set up for v3.2 NEMO and above.
+!!
+!! More info here.....
+!!
+!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+
+!------------------------------------------------------------------------------
+! vertical coordinate
+!------------------------------------------------------------------------------
+ ln_zco = .false. ! z-coordinate - full steps (T/F)
+ ln_zps = .true. ! z-coordinate - partial steps (T/F)
+ ln_sco = .false. ! s- or hybrid z-s-coordinate (T/F)
+ rn_hmin = -10 ! min depth of the ocean (>0) or
+ ! min number of ocean level (<0)
+
+!------------------------------------------------------------------------------
+! s-coordinate or hybrid z-s-coordinate
+!------------------------------------------------------------------------------
+ rn_sbot_min = 10. ! minimum depth of s-bottom surface (>0) (m)
+ rn_sbot_max = 7000. ! maximum depth of s-bottom surface
+ ! (= ocean depth) (>0) (m)
+ ln_s_sigma = .false. ! hybrid s-sigma coordinates
+ rn_hc = 150.0 ! critical depth with s-sigma
+
+!------------------------------------------------------------------------------
+! grid information
+!------------------------------------------------------------------------------
+ sn_src_hgr = '/Users/thopri/Projects/PyNEMO/inputs/subset_coordinates.nc'
+ sn_src_zgr = '/Users/thopri/Projects/PyNEMO/inputs/subset_coordinates.nc'
+ sn_dst_hgr = 'http://opendap4gws.jasmin.ac.uk/thredds/noc_msm/dodsC/pynemo_grid_C/mesh_hgr_zps.nc'
+ sn_dst_zgr = 'http://opendap4gws.jasmin.ac.uk/thredds/noc_msm/dodsC/pynemo_grid_C/mesh_zgr_zps.nc'
+ sn_src_msk = '/Users/thopri/Projects/PyNEMO/inputs/subset_bathy.nc'
+ sn_bathy = 'http://opendap4gws.jasmin.ac.uk/thredds/noc_msm/dodsC/pynemo_grid_C/NNA_R12_bathy_meter_bench.nc'
+
+!------------------------------------------------------------------------------
+! I/O
+!------------------------------------------------------------------------------
+ sn_src_dir = '/Users/thopri/Projects/PyNEMO/inputs/GLO-MFC_001_024.ncml' ! src_files/'
+ sn_dst_dir = '/Users/thopri/Projects/PyNEMO/outputs'
+ sn_ncml_out = '/Users/thopri/Projects/PyNEMO/pynemo/output_NCML'
+ sn_model_prefix = 'NEMO'
+
+ sn_fn = 'NNA_R12' ! prefix for output files
+ nn_fv = -1e20 ! set fill value for output files
+ nn_src_time_adj = 0 ! src time adjustment
+ sn_dst_metainfo = 'CMEMS example'
+
+!------------------------------------------------------------------------------
+! CMEMS Data Source Configuration
+!------------------------------------------------------------------------------
+ ln_download_cmems = .false.
+ sn_cmems_dir = '/Users/thopri/Projects/PyNEMO/inputs/' ! where to download CMEMS input files (static and variable)
+ ln_download_static = .false.
+ ln_subset_static = .false.
+ nn_num_retry = 4 ! how many times to retry CMEMS download after non critical errors?
+!------------------------------------------------------------------------------
+! CMEMS MOTU Configuration (for Boundary Data)
+!------------------------------------------------------------------------------
+ sn_motu_server = 'http://nrt.cmems-du.eu/motu-web/Motu'
+ sn_cmems_config_template = '/Users/thopri/Projects/PyNEMO/pynemo/config/motu_config_template.ini'
+ sn_cmems_config = '/Users/thopri/Projects/PyNEMO/pynemo/config/motu_config.ini'
+ sn_cmems_model = 'GLOBAL_ANALYSIS_FORECAST_PHY_001_024-TDS'
+ sn_cmems_product = 'global-analysis-forecast-phy-001-024'
+ sn_dl_prefix = 'subset'
+!------------------------------------------------------------------------------
+! CMEMS FTP Configuration (for Static Files)
+!------------------------------------------------------------------------------
+ sn_ftp_server = 'nrt.cmems-du.eu'
+ sn_static_dir = '/Core/GLOBAL_ANALYSIS_FORECAST_PHY_001_024/global-analysis-forecast-phy-001-024-statics'
+ sn_static_filenames = 'GLO-MFC_001_024_coordinates.nc GLO-MFC_001_024_mask_bathy.nc GLO-MFC_001_024_mdt.nc'
+ sn_cdo_loc = '/opt/local/bin/cdo' ! location of cdo executable can be found by running "where cdo"
+!------------------------------------------------------------------------------
+! CMEMS Extent Configuration
+!------------------------------------------------------------------------------
+ nn_latitude_min = 40
+ nn_latitude_max = 66
+ nn_longitude_min = -22
+ nn_longitude_max = 16
+ nn_depth_min = 0.493
+ nn_depth_max = 5727.918000000001
+
+!------------------------------------------------------------------------------
+! unstructured open boundaries
+!------------------------------------------------------------------------------
+ ln_coords_file = .true. ! =T : produce bdy coordinates files
+ cn_coords_file = 'coordinates.bdy.nc' ! name of bdy coordinates files
+ ! (if ln_coords_file=.TRUE.)
+ ln_mask_file = .false. ! =T : read mask from file
+ cn_mask_file = 'mask.nc' ! name of mask file
+ ! (if ln_mask_file=.TRUE.)
+ ln_dyn2d = .false. ! boundary conditions for
+ ! barotropic fields
+ ln_dyn3d = .false. ! boundary conditions for
+ ! baroclinic velocities
+ ln_tra = .true. ! boundary conditions for T and S
+ ln_ice = .false. ! ice boundary condition
+ nn_rimwidth = 9 ! width of the relaxation zone
+
+!------------------------------------------------------------------------------
+! unstructured open boundaries tidal parameters
+!------------------------------------------------------------------------------
+ ln_tide = .false. ! =T : produce bdy tidal conditions
+ sn_tide_model = 'fes' ! Name of tidal model (fes|tpxo)
+ clname(1) = 'M2' ! constituent name
+ clname(2) = 'S2'
+ !clname(3) = 'N2'
+ !clname(4) = 'O1'
+ !clname(5) = 'K1'
+ !clname(6) = 'K2'
+ !clname(7) = 'L2'
+ !clname(8) = 'NU2'
+ !clname(9) = 'M4'
+ !clname(10) = 'MS4'
+ !clname(11) = 'Q1'
+ !clname(12) = 'P1'
+ !clname(13) = 'S1'
+ !clname(14) = '2N2'
+ !clname(15) = 'MU2'
+ ln_trans = .false. ! interpolate transport rather than velocities
+ ln_tide_checker = .false. ! run tide checker on PyNEMO tide output
+ sn_ref_model = 'fes' ! which model to check output against (FES only)
+!------------------------------------------------------------------------------
+! Time information
+!------------------------------------------------------------------------------
+ nn_year_000 = 2017 ! year start
+ nn_year_end = 2017 ! year end
+ nn_month_000 = 01 ! month start (default = 1 is years>1)
+ nn_month_end = 03 ! month end (default = 12 is years>1)
+ sn_dst_calendar = 'gregorian' ! output calendar format
+ nn_base_year = 1960 ! base year for time counter
+ ! TPXO file locations
+ sn_tide_grid = '/Users/thopri/Projects/PyNEMO/DATA/TPXO/grid_tpxo7.2.nc'
+ sn_tide_h = '/Users/thopri/Projects/PyNEMO/DATA/TPXO/h_tpxo7.2.nc'
+ sn_tide_u = '/Users/thopri/Projects/PyNEMO/DATA/TPXO/u_tpxo7.2.nc'
+ ! location of FES data
+ sn_tide_fes = '/Users/thopri/Projects/PyNEMO/DATA/FES/'
+
+!------------------------------------------------------------------------------
+! Additional parameters
+!------------------------------------------------------------------------------
+ nn_wei = 1 ! smoothing filter weights
+ rn_r0 = 0.041666666 ! decorrelation distance use in gauss
+ ! smoothing onto dst points. Need to
+ ! make this a funct. of dlon
+ sn_history = 'CMEMS test case'
+ ! history for netcdf file
+ ln_nemo3p4 = .true. ! else presume v3.2 or v3.3
+ nn_alpha = 0 ! Euler rotation angle
+ nn_beta = 0 ! Euler rotation angle
+ nn_gamma = 0 ! Euler rotation angle
+ rn_mask_max_depth = 100.0 ! Maximum depth to be ignored for the mask
+ rn_mask_shelfbreak_dist = 20000.0 ! Distance from the shelf break
\ No newline at end of file
diff --git a/inputs/namelist_remote.bdy b/inputs/namelist_remote.bdy
index 72b58c84..d0c66b7c 100644
--- a/inputs/namelist_remote.bdy
+++ b/inputs/namelist_remote.bdy
@@ -56,24 +56,26 @@
ln_mask_file = .false. ! =T : read mask from file
cn_mask_file = 'mask.nc' ! name of mask file
! (if ln_mask_file=.TRUE.)
- ln_dyn2d = .false. ! boundary conditions for
+ ln_dyn2d = .true. ! boundary conditions for
! barotropic fields
- ln_dyn3d = .false. ! boundary conditions for
+ ln_dyn3d = .true. ! boundary conditions for
! baroclinic velocities
ln_tra = .true. ! boundary conditions for T and S
- ln_ice = .false. ! ice boundary condition
+ ln_ice = .false. ! ice boundary condition
nn_rimwidth = 9 ! width of the relaxation zone
!------------------------------------------------------------------------------
! unstructured open boundaries tidal parameters
!------------------------------------------------------------------------------
ln_tide = .false. ! =T : produce bdy tidal conditions
- sn_tide_model = 'FES' ! Name of tidal model (FES|TPXO)
+ sn_tide_model = 'tpxo' ! Name of tidal model (fes|tpxo)
clname(1) = 'M2' ! constituent name
- clname(2) = 'S2'
- clname(3) = 'K2'
- ln_trans = .true. ! interpolate transport rather than
- ! velocities
+ clname(2) = 'S2'
+ clname(3) = 'O1'
+ clname(4) = 'K1'
+ clname(5) = 'N2'
+ ln_trans = .false. ! interpolate transport rather than
+ ! velocities
!------------------------------------------------------------------------------
! Time information
!------------------------------------------------------------------------------
@@ -83,9 +85,12 @@
nn_month_end = 11 ! month end (default = 12 is years>1)
sn_dst_calendar = 'gregorian' ! output calendar format
nn_base_year = 1960 ! base year for time counter
- sn_tide_grid = './src_data/tide/grid_tpxo7.2.nc'
- sn_tide_h = './src_data/tide/h_tpxo7.2.nc'
- sn_tide_u = './src_data/tide/u_tpxo7.2.nc'
+ ! TPXO file locations
+ sn_tide_grid = '/Users/thopri/Projects/PyNEMO/DATA/TPXO/grid_tpxo7.2.nc'
+ sn_tide_h = '/Users/thopri/Projects/PyNEMO/DATA/TPXO/h_tpxo7.2.nc'
+ sn_tide_u = '/Users/thopri/Projects/PyNEMO/DATA/TPXO/u_tpxo7.2.nc'
+ ! location of FES data
+ sn_tide_fes = '/Users/thopri/Projects/PyNEMO/DATA/FES/'
!------------------------------------------------------------------------------
! Additional parameters
diff --git a/pynemo/config/motu_config_template.ini b/pynemo/config/motu_config_template.ini
new file mode 100644
index 00000000..4c279d05
--- /dev/null
+++ b/pynemo/config/motu_config_template.ini
@@ -0,0 +1,36 @@
+[Main]
+# Motu credentials
+user=J90TBS4Q1UCT4CM7
+pwd=Z8UKFNXA5OIZRXCK
+
+motu=DSGJJGWODV2F8TFU
+service_id=S7L40ACQHANTAC6Y
+product_id=4LC8ALR9T96XN08U
+date_min=M49OAWI14XESWY1K
+date_max=DBT3J4GH2O19Q75P
+latitude_min=3M2FJJE5JW1EN4C1
+latitude_max=OXI2PXSTJG5PV6OW
+longitude_min=DWUJ65Y233FQFW3F
+longitude_max=K0UQJJDJOKX14DPS
+depth_min=FNO0GZ1INQDATAXA
+depth_max=EI6GB1FHTMCIPOZC
+# Empty or non set means all variables
+# 1 or more variables separated by a coma and identified by their standard name
+variable=4Y4LMQLAKP10YFUE
+# Accept relative or absolute path. The dot character "." is the current folder
+out_dir=QFCN2P56ZQSA7YNK
+out_name=YSLTB459ZW0P84GE
+
+# Logging
+# https://docs.python.org/3/library/logging.html#logging-levels
+# log_level=X {CRITICAL:50, ERROR:40, WARNING:30, INFO:20, DEBUG:10, TRACE:0}
+log_level=20
+
+# block_size block used to download file (integer expressing bytes) default=65535
+# block_size=65535
+socket_timeout=120000
+
+# Http proxy to connect to Motu server
+# proxy_server=proxy.domain.net:8080
+# proxy_user=john
+# proxy_pwd=secret
diff --git a/pynemo/gui/nemo_bdy_mask.py b/pynemo/gui/nemo_bdy_mask.py
index ea062abc..39793706 100644
--- a/pynemo/gui/nemo_bdy_mask.py
+++ b/pynemo/gui/nemo_bdy_mask.py
@@ -68,19 +68,37 @@ def set_bathymetry_file(self, bathy_file):
self.bathymetry_file = str(bathy_file)
#open the bathymetry file
self.bathy_nc = Dataset(self.bathymetry_file)
- self.lon = np.asarray(self.bathy_nc.variables['nav_lon'])
- self.lat = np.asarray(self.bathy_nc.variables['nav_lat'])
- self.bathy_data = self.bathy_nc.variables['Bathymetry'][:,:]
- try: #check if units exists otherwise unknown. TODO
+ try:
+ self.lon = np.asarray(self.bathy_nc.variables['nav_lon'])
+ self.lat = np.asarray(self.bathy_nc.variables['nav_lat'])
+ except:
+ self.lon = np.asarray(self.bathy_nc.variables['longitude'])
+ self.lat = np.asarray(self.bathy_nc.variables['latitude'])
+ # expand lat and lon 1D arrays into 2D array matching nav_lat nav_lon
+ self.lon = np.tile(self.lon, (np.shape(self.lat)[0], 1))
+ self.lat = np.tile(self.lat, (np.shape(self.lon)[1], 1))
+ self.lat = np.rot90(self.lat)
+
+ try:
+ self.bathy_data = self.bathy_nc.variables['Bathymetry'][:,:]
self.data_units = self.bathy_nc.variables['Bathymetry'].units
+ except KeyError:
+ self.bathy_data = self.bathy_nc.variables['deptho'][:,:]
+ self.data_units = self.bathy_nc.variables['deptho'].units
except AttributeError:
+ self.logger.warning('Bathymetry Units unknown....')
self.data_units = "unknown"
if self.data is None:
- self.data = self.bathy_nc.variables['Bathymetry']
+ try:
+ self.data = self.bathy_nc.variables['Bathymetry']
+ except:
+ self.data = self.bathy_nc.variables['deptho']
+ if sum(self.data[:, 0]) + sum(self.data[0, :]) + sum(self.data[:, -1]) + sum(self.data[-1, :]) == 0:
+ raise BaseException('zeros around boundaries of bathymetry is not supported by PyNEMO mask generator')
self.data = np.asarray(self.data[:, :])
self.data = np.around((self.data + .5).clip(0, 1))
#apply default 1px border
- self.apply_border_mask(1)
+ self.apply_border_mask(1)
except KeyError:
self.logger.error('Bathymetry file does not have Bathymetry variable')
raise
@@ -89,6 +107,7 @@ def set_bathymetry_file(self, bathy_file):
raise
+
def save_mask(self, mask_file):
"""Reads the mask data from the mask file"""
if mask_file == None:
diff --git a/pynemo/nemo_bdy_dl_cmems.py b/pynemo/nemo_bdy_dl_cmems.py
new file mode 100644
index 00000000..5685384e
--- /dev/null
+++ b/pynemo/nemo_bdy_dl_cmems.py
@@ -0,0 +1,354 @@
+# -*- coding: utf-8 -*-
+"""
+Set of functions to download CMEMS files using FTP (for static mask data) and MOTU (for subsetted variable data).
+
+"""
+# import modules
+from subprocess import Popen, PIPE, CalledProcessError
+import xml.etree.ElementTree as ET
+import logging
+import ftplib
+import re
+import pandas as pd
+from datetime import datetime
+from pathlib import Path
+import glob
+import os
+#local imports
+from pynemo.utils import cmems_errors as errors
+from pynemo.reader import factory
+
+logger = logging.getLogger(__name__)
+# TODO: Fix double spacing issue on CMEMS download log entries.
+'''
+This function checks to see if the MOTU client is installed on the PyNEMO python environment. If it is not installed
+error code 1 is returned . If it is installed the version number of the installed client is returned as a string
+'''
+def chk_motu():
+ stdout,stderr = Popen(['motuclient','--version'], stdout=PIPE, stderr=PIPE,universal_newlines=True).communicate()
+ stdout = stdout.strip()
+ stderr = stderr.strip()
+
+ if len(stderr) > 0:
+ return stderr
+
+ if not 'motuclient-python' in stdout:
+ return 1
+ else:
+ idx = stdout.find('v')
+ return stdout[idx:-1]
+
+
+'''
+CMEMS holds mask data for the model grids as an FTP download only, i.e. it can't be used with the MOTU subsetter.
+This code logs on to the FTP server and downloads the requested files. The config bdy file needs to provide location and
+filenames to download. These can be found using an FTP server or the CMEMS web portal. The credintials for the
+FTP connection (and MOTU client) are stored in a Credintials files called CMEMS_cred.py located in the utils folder.
+If the download is successful a zero status code is returned. Other wise an error message is returned in the form of a string
+'''
+def get_static(args):
+ try:
+ from pynemo.utils import CMEMS_cred
+ except ImportError:
+ logger.error('Unable to import CMEMS credentials, see Readme for instructions on adding to PyNEMO')
+ return 'Unable to import credential file, have you created one?'
+ try:
+ logger.info('connecting to FTP host......')
+ ftp = ftplib.FTP(host=args['ftp_server'], user=CMEMS_cred.user, passwd=CMEMS_cred.pwd)
+ except ftplib.error_temp:
+ return 'temporary error in FTP connection, please try running PyNEMO again........'
+ except ftplib.error_perm as err:
+ return err
+ except ftplib.error_reply as err:
+ return err
+ except ftplib.error_proto as err:
+ return err
+ # TODO: provide better returns for the various FTP errors
+ # TODO: add try excepts to handle issues with files being missing etc.
+ # TODO: Check there is enough space to download as well.....
+ # TODO: Handle timeouts etc as well......
+ logger.info('navigating to download directoy.......')
+ ftp.cwd(args['static_dir'])
+ logger.info('generating download filename list......')
+ filenames = args['static_filenames'].split(' ')
+ for f in filenames:
+ try:
+ logger.info('downloading '+f+' now......')
+ ftp.retrbinary("RETR " + f, open(args['cmems_dir']+f, 'wb').write)
+ except ftplib.error_temp:
+ return 'temporary error in FTP download, please try running PyNEMO again........'
+ except ftplib.error_perm as err:
+ return err
+ except ftplib.error_reply as err:
+ return err
+ except ftplib.error_proto as err:
+ return err
+ ftp.quit()
+
+ return 0
+
+'''
+The FTP download results in the whole product grid being downloaded, so this needs to be subset to match the data downloads
+This functions uses CDO library to subset the netcdf file. Therefore CDO should be installed on the operating system.
+For each of the defined static files this function subsets based on the defined extent in the settings bdy file,
+if 'Abort' is in the string returned by CDO then this is returned as an error string.
+If Abort is not in the returned string this indicates success, then a zero status code is returned.
+'''
+def subset_static(args):
+ logger.info('subsetting static files now......')
+ filenames = args['static_filenames'].split(' ')
+ for f in filenames:
+ v = f.split('_')
+ v = args['dl_prefix']+'_'+v[-1]
+ cdo = args['cdo_loc']
+ sellon = 'sellonlatbox,'+str(args['longitude_min'])+','+str(args['longitude_max'])+','\
+ +str(args['latitude_min'])+','+str(args['latitude_max'])
+ src_file = args['cmems_dir']+f
+ dst_file = args['cmems_dir']+v
+ stdout, stderr = Popen([cdo, sellon, src_file, dst_file],stdout=PIPE, stderr=PIPE,universal_newlines=True).communicate()
+ stdout = stdout.strip()
+ stderr = stderr.strip()
+
+ # For some reason CDO seems to pipe output to stderr so check stderr for results and pass stdout
+ # if it has length greater than zero i.e. not an empty string
+ if 'Abort' in stderr:
+ return stderr
+ if len(stdout) > 0:
+ return stdout
+ return 0
+
+'''
+Request CMEMS data in either monthly, weekly, and daily intervals. Depending on the character passed to the function 'F'
+the function will split the requests into monthly, weekly or daily intervals. The function splits the requested period into
+the relevent intervals and passes the interval into the request_cmems function below. If request_cmems returns a zero then
+the next interval is downloaded. If there is an error then the string containing the error is returned. Part of request_cmems
+is that it returns a 1 if the interval is too large.
+'''
+def MWD_request_cmems(args,date_min,date_max,F):
+ if F == 'M':
+ month_start = pd.date_range(date_min, date_max,
+ freq='MS').strftime("%Y-%m-%d").tolist()
+ month_end = pd.date_range(date_min, date_max,
+ freq='M').strftime("%Y-%m-%d").tolist()
+ for m in range(len(month_end)):
+ mnth_dl = request_cmems(args, month_start[m], month_end[m])
+ if mnth_dl == 0:
+ logger.info('CMEMS month request ' + str((m + 1)) + 'of' + (str(len(month_end))) + ' successful')
+ if type(mnth_dl) == str:
+ logger.error(
+ 'CMEMS month request ' + str((m + 1)) + 'of' + str((len(month_end))) + ' unsuccessful: Error Msg below')
+ logger.error(mnth_dl)
+ return mnth_dl
+ if mnth_dl == 1:
+ return 1
+
+ if F == 'W':
+ week_start = pd.date_range(date_min, date_max,
+ freq='W').strftime("%Y-%m-%d").tolist()
+ week_end = []
+ for w in range(len(week_start)):
+ week_end.append((datetime.strptime(week_start[w], '%Y-%m-%d')
+ + datetime.timedelta(days=6)).strftime('%Y-%m-%d'))
+ for w in range(len(week_end)):
+ wk_dl = request_cmems(args, week_start[w], week_end[w])
+ if wk_dl == 0:
+ logger.info('CMEMS week request ' + str((w + 1)) + 'of' + str((len(week_end))) + ' successful')
+ if type(wk_dl) == str:
+ logger.error(
+ 'CMEMS week request ' + str((m + 1)) + 'of' + str((len(week_end))) + ' unsuccessful: Error Msg below')
+ logger.error(wk_dl)
+ return wk_dl
+ if wk_dl == 1:
+ return 1
+
+ if F == 'D':
+ days = pd.date_range(date_min, date_max,
+ freq='D').strftime("%Y-%m-%d").tolist()
+ for d in range(len(days)):
+ dy_dl = request_cmems(args, days[d], days[d])
+ if dy_dl == 0:
+ logger.info('CMEMS day request ' + str((d + 1)) + 'of' + str((len(week_end) + 1)) + ' successful')
+ if dy_dl == 1:
+ logger.error('CMEMS day request still too big, please make domain smaller, or use less variables')
+ return
+ if type(dy_dl) == str:
+ logger.error('CMEMS day request ' + str((d + 1)) + 'of' + (
+ str(len(days))) + ' unsuccessful: Error Msg below')
+ logger.error(dy_dl)
+ return dy_dl
+
+ if F not in ('MWD'):
+ time_int_err = 'incorrect string used to define time download interval please use M, W or D'
+ logger.error(time_int_err)
+ return time_int_err
+
+ return 0
+
+'''
+Main request cmems download function. First tries to import CMEMS creditials as per FTP function. This function reads
+the defined NCML file from the bdy file. This gives the number of variables to populate the MOTU download config file.
+For each variable, the name and grid type are pulled from the NCML and populated into a python dictionary.
+
+For each item in the dictionary the relevent request is populated in the CMEMS config file. The first request has
+ a size flag applied and a xml file is downloaded containing details of the request.
+ If the request is valid a field in the XML is set to OK and then the request is repeated with the size flag removed
+resulting in the download of the relevent netcdf file. The console information is parsed to check for errors
+and for confirmation of the success of the download. If there are errors the error string is returned otherwise a
+success message is written to the log file. If the request is too big than a 1 error code is returned.
+Otherwise if all requests are successful then a zero status code is returned.
+'''
+def request_cmems(args, date_min, date_max):
+ try:
+ from pynemo.utils import CMEMS_cred
+ except ImportError:
+ logger.error('Unable to import CMEMS credentials, see Readme for instructions on adding to PyNEMO')
+ return 'Unable to import credentials file'
+
+ xml = args['src_dir']
+ root = ET.parse(xml).getroot()
+ num_var = (len(root.getchildren()[0].getchildren()))
+ logger.info('number of variables requested is '+str(num_var))
+ grids = {}
+ locs = {}
+
+ for n in range(num_var):
+ F = root.getchildren()[0].getchildren()[n].getchildren()[0].getchildren()[0].attrib
+ var_name = root.getchildren()[n+1].attrib['orgName']
+ Type = root.getchildren()[0].getchildren()[n].getchildren()[0].attrib
+ logger.info('Variable '+ str(n+1)+' is '+Type['name']+' (Variable name: '+var_name+')')
+ r = re.findall('([A-Z])', F['regExp'])
+ r = ''.join(r)
+ logger.info('It is on the '+str(r)+' grid')
+
+ if r in grids:
+ grids[r].append(var_name)
+ else:
+ grids[r] = [var_name]
+ if r in locs:
+ pass
+ else:
+ locs[r] = F['location'][6:]
+
+ for key in grids:
+ with open(args['cmems_config_template'], 'r') as file:
+ filedata = file.read()
+
+ filedata = filedata.replace('J90TBS4Q1UCT4CM7', CMEMS_cred.user)
+ filedata = filedata.replace('Z8UKFNXA5OIZRXCK', CMEMS_cred.pwd)
+ filedata = filedata.replace('DSGJJGWODV2F8TFU', args['motu_server'])
+ filedata = filedata.replace('S7L40ACQHANTAC6Y', args['cmems_model'])
+ filedata = filedata.replace('4LC8ALR9T96XN08U', args['cmems_product'])
+ filedata = filedata.replace('M49OAWI14XESWY1K', date_min)
+ filedata = filedata.replace('DBT3J4GH2O19Q75P', date_max)
+ filedata = filedata.replace('3M2FJJE5JW1EN4C1', str(args['latitude_min']))
+ filedata = filedata.replace('OXI2PXSTJG5PV6OW', str(args['latitude_max']))
+ filedata = filedata.replace('DWUJ65Y233FQFW3F', str(args['longitude_min']))
+ filedata = filedata.replace('K0UQJJDJOKX14DPS', str(args['longitude_max']))
+ filedata = filedata.replace('FNO0GZ1INQDATAXA', str(args['depth_min']))
+ filedata = filedata.replace('EI6GB1FHTMCIPOZC', str(args['depth_max']))
+ filedata = filedata.replace('4Y4LMQLAKP10YFUE', ','.join(grids[key]))
+ filedata = filedata.replace('QFCN2P56ZQSA7YNK', locs[key])
+ filedata = filedata.replace('YSLTB459ZW0P84GE', args['dl_prefix']+'_'+str(date_min)+'_'+str(date_max)+'_'+str(key)+'.nc')
+
+ file_chk = Path(locs[key] + args['dl_prefix'] + '_' + str(date_min) + '_' + str(date_max) + '_' + str(key) + '.nc')
+
+ if file_chk.is_file() == True:
+ logger.warning('filename of download already exists, please check file is valid, skipping to next item......')
+
+ if file_chk.is_file() == False:
+
+ with open(args['cmems_config'], 'w') as file:
+ file.write(filedata)
+
+ with Popen(['motuclient', '--size','--config-file', args['cmems_config']], stdout=PIPE, bufsize=1, universal_newlines=True) as p:
+ for line in p.stdout:
+ line = line.replace("[ INFO]", "")
+ logger.info(line)
+ if 'Error' in line:
+ return 'Error found in CMEMS download report, please check downloaded data'
+ if 'Done' in line:
+ logger.info('download of request xml file for variable ' + ' '.join(grids[key]) + ' successful')
+ if p.returncode != 0:
+ return str(p.returncode)
+
+ logger.info('checking size of request for variables '+' '.join(grids[key]))
+ xml = locs[key]+args['dl_prefix']+'_'+str(date_min)+'_'+str(date_max)+'_'+str(key)+ '.xml'
+ try:
+ root = ET.parse(xml).getroot()
+ except ET.ParseError:
+ return 'Parse Error in XML file, This generally occurs when CMEMS service is down and returns an unexpected XML.'
+
+ logger.info('size of request ' + root.attrib['size']+'Kb')
+
+ if 'OK' in root.attrib['msg']:
+ logger.info('request valid, downloading now......')
+
+ with Popen(['motuclient', '--config-file', args['cmems_config']], stdout=PIPE, bufsize=1, universal_newlines=True) as p:
+ for line in p.stdout:
+ line = line.replace("[ INFO]", "")
+ logger.info(line)
+ if 'Error' in line:
+ return line
+ if 'Done' in line:
+ logger.info('download of request data file for variable ' + ' '.join(grids[key]) + ' successful')
+ if p.returncode != 0:
+ return str(p.returncode)
+
+ elif 'too big' in root.attrib['msg']:
+ return 1
+ else:
+ return 'unable to determine if size request is valid (too big or not)'
+
+ return 0
+
+'''
+Function to check errors from both FTP or MOTU. This checks a python file containing dictionaries for different error types,
+(currently FTP and MOTU) with the error from the download. If the error code is present as a key, then the system will retry
+or exit. This depends which error dictionary the type is in. i.e. restart errors result in restart and critical errors result
+in sys exit. The number of restarts is defined in the BDY file and once expired results in sys exit. Finally if the error is
+not in the type dictionaries the system will exit. Unlogged errors can be added so that a restart or exit occurs when they occur.
+'''
+def err_parse(error, err_type):
+
+ if err_type == 'FTP':
+ for key in errors.FTP_retry:
+ if key in error:
+ logger.info('retrying FTP download....')
+ return 0
+ for key in errors.FTP_critical:
+ if key in error:
+ logger.info('critical FTP error, stopping')
+ return 1
+ logger.critical('unlogged FTP error, stopping')
+ logger.critical(error)
+ return 2
+
+ if err_type == 'MOTU':
+ for key in errors.MOTU_retry:
+ if key in error:
+ logger.info('non critical error')
+ logger.info('restarting download....')
+ return 0
+ for key in errors.MOTU_critical:
+ if key in error:
+ logger.critical('critical error found: please see below')
+ logger.critical(error)
+ return 1
+ logger.critical('unlogged error: please see below')
+ logger.critical(error)
+ return 2
+
+'''
+Function to clean up the download process, this is called before every sys exit and at the end of the download function
+At the moment it only removes the xml size files that are no longer required. Other functionality maybe added in the future
+'''
+def clean_up(settings):
+ # remove size check XML files that are no longer needed.
+ try:
+ for f in glob.glob(settings['cmems_dir'] + "*.xml"):
+ os.remove(f)
+ except OSError:
+ logger.info('no xml files found to remove')
+ return
+ logger.info('removed size check xml files successfully')
+ return
diff --git a/pynemo/nemo_bdy_extr_tm3.py b/pynemo/nemo_bdy_extr_tm3.py
index 6bbb35a3..ad9a62bd 100644
--- a/pynemo/nemo_bdy_extr_tm3.py
+++ b/pynemo/nemo_bdy_extr_tm3.py
@@ -37,6 +37,7 @@
import numpy as np
import scipy.spatial as sp
from calendar import monthrange, isleap
+from glob import glob
from scipy.interpolate import interp1d
from cftime import datetime, utime
from pynemo import nemo_bdy_ncgen as ncgen
@@ -74,6 +75,15 @@ def __init__(self, setup, SourceCoord, DstCoord, Grid, var_nam, grd, pair):
self.g_type = grd
self.settings = setup
self.key_vec = False
+ self.t_mask = None
+ self.u_mask = None
+ self.v_mask = None
+ self.dist_wei = None
+ self.dist_fac = None
+ self.tmp_valid = None
+ self.data_ind = None
+ self.nan_ind = None
+ self.isslab = False
# TODO: Why are we deepcopying the coordinates???
@@ -90,8 +100,11 @@ def __init__(self, setup, SourceCoord, DstCoord, Grid, var_nam, grd, pair):
self.jpj, self.jpi = DC.lonlat[grd]['lon'].shape
- self.jpk = DC.depths[grd]['bdy_z'].shape[0]
- # Set some constants
+ try:
+ self.jpk = DC.depths[grd]['bdy_z'].shape[0]
+ except KeyError:
+ self.jpk = 1
+ # Set some constants
# Make function of dst grid resolution (used in 1-2-1 weighting)
# if the weighting can only find one addtional point this implies an
@@ -118,12 +131,9 @@ def __init__(self, setup, SourceCoord, DstCoord, Grid, var_nam, grd, pair):
dst_lon = DC.bdy_lonlat[self.g_type]['lon']
dst_lat = DC.bdy_lonlat[self.g_type]['lat']
- try:
- dst_dep = DC.depths[self.g_type]['bdy_z']
- except KeyError:
- dst_dep = np.zeros([1])
- self.isslab = len(dst_dep) == 1
+ dst_dep = DC.depths[self.g_type]['bdy_z']
if dst_dep.size == len(dst_dep):
+ self.isslab = True
dst_dep = np.ones([1, len(dst_lon)])
# ??? Should this be read from settings?
@@ -267,6 +277,8 @@ def __init__(self, setup, SourceCoord, DstCoord, Grid, var_nam, grd, pair):
dist_merid = diff_lon_rv * np.cos(dst_lat_rep * np.pi / 180)
dist_zonal = sc_lat_rv[ind_rv] - dst_lat_rep
+ # TODO: would a greater circle distance function be better here?
+
dist_tot = np.power((np.power(dist_merid, 2) +
np.power(dist_zonal, 2)), 0.5)
dist_tot = dist_tot.reshape(ind.shape, order='F').T
@@ -395,10 +407,11 @@ def __init__(self, setup, SourceCoord, DstCoord, Grid, var_nam, grd, pair):
self.dst_dep = dst_dep
self.num_bdy = num_bdy
self.id_121 = id_121
- if not self.isslab:
- self.bdy_z = DC.depths[self.g_type]['bdy_H']
- else:
- self.bdy_z = np.zeros([1])
+ self.bdy_z = DC.depths[self.g_type]['bdy_H']
+ # if not self.isslab:
+ # self.bdy_z = DC.depths[self.g_type]['bdy_H']
+ # else:
+ # self.bdy_z = np.zeros([1])
self.dst_z = dst_dep
self.sc_z_len = sc_z_len
@@ -419,11 +432,15 @@ def extract_month(self, year, month):
"""
self.logger.info('extract_month function called')
# Check year entry exists in d_bdy, if not create it.
+ #for v in range(self.nvar):
+ # try:
+ # self.d_bdy[self.var_nam[v]][year]
+ # except KeyError:
+ # self.d_bdy[self.var_nam[v]][year] = {'data': None, 'date': {}}
+
+ # flush previous months data......
for v in range(self.nvar):
- try:
- self.d_bdy[self.var_nam[v]][year]
- except KeyError:
- self.d_bdy[self.var_nam[v]][year] = {'data': None, 'date': {}}
+ self.d_bdy[self.var_nam[v]][year] = {'data': None, 'date': {}}
i_run = np.arange(self.sc_ind['imin'], self.sc_ind['imax'])
j_run = np.arange(self.sc_ind['jmin'], self.sc_ind['jmax'])
@@ -435,7 +452,7 @@ def extract_month(self, year, month):
# define src/dst cals
sf, ed = self.cal_trans(sc_time.calendar, #sc_time[0].calendar
self.settings['dst_calendar'], year, month)
- DstCal = utime('seconds since %d-1-1' %year,
+ DstCal = utime('seconds since %d-1-1' %self.settings['base_year'],
self.settings['dst_calendar'])
dst_start = DstCal.date2num(datetime(year, month, 1))
dst_end = DstCal.date2num(datetime(year, month, ed, 23, 59, 59))
@@ -456,25 +473,46 @@ def extract_month(self, year, month):
rev_seq = list(range(len(sc_time.time_counter)))
rev_seq.reverse()
for date in rev_seq:
- if src_date_seconds[date] < dst_start:
- first_date = date
- break
+ try:
+ if src_date_seconds[date-1] <= dst_start <= src_date_seconds[date]:
+ first_date = date
+ break
+ except IndexError:
+ if src_date_seconds[date] == dst_start:
+ first_date = date
+ else:
+ logging.error('Start date not found in source data')
for date in range(len(sc_time.time_counter)):
- if src_date_seconds[date] > dst_end:
- last_date = date
- break
+ try:
+ if src_date_seconds[date] <= dst_end <= src_date_seconds[date+1]:
+ last_date = date
+ break
+ except IndexError:
+ if src_date_seconds[date] == dst_end:
+ last_date = date
+ else:
+ logging.error('End date not found in source data,')
self.logger.info('first/last dates: %s %s', first_date, last_date)
if self.first:
nc_3 = GetFile(self.settings['src_msk'])
- varid_3 = nc_3['tmask']
- t_mask = varid_3[:1, :sc_z_len, j_run, i_run]
+ # TODO: sort generic mask variable name
+ try:
+ varid_3 = nc_3['tmask']
+ self.t_mask = varid_3[:1, :sc_z_len, j_run, i_run]
+ except:
+ varid_3 = nc_3['mask']
+ varid_3 = np.expand_dims(varid_3, axis=0)
+ self.t_mask = varid_3[:1, :sc_z_len, np.min(j_run):np.max(j_run) + 1, np.min(i_run):np.max(i_run) + 1]
+ # TODO: Sort out issue with j_run and i_run not broadcasting to varid_3
if self.key_vec:
- varid_3 = nc_3['umask']
- u_mask = varid_3[:1, :sc_z_len, j_run, extended_i]
- varid_3 = nc_3['vmask']
- v_mask = varid_3[:1, :sc_z_len, extended_j, i_run]
+ #varid_3 = nc_3['umask']
+ varid_3 = nc_3['mask']
+ self.u_mask = varid_3[:1, :sc_z_len, j_run, extended_i]
+ #varid_3 = nc_3['vmask']
+ varid_3 = nc_3['mask']
+ self.v_mask = varid_3[:1, :sc_z_len, extended_j, i_run]
nc_3.close()
# Identify missing values and scale factors if defined
@@ -499,7 +537,7 @@ def extract_month(self, year, month):
meta_data[n] = self.fnames_2.get_meta_data(self.var_nam[n], meta_data[n])
for vn in range(self.nvar):
- self.d_bdy[self.var_nam[vn]]['date'] = sc_time.date_counter[first_date:last_date + 1]
+ self.d_bdy[self.var_nam[vn]]['date'] = sc_time.date_counter[first_date:last_date + 1]
# Loop over identified files
for f in range(first_date, last_date + 1):
@@ -538,8 +576,8 @@ def extract_month(self, year, month):
# Average vector vars onto T-grid
if self.key_vec:
# First make sure land points have a zero val
- sc_alt_arr[0] *= u_mask
- sc_alt_arr[1] *= v_mask
+ sc_alt_arr[0] *= self.u_mask
+ sc_alt_arr[1] *= self.v_mask
# Average from to T-grid assuming C-grid stagger
sc_array[0] = 0.5 * (sc_alt_arr[0][:,:,:,:-1] +
sc_alt_arr[0][:,:,:,1:])
@@ -551,7 +589,10 @@ def extract_month(self, year, month):
# Note using isnan/sum is relatively fast, but less than
# bottleneck external lib
self.logger.info('SC ARRAY MIN MAX : %s %s', np.nanmin(sc_array[0]), np.nanmax(sc_array[0]))
- sc_array[0][t_mask == 0] = np.NaN
+ if not self.isslab and not self.key_vec:
+ sc_array[0][self.t_mask == 0] = np.NaN
+ if self.isslab and not self.key_vec:
+ sc_array[0][self.t_mask[:,0:1,:,:] == 0] = np.NaN
self.logger.info( 'SC ARRAY MIN MAX : %s %s', np.nanmin(sc_array[0]), np.nanmax(sc_array[0]))
if not np.isnan(np.sum(meta_data[vn]['sf'])):
sc_array[0] *= meta_data[vn]['sf']
@@ -559,7 +600,7 @@ def extract_month(self, year, month):
sc_array[0] += meta_data[vn]['os']
if self.key_vec:
- sc_array[1][t_mask == 0] = np.NaN
+ sc_array[1][self.t_mask == 0] = np.NaN
if not np.isnan(np.sum(meta_data[vn + 1]['sf'])):
sc_array[1] *= meta_data[vn + 1]['sf']
if not np.isnan(np.sum(meta_data[vn + 1]['os'])):
@@ -568,23 +609,39 @@ def extract_month(self, year, month):
# Now collapse the extracted data to an array
# containing only nearest neighbours to dest bdy points
# Loop over the depth axis
- for dep in range(sc_z_len):
+ if self.isslab == False:
+ for dep in range(sc_z_len):
+ tmp_arr = [None, None]
+ # Consider squeezing
+ tmp_arr[0] = sc_array[0][0, dep, :, :].flatten('F') # [:,:,dep]
+ if not self.key_vec:
+ sc_bdy[vn, dep, :, :] = self._flat_ref(tmp_arr[0], ind)
+ else:
+ tmp_arr[1] = sc_array[1][0,dep,:,:].flatten('F') #[:,:,dep]
+ # Include in the collapse the rotation from the
+ # grid to real zonal direction, ie ij -> e
+ sc_bdy[vn, dep, :] = (tmp_arr[0][ind[:]] * self.gcos -
+ tmp_arr[1][ind[:]] * self.gsin)
+ # Include... meridinal direction, ie ij -> n
+ sc_bdy[vn+1, dep, :] = (tmp_arr[1][ind[:]] * self.gcos +
+ tmp_arr[0][ind[:]] * self.gsin)
+ if self.isslab == True:
tmp_arr = [None, None]
# Consider squeezing
- tmp_arr[0] = sc_array[0][0,dep,:,:].flatten('F') #[:,:,dep]
+ tmp_arr[0] = sc_array[0][0, 0, :, :].flatten('F') # [:,:,dep]
if not self.key_vec:
- sc_bdy[vn, dep, :, :] = self._flat_ref(tmp_arr[0], ind)
+ sc_bdy[vn, 0, :, :] = self._flat_ref(tmp_arr[0], ind)
else:
- tmp_arr[1] = sc_array[1][0,dep,:,:].flatten('F') #[:,:,dep]
+ tmp_arr[1] = sc_array[1][0,0,:,:].flatten('F') #[:,:,dep]
# Include in the collapse the rotation from the
# grid to real zonal direction, ie ij -> e
- sc_bdy[vn, dep, :] = (tmp_arr[0][ind[:]] * self.gcos -
- tmp_arr[1][ind[:]] * self.gsin)
+ sc_bdy[vn, 0, :] = (tmp_arr[0][ind[:]] * self.gcos -
+ tmp_arr[1][ind[:]] * self.gsin)
# Include... meridinal direction, ie ij -> n
- sc_bdy[vn+1, dep, :] = (tmp_arr[1][ind[:]] * self.gcos +
- tmp_arr[0][ind[:]] * self.gsin)
+ sc_bdy[vn+1, 0, :] = (tmp_arr[1][ind[:]] * self.gcos +
+ tmp_arr[0][ind[:]] * self.gsin)
- # End depths loop
+ # End depths loop
self.logger.info(' END DEPTHS LOOP ')
# End Looping over vars
self.logger.info(' END VAR LOOP ')
@@ -602,7 +659,7 @@ def extract_month(self, year, month):
# source data to dest bdy pts. Only need do once.
if self.first:
# identify valid pts
- data_ind = np.invert(np.isnan(sc_bdy[0,:,:,:]))
+ self.data_ind = np.invert(np.isnan(sc_bdy[0,:,:,:]))
# dist_tot is currently 2D so extend along depth
# axis to allow single array calc later, also remove
# any invalid pts using our eldritch data_ind
@@ -610,10 +667,10 @@ def extract_month(self, year, month):
self.dist_tot = (np.repeat(self.dist_tot, sc_z_len).reshape(
self.dist_tot.shape[0],
self.dist_tot.shape[1], sc_z_len)).transpose(2,0,1)
- self.dist_tot *= data_ind
+ self.dist_tot *= self.data_ind
self.logger.info('DIST TOT ZEROS %s', np.sum(self.dist_tot == 0))
- self.logger.info('DIST IND ZEROS %s', np.sum(data_ind == 0))
+ self.logger.info('DIST IND ZEROS %s', np.sum(self.data_ind == 0))
# Identify problem pts due to grid discontinuities
# using dists > lat
@@ -625,22 +682,22 @@ def extract_month(self, year, month):
# Calculate guassian weighting with correlation dist
r0 = self.settings['r0']
- dist_wei = (1/(r0 * np.power(2 * np.pi, 0.5)))*(np.exp( -0.5 *np.power(self.dist_tot / r0, 2)))
+ self.dist_wei = (1/(r0 * np.power(2 * np.pi, 0.5)))*(np.exp( -0.5 *np.power(self.dist_tot / r0, 2)))
# Calculate sum of weightings
- dist_fac = np.sum(dist_wei * data_ind, 2)
+ self.dist_fac = np.sum(self.dist_wei * self.data_ind, 2)
# identify loc where all sc pts are land
- nan_ind = np.sum(data_ind, 2) == 0
- self.logger.info('NAN IND : %s ', np.sum(nan_ind))
+ self.nan_ind = np.sum(self.data_ind, 2) == 0
+ self.logger.info('NAN IND : %s ', np.sum(self.nan_ind))
# Calc max zlevel to which data available on sc grid
- data_ind = np.sum(nan_ind == 0, 0) - 1
+ self.data_ind = np.sum(self.nan_ind == 0, 0) - 1
# set land val to level 1 otherwise indexing problems
# may occur- should not affect later results because
# land is masked in weightings array
- data_ind[data_ind == -1] = 0
+ self.data_ind[self.data_ind == -1] = 0
# transform depth levels at each bdy pt to vector
# index that can be used to speed up calcs
- data_ind += np.arange(0, sc_z_len * self.num_bdy, sc_z_len)
+ self.data_ind += np.arange(0, sc_z_len * self.num_bdy, sc_z_len)
# ? Attribute only used on first run so clear.
del self.dist_tot
@@ -648,8 +705,8 @@ def extract_month(self, year, month):
# weighted averaged onto new horizontal grid
for vn in range(self.nvar):
self.logger.info(' sc_bdy %s %s', np.nanmin(sc_bdy), np.nanmax(sc_bdy))
- dst_bdy = (np.nansum(sc_bdy[vn,:,:,:] * dist_wei, 2) /
- dist_fac)
+ dst_bdy = (np.nansum(sc_bdy[vn,:,:,:] * self.dist_wei, 2) /
+ self.dist_fac)
self.logger.info(' dst_bdy %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
# Quick check to see we have not got bad values
if np.sum(dst_bdy == np.inf) > 0:
@@ -658,8 +715,8 @@ def extract_month(self, year, month):
# weight vector array and rotate onto dest grid
if self.key_vec:
# [:,:,:,vn+1]
- dst_bdy_2 = (np.nansum(sc_bdy[vn+1,:,:,:] * dist_wei, 2) /
- dist_fac)
+ dst_bdy_2 = (np.nansum(sc_bdy[vn+1,:,:,:] * self.dist_wei, 2) /
+ self.dist_fac)
self.logger.info('time to to rot and rep ')
self.logger.info('%s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
self.logger.info( '%s en to %s %s' , self.rot_str,self.rot_dir, dst_bdy.shape)
@@ -668,18 +725,18 @@ def extract_month(self, year, month):
self.logger.info('%s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
# Apply 1-2-1 filter along bdy pts using NN ind self.id_121
if self.first:
- tmp_valid = np.invert(np.isnan(
+ self.tmp_valid = np.invert(np.isnan(
dst_bdy.flatten('F')[self.id_121]))
# Finished first run operations
self.first = False
dst_bdy = (np.nansum(dst_bdy.flatten('F')[self.id_121] *
self.tmp_filt, 2) / np.sum(self.tmp_filt *
- tmp_valid, 2))
+ self.tmp_valid, 2))
# Set land pts to zero
- self.logger.info(' pre dst_bdy[nan_ind] %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
- dst_bdy[nan_ind] = 0
+ self.logger.info(' pre dst_bdy[self.nan_ind] %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
+ dst_bdy[self.nan_ind] = 0
self.logger.info(' post dst_bdy %s %s', np.nanmin(dst_bdy), np.nanmax(dst_bdy))
# Remove any data on dst grid that is in land
dst_bdy[:,np.isnan(self.bdy_z)] = 0
@@ -690,7 +747,7 @@ def extract_month(self, year, month):
# If all else fails fill down using deepest pt
dst_bdy = dst_bdy.flatten('F')
dst_bdy += ((dst_bdy == 0) *
- dst_bdy[data_ind].repeat(sc_z_len))
+ dst_bdy[self.data_ind].repeat(sc_z_len))
# Weighted averaged on new vertical grid
dst_bdy = (dst_bdy[self.z_ind[:,0]] * self.z_dist[:,0] +
dst_bdy[self.z_ind[:,1]] * self.z_dist[:,1])
@@ -792,7 +849,7 @@ def time_interp(self, year, month):
# we're grouping variables then they must all have the same date stamps
nt = len(self.d_bdy[self.var_nam[0]]['date'])
time_counter = np.zeros([nt])
- tmp_cal = utime('seconds since %d-1-1' %year,
+ tmp_cal = utime('seconds since %d-1-1' %self.settings['base_year'],
self.settings['dst_calendar'].lower())
for t in range(nt):
@@ -810,7 +867,7 @@ def time_interp(self, year, month):
del_t = time_counter[1] - time_counter[0]
dstep = 86400 / np.int(del_t)
-
+ dstep = int(dstep)
# TODO: put in a test to check all deltaT are the same otherwise throw
# an exception
@@ -819,16 +876,24 @@ def time_interp(self, year, month):
# multiple of 86400 | data are annual means
if del_t >= 86400.:
for v in self.var_nam:
- intfn = interp1d(time_counter, self.d_bdy[v][1979]['data'][:,:,:], axis=0,
+ intfn = interp1d(time_counter, self.d_bdy[v][year]['data'][:,:,:], axis=0,
bounds_error=True)
- self.d_bdy[v][1979]['data'] = intfn(np.arange(time_000, time_end, 86400))
+ try:
+ self.d_bdy[v][year]['data'] = intfn(np.arange(time_000, time_end, 86400))
+ except ValueError as e:
+ self.logger.error('Value error in time_counter, does time horizon in data and bdy file match?')
+ raise ValueError('Value error in time_counter, does time horizon in data and bdy file match?') from e
else:
for v in self.var_nam:
for t in range(dstep):
intfn = interp1d(time_counter[t::dstep],
self.d_bdy[v].data[t::dstep,:,:], axis=0, bounds_error=True)
- self.d_bdy[v].data[t::dstep,:,:] = intfn(np.arange(time_000,
- time_end, 86400))
+ try:
+ self.d_bdy[v].data[t::dstep, :, :] = intfn(np.arange(time_000,time_end, 86400))
+ except ValueError as e:
+ self.logger.error('Value error in time_counter, does time horizon in data and bdy file match?')
+ raise ValueError('Value error in time_counter, does time horizon in data and bdy file match?') from e
+
self.time_counter = time_counter
def write_out(self, year, month, ind, unit_origin):
@@ -855,6 +920,11 @@ def write_out(self, year, month, ind, unit_origin):
f_out = self.settings['dst_dir']+self.settings['fn']+ \
'_bdy'+self.g_type.upper()+ '_y'+str(year)+'m'+'%02d' % month+'.nc'
+
+ ncml_out = glob(self.settings['ncml_out']+'/*'+'output_'+str(self.g_type.upper())+'.ncml')
+ if len(ncml_out) == 0:
+ raise RuntimeError('NCML out file for grid '+str(self.g_type.upper())+' missing, please add into NCML directory')
+ ncml_out = ncml_out[0]
ncgen.CreateBDYNetcdfFile(f_out, self.num_bdy,
self.jpi, self.jpj, self.jpk,
@@ -863,7 +933,7 @@ def write_out(self, year, month, ind, unit_origin):
unit_origin,
self.settings['fv'],
self.settings['dst_calendar'],
- self.g_type.upper())
+ self.g_type.upper(),self.var_nam,ncml_out)
self.logger.info('Writing out BDY data to: %s', f_out)
@@ -871,28 +941,30 @@ def write_out(self, year, month, ind, unit_origin):
# for v in self.variables:
for v in self.var_nam:
- if self.settings['dyn2d']: # Calculate depth averaged velocity
- tile_dz = np.tile(self.bdy_dz, [len(self.time_counter), 1, 1, 1])
- tmp_var = np.reshape(self.d_bdy[v][1979]['data'][:,:,:], tile_dz.shape)
- tmp_var = np.nansum(tmp_var * tile_dz, 2) /np.nansum(tile_dz, 2)
- else: # Replace NaNs with specified fill value
- tmp_var = np.where(np.isnan(self.d_bdy[v][1979]['data'][:,:,:]),
+ if self.isslab == True:
+ tmp_var = np.where(np.isnan(self.d_bdy[v][year]['data'][:, :, :]),
+ self.settings['fv'],
+ self.d_bdy[v][year]['data'][:, :, :])
+ # remove depths from varible leaving surface layer
+ tmp_var = tmp_var[:,0:1,:]
+ else:
+ tmp_var = np.where(np.isnan(self.d_bdy[v][year]['data'][:,:,:]),
self.settings['fv'],
- self.d_bdy[v][1979]['data'][:,:,:])
+ self.d_bdy[v][year]['data'][:,:,:])
# Write variable to file
- ncpop.write_data_to_file(f_out, v, tmp_var)
+ ncpop.write_data_to_file(f_out, v, tmp_var,ncml_out)
# Write remaining data to file (indices are in Python notation
# therefore we must add 1 to i,j and r)
- ncpop.write_data_to_file(f_out, 'nav_lon', self.nav_lon)
- ncpop.write_data_to_file(f_out, 'nav_lat', self.nav_lat)
- ncpop.write_data_to_file(f_out, 'depth'+self.g_type, self.dst_dep)
- ncpop.write_data_to_file(f_out, 'nbidta', ind.bdy_i[:, 0] + 1)
- ncpop.write_data_to_file(f_out, 'nbjdta', ind.bdy_i[:, 1] + 1)
- ncpop.write_data_to_file(f_out, 'nbrdta', ind.bdy_r[: ] + 1)
- ncpop.write_data_to_file(f_out, 'time_counter', self.time_counter)
+ ncpop.write_data_to_file(f_out, 'nav_lon', self.nav_lon,ncml_out)
+ ncpop.write_data_to_file(f_out, 'nav_lat', self.nav_lat,ncml_out)
+ ncpop.write_data_to_file(f_out, 'depth'+self.g_type, self.dst_dep,ncml_out)
+ ncpop.write_data_to_file(f_out, 'nbidta', ind.bdy_i[:, 0] + 1,ncml_out)
+ ncpop.write_data_to_file(f_out, 'nbjdta', ind.bdy_i[:, 1] + 1,ncml_out)
+ ncpop.write_data_to_file(f_out, 'nbrdta', ind.bdy_r[: ] + 1,ncml_out)
+ ncpop.write_data_to_file(f_out, 'time_counter', self.time_counter,ncml_out)
diff --git a/pynemo/nemo_bdy_gen_c.py b/pynemo/nemo_bdy_gen_c.py
index 4f1a2958..da5cc90c 100644
--- a/pynemo/nemo_bdy_gen_c.py
+++ b/pynemo/nemo_bdy_gen_c.py
@@ -11,6 +11,7 @@
#External Imports
import numpy as np
import logging
+import sys
#Local Imports
from .utils.nemo_bdy_lib import sub2ind
@@ -74,7 +75,11 @@ def __init__(self, boundary_mask, settings, grid):
bdy_msk[grid_ind] = fval
# Create padded array for overlays
- msk = np.pad(bdy_msk,((1,1),(1,1)), 'constant', constant_values=(-1))
+ try:
+ msk = np.pad(bdy_msk,((1,1),(1,1)), 'constant', constant_values=(-1))
+ except ValueError:
+ raise Exception('the ValueError above is commonly due to PyNEMO not finding the bathymetry/mask file or '
+ 'in a incompatable format. E.g. there are zeros along all borders of bathy file...')
# create index arrays of I and J coords
igrid, jgrid = np.meshgrid(np.arange(bdy_msk.shape[1]), np.arange(bdy_msk.shape[0]))
diff --git a/pynemo/nemo_bdy_ncgen.py b/pynemo/nemo_bdy_ncgen.py
index af21d1d3..3aa301a2 100644
--- a/pynemo/nemo_bdy_ncgen.py
+++ b/pynemo/nemo_bdy_ncgen.py
@@ -9,14 +9,13 @@
from netCDF4 import Dataset
import datetime
import logging
+from pynemo import nemo_ncml_parse as ncml_parse
-def CreateBDYNetcdfFile(filename, N, I, J, K, rw, h, orig, fv, calendar, grd):
+def CreateBDYNetcdfFile(filename, N, I, J, K, rw, h, orig, fv, calendar, grd, var_nam,ncml_out):
""" This method creates a template of bdy netcdf files. A common for
T, I, U, V, E grid types.
"""
-
gridNames = ['T', 'I', 'U', 'V', 'E', 'Z'] # All possible grids
-
# Dimension Lengths
xb_len = N
yb_len = 1
@@ -28,213 +27,255 @@ def CreateBDYNetcdfFile(filename, N, I, J, K, rw, h, orig, fv, calendar, grd):
ncid = Dataset(filename, 'w', clobber=True, format='NETCDF4')
#define dimensions
- if grd in gridNames and grd != 'Z': # i.e grid NOT barotropic (Z)
- dimztID = ncid.createDimension('z', depth_len)
+ if grd in gridNames:# and grd != 'Z': # i.e grid NOT barotropic (Z)
+ z = ncml_parse.dst_dims(ncml_out,'z')
+ dimztID = ncid.createDimension(z, depth_len)
else:
- logging.error('Grid tpye not known')
- dimxbID = ncid.createDimension('xb', xb_len)
- dimybID = ncid.createDimension('yb', yb_len)
- dimxID = ncid.createDimension('x', x_len)
- dimyID = ncid.createDimension('y', y_len)
- dimtcID = ncid.createDimension('time_counter', None)
+ logging.error('Grid type not known')
+ xb = ncml_parse.dst_dims(ncml_out, 'xb')
+ dimxbID = ncid.createDimension(xb, xb_len)
+ yb = ncml_parse.dst_dims(ncml_out,'yb')
+ dimybID = ncid.createDimension(yb, yb_len)
+ x = ncml_parse.dst_dims(ncml_out,'x')
+ dimxID = ncid.createDimension(x, x_len)
+ y = ncml_parse.dst_dims(ncml_out,'y')
+ dimyID = ncid.createDimension(y, y_len)
+ time_counter = ncml_parse.dst_dims(ncml_out,'time_counter')
+ dimtcID = ncid.createDimension(time_counter, None)
#define variable
- vartcID = ncid.createVariable('time_counter', 'f4', ('time_counter', ))
- varlonID = ncid.createVariable('nav_lon', 'f4', ('y', 'x', ))
- varlatID = ncid.createVariable('nav_lat', 'f4', ('y', 'x', ))
+ time_var = ncml_parse.dst_var(ncml_out,'time_counter')
+ vartcID = ncid.createVariable(time_var['name'], time_var['type'], (time_var['shape'][0], ))
+ lon_var = ncml_parse.dst_var(ncml_out,'nav_lon')
+ varlonID = ncid.createVariable(lon_var['name'], lon_var['type'], (lon_var['shape'][0], lon_var['shape'][1], ))
+ lat_var = ncml_parse.dst_var(ncml_out,'nav_lat')
+ varlatID = ncid.createVariable(lat_var['name'], lat_var['type'], (lat_var['shape'][0], lat_var['shape'][1], ))
if grd in ['E']:
- varztID = ncid.createVariable('deptht', 'f4', ('z', 'yb', 'xb', ))
- varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
- varN1pID = ncid.createVariable('N1p', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+ deptht = ncml_parse.dst_var(ncml_out,'deptht')
+ varztID = ncid.createVariable(deptht['name'], deptht['type'], (deptht['shape'][0], deptht['shape'][1], deptht['shape'][2], ))
+ bdy_msk = ncml_parse.dst_var(ncml_out,'bdy_msk')
+ varmskID = ncid.createVariable(bdy_msk['name'], bdy_msk['type'], (bdy_msk['shape'][0], bdy_msk['shape'][1], ), fill_value=fv)
+ N1p = ncml_parse.dst_var(ncml_out,'N1p')
+ varN1pID = ncid.createVariable(N1p['name'], N1p['type'], (N1p['shape'][0], N1p['shape'][1], N1p['shape'][2], N1p['shape'][3], ),
fill_value=fv)
- varN3nID = ncid.createVariable('N3n', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+ N3n = ncml_parse.dst_var(ncml_out,'N3n')
+ varN3nID = ncid.createVariable(N3n['name'], N3n['type'], (N3n['shape'][0], N3n['shape'][1], N3n['shape'][2], N3n['shape'][3], ),
fill_value=fv)
- varN5sID = ncid.createVariable('N5s', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+ N5s = ncml_parse.dst_var(ncml_out,'N5s')
+ varN5sID = ncid.createVariable(N5s['name'], N5s['type'], (N5s['shape'][0], N5s['shape'][0], N5s['shape'][0], N5s['shape'][0], ),
fill_value=fv)
elif grd in ['T', 'I']:
- varztID = ncid.createVariable('deptht', 'f4', ('z', 'yb', 'xb', ))
- varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
- vartmpID = ncid.createVariable('votemper', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
- fill_value=fv)
- varsalID = ncid.createVariable('vosaline', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
- fill_value=fv)
+ deptht = ncml_parse.dst_var(ncml_out, 'deptht')
+ varztID = ncid.createVariable(deptht['name'], deptht['type'],(deptht['shape'][0], deptht['shape'][1], deptht['shape'][2],))
+ bdy_msk = ncml_parse.dst_var(ncml_out, 'bdy_msk')
+ varmskID = ncid.createVariable(bdy_msk['name'], bdy_msk['type'], (bdy_msk['shape'][0], bdy_msk['shape'][1],),fill_value=fv)
+
+ temp_var = ncml_parse.dst_var(ncml_out,'votemper')
+ vartmpID = ncid.createVariable(temp_var['name'], temp_var['type'],
+ (temp_var['shape'][0], temp_var['shape'][1], temp_var['shape'][2], temp_var['shape'][3], ), fill_value=fv)
+ sal_var = ncml_parse.dst_var(ncml_out,'vosaline')
+ varsalID = ncid.createVariable(sal_var['name'], sal_var['type'],
+ (sal_var['shape'][0], sal_var['shape'][1], sal_var['shape'][2], sal_var['shape'][3], ), fill_value=fv)
+
if grd == 'I':
- varildID = ncid.createVariable('ileadfra', 'f4', ('time_counter', 'yb', 'xb',),
+ ileadfra = ncml_parse.dst_var(ncml_out,'ileadfra')
+ varildID = ncid.createVariable(ileadfra['name'], ileadfra['type'], (ileadfra['shape'][0], ileadfra['shape'][1], ileadfra['shape'][2],),
fill_value=fv)
- variicID = ncid.createVariable('iicethic', 'f4', ('time_counter', 'yb', 'xb',),
+ iicethic = ncml_parse.dst_var(ncml_out,'iicethic')
+ variicID = ncid.createVariable(iicethic['name'], iicethic['type'], (iicethic['shape'][0], iicethic['shape'][1], iicethic['shape'][2],),
fill_value=fv)
- varisnID = ncid.createVariable('isnowthi', 'f4', ('time_counter', 'yb', 'xb',),
+ isnowthi = ncml_parse.dst_var(ncml_out,'isnowthi')
+ varisnID = ncid.createVariable(isnowthi['name'], isnowthi['type'], (isnowthi['shape'][0], isnowthi['shape'][1], isnowthi['shape'][2],),
fill_value=fv)
elif grd == 'U':
- varztID = ncid.createVariable('depthu', 'f4', ('z', 'yb', 'xb', ), fill_value=fv)
- varbtuID = ncid.createVariable('vobtcrtx', 'f4', ('time_counter', 'yb', 'xb', ),
+ depthu = ncml_parse.dst_var(ncml_out,'depthu')
+ varztID = ncid.createVariable(depthu['name'], depthu['type'], (depthu['shape'][0], depthu['shape'][1], depthu['shape'][2], ), fill_value=fv)
+ vobtcrx = ncml_parse.dst_var(ncml_out,'vobtcrtx')
+ varbtuID = ncid.createVariable(vobtcrx['name'], vobtcrx['type'], (vobtcrx['shape'][0], vobtcrx['shape'][1], vobtcrx['shape'][2], ),
fill_value=fv)
- vartouID = ncid.createVariable('vozocrtx', 'f4', ('time_counter', 'z', 'yb', 'xb', ),
+ vozocrtx = ncml_parse.dst_var(ncml_out,'vozocrtx')
+ vartouID = ncid.createVariable(vozocrtx['name'], vozocrtx['type'],
+ (vozocrtx['shape'][0], vozocrtx['shape'][1], vozocrtx['shape'][2], vozocrtx['shape'][3], ),
fill_value=fv)
elif grd == 'V':
- varztID = ncid.createVariable('depthv', 'f4', ('z', 'yb', 'xb', ))
- varbtvID = ncid.createVariable('vobtcrty', 'f4', ('time_counter', 'yb', 'xb', ),
+ depthv = ncml_parse.dst_var(ncml_out, 'depthv')
+ varztID = ncid.createVariable(depthv['name'], depthv['type'], (depthv['shape'][0], depthv['shape'][1], depthv['shape'][2], ))
+ vobtcrty = ncml_parse.dst_var(ncml_out,'vobtcrty')
+ varbtvID = ncid.createVariable(vobtcrty['name'], vobtcrty['type'], (vobtcrty['shape'][0], vobtcrty['shape'][1], vobtcrty['shape'][2], ),
fill_value=fv)
- vartovID = ncid.createVariable('vomecrty', 'f4', ('time_counter', 'z', 'yb', 'xb',),
+ vomecrty = ncml_parse.dst_var(ncml_out,'vomecrty')
+ vartovID = ncid.createVariable(vomecrty['name'], vomecrty['type'],
+ (vomecrty['shape'][0], vomecrty['shape'][1], vomecrty['shape'][2], vomecrty['shape'][3],),
fill_value=fv)
elif grd == 'Z':
- varsshID = ncid.createVariable('sossheig', 'f4', ('time_counter', 'yb', 'xb', ),
+ depthz = ncml_parse.dst_var(ncml_out, 'depthz')
+ varztID = ncid.createVariable(depthz['name'], depthz['type'], (depthz['shape'][0], depthz['shape'][1], depthz['shape'][2], ))
+ sossheig = ncml_parse.dst_var(ncml_out,'sossheig')
+ varsshID = ncid.createVariable(sossheig['name'], sossheig['type'], (sossheig['shape'][0], sossheig['shape'][1], sossheig['shape'][2] ),
fill_value=fv)
- varmskID = ncid.createVariable('bdy_msk', 'f4', ('y', 'x', ), fill_value=fv)
+ bdy_msk = ncml_parse.dst_var(ncml_out,'bdy_msk')
+ varmskID = ncid.createVariable(bdy_msk['name'], bdy_msk['type'], (bdy_msk['shape'][0], bdy_msk['shape'][1], ), fill_value=fv)
else:
logging.error("Unknow Grid input")
+ nbidta = ncml_parse.dst_var(ncml_out,'nbidta')
+ varnbiID = ncid.createVariable(nbidta['name'], nbidta['type'], (nbidta['shape'][0], nbidta['shape'][1], ))
+ nbjdta = ncml_parse.dst_var(ncml_out,'nbjdta')
+ varnbjID = ncid.createVariable(nbjdta['name'], nbjdta['type'], (nbjdta['shape'][0], nbjdta['shape'][1], ))
+ nbrdta = ncml_parse.dst_var(ncml_out, 'nbrdta')
+ varnbrID = ncid.createVariable(nbrdta['name'], nbrdta['type'], (nbrdta['shape'][0], nbrdta['shape'][1], ))
- varnbiID = ncid.createVariable('nbidta', 'i4', ('yb', 'xb', ))
- varnbjID = ncid.createVariable('nbjdta', 'i4', ('yb', 'xb', ))
- varnbrID = ncid.createVariable('nbrdta', 'i4', ('yb', 'xb', ))
#Global Attributes
ncid.file_name = filename
ncid.creation_date = str(datetime.datetime.now())
ncid.rim_width = rw
ncid.history = h
- ncid.institution = 'National Oceanography Centre, Livepool, U.K.'
+ ncid.institution = ncml_parse.dst_glob_attrib(ncml_out,'institution')
#Time axis attributes
- vartcID.axis = 'T'
- vartcID.standard_name = 'time'
+ vartcID.axis = ncml_parse.dst_var_attrib(ncml_out,time_var['name'],'axis')
+ vartcID.standard_name = ncml_parse.dst_var_attrib(ncml_out,time_var['name'],'standard_name')
vartcID.units = 'seconds since '+orig
- vartcID.title = 'Time'
- vartcID.long_name = 'Time axis'
+ vartcID.title = ncml_parse.dst_var_attrib(ncml_out,time_var['name'],'title')
+ vartcID.long_name = ncml_parse.dst_var_attrib(ncml_out,time_var['name'],'long_name')
+ # TODO: should the bdy file or NCML file define what origin or calendar to use?
vartcID.time_origin = orig
vartcID.calendar = calendar
#Longitude axis attributes
- varlonID.axis = 'Longitude'
- varlonID.short_name = 'nav_lon'
- varlonID.units = 'degrees_east'
- varlonID.long_name = 'Longitude'
+ varlonID.axis = ncml_parse.dst_var_attrib(ncml_out,lon_var['name'],'axis')
+ varlonID.short_name = ncml_parse.dst_var_attrib(ncml_out,lon_var['name'],'short_name')
+ varlonID.units = ncml_parse.dst_var_attrib(ncml_out,lon_var['name'],'units')
+ varlonID.long_name = ncml_parse.dst_var_attrib(ncml_out,lon_var['name'],'long_name')
#Latitude axis attributes
- varlatID.axis = 'Latitude'
- varlatID.short_name = 'nav_lat'
- varlatID.units = 'degrees_east'
- varlatID.long_name = 'Latitude'
+ varlatID.axis = ncml_parse.dst_var_attrib(ncml_out,lat_var['name'],'axis')
+ varlatID.short_name = ncml_parse.dst_var_attrib(ncml_out,lat_var['name'],'short_name')
+ varlatID.units = ncml_parse.dst_var_attrib(ncml_out,lat_var['name'],'units')
+ varlatID.long_name = ncml_parse.dst_var_attrib(ncml_out,lat_var['name'],'long_name')
#nbidta attributes
- varnbiID.short_name = 'nbidta'
- varnbiID.units = 'unitless'
- varnbiID.long_name = 'Bdy i indices'
+ varnbiID.short_name = ncml_parse.dst_var_attrib(ncml_out,nbidta['name'],'short_name')
+ varnbiID.units = ncml_parse.dst_var_attrib(ncml_out,nbidta['name'],'units')
+ varnbiID.long_name = ncml_parse.dst_var_attrib(ncml_out,nbidta['name'],'long_name')
#nbjdta attributes
- varnbjID.short_name = 'nbjdta'
- varnbjID.units = 'unitless'
- varnbjID.long_name = 'Bdy j indices'
+ varnbjID.short_name = ncml_parse.dst_var_attrib(ncml_out,nbjdta['name'],'short_name')
+ varnbjID.units = ncml_parse.dst_var_attrib(ncml_out,nbjdta['name'],'units')
+ varnbjID.long_name = ncml_parse.dst_var_attrib(ncml_out,nbjdta['name'],'long_name')
#nbrdta attributes
- varnbrID.short_name = 'nbrdta'
- varnbrID.units = 'unitless'
- varnbrID.long_name = 'Bdy discrete distance'
+ varnbrID.short_name = ncml_parse.dst_var_attrib(ncml_out,nbrdta['name'],'short_name')
+ varnbrID.units = ncml_parse.dst_var_attrib(ncml_out,nbrdta['name'],'units')
+ varnbrID.long_name = ncml_parse.dst_var_attrib(ncml_out,nbrdta['name'],'long_name')
+
if grd == 'E':
- varztID.axis = 'Depth'
- varztID.short_name = 'deptht'
- varztID.units = 'm'
- varztID.long_name = 'Depth'
-
- varmskID.short_name = 'bdy_msk'
- varmskID.units = 'unitless'
- varmskID.long_name = 'Structured boundary mask'
-
- varN1pID.units = 'mmol/m^3'
- varN1pID.short_name = 'N1p'
- varN1pID.long_name = 'Phosphate'
- varN1pID.grid = 'bdyT'
-
- varN3nID.units = 'mmol/m^3'
- varN3nID.short_name = 'N3n'
- varN3nID.long_name = 'Nitrate'
- varN3nID.grid = 'bdyT'
-
- varN5sID.units = 'mmol/m^3'
- varN5sID.short_name = 'N5s'
- varN5sID.long_name = 'Silicate'
- varN5sID.grid = 'bdyT'
+ varztID.axis = ncml_parse.dst_var_attrib(ncml_out,varztID['name'],'axis')
+ varztID.short_name = ncml_parse.dst_var_attrib(ncml_out,varztID['name'],'short_name')
+ varztID.units = ncml_parse.dst_var_attrib(ncml_out,varztID['name'],'units')
+ varztID.long_name = ncml_parse.dst_var_attrib(ncml_out,varztID['name'],'long_name')
+
+ varmskID.short_name = ncml_parse.dst_var_attrib(ncml_out,varmskID['name'],'short_name')
+ varmskID.units = ncml_parse.dst_var_attrib(ncml_out,varmskID['name'],'units')
+ varmskID.long_name = ncml_parse.dst_var_attrib(ncml_out,varmskID['name'],'long_name')
+
+ varN1pID.units = ncml_parse.dst_var_attrib(ncml_out,varN1pID['name'],'units')
+ varN1pID.short_name = ncml_parse.dst_var_attrib(ncml_out,varN1pID['name'],'short_name')
+ varN1pID.long_name = ncml_parse.dst_var_attrib(ncml_out,varN1pID['name'],'long_name')
+ varN1pID.grid = ncml_parse.dst_var_attrib(ncml_out,varN1pID['name'],'grid')
+
+ varN3nID.units = ncml_parse.dst_var_attrib(ncml_out,varN3nID['name'],'units')
+ varN3nID.short_name = ncml_parse.dst_var_attrib(ncml_out,varN3nID['name'],'short_name')
+ varN3nID.long_name = ncml_parse.dst_var_attrib(ncml_out,varN3nID['name'],'long_name')
+ varN3nID.grid = ncml_parse.dst_var_attrib(ncml_out,varN3nID['name'],'grid')
+
+ varN5sID.units = ncml_parse.dst_var_attrib(ncml_out,varN5sID['name'],'units')
+ varN5sID.short_name = ncml_parse.dst_var_attrib(ncml_out,varN5sID['name'],'short_name')
+ varN5sID.long_name = ncml_parse.dst_var_attrib(ncml_out,varN5sID['name'],'long_name')
+ varN5sID.grid = ncml_parse.dst_var_attrib(ncml_out,varN5sID['name'],'grid')
if grd in ['T', 'I']:
- varztID.axis = 'Depth'
- varztID.short_name = 'deptht'
- varztID.units = 'm'
- varztID.long_name = 'Depth'
+ varztID.axis = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'axis')
+ varztID.short_name = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'short_name')
+ varztID.units = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'units')
+ varztID.long_name = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'long_name')
- varmskID.short_name = 'bdy_msk'
- varmskID.units = 'unitless'
- varmskID.long_name = 'Structured boundary mask'
+ varmskID.short_name = ncml_parse.dst_var_attrib(ncml_out,varmskID.name,'short_name')
+ varmskID.units = ncml_parse.dst_var_attrib(ncml_out,varmskID.name,'units')
+ varmskID.long_name = ncml_parse.dst_var_attrib(ncml_out,varmskID.name,'long_name')
- vartmpID.units = 'C'
- vartmpID.short_name = 'votemper'
- vartmpID.long_name = 'Temperature'
- vartmpID.grid = 'bdyT'
+ vartmpID.units = ncml_parse.dst_var_attrib(ncml_out,vartmpID.name,'units')
+ vartmpID.short_name = ncml_parse.dst_var_attrib(ncml_out,vartmpID.name,'short_name')
+ vartmpID.long_name = ncml_parse.dst_var_attrib(ncml_out,vartmpID.name,'long_name')
+ vartmpID.grid = ncml_parse.dst_var_attrib(ncml_out,vartmpID.name,'grid')
- varsalID.units = 'PSU'
- varsalID.short_name = 'vosaline'
- varsalID.long_name = 'Salinity'
- varsalID.grid = 'bdyT'
+ varsalID.units = ncml_parse.dst_var_attrib(ncml_out,varsalID.name,'units')
+ varsalID.short_name = ncml_parse.dst_var_attrib(ncml_out,varsalID.name,'short_name')
+ varsalID.long_name = ncml_parse.dst_var_attrib(ncml_out,varsalID.name,'long_name')
+ varsalID.grid = ncml_parse.dst_var_attrib(ncml_out,varsalID.name,'grid')
if grd == 'I':
- varildID.units = '%'
- varildID.short_name = 'ildsconc'
- varildID.long_name = 'Ice lead fraction'
- varildID.grid = 'bdyT'
-
- variicID.units = 'm'
- variicID.short_name = 'iicethic'
- variicID.long_name = 'Ice thickness'
- variicID.grid = 'bdyT'
-
- varisnID.units = 'm'
- varisnID.short_name = 'isnowthi'
- varisnID.long_name = 'Snow thickness'
- varisnID.grid = 'bdyT'
+ varildID.units = ncml_parse.dst_var_attrib(ncml_out,varildID.name,'units')
+ varildID.short_name = ncml_parse.dst_var_attrib(ncml_out,varildID.name,'short_name')
+ varildID.long_name = ncml_parse.dst_var_attrib(ncml_out,varildID.name,'long_name')
+ varildID.grid = ncml_parse.dst_var_attrib(ncml_out,varildID.name,'grid')
+
+ variicID.units = ncml_parse.dst_var_attrib(ncml_out,variicID.name,'units')
+ variicID.short_name = ncml_parse.dst_var_attrib(ncml_out,variicID.name,'short_name')
+ variicID.long_name = ncml_parse.dst_var_attrib(ncml_out,variicID.name,'long_name')
+ variicID.grid = ncml_parse.dst_var_attrib(ncml_out,variicID.name,'grid')
+
+ varisnID.units = ncml_parse.dst_var_attrib(ncml_out,varisnID.name,'units')
+ varisnID.short_name = ncml_parse.dst_var_attrib(ncml_out,varisnID.name,'short_name')
+ varisnID.long_name = ncml_parse.dst_var_attrib(ncml_out,varisnID.name,'long_name')
+ varisnID.grid = ncml_parse.dst_var_attrib(ncml_out,varisnID.name,'grid')
+
elif grd == 'U':
- varztID.axis = 'Depth'
- varztID.short_name = 'depthu'
- varztID.units = 'm'
- varztID.long_name = 'Depth'
+ varztID.axis = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'axis')
+ varztID.short_name = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'short_name')
+ varztID.units = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'units')
+ varztID.long_name = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'long_name')
- varbtuID.units = 'm/s'
- varbtuID.short_name = 'vobtcrtx'
- varbtuID.long_name = 'Thickness-weighted depth-averaged zonal Current'
- varbtuID.grid = 'bdyU'
+ varbtuID.units = ncml_parse.dst_var_attrib(ncml_out,varbtuID.name,'units')
+ varbtuID.short_name = ncml_parse.dst_var_attrib(ncml_out,varbtuID.name,'short_name')
+ varbtuID.long_name = ncml_parse.dst_var_attrib(ncml_out,varbtuID.name,'long_name')
+ varbtuID.grid = ncml_parse.dst_var_attrib(ncml_out,varbtuID.name,'grid')
- vartouID.units = 'm/s'
- vartouID.short_name = 'vozocrtx'
- vartouID.long_name = 'Zonal Current'
- vartouID.grid = 'bdyU'
+ vartouID.units = ncml_parse.dst_var_attrib(ncml_out,vartouID.name,'units')
+ vartouID.short_name = ncml_parse.dst_var_attrib(ncml_out,vartouID.name,'short_name')
+ vartouID.long_name = ncml_parse.dst_var_attrib(ncml_out,vartouID.name,'long_name')
+ vartouID.grid = ncml_parse.dst_var_attrib(ncml_out,vartouID.name,'grid')
elif grd == 'V':
- varztID.axis = 'Depth'
- varztID.short_name = 'depthv'
- varztID.units = 'm'
- varztID.long_name = 'Depth'
+ varztID.axis = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'axis')
+ varztID.short_name = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'short_name')
+ varztID.units = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'units')
+ varztID.long_name = ncml_parse.dst_var_attrib(ncml_out,varztID.name,'long_name')
- varbtvID.units = 'm/s'
- varbtvID.short_name = 'vobtcrty'
- varbtvID.long_name = 'Thickness-weighted depth-averaged meridional Current'
- varbtvID.grid = 'bdyV'
+ varbtvID.units = ncml_parse.dst_var_attrib(ncml_out,varbtvID.name,'units')
+ varbtvID.short_name = ncml_parse.dst_var_attrib(ncml_out,varbtvID.name,'short_name')
+ varbtvID.long_name = ncml_parse.dst_var_attrib(ncml_out,varbtvID.name,'long_name')
+ varbtvID.grid = ncml_parse.dst_var_attrib(ncml_out,varbtvID.name,'grid')
- vartovID.units = 'm/s'
- vartovID.short_name = 'vomecrty'
- vartovID.long_name = 'Meridional Current'
- vartovID.grid = 'bdyV'
+ vartovID.units = ncml_parse.dst_var_attrib(ncml_out,vartovID.name,'units')
+ vartovID.short_name = ncml_parse.dst_var_attrib(ncml_out,vartovID.name,'short_name')
+ vartovID.long_name = ncml_parse.dst_var_attrib(ncml_out,vartovID.name,'long_name')
+ vartovID.grid = ncml_parse.dst_var_attrib(ncml_out,vartovID.name,'grid')
elif grd == 'Z':
- varsshID.units = 'm'
- varsshID.short_name = 'sossheig'
- varsshID.long_name = 'Sea Surface Height'
- varsshID.grid = 'bdyT'
+ varsshID.units = ncml_parse.dst_var_attrib(ncml_out,varsshID.name,'units')
+ varsshID.short_name = ncml_parse.dst_var_attrib(ncml_out,varsshID.name,'short_name')
+ varsshID.long_name = ncml_parse.dst_var_attrib(ncml_out,varsshID.name,'long_name')
+ varsshID.grid = ncml_parse.dst_var_attrib(ncml_out,varsshID.name,'grid')
- varmskID.short_name = 'bdy_msk'
- varmskID.units = 'unitless'
- varmskID.long_name = 'Structured boundary mask'
+ varmskID.short_name = ncml_parse.dst_var_attrib(ncml_out,varmskID.name,'short_name')
+ varmskID.units = ncml_parse.dst_var_attrib(ncml_out,varmskID.name,'units')
+ varmskID.long_name = ncml_parse.dst_var_attrib(ncml_out,varmskID.name,'long_name')
else:
logging.error('Unknown Grid')
ncid.close()
+
diff --git a/pynemo/nemo_bdy_ncpop.py b/pynemo/nemo_bdy_ncpop.py
index 39d344a9..13ef0299 100644
--- a/pynemo/nemo_bdy_ncpop.py
+++ b/pynemo/nemo_bdy_ncpop.py
@@ -8,7 +8,9 @@
# pylint: disable=no-name-in-module
from netCDF4 import Dataset
import numpy as np
-def write_data_to_file(filename, variable_name, data):
+from pynemo import nemo_ncml_parse as ncml_parse
+
+def write_data_to_file(filename, variable_name, data,ncml_out):
""" Writes the data to the netcdf templete file.
Keyword arguments:
filename -- output filename
@@ -17,9 +19,10 @@ def write_data_to_file(filename, variable_name, data):
"""
ncid = Dataset(filename, 'a', clobber=False, format='NETCDF4')
count = data.shape
-
- three_dim_variables = ['votemper', 'vosaline', 'N1p', 'N3n', 'N5s']
- two_dim_variables = ['sossheig', 'vobtcrtx', 'vobtcrty', 'iicethic', 'ileadfra', 'isnowthi']
+ time = ncml_parse.dst_dims(ncml_out,'time_counter')
+ var_list = ncml_parse.dst_var_list(ncml_out,time)
+ three_dim_variables = var_list['3D_vars'] #['votemper', 'vosaline', 'N1p', 'N3n', 'N5s','vobtcrtx','vozocrtx','vobtcrty','vomecrty']
+ two_dim_variables = var_list['2D_vars'] #['sossheig', 'iicethic', 'ileadfra', 'isnowthi']
if variable_name in three_dim_variables:
if len(count) == 3:
diff --git a/pynemo/nemo_bdy_zgrv2.py b/pynemo/nemo_bdy_zgrv2.py
index 4503e0d9..992d983b 100644
--- a/pynemo/nemo_bdy_zgrv2.py
+++ b/pynemo/nemo_bdy_zgrv2.py
@@ -52,7 +52,7 @@ def __init__(self, bdy_t, bdy_u, bdy_v, settings):
# Check inputs
# FIX ME? Errors for wrong obj arg len. probably better to work around
- print(settings)
+ self.logger.info(settings)
if settings['sco']:
# hc = ... FIX ME??
# Depth of water column at t-point
@@ -114,9 +114,11 @@ def __init__(self, bdy_t, bdy_u, bdy_v, settings):
for p in list(self.zpoints.keys()):
self.zpoints[p] = self.zpoints[p].reshape(zshapes[p])
-
- self.logger.debug( 'Done loop, zpoints: %s ', self.zpoints['t'].shape)
-
+
+ self.zpoints['z'] = self.zpoints['t'][0,:]
+ self.zpoints['wz'] = self.zpoints['wt'][0,:]
+
+ self.logger.debug('Done loop, zpoints: %s ', self.zpoints['t'].shape)
nc.close()
diff --git a/pynemo/nemo_ncml_parse.py b/pynemo/nemo_ncml_parse.py
new file mode 100644
index 00000000..19617dee
--- /dev/null
+++ b/pynemo/nemo_ncml_parse.py
@@ -0,0 +1,94 @@
+'''
+NCML python parser using XML to Dict
+'''
+
+import xml.etree.ElementTree as ET
+import xmltodict
+import logging
+
+logger = logging.getLogger(__name__)
+
+def dst_dims(ncmlfile,orgName):
+ ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
+ dimensions = ncml_dict['ns0:netcdf']['ns0:dimension']
+ if type(dimensions) is not list:
+ dimensions = [dimensions]
+ if not any(d['@orgName'] == orgName for d in dimensions):
+ raise ValueError('dimension name not defined in NCML output specification file')
+ for i in range(len(dimensions)):
+ if dimensions[i]['@orgName'] == orgName:
+ dim_name = dimensions[i]['@name']
+ break
+ return dim_name
+
+def dst_var_list(ncmlfile, time):
+ ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
+ variables = ncml_dict['ns0:netcdf']['ns0:variable']
+ var_3D = []
+ var_2D = []
+ for i in range(len(variables)):
+ if len(variables[i]['@shape'].split(' ')) == 4 and variables[i]['@shape'].split(' ')[0] == time:
+ var_3D.append(variables[i]['@name'])
+ if len(variables[i]['@shape'].split(' ')) == 3 and variables[i]['@shape'].split(' ')[0] == time:
+ var_2D.append(variables[i]['@name'])
+ var_type = {'3D_vars': var_3D,
+ '2D_vars': var_2D,
+ }
+ return var_type
+
+def src_var_list(ncmlfile):
+ ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
+ variables = ncml_dict['ns0:netcdf']['ns0:variable']
+ var_list = []
+ for i in range(len(variables)):
+ var_list.append(variables[i]['@name'])
+ return var_list
+
+def dst_var(ncmlfile, orgName):
+ ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
+ variables = ncml_dict['ns0:netcdf']['ns0:variable']
+ if type(variables) is not list:
+ variables = [variables]
+ if not any(d['@orgName'] == orgName for d in variables):
+ raise ValueError('variable name not defined in NCML output specification file')
+ var = {}
+ for i in range(len(variables)):
+ if variables[i]['@orgName'] == orgName:
+ var['name'] = variables[i]['@name']
+ var['shape'] = variables[i]['@shape'].split(' ')
+ if variables[i]['@type'] == 'float':
+ var['type'] = 'f4'
+ if variables[i]['@type'] == 'int':
+ var['type'] = 'i4'
+ break
+ return var
+
+def dst_glob_attrib(ncmlfile,name):
+ ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
+ nc_attrib = ncml_dict['ns0:netcdf']['ns0:attribute']
+ if type(nc_attrib) is not list:
+ nc_attrib = [nc_attrib]
+ if not any(d['@name'] == name for d in nc_attrib):
+ logger.warning('Global attribute name not found, writing attribute not specified')
+ return 'Global attribute not specified in NCML file'
+ for i in range(len(nc_attrib)):
+ if nc_attrib[i]['@name'] == name:
+ attrib_val = nc_attrib[i]['@value']
+ return attrib_val
+
+def dst_var_attrib(ncmlfile, variable,name):
+ ncml_dict = xmltodict.parse(ET.tostring(ET.parse(ncmlfile).getroot()))
+ variables = ncml_dict['ns0:netcdf']['ns0:variable']
+ if type(variables) is not list:
+ variables = [variables]
+ for i in range(len(variables)):
+ if variables[i]['@name'] == variable:
+ if not any(d['@name'] == name for d in variables[i]['ns0:attribute']):
+ logging.warning('variable attribute name not found, writing attribute not specified')
+ return 'Variable attribute not specified in NCML file'
+ for j in range(len(variables[i]['ns0:attribute'])):
+ if variables[i]['ns0:attribute'][j]['@name'] == name:
+ attrib = variables[i]['ns0:attribute'][j]['@value']
+ break
+ break
+ return attrib
\ No newline at end of file
diff --git a/pynemo/output_NCML/NEMO_output_I.ncml b/pynemo/output_NCML/NEMO_output_I.ncml
new file mode 100644
index 00000000..4b0e72ad
--- /dev/null
+++ b/pynemo/output_NCML/NEMO_output_I.ncml
@@ -0,0 +1,85 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pynemo/output_NCML/NEMO_output_T.ncml b/pynemo/output_NCML/NEMO_output_T.ncml
new file mode 100644
index 00000000..a1c4ae15
--- /dev/null
+++ b/pynemo/output_NCML/NEMO_output_T.ncml
@@ -0,0 +1,67 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pynemo/output_NCML/NEMO_output_U.ncml b/pynemo/output_NCML/NEMO_output_U.ncml
new file mode 100644
index 00000000..dcbeb5e9
--- /dev/null
+++ b/pynemo/output_NCML/NEMO_output_U.ncml
@@ -0,0 +1,67 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pynemo/output_NCML/NEMO_output_V.ncml b/pynemo/output_NCML/NEMO_output_V.ncml
new file mode 100644
index 00000000..70431ef8
--- /dev/null
+++ b/pynemo/output_NCML/NEMO_output_V.ncml
@@ -0,0 +1,67 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pynemo/output_NCML/NEMO_output_Z.ncml b/pynemo/output_NCML/NEMO_output_Z.ncml
new file mode 100644
index 00000000..b1612c90
--- /dev/null
+++ b/pynemo/output_NCML/NEMO_output_Z.ncml
@@ -0,0 +1,61 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pynemo/output_NCML/NEMO_tide_U.ncml b/pynemo/output_NCML/NEMO_tide_U.ncml
new file mode 100644
index 00000000..c27e7020
--- /dev/null
+++ b/pynemo/output_NCML/NEMO_tide_U.ncml
@@ -0,0 +1,53 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pynemo/output_NCML/NEMO_tide_V.ncml b/pynemo/output_NCML/NEMO_tide_V.ncml
new file mode 100644
index 00000000..38dc658d
--- /dev/null
+++ b/pynemo/output_NCML/NEMO_tide_V.ncml
@@ -0,0 +1,53 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pynemo/output_NCML/NEMO_tide_Z.ncml b/pynemo/output_NCML/NEMO_tide_Z.ncml
new file mode 100644
index 00000000..db96b6c2
--- /dev/null
+++ b/pynemo/output_NCML/NEMO_tide_Z.ncml
@@ -0,0 +1,53 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pynemo/profile.py b/pynemo/profile.py
index 8152cdc4..49a49adb 100644
--- a/pynemo/profile.py
+++ b/pynemo/profile.py
@@ -35,6 +35,10 @@
import logging
import numpy as np
from PyQt5.QtWidgets import QMessageBox
+from calendar import monthrange
+import sys
+from glob import glob
+
#Local imports
from pynemo import pynemo_settings_editor
@@ -47,13 +51,16 @@
from pynemo import nemo_coord_gen_pop as coord
from pynemo import nemo_bdy_zgrv2 as zgrv
from pynemo import nemo_bdy_extr_tm3 as extract
+from pynemo import nemo_ncml_parse as ncml_parse
from pynemo.reader.factory import GetFile
from pynemo.reader import factory
from pynemo.tide import nemo_bdy_tide3 as tide
from pynemo.tide import nemo_bdy_tide_ncgen
+from pynemo.tests import nemo_tide_test as tt
from pynemo.utils import Constants
from pynemo.gui.nemo_bdy_mask import Mask as Mask_File
+from pynemo import nemo_bdy_dl_cmems as dl_cmems
class Grid(object):
"""
@@ -71,6 +78,209 @@ def __init__(self):
logger = logging.getLogger(__name__)
logging.basicConfig(filename='nrct.log', level=logging.INFO)
+def download_cmems(setup_filepath=0):
+ '''
+ CMEMS download function.
+
+ This is the main script to download CMEMS data, it has been removed from core PyNEMO function
+ to handle issues with downloading better e.g. connection issues etc.
+
+ Input options are handled in the same NEMO style namelist that the main script uses.
+
+
+ :param setup_filepath:
+ :param mask_gui:
+ :return:
+ '''
+ logger.info('============================================')
+ logger.info('Start CMEMS download Logging: ' + time.asctime())
+ logger.info('============================================')
+
+ Setup = setup.Setup(setup_filepath) # default settings file
+ settings = Setup.settings
+ if settings['download_static'] == False:
+ logger.info('CMEMS static data download not requested')
+ if settings['download_static'] == True:
+ for re in range(settings['num_retry']):
+ logger.info('CMEMS Static data requested: downloading static data now.... (this may take awhile)')
+ static = dl_cmems.get_static(settings)
+ if static == 0:
+ logger.info('CMEMS static data downloaded')
+ break
+ if type(static) == str:
+ err_chk = dl_cmems.err_parse(static,'FTP')
+ if err_chk == 0:
+ logger.info('retrying FTP download....retry number '+str(re+1)+' of '+str(settings['num_retry']) )
+ if re == (settings['num_retry']-1):
+ logger.critical('reached retry limit defined in BDY file, exiting now')
+ logger.critical(static)
+ dl_cmems.clean_up(settings)
+ sys.exit(static)
+ if err_chk == 1:
+ dl_cmems.clean_up(settings)
+ sys.exit(static)
+ if err_chk == 2:
+ dl_cmems.clean_up(settings)
+ sys.exit(static)
+ dl_cmems.clean_up(settings)
+
+ # subset downloaded static grid files to match downloaded CMEMS data
+ if settings['subset_static'] == False:
+ logger.info('CMEMS subset static data not requested')
+ if settings['subset_static'] == True:
+ logger.info('CMEMS subset static data requested: subsetting now......')
+ subset_static = dl_cmems.subset_static(settings)
+ if subset_static == 0:
+ logger.info('CMEMS static data subset successfully')
+ if type(subset_static) == str:
+ logger.error(subset_static)
+ dl_cmems.clean_up(settings)
+ sys.exit(subset_static)
+ dl_cmems.clean_up(settings)
+
+ if settings['download_cmems'] == False:
+ logger.info('CMEMS Boundary data download not requested')
+
+ if settings['download_cmems'] == True:
+
+ logger.info('CMEMS Boundary data requested: starting download process....')
+
+ if settings['year_end'] - settings['year_000'] > 0:
+ date_min = str(settings['year_000']) + '-01-01'
+ date_max = str(settings['year_end']) + '-12-31'
+
+ elif settings['year_end'] - settings['year_000'] == 0:
+
+ days_mth = monthrange(settings['year_end'], settings['month_end'])
+ date_min = str(settings['year_000']) + '-' + str(settings['month_000']).zfill(2) + '-01'
+ date_max = str(settings['year_end']) + '-' + str(settings['month_end']).zfill(2) + '-' + str(
+ days_mth[1])
+
+ elif settings['year_end'] - settings['year_000'] < 0:
+ error_msg = 'end date before start date please ammend bdy file'
+ logger.error(error_msg)
+ dl_cmems.clean_up(settings)
+ sys.exit(error_msg)
+ else:
+ logger.warning('unable to parse dates..... using demo date November 2017')
+ date_min = '2017-11-01'
+ date_max = '2017-11-30'
+ # check to see if MOTU client is installed
+ chk = dl_cmems.chk_motu()
+ if chk == 1:
+ error_msg = 'motuclient not installed, please install by running $ pip install motuclient'
+ logger.error(error_msg)
+ dl_cmems.clean_up(settings)
+ sys.exit(error_msg)
+ if type(chk) == str:
+ logger.info('version ' + chk + ' of motuclient is installed')
+ else:
+ error_msg = 'unable to parse MOTU check'
+ logger.error(error_msg)
+ dl_cmems.clean_up(settings)
+ sys.exit(error_msg)
+ # download request for CMEMS data, try whole time interval first.
+ for re in range(settings['num_retry']):
+ logger.info('starting CMES download now (this can take a while)...')
+ dl = dl_cmems.request_cmems(settings, date_min, date_max)
+ if dl == 0:
+ logger.info('CMES data downloaded successfully')
+ break
+ # a string return means MOTU has return an error
+ if type(dl) == str:
+ # check error message against logged errors
+ err_chk = dl_cmems.err_parse(dl,'MOTU')
+ # error is known and retry is likely to work
+ if err_chk == 0:
+ logger.info('retrying CMEMS download....retry number '+str(re+1)+' of '+str(settings['num_retry']) )
+ if re == (settings['num_retry']-1):
+ logger.critical('reached retry limit defined in BDY file, exiting now')
+ logger.critical(dl)
+ dl_cmems.clean_up(settings)
+ sys.exit(dl)
+ # error is known and retry is likely to not work
+ if err_chk == 1:
+ dl_cmems.clean_up(settings)
+ sys.exit(dl)
+ # error is not logged, add to error file.
+ if err_chk == 2:
+ dl_cmems.clean_up(settings)
+ sys.exit(dl)
+ if dl == 1:
+ # if the request is too large try monthly intervals
+ logger.warning('CMEMS request too large, try monthly downloads...(this may take awhile)')
+ mnth_dl = dl_cmems.MWD_request_cmems(settings, date_min, date_max, 'M')
+ if mnth_dl == 0:
+ logger.info('CMEMS monthly request successful')
+ break
+ if type(mnth_dl) == str:
+ err_chk = dl_cmems.err_parse(mnth_dl,'MOTU')
+ if err_chk == 0:
+ logger.info('retrying CMEMS download....retry number '+str(re+1)+' of '+str(settings['num_retry']) )
+ if re == (settings['num_retry']-1):
+ logger.critical('reached retry limit defined in BDY file, exiting now')
+ logger.critical(mnth_dl)
+ dl_cmems.clean_up(settings)
+ sys.exit(mnth_dl)
+ if err_chk == 1:
+ dl_cmems.clean_up(settings)
+ sys.exit(mnth_dl)
+ if err_chk == 2:
+ dl_cmems.clean_up(settings)
+ sys.exit(mnth_dl)
+ if mnth_dl == 1:
+ # if the request is too large try weekly intervals
+ logger.warning('CMEMS request still too large, trying weekly downloads...(this will take longer...)')
+ wk_dl = dl_cmems.MWD_request_cmems(settings, date_min, date_max, 'W')
+ if wk_dl == 0:
+ logger.info('CMEMS weekly request successful')
+ break
+ if type(wk_dl) == str:
+ err_chk = dl_cmems.err_parse(wk_dl,'MOTU')
+ if err_chk == 0:
+ logger.info('retrying CMEMS download....retry number ' + str(re + 1) + ' of ' + str(settings['num_retry']))
+ if re == (settings['num_retry'] - 1):
+ logger.critical('reached retry limit defined in BDY file, exiting now')
+ logger.critical(wk_dl)
+ dl_cmems.clean_up(settings)
+ sys.exit(wk_dl)
+ if err_chk == 1:
+ dl_cmems.clean_up(settings)
+ sys.exit(wk_dl)
+ if err_chk == 2:
+ dl_cmems.clean_up(settings)
+ sys.exit(wk_dl)
+ if wk_dl == 1:
+ # if the request is too large try daily intervals.
+ logger.warning('CMESM request STILL too large, trying daily downloads....(even longer.....)')
+ dy_dl = dl_cmems.MWD_request_cmems(settings, date_min, date_max, 'D')
+ if dy_dl == 0:
+ logger.info('CMEMS daily request successful')
+ break
+ # if the request is still too large then smaller domain is required.
+ if dy_dl == str:
+ # perform error check for retry
+ err_chk = dl_cmems.err_parse(dy_dl,'MOTU')
+ if err_chk == 0:
+ logger.info('retrying CMEMS download....retry number ' + str(re + 1) + ' of ' + str(settings['num_retry']))
+ if re == (settings['num_retry'] - 1):
+ logger.critical('reached retry limit defined in BDY file, exiting now')
+ logger.critical(dy_dl)
+ dl_cmems.clean_up(settings)
+ sys.exit(dy_dl)
+ if err_chk == 1:
+ dl_cmems.clean_up(settings)
+ sys.exit(dy_dl)
+ if err_chk == 2:
+ dl_cmems.clean_up(settings)
+ sys.exit(dy_dl)
+# end of messy if statements to split requests into months, weeks and days as needed.
+ dl_cmems.clean_up(settings)
+ logger.info('============================================')
+ logger.info('End CMEMS download: ' + time.asctime())
+ logger.info('============================================')
+
+
def process_bdy(setup_filepath=0, mask_gui=False):
"""
Main entry for processing BDY lateral boundary conditions.
@@ -86,7 +296,7 @@ def process_bdy(setup_filepath=0, mask_gui=False):
"""
# Start Logger
-
+ logger.info('============================================')
logger.info('Start NRCT Logging: '+time.asctime())
logger.info('============================================')
@@ -110,6 +320,11 @@ def process_bdy(setup_filepath=0, mask_gui=False):
logger.info('Generated BDY %s information', grd)
logger.info('Grid %s has shape %s', grd, bdy_ind[grd].bdy_i.shape)
+ for grd in ['z']:
+ bdy_ind[grd] = gen_grid.Boundary(bdy_msk, settings, 't')
+ logger.info('Generated BDY %s information', 't')
+ logger.info('Grid %s has shape %s', grd, bdy_ind['t'].bdy_i.shape)
+
# TODO: Write in option to seperate out disconnected LBCs
# Write out grid information to coordinates.bdy.nc
@@ -125,13 +340,19 @@ def process_bdy(setup_filepath=0, mask_gui=False):
for grd in ['t', 'u', 'v']:
nbdy[grd] = len(bdy_ind[grd].bdy_i[:, 0])
+ for grd in ['z']:
+ nbdy[grd] = len(bdy_ind['t'].bdy_i[:, 0])
+
# Gather grid information
# TODO: insert some logic here to account for 2D or 3D src_zgr
logger.info('Gathering grid information')
nc = GetFile(settings['src_zgr'])
- SourceCoord.zt = np.squeeze(nc['gdept_0'][:])
+ try:
+ SourceCoord.zt = np.squeeze(nc['gdept_0'][:])
+ except:
+ SourceCoord.zt = np.squeeze(nc['depth'][:])
nc.close()
# Define z at t/u/v points
@@ -143,20 +364,30 @@ def process_bdy(setup_filepath=0, mask_gui=False):
# TODO: put conditional here as we may want to keep data on parent
# vertical grid
- DstCoord.depths = {'t': {}, 'u': {}, 'v': {}}
+ DstCoord.depths = {'t': {}, 'u': {}, 'v': {}, 'z':{}}
- for grd in ['t', 'u', 'v']:
+ for grd in ['t', 'u', 'v','z']:
DstCoord.depths[grd]['bdy_H'] = np.nanmax(z.zpoints['w'+grd], axis=0)
DstCoord.depths[grd]['bdy_dz'] = np.diff(z.zpoints['w'+grd], axis=0)
DstCoord.depths[grd]['bdy_z'] = z.zpoints[grd]
+
logger.info('Depths defined')
# Gather vorizontal grid information
-
+ # TODO: Sort generic grid variables (define in bdy file?)
nc = GetFile(settings['src_hgr'])
- SourceCoord.lon = nc['glamt'][:,:]
- SourceCoord.lat = nc['gphit'][:,:]
+
+ try:
+ SourceCoord.lon = nc['glamt'][:,:]
+ SourceCoord.lat = nc['gphit'][:,:]
+ except:
+ SourceCoord.lon = nc['longitude'][:]
+ SourceCoord.lat = nc['latitude'][:]
+ # expand lat and lon 1D arrays into 2D array matching nav_lat nav_lon
+ SourceCoord.lon = np.tile(SourceCoord.lon, (np.shape(SourceCoord.lat)[0], 1))
+ SourceCoord.lat = np.tile(SourceCoord.lat, (np.shape(SourceCoord.lon)[1], 1))
+ SourceCoord.lat = np.rot90(SourceCoord.lat)
try: # if they are masked array convert them to normal arrays
SourceCoord.lon = SourceCoord.lon.filled()
@@ -169,7 +400,7 @@ def process_bdy(setup_filepath=0, mask_gui=False):
nc.close()
- DstCoord.lonlat = {'t': {}, 'u': {}, 'v': {}}
+ DstCoord.lonlat = {'t': {}, 'u': {}, 'v': {}, 'z':{}}
nc = GetFile(settings['dst_hgr'])
@@ -178,6 +409,10 @@ def process_bdy(setup_filepath=0, mask_gui=False):
for grd in ['t', 'u', 'v']:
DstCoord.lonlat[grd]['lon'] = nc['glam' + grd][0, :, :]
DstCoord.lonlat[grd]['lat'] = nc['gphi' + grd][0, :, :]
+
+ for grd in ['z']:
+ DstCoord.lonlat[grd]['lon'] = nc['glamt'][0, :, :]
+ DstCoord.lonlat[grd]['lat'] = nc['gphit'][0, :, :]
nc.close()
@@ -185,13 +420,13 @@ def process_bdy(setup_filepath=0, mask_gui=False):
# Identify lons/lats of the BDY points
- DstCoord.bdy_lonlat = {'t': {}, 'u': {}, 'v': {}}
+ DstCoord.bdy_lonlat = {'t': {}, 'u': {}, 'v': {},'z':{}}
- for grd in ['t', 'u', 'v']:
+ for grd in ['t', 'u', 'v', 'z']:
for l in ['lon', 'lat']:
DstCoord.bdy_lonlat[grd][l] = np.zeros(nbdy[grd])
- for grd in ['t', 'u', 'v']:
+ for grd in ['t', 'u', 'v', 'z']:
for i in range(nbdy[grd]):
x = bdy_ind[grd].bdy_i[i, 1]
y = bdy_ind[grd].bdy_i[i, 0]
@@ -210,26 +445,45 @@ def process_bdy(setup_filepath=0, mask_gui=False):
reader = factory.GetReader(settings['src_dir'],t_adj)
for grd in ['t', 'u', 'v']:
bdy_ind[grd].source_time = reader[grd]
+ for grd in ['z']:
+ bdy_ind[grd].source_time = reader['t']
unit_origin = '%d-01-01 00:00:00' %settings['base_year']
# Extract source data on dst grid
- if settings['tide']:
+ if settings['tide'] == True:
if settings['tide_model']=='tpxo':
- cons = tide.nemo_bdy_tpx7p2_rot(
+ cons = tide.nemo_bdy_tide_rot(
Setup, DstCoord, bdy_ind['t'], bdy_ind['u'], bdy_ind['v'],
- settings['clname'])
+ settings['clname'], settings['tide_model'])
+ write_tidal_data(Setup, DstCoord, bdy_ind, settings['clname'], cons)
+
+ if settings['tide_checker'] == True:
+ logger.info('tide checker starting now.....')
+ tt_test = tt.main(setup_filepath,settings['ref_model'])
+ if tt_test == 0:
+ logger.info('tide checker ran successfully, check spreadsheet in output folder')
+ if tt_test !=0:
+ logger.warning('error running tide checker')
+
elif settings['tide_model']=='fes':
- logger.error('Tidal model: %s, not yet implimented',
- settings['tide_model'])
- return
+ cons = tide.nemo_bdy_tide_rot(
+ Setup, DstCoord, bdy_ind['t'], bdy_ind['u'], bdy_ind['v'],
+ settings['clname'],settings['tide_model'])
+ write_tidal_data(Setup, DstCoord, bdy_ind, settings['clname'], cons)
+
+ if settings['tide_checker'] == True:
+ logger.info('tide checker starting now.....')
+ tt_test = tt.main(setup_filepath,settings['ref_model'])
+ if tt_test == 0:
+ logger.info('tide checker ran successfully, check spreadsheet in output folder')
+ if tt_test !=0:
+ logger.warning('error running tide checker')
else:
logger.error('Tidal model: %s, not recognised',
settings['tide_model'])
return
-
- write_tidal_data(Setup, DstCoord, bdy_ind, settings['clname'], cons)
logger.info('Tidal constituents written to file')
@@ -273,32 +527,41 @@ def process_bdy(setup_filepath=0, mask_gui=False):
# Define mapping of variables to grids with a dictionary
emap = {}
- grd = [ 't', 'u', 'v']
- pair = [ None, 'uv', 'uv'] # TODO: devolve this to the namelist?
-
- # TODO: The following is a temporary stop gap to assign variables. In
- # future we need a slicker way of determining the variables to extract.
+ grd = [ 't', 'u', 'v', 'z']
+ #pair = [ None, 'uv', 'uv'] # TODO: devolve this to the namelist?
+ pair = [None, None, None, None]
+ # TODO: The following is a temporary stop gap to assign variables for both CMEMS downloads
+ # and existing variable names. In future we need a slicker way of determining the variables to extract.
# Perhaps by scraping the .ncml file - this way biogeochemical tracers
# can be included in the ln_tra = .true. option without having to
# explicitly declaring them.
+ var_list = ncml_parse.src_var_list(settings['src_dir'])
var_in = {}
for g in range(len(grd)):
var_in[grd[g]] = []
-
+
if ln_tra:
- var_in['t'].extend(['votemper', 'vosaline'])
-
+ if 'votemper' and 'vosaline' in var_list:
+ var_in['t'].extend(['votemper', 'vosaline'])
+ if 'votemper' and not 'vosaline' in var_list:
+ var_in['t'].extend(['votemper'])
+ if 'vosaline' and not 'votemper' in var_list:
+ var_in['t'].extend(['vosaline'])
+
if ln_dyn2d or ln_dyn3d:
- var_in['u'].extend(['vozocrtx', 'vomecrty'])
- var_in['v'].extend(['vozocrtx', 'vomecrty'])
-
+ if 'vozocrtx' and 'vomecrty' in var_list:
+ var_in['u'].extend(['vozocrtx'])
+ var_in['v'].extend(['vomecrty'])
+
if ln_dyn2d:
- var_in['t'].extend(['sossheig'])
-
+ if 'sossheig' in var_list:
+ var_in['z'].extend(['sossheig'])
+
if ln_ice:
- var_in['t'].extend(['ice1', 'ice2', 'ice3'])
-
+ if 'iicethic' and 'ileadfra' and 'isnowthi' in var_list:
+ var_in['t'].extend(['iicethic', 'ileadfra', 'isnowthi'])
+
# As variables are associated with grd there must be a filename attached
# to each variable
@@ -343,7 +606,9 @@ def process_bdy(setup_filepath=0, mask_gui=False):
logger.info('End NRCT Logging: '+time.asctime())
logger.info('==========================================')
-
+
+
+
def write_tidal_data(setup_var, dst_coord_var, grid, tide_cons, cons):
"""
@@ -386,30 +651,36 @@ def write_tidal_data(setup_var, dst_coord_var, grid, tide_cons, cons):
setup_var.settings['fn']+ \
'_bdytide_'+const_name+'_grd_'+ \
val['nam'].upper()+'.nc'
-
+
+ ncml_out = glob(setup_var.settings['ncml_out'] + '/*' + 'tide_'+str(val['nam'].upper()) + '.ncml')
+ if len(ncml_out) == 0:
+ raise RuntimeError(
+ 'NCML out tide file for grid ' + str(val['nam'].upper()) + ' missing, please add into NCML directory')
+ ncml_out = ncml_out[0]
+
nemo_bdy_tide_ncgen.CreateBDYTideNetcdfFile(fout_tide,
val['nx'],
dst_coord_var.lonlat['t']['lon'].shape[1],
dst_coord_var.lonlat['t']['lon'].shape[0],
val['des']+tide_con,
- setup_var.settings['fv'], key.upper())
+ setup_var.settings['fv'], key.upper(),ncml_out)
ncpop.write_data_to_file(fout_tide, val['nam']+'1',
- cons['cos'][val['nam']][indx])
+ cons['cos'][val['nam']][indx],ncml_out)
ncpop.write_data_to_file(fout_tide, val['nam']+'2',
- cons['sin'][val['nam']][indx])
+ cons['sin'][val['nam']][indx],ncml_out)
ncpop.write_data_to_file(fout_tide, 'bdy_msk',
- dst_coord_var.bdy_msk)
+ dst_coord_var.bdy_msk,ncml_out)
ncpop.write_data_to_file(fout_tide, 'nav_lon',
- dst_coord_var.lonlat['t']['lon'])
+ dst_coord_var.lonlat['t']['lon'],ncml_out)
ncpop.write_data_to_file(fout_tide, 'nav_lat',
- dst_coord_var.lonlat['t']['lat'])
+ dst_coord_var.lonlat['t']['lat'],ncml_out)
ncpop.write_data_to_file(fout_tide, 'nbidta',
- grid[key].bdy_i[val['ind'], 0]+1)
+ grid[key].bdy_i[val['ind'], 0]+1,ncml_out)
ncpop.write_data_to_file(fout_tide, 'nbjdta',
- grid[key].bdy_i[val['ind'], 1]+1)
+ grid[key].bdy_i[val['ind'], 1]+1,ncml_out)
ncpop.write_data_to_file(fout_tide, 'nbrdta',
- grid[key].bdy_r[val['ind']]+1)
+ grid[key].bdy_r[val['ind']]+1,ncml_out)
# Iterate over constituents
diff --git a/pynemo/pynemo_exe.py b/pynemo/pynemo_exe.py
index 55820759..2855028b 100644
--- a/pynemo/pynemo_exe.py
+++ b/pynemo/pynemo_exe.py
@@ -11,6 +11,9 @@
# Logging set to info
logging.basicConfig(level=logging.INFO)
import time
+from yaspin import yaspin
+from yaspin.spinners import Spinners
+
def main():
""" Main function which checks the command line parameters and
passes them to the profile module for processing """
@@ -18,21 +21,30 @@ def main():
setup_file = ''
mask_gui = False
try:
- opts, dummy_args = getopt.getopt(sys.argv[1:], "hs:g", ["help","setup=","mask_gui"])
+ opts, dummy_args = getopt.getopt(sys.argv[1:], "hs:d:g", ["help", "setup=", "download_cmems=", "mask_gui"])
except getopt.GetoptError:
- print("usage: pynemo -g -s ")
+ print("usage: pynemo -g -s -d ")
sys.exit(2)
for opt, arg in opts:
if opt == "-h":
- print("usage: pynemo [-g] -s ")
+ print("usage: pynemo [-g] -s -d ")
print(" -g (optional) will open settings editor before extracting the data")
print(" -s file to use")
+ print(" -d (optional) will download CMEMS data using provided bdy file")
sys.exit()
elif opt in ("-s", "--setup"):
setup_file = arg
- elif opt in("-g", "--mask_gui"):
+ elif opt in ("-g", "--mask_gui"):
mask_gui = True
+ elif opt in ("-d", "--download_cmems"):
+ setup_file = arg
+ t0 = time.time()
+ with yaspin(Spinners.earth, text='PyNEMO: download CMEMS data is running'):
+ profile.download_cmems(setup_file)
+ t1 = time.time()
+ print("CMEMS download time: %s" % (t1 - t0))
+ sys.exit(0)
if setup_file == "":
print("usage: pynemo [-g] -s ")
@@ -41,7 +53,8 @@ def main():
#Logger
#logger = logging.getLogger(__name__)
t0 = time.time()
- profile.process_bdy(setup_file, mask_gui)
+ with yaspin(Spinners.earth, text='PyNEMO: boundary generation is running'):
+ profile.process_bdy(setup_file, mask_gui)
t1 = time.time()
print("Execution Time: %s" % (t1-t0))
diff --git a/pynemo/pynemo_unit_test.py b/pynemo/pynemo_unit_test.py
new file mode 100644
index 00000000..c68359e0
--- /dev/null
+++ b/pynemo/pynemo_unit_test.py
@@ -0,0 +1,119 @@
+# -*- coding: utf-8 -*-
+"""
+Set of test functions to test PyNEMO functionality.
+
+"""
+from subprocess import Popen, PIPE
+from netCDF4 import Dataset
+import numpy as np
+import glob
+import os
+from pynemo.unit_tests import UT_config as config
+
+# generate test data by import test gen script and executing main function
+# TODO: Maybe simplify this, as this import imports other scripts and is abit clunky.
+from pynemo.unit_tests import test_gen as tg
+gen_data = tg._main()
+# if a non zero is return than the grid and data generation has failed.
+if gen_data != 0:
+ raise Exception('DONT PANIC: Input grid and boundary data generation failed')
+
+# run PyNEMO with test data
+# generate list of namelist.bdy files to run
+namelist_files = glob.glob(config.unit_dir+'namelist*')
+for n in namelist_files:
+ # run each of the namelist files
+ stdout, stderr = Popen(['pynemo', '-s', n], stdout=PIPE, stderr=PIPE,
+ universal_newlines=True).communicate()
+ # check to see if PyNEMO ran correctly, no execution time in stdout is indication of this.
+ if 'Execution Time' not in stdout:
+ print(stderr)
+ raise Exception('DONT PANIC: Test Run '+str(n)+' Failed')
+
+# TODO: Learn about parameterising the tests so that different parameters can be checked
+# with same code. Rather than having similar test functions repeated.
+
+# perform tests
+def test_temp():
+ test_files = glob.glob(config.output_dir+'*bdyT*')
+ if len(test_files) == 0:
+ raise Exception('DONT PANIC: no temperature test files found')
+ for t in test_files:
+ results = Dataset(t) # open results
+ temp = results['thetao'][:]
+ results.close()
+ temp_ = np.ma.masked_array(temp,temp == -32767.0)
+ assert abs(temp_[temp_!=0.0].mean() - 15) <= 0.001
+ assert abs(temp_[temp_ != 0.0].max() - 15) <= 0.001
+ assert abs(temp_[temp_ != 0.0].min() - 15) <= 0.001
+
+def test_salinty():
+ test_files = glob.glob(config.output_dir+'*bdyT*')
+ if len(test_files) == 0:
+ raise Exception('DONT PANIC: no salinity test files found')
+ for t in test_files:
+ results = Dataset(t) # open results
+ sal = results['so'][:]
+ results.close()
+ sal_ = np.ma.masked_array(sal,sal == -32767.0)
+ assert abs(sal_[sal_!=0.0].mean() - 35) <= 0.001
+ assert abs(sal_[sal_ != 0.0].max() - 35) <= 0.001
+ assert abs(sal_[sal_ != 0.0].min() - 35) <= 0.001
+
+# TODO: add in checking so that settings in the bdy file are checked to see if
+# U and V and SSH tests are required. e.g. ln_dyn2d is set to true.
+
+def test_ssh():
+ test_files = glob.glob(config.output_dir+'*bdyT*')
+ if len(test_files) == 0:
+ raise Exception('DONT PANIC: no SSH test files found')
+ for t in test_files:
+ results = Dataset(t) # open results
+ ssh = results['zos'][:]
+ results.close()
+ ssh_ = np.ma.masked_array(ssh,ssh == -32767.0)
+ assert abs(ssh_[ssh_!=0.0].mean() - 1.0) <= 0.001
+ assert abs(ssh_[ssh_ != 0.0].max() - 1.0) <= 0.001
+ assert abs(ssh_[ssh_ != 0.0].min() - 1.0) <= 0.001
+
+def test_U():
+ test_files = glob.glob(config.output_dir+'*bdyU*')
+ if len(test_files) == 0:
+ raise Exception('DONT PANIC: no U current test files found')
+ for t in test_files:
+ results = Dataset(t) # open results
+ U = results['uo'][:]
+ results.close()
+ U_ = np.ma.masked_array(U,U == -32767.0)
+ assert abs(U_[U_!=0.0].mean() - 0.5) <= 0.001
+ assert abs(U_[U_ != 0.0].max() - 0.5) <= 0.001
+ assert abs(U_[U_ != 0.0].min() - 0.5) <= 0.001
+
+def test_V():
+ test_files = glob.glob(config.output_dir+'*bdyV*')
+ if len(test_files) == 0:
+ raise Exception('DONT PANIC: no V current test files found')
+ for t in test_files:
+ results = Dataset(t) # open results
+ V = results['vo'][:]
+ results.close()
+ V_ = np.ma.masked_array(V,V == -32767.0)
+ assert abs(V_[V_!=0.0].mean() - 0.5) <= 0.001
+ assert abs(V_[V_ != 0.0].max() - 0.5) <= 0.001
+ assert abs(V_[V_ != 0.0].min() - 0.5) <= 0.001
+
+# clean up test I/O
+def test_rm_out():
+ files = glob.glob(config.output_dir+'*')
+ for f in files:
+ os.remove(f)
+ files = glob.glob(config.output_dir+'*')
+ assert len(files) == 0
+
+
+def test_rm_in():
+ files = glob.glob(config.output_dir+'*')
+ for f in files:
+ os.remove(f)
+ files = glob.glob(config.output_dir+'*')
+ assert len(files) == 0
diff --git a/pynemo/reader/directory.py b/pynemo/reader/directory.py
index 75f427ae..ed91dcad 100644
--- a/pynemo/reader/directory.py
+++ b/pynemo/reader/directory.py
@@ -1,6 +1,7 @@
'''
This is an abstraction for the data repository
@author: Mr. Srikanth Nagella
+NOTE: this script is no longer used/supported by PyNEMO, reading directories for data files requires an NCML file.
'''
from os import listdir
import numpy as np
diff --git a/pynemo/reader/factory.py b/pynemo/reader/factory.py
index 33ab3465..2f133b5d 100644
--- a/pynemo/reader/factory.py
+++ b/pynemo/reader/factory.py
@@ -9,24 +9,30 @@
#Local Imports
from pynemo.reader.ncml import Reader as NcMLReader
from pynemo.reader.ncml import NcMLFile
-from pynemo.reader.directory import Reader as DirectoryReader
+#from pynemo.reader.directory import Reader as DirectoryReader
+import logging
from netCDF4 import Dataset
-
+logger = logging.getLogger(__name__)
def GetReader(uri, t_adjust, reader_type=None):
if reader_type is None:
- print(uri)
+ logger.info(uri)
if uri.endswith(".ncml"):
reader_type = "NcML"
elif os.path.isdir(uri):
- reader_type = "Directory"
+ # directory reading directly is no longer supported please use NCML file to define directory
+ #reader_type = "Directory"
+ logger.error("Directory Reading is no longer supported without using NCML file to define location")
+ raise Exception("Directory Reading is no longer supported without using NCML file to define location")
else:
- print("Error input should be a NcML file or URL or a Local directory")
- return None
+ logger.error("Error input: should be a NcML file")
+ raise Exception("Error input: should be a NcML file")
if reader_type == "NcML":
return NcMLReader(uri,t_adjust)
else:
- return DirectoryReader(uri, t_adjust)
+ logger.error("Directory Reading is no longer supported without using NCML file to define location")
+ raise Exception("Directory Reading is no longer supported without using NCML file to define location")
+ #return DirectoryReader(uri, t_adjust)
class NetCDFFile(object):
diff --git a/pynemo/reader/ncml.py b/pynemo/reader/ncml.py
index 9d23e856..a50f051c 100644
--- a/pynemo/reader/ncml.py
+++ b/pynemo/reader/ncml.py
@@ -14,6 +14,7 @@
ncmlpath, file_name = os.path.split(__file__)
ncmlpath = os.path.join(ncmlpath, "jars", "netcdfAll-4.6.jar")
jnius_config.set_classpath('.',ncmlpath)
+logger = logging.getLogger(__name__)
try:
if os.environ['http_proxy'] is not None:
#split the proxy name and port
@@ -22,7 +23,7 @@
proxy_port = proxylist[1]
jnius_config.add_options('-Dhttp.proxyHost='+proxy_host,'-Dhttp.proxyPort='+proxy_port)
except:
- print("Didn't find a proxy environment variable")
+ logger.info("Didn't find a proxy environment variable")
NetcdfDataset = None
NcMLReader = None
Section = None
@@ -40,6 +41,7 @@ def init_jnius():
print('Warning: Please make sure pyjnius is installed and jvm.dll/libjvm.so/libjvm.dylib is in the path')
time_counter_const = "time_counter"
+
class Reader(object):
""" This class is the high level of object for the NCML reader, from here using grid type
will return the grid data
@@ -268,7 +270,7 @@ def get_attribute_value(self, attr_name):
attr = dvar.findAttributeIgnoreCase(attr_name)
if attr is not None:
retval = attr.getValue(0)
- return retval
+ return retval
except KeyError:
self.logger.error('Cannot find the requested variable '+self.variable)
return None
diff --git a/pynemo/tests/bdy_coords.py b/pynemo/tests/bdy_coords.py
index 5f801fff..b1c4a191 100755
--- a/pynemo/tests/bdy_coords.py
+++ b/pynemo/tests/bdy_coords.py
@@ -8,12 +8,13 @@
# pylint: disable=E1103
# pylint: disable=no-name-in-module
-#External imports
+# External imports
from time import clock
import numpy as np
import logging
+import importlib
-#local imports
+# local imports
from pynemo import nemo_bdy_setup as setup
from pynemo import nemo_bdy_gen_c as gen_grid
from pynemo import nemo_coord_gen_pop as coord
@@ -25,18 +26,21 @@
from pynemo.utils import Constants
from pynemo.gui.nemo_bdy_mask import Mask as Mask_File
-from PyQt4.QtGui import QMessageBox
-#import pickle
+from PyQt5.QtWidgets import QMessageBox
+
+# import pickle
logger = logging.getLogger(__name__)
+
+
def process_bdy(setup_filepath=0, mask_gui=False):
- """ Main entry to the processing of the bdy
+ """ Main entry to the processing of the bdy
Keyword arguments:
setup_filepath -- file path to bdy file
mask_gui -- whether gui to select the mask file needs to be poped up
"""
- #Logger
+ # Logger
logger.info('START')
start = clock()
SourceCoord = source_coord.SourceCoord()
@@ -44,7 +48,7 @@ def process_bdy(setup_filepath=0, mask_gui=False):
logger.info(clock() - start)
start = clock()
- Setup = setup.Setup(setup_filepath) # default settings file
+ Setup = setup.Setup(setup_filepath) # default settings file
settings = Setup.settings
logger.info(clock() - start)
@@ -60,7 +64,7 @@ def process_bdy(setup_filepath=0, mask_gui=False):
logger.info('Done Mask')
DstCoord.bdy_msk = bdy_msk == 1
- reload(gen_grid)
+ importlib.reload(gen_grid)
start = clock()
logger.info('start bdy_t')
grid_t = gen_grid.Boundary(bdy_msk, settings, 't')
@@ -91,11 +95,11 @@ def process_bdy(setup_filepath=0, mask_gui=False):
bdy_ind = {'t': grid_t, 'u': grid_u, 'v': grid_v, 'f': grid_f}
- for k in bdy_ind.keys():
+ for k in list(bdy_ind.keys()):
logger.info('bdy_ind %s %s %s', k, bdy_ind[k].bdy_i.shape, bdy_ind[k].bdy_r.shape)
start = clock()
- co_set = coord.Coord(settings['dst_dir']+'/coordinates.bdy.nc', bdy_ind)
+ co_set = coord.Coord(settings['dst_dir'] + '/coordinates.bdy.nc', bdy_ind)
logger.info('done coord gen')
logger.info(clock() - start)
start = clock()
@@ -114,6 +118,7 @@ def process_bdy(setup_filepath=0, mask_gui=False):
# dyn 3d over 1st rim
# dyn 3d frs over rw
+
def _get_mask(Setup, mask_gui):
""" This method reads the mask information from the netcdf file or opens a gui
to create a mask depending on the mask_gui input. return the mask data. The default mask
@@ -124,34 +129,34 @@ def _get_mask(Setup, mask_gui):
"""
bdy_msk = None
if mask_gui:
- #Open the gui to create a mask
+ # Open the gui to create a mask
_, mask = pynemo_settings_editor.open_settings_dialog(Setup)
bdy_msk = mask.data
Setup.refresh()
else:
try:
- #mask filename and mask file flag is set
- if Setup.bool_settings['mask_file'] and Setup.settings['mask_file'] is not None:
+ # mask filename and mask file flag is set
+ if Setup.bool_settings['mask_file'] and Setup.settings['mask_file'] is not None:
mask = Mask_File(mask_file=Setup.settings['mask_file'])
bdy_msk = mask.data
elif Setup.bool_settings['mask_file']:
logger.error("Mask file is not given")
return
- else: #no mask file specified then use default 1px halo mask
+ else: # no mask file specified then use default 1px halo mask
logger.warning("Using default mask with bathymetry!!!!")
mask = Mask_File(Setup.settings['bathy'])
mask.apply_border_mask(Constants.DEFAULT_MASK_PIXELS)
bdy_msk = mask.data
- except ValueError: # why is this except here? as there is an else: statement TODO
- print 'something wrong?'
+ except ValueError: # why is this except here? as there is an else: statement TODO
+ print('something wrong?')
return
if np.amin(bdy_msk) == 0:
# Mask is not set throw a warning message and set border to 1px.
logger.warning("Setting the mask to 1px border")
- QMessageBox.warning(None,"pyNEMO", "Mask is not set, setting a 1 pixel border mask")
+ QMessageBox.warning(None, "pyNEMO", "Mask is not set, setting a 1 pixel border mask")
if bdy_msk is not None and 1 < bdy_msk.shape[0] and 1 < bdy_msk.shape[1]:
tmp = np.ones(bdy_msk.shape, dtype=bool)
tmp[1:-1, 1:-1] = False
bdy_msk[tmp] = -1
-
- return bdy_msk
+
+ return bdy_msk
\ No newline at end of file
diff --git a/pynemo/tests/nemo_tide_test.py b/pynemo/tests/nemo_tide_test.py
new file mode 100644
index 00000000..93cfe6d8
--- /dev/null
+++ b/pynemo/tests/nemo_tide_test.py
@@ -0,0 +1,376 @@
+#!/usr/bin/env python3.7
+# -*- coding: utf-8 -*-
+"""
+Created on Fri May 01 2020
+
+@author: thopri
+example usage: (in python console)
+from pynemo.tides import nemo_tide_test as tt
+tt.main()
+
+all parameters have defaults applied if not supplied:
+location of bdy file - 'inputs/namelist_cmems.bdy'
+amplitude threshold - 0.25 m
+phase threshold - 10.00 degrees
+model resolution - 1/16 degree
+model - 'fes'
+
+So for FES the only model currently supported only the location of the BDY file and thresholds (if different from defaults)
+need to be provided. For TPXO this would vary based on the resolution e.g. TPXO7.2 is 1/4 and 'tpxo'
+
+The script generates a excel spreadsheet that contains the locations and amplitudes and phases for all HC's
+defined in the bdy file that exceed the default or defined the thresholds passed to the main function.
+File locations e.g. model reference location etc are all taken from bdy file that is passed to the main function
+
+To do this the script compiles a list of PyNEMO boundary amplitudes and phases and lat/lon's, finds the closest value
+in the reference model (currently only FES is supported), and then compares them. If the absolute difference is greater
+than defined threshold then the location and parameter (either Amp or Phase) is returned within a Pandas Dataframe
+which is then written to a spreadsheet.
+
+Notes:
+The script checks the Amplitude and Phase independently, so lat/lons for each are also returned. Each HC is saved to
+a separate sheet in the spreadsheet. The name of the spreadsheet contains meta data showing thresholds and reference
+model used. Units for threshold are meters and degrees.
+
+Update: fill values for FES are commonly returned at coastlines, this is due to the nearest FES cell being land but PyNEMO
+will have interpolated data from the water. In instance the code checks the cells aroud the fill value and averages both
+amplitude and phase (using HsinG,HcosG) to act as a reference.
+
+Phase threshold is not longer required as it is applied using an function that references amplitude, the idea is that the
+threshold is low for high amplitudes, e.g. 5 degrees for 1.0m and high for low amplitudes 80 degrees for 0.01 m.
+
+Amplitudes at phase exceedance locataions are also returned to allow assessment of the impact, e.g. low amplitude low impact
+
+"""
+from netCDF4 import Dataset
+import numpy as np
+import logging
+import time
+import pandas as pd
+import warnings
+from pynemo import nemo_bdy_setup as setup
+
+# log to PyNEMO log file
+logger = logging.getLogger(__name__)
+logging.basicConfig(filename='nrct.log', level=logging.INFO)
+
+# TODO: add TPXO read and subset functionality currently only uses FES as "truth"
+
+def main(bdy_file='inputs/namelist_cmems.bdy',model='fes'):
+ logger.info('============================================')
+ logger.info('Start Tide Test Logging: ' + time.asctime())
+ logger.info('============================================')
+ # get settings dict based on bdy file
+ Setup = setup.Setup(bdy_file) # default settings file
+ settings = Setup.settings
+ constituents = settings['clname']
+ # TODO maybe define Z and/or UV in bdy file? at the moment Z, U and Vs are generated with no option for Z only.
+ grids = ['Z','U','V']
+ if model == 'fes':
+ logger.info('using FES as reference.......')
+ # open writer object to write pandas dataframes to spreadsheet
+ writer = pd.ExcelWriter(settings['dst_dir'] + 'comparision_with_'+str(model)+'.xlsx', engine='xlsxwriter')
+ for key in constituents:
+ for j in range(len(grids)):
+ out_fname = settings['dst_dir']+settings['fn']+'_bdytide_'+constituents[key].strip("',/\n")+'_grd_'+grids[j]+'.nc'
+ logger.info('processing output file '+out_fname)
+ fes_fname = settings['tide_fes']+constituents[key].strip("',/\n")+'_'+grids[j]+'.nc'
+ # read in FES data (whole globe)
+ fes = read_fes(fes_fname, grids[j])
+ grid = grids[j].lower()
+ # extract PyNEMO data from output files (generate list of lats,lons etc)
+ pynemo_out = extract_PyNEMO_output(out_fname, grid)
+ # subset FES to match PyNEMO list of lat lons
+ subset_fes = subset_reference(pynemo_out, fes)
+ # compare the two lists (or dicts really)
+ error_log = compare_tides(pynemo_out, subset_fes)
+ # return differences above threshold as a Pandas Dataframe and name using HC and Grid
+ error_log.name = constituents[key].strip("',/\n") + grids[j]
+ # if the dataframe is empty (no exceedances) then discard dataframe and log the good news
+ if error_log.empty == True:
+ logger.info('output file does not exceed threshold when compared with reference model..... thats good!')
+ # if dataframe has values then these exceed the threshold, log and save to excel spreadsheet using dataset
+ # name e.g. M2Z (based on HC and grid) as name for the sheet
+ if error_log.empty == False:
+ logger.warning('Exceedance in thesholds detected, check spreadsheet in dst_dir')
+ error_log.to_excel(writer,sheet_name=error_log.name)
+ # close writer object and save excel spreadsheet
+ writer.save()
+ # code runs here if TPXO is requested as reference this hasn't been written yet so raises exception
+ elif model == 'tpxo':
+ logger.info('using TPXO as reference.......')
+ logger.exception('not set up to use TPXO yet...... exiting')
+ raise Exception('Not setup for TPXO use FES instead?')
+ # everything else goes here which shouldn't happen so is raised as an exception
+ else:
+ logger.exception('Tide reference model not recognised.... exiting')
+ raise Exception('Invalid tide referece model name provided')
+ return 0
+
+ # find nearest value in array used for finding subset of Lat and Lon
+def find_nearest(array, value):
+ array = np.asarray(array)
+ idx = (np.abs(array - value)).argmin()
+ return idx
+
+ # extract PyNEMO output from netcdf file, convert HcosG and HsinG to Amp and Phase
+ # and extract lons and lats from I and J coords. return a dict
+def extract_PyNEMO_output(out_fname,grid):
+ tide_out = Dataset(out_fname)
+ nav_lat = tide_out.variables['nav_lat'][:]
+ nav_lon = tide_out.variables['nav_lon'][:]
+ nbidta = tide_out.variables['nbidta'][:]
+ nbjdta = tide_out.variables['nbjdta'][:]
+ cosine = np.array(tide_out.variables[grid+'1'][:])
+ sine = np.array(tide_out.variables[grid+'2'][:])
+ amp = np.hypot(sine,cosine)
+ phase = np.arctan2(sine[0,:],cosine[0,:])
+ phase = np.degrees(phase)
+ lat = np.array(nav_lat[nbjdta, nbidta])
+ lon = np.array(nav_lon[nbjdta, nbidta])
+ pynemo_out = {'lat':lat,'lon':lon,'amp':amp,'phase':phase}
+ tide_out.close()
+ return pynemo_out
+
+ # read FES netcdf file, convert lon to -180 to 180, rather than 0-360 it also converts amplitude from CM to M
+ # return a dict
+def read_fes(fes_fname,grid):
+ fes_tide = Dataset(fes_fname)
+ if grid == 'Z':
+ fes_amp = np.array(fes_tide.variables['amplitude'][:])
+ # convert to m
+ fes_amp = fes_amp / 100
+ fes_phase = np.array(fes_tide.variables['phase'][:])
+
+ if grid != 'Z':
+ fes_amp = np.array(fes_tide.variables[grid+'a'][:])
+ fes_phase = np.array(fes_tide.variables[grid+'g'][:])
+ # convert to m/s
+ fes_amp = fes_amp/100
+
+ fes_lat = fes_tide.variables['lat'][:]
+ fes_lon = fes_tide.variables['lon'][:]
+ # change to -180 to 180 lonitude convention
+ fes_lon[fes_lon > 180.0] = fes_lon[fes_lon > 180.0] - 360.0
+ fes_dict = {'lat':fes_lat,'lon':fes_lon,'amp':fes_amp,'phase':fes_phase}
+ fes_tide.close()
+ return fes_dict
+
+ # subset FES dict from read_FES, this uses find_nearest to find nearest FES point using PyNEMO dict from extract_PyNEMO
+ # It also converts FES amplitude from cm to m.
+def subset_reference(pynemo_out, reference):
+ model_res = np.abs(reference['lon'][0]-reference['lon'][1])
+ idx_lat = np.zeros(np.shape(pynemo_out['lat']))
+ for i in range(np.shape(pynemo_out['lat'])[1]):
+ idx_lat[0, i] = find_nearest(reference['lat'], pynemo_out['lat'][0, i])
+ idx_lat = idx_lat.astype(np.int64)
+
+ idx_lon = np.zeros(np.shape(pynemo_out['lon']))
+ for i in range(np.shape(pynemo_out['lon'])[1]):
+ idx_lon[0, i] = find_nearest(reference['lon'], pynemo_out['lon'][0, i])
+ idx_lon = idx_lon.astype(np.int64)
+
+ amp_sub = reference['amp'][idx_lat, idx_lon]
+ # surpress warnings due to NaNmean
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", category=RuntimeWarning)
+ for i in range(np.shape(amp_sub)[1]):
+ # if a fill value in FES subset is found
+ if amp_sub[0, i] == 184467436613926912.0000:
+ logger.warning('found fill value in FES subset, taking nanmean from surrounding amplitude points')
+ # if there are fill values surrounding subset fill value change these to NaN
+ if reference['amp'][idx_lat[0,i]+1, idx_lon[0,i]]== 184467436613926912.0000:
+ reference['amp'][idx_lat[0, i]+1, idx_lon[0, i]] = np.nan
+ if reference['amp'][idx_lat[0,i], idx_lon[0,i]+1]== 184467436613926912.0000:
+ reference['amp'][idx_lat[0, i], idx_lon[0,i]+1] = np.nan
+ if reference['amp'][idx_lat[0,i]-1, idx_lon[0,i]]== 184467436613926912.0000:
+ reference['amp'][idx_lat[0, i]-1, idx_lon[0, i]] = np.nan
+ if reference['amp'][idx_lat[0,i], idx_lon[0,i]-1]== 184467436613926912.0000:
+ reference['amp'][idx_lat[0, i], idx_lon[0, i]-1] = np.nan
+ if reference['amp'][idx_lat[0,i]+1, idx_lon[0,i]+1]== 184467436613926912.0000:
+ reference['amp'][idx_lat[0, i]+1, idx_lon[0, i]+1] = np.nan
+ if reference['amp'][idx_lat[0,i]-1, idx_lon[0,i]-1]== 184467436613926912.0000:
+ reference['amp'][idx_lat[0, i]-1, idx_lon[0, i]-1] = np.nan
+ if reference['amp'][idx_lat[0,i]-1, idx_lon[0,i]+1]== 184467436613926912.0000:
+ reference['amp'][idx_lat[0, i]-1, idx_lon[0, i]+1] = np.nan
+ if reference['amp'][idx_lat[0,i]+1, idx_lon[0,i]-1]== 184467436613926912.0000:
+ reference['amp'][idx_lat[0, i]+1, idx_lon[0, i]-1] = np.nan
+ # nan mean surrounding points to replace fill value subset point
+ amp_sub[0,i] = np.nanmean([reference['amp'][idx_lat[0,i]+1, idx_lon[0,i]], \
+ reference['amp'][idx_lat[0,i], idx_lon[0,i]+1], \
+ reference['amp'][idx_lat[0,i]-1, idx_lon[0,i]], \
+ reference['amp'][idx_lat[0,i], idx_lon[0,i]-1], \
+ reference['amp'][idx_lat[0,i]+1, idx_lon[0,i]]+1, \
+ reference['amp'][idx_lat[0,i]-1, idx_lon[0,i]-1], \
+ reference['amp'][idx_lat[0,i]-1, idx_lon[0,i]+1], \
+ reference['amp'][idx_lat[0,i]+1, idx_lon[0,i]-1] \
+ ])
+ phase_sub = reference['phase'][idx_lat, idx_lon]
+ for i in range(np.shape(phase_sub)[1]):
+ # if a fill value in FES subset is found
+ if phase_sub[0, i] == 18446744073709551616.0000:
+ logger.warning('found fill value in FES subset, taking nanmean from surrounding phase points')
+ # if there are fill values surrounding subset fill value change these to NaN
+ if reference['phase'][idx_lat[0, i] + 1, idx_lon[0, i]] == 18446744073709551616.0000:
+ reference['phase'][idx_lat[0, i] + 1, idx_lon[0, i]] = np.nan
+ if reference['phase'][idx_lat[0, i], idx_lon[0, i] + 1] == 18446744073709551616.0000:
+ reference['phase'][idx_lat[0, i], idx_lon[0, i] + 1] = np.nan
+ if reference['phase'][idx_lat[0, i] - 1, idx_lon[0, i]] == 18446744073709551616.0000:
+ reference['phase'][idx_lat[0, i] - 1, idx_lon[0, i]] = np.nan
+ if reference['phase'][idx_lat[0, i], idx_lon[0, i] - 1] == 18446744073709551616.0000:
+ reference['phase'][idx_lat[0, i], idx_lon[0, i] - 1] = np.nan
+ if reference['phase'][idx_lat[0, i] + 1, idx_lon[0, i] + 1] == 18446744073709551616.0000:
+ reference['phase'][idx_lat[0, i] + 1, idx_lon[0, i] + 1] = np.nan
+ if reference['phase'][idx_lat[0, i] - 1, idx_lon[0, i] - 1] == 18446744073709551616.0000:
+ reference['phase'][idx_lat[0, i] - 1, idx_lon[0, i] - 1] = np.nan
+ if reference['phase'][idx_lat[0, i] - 1, idx_lon[0, i] + 1] == 18446744073709551616.0000:
+ reference['phase'][idx_lat[0, i] - 1, idx_lon[0, i] + 1] = np.nan
+ if reference['phase'][idx_lat[0, i] + 1, idx_lon[0, i] - 1] == 18446744073709551616.0000:
+ reference['phase'][idx_lat[0, i] + 1, idx_lon[0, i] - 1] = np.nan
+ # calculate HcosG and then average
+ HcosG = np.nanmean([reference['amp'][idx_lat[0, i]+1, idx_lon[0, i]]*np.cos(
+ reference['phase'][idx_lat[0, i]+1, idx_lon[0, i]]*np.pi/180),
+ reference['amp'][idx_lat[0, i], idx_lon[0, i]+1] * np.cos(
+ reference['phase'][idx_lat[0, i], idx_lon[0, i]+1] * np.pi / 180),
+ reference['amp'][idx_lat[0, i]-1, idx_lon[0, i]] * np.cos(
+ reference['phase'][idx_lat[0, i]-1, idx_lon[0, i]] * np.pi / 180),
+ reference['amp'][idx_lat[0, i], idx_lon[0, i]-1] * np.cos(
+ reference['phase'][idx_lat[0, i], idx_lon[0, i]-1] * np.pi / 180),
+ reference['amp'][idx_lat[0, i]+1, idx_lon[0, i]+1] * np.cos(
+ reference['phase'][idx_lat[0, i]+1, idx_lon[0, i]+1] * np.pi / 180),
+ reference['amp'][idx_lat[0, i]-1, idx_lon[0, i]-1] * np.cos(
+ reference['phase'][idx_lat[0, i]-1, idx_lon[0, i]-1] * np.pi / 180),
+ reference['amp'][idx_lat[0, i]-1, idx_lon[0, i]+1] * np.cos(
+ reference['phase'][idx_lat[0, i]-1, idx_lon[0, i]+1] * np.pi / 180),
+ reference['amp'][idx_lat[0, i]+1, idx_lon[0, i]-1] * np.cos(
+ reference['phase'][idx_lat[0, i]+1, idx_lon[0, i]-1] * np.pi / 180),
+ ])
+ # calculate HsinG and then average
+ HsinG = np.nanmean([reference['amp'][idx_lat[0, i]+1, idx_lon[0, i]]*np.sin(
+ reference['phase'][idx_lat[0, i]+1, idx_lon[0, i]]*np.pi/180),
+ reference['amp'][idx_lat[0, i], idx_lon[0, i]+1] * np.sin(
+ reference['phase'][idx_lat[0, i], idx_lon[0, i]+1] * np.pi / 180),
+ reference['amp'][idx_lat[0, i]-1, idx_lon[0, i]] * np.sin(
+ reference['phase'][idx_lat[0, i]-1, idx_lon[0, i]] * np.pi / 180),
+ reference['amp'][idx_lat[0, i], idx_lon[0, i]-1] * np.sin(
+ reference['phase'][idx_lat[0, i], idx_lon[0, i]-1] * np.pi / 180),
+ reference['amp'][idx_lat[0, i]+1, idx_lon[0, i]+1] * np.sin(
+ reference['phase'][idx_lat[0, i]+1, idx_lon[0, i]+1] * np.pi / 180),
+ reference['amp'][idx_lat[0, i]-1, idx_lon[0, i]-1] * np.sin(
+ reference['phase'][idx_lat[0, i]-1, idx_lon[0, i]-1] * np.pi / 180),
+ reference['amp'][idx_lat[0, i]-1, idx_lon[0, i]+1] * np.sin(
+ reference['phase'][idx_lat[0, i]-1, idx_lon[0, i]+1] * np.pi / 180),
+ reference['amp'][idx_lat[0, i]+1, idx_lon[0, i]-1] * np.sin(
+ reference['phase'][idx_lat[0, i]+1, idx_lon[0, i]-1] * np.pi / 180),
+ ])
+ # convert back to phase
+ phase_sub[0,i] = np.arctan2(HsinG,HcosG)
+
+ lat_sub = reference['lat'][idx_lat]
+ lon_sub = reference['lon'][idx_lon]
+ subset = {'lat':lat_sub,'lon':lon_sub,'amp':amp_sub,'phase':phase_sub,'model_res':model_res}
+ return subset
+
+ # takes pynemo extract dict, subset fes dict, and the thresholds and model res passed to main function.
+ # returns a Pandas Dataframe with any PyNEMO values that exceed the nearest FES point by defined threshold
+ # It also checks lats and lons are within the model reference resolution
+ # i.e. ensure closest model reference point is used.
+def compare_tides(pynemo_out,subset):
+ # compare lat and lons
+ diff_lat = np.abs(pynemo_out['lat']-subset['lat'])
+ diff_lon = np.abs(pynemo_out['lon'] - subset['lon'])
+ exceed_lat = diff_lat > subset['model_res']
+ exceed_lon = diff_lon > subset['model_res']
+ exceed_sum = np.sum(exceed_lat+exceed_lon)
+ if exceed_sum > 0:
+ raise Exception('Dont Panic: Lat and/or Lon further away from model point than model resolution')
+ # surpress warnings as NaNs from averaging surrounding pixels can cause issues
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore", category=RuntimeWarning)
+ # compare amp
+ abs_amp_diff = np.abs(pynemo_out['amp']-subset['amp'])
+ # calculate threshold in percentage terms
+ logger.info('percentage amplitude exceedance calculated using the following.....')
+ amp_percentage_exceed = 26.933 * subset['amp'] ** -0.396
+ logger.info('Percentage Exceedance = 26.933 * Reference Amplitude ^ -0.396')
+ # work out difference based on percentage and reference amplitude
+ percent_diff = (abs_amp_diff / pynemo_out['amp']) * 100
+ abs_amp_thres = percent_diff > amp_percentage_exceed
+ err_amp = pynemo_out['amp'][abs_amp_thres].tolist()
+ err_amp_lats = pynemo_out['lat'][abs_amp_thres].tolist()
+ err_amp_lons = pynemo_out['lon'][abs_amp_thres].tolist()
+
+ err_ref_amp = subset['amp'][abs_amp_thres].tolist()
+ err_ref_lats_amp = subset['lat'][abs_amp_thres].tolist()
+ err_ref_lons_amp = subset['lon'][abs_amp_thres].tolist()
+
+ # compare phase
+ # change from -180-180 to 0to 360 for both pynemo and subset.
+ pynemo_out['phase'][pynemo_out['phase'] < 0.0] = pynemo_out['phase'][pynemo_out['phase'] < 0.0] + 360.0
+ subset['phase'][subset['phase'] < 0.0] = subset['phase'][subset['phase'] < 0.0] + 360.0
+ # compare phase angles between 0 and 360.
+ abs_ph = 180 - abs(abs(pynemo_out['phase'] - subset['phase']) - 180)
+ # values outside of 0 to 360 (such as erroneous fill values) end up negative
+ # so multiply by -1 to ensure they are identified as exceeding threshold
+ abs_ph[abs_ph < 0.0 ] = abs_ph[abs_ph < 0.0] *-1
+ # calculate phase threshold based on amplitude and power relationship
+ # as amplitude decreases the phase exceedance allowed increases.
+ logger.info('phase exceedance calculated using the following.....')
+ phase_thres = 5.052 * pynemo_out['amp'] ** -0.60
+ logger.info('Exceedance = 5.052 * Amplitude ^ -0.60')
+ abs_ph_thres = abs_ph > phase_thres
+
+ err_pha = pynemo_out['phase'][abs_ph_thres[0,:]].tolist()
+ err_pha_amp = pynemo_out['amp'][abs_ph_thres].tolist()
+ err_pha_lats = pynemo_out['lat'][abs_ph_thres].tolist()
+ err_pha_lons = pynemo_out['lon'][abs_ph_thres].tolist()
+
+ err_ref_pha = subset['phase'][abs_ph_thres].tolist()
+ err_ref_pha_amp = subset['amp'][abs_ph_thres].tolist()
+ err_ref_lats_pha = subset['lat'][abs_ph_thres].tolist()
+ err_ref_lons_pha = subset['lon'][abs_ph_thres].tolist()
+
+ lerr_pha, lerr_amp = len(err_pha), len(err_amp)
+ max_len = max(lerr_pha, lerr_amp)
+ if not max_len == lerr_pha:
+ err_pha.extend([''] * (max_len - lerr_pha))
+ err_pha_amp.extend([''] * (max_len - lerr_pha))
+ err_pha_lats.extend([''] * (max_len - lerr_pha))
+ err_pha_lons.extend([''] * (max_len - lerr_pha))
+ err_ref_pha.extend([''] * (max_len - lerr_pha))
+ err_ref_pha_amp.extend([''] * (max_len - lerr_pha))
+ err_ref_lats_pha.extend([''] * (max_len - lerr_pha))
+ err_ref_lons_pha.extend([''] * (max_len - lerr_pha))
+ if not max_len == lerr_amp:
+ err_amp.extend([''] * (max_len - lerr_amp))
+ err_amp_lats.extend([''] * (max_len - lerr_amp))
+ err_amp_lons.extend([''] * (max_len - lerr_amp))
+ err_ref_amp.extend([''] * (max_len - lerr_amp))
+ err_ref_lats_amp.extend([''] * (max_len - lerr_amp))
+ err_ref_lons_amp.extend([''] * (max_len - lerr_amp))
+
+ err_log = pd.DataFrame({'amp_lat':err_amp_lats,
+ 'amp_lon':err_amp_lons,
+ 'amp':err_amp,
+ 'ref_amp': err_ref_amp,
+ 'ref_amp_lats': err_ref_lats_amp,
+ 'ref_amp_lons': err_ref_lons_amp,
+ 'phase_lat':err_pha_lats,
+ 'phase_lon':err_pha_lons,
+ 'phase':err_pha,
+ 'phase_amp':err_pha_amp,
+ 'ref_phase':err_ref_pha,
+ 'ref_phase_amp':err_ref_pha_amp,
+ 'ref_phase_lats':err_ref_lats_pha,
+ 'ref_phase_lons':err_ref_lons_pha
+ })
+
+ return err_log
+
+if __name__ == '__main__':
+ main()
+
+
+
+
+
diff --git a/pynemo/tide/fes_extract_HC.py b/pynemo/tide/fes_extract_HC.py
new file mode 100644
index 00000000..99f98cd6
--- /dev/null
+++ b/pynemo/tide/fes_extract_HC.py
@@ -0,0 +1,333 @@
+'''
+This is to extract the tidal harmonic constants out of a tidal model
+for a given locations
+[amp,Gph] = tpxo_extract_HC(Model,lat,lon,type,Cid)
+
+@author: Mr. Srikanth Nagella
+'''
+
+# pylint: disable=E1103
+# pylint: disable=no-name-in-module
+from netCDF4 import Dataset
+from scipy import interpolate
+import numpy as np
+
+class HcExtract(object):
+ """ This is FES model extract_hc.c implementation in python adapted from tpxo_extract_HC.py"""
+ def __init__(self, settings, lat, lon, grid_type):
+ """initialises the Extract of tide information from the netcdf
+ Tidal files"""
+ # Set tide model
+ tide_model = 'fes'
+ if tide_model == 'fes': # Define stuff to generalise Tide model
+
+ hRe_name = 'hRe'
+ hIm_name = 'hIm'
+ lon_z_name = 'lon_z'
+ lat_z_name = 'lat_z'
+ URe_name = 'URe'
+ UIm_name = 'UIm'
+ lon_u_name = 'lon_u'
+ lat_u_name = 'lat_u'
+ VRe_name = 'VRe'
+ VIm_name = 'VIm'
+ lon_v_name = 'lon_v'
+ lat_v_name = 'lat_v'
+ mz_name = 'mask_z'
+ mu_name = 'mask_u'
+ mv_name = 'mask_v'
+
+ # create list of HC using namelist file as reference
+ constituents = list(settings['clname'].values())
+ # clean strings in list and change to upper case if not already
+ for i in range(len(constituents)):
+ constituents[i] = constituents[i].strip("',/\n")
+ constituents[i] = constituents[i].upper()
+
+ self.cons = constituents
+ self.mask_dataset = {}
+
+ # extract lon and lat z data
+ lon_z = np.array(Dataset(settings['tide_fes']+constituents[i]+'_Z.nc').variables['lon'])
+ lat_z = np.array(Dataset(settings['tide_fes']+constituents[i]+'_Z.nc').variables['lat'])
+ lon_resolution = lon_z[1] - lon_z[0]
+ data_in_km = 0 # added to maintain the reference to matlab tmd code
+
+ if grid_type == 'z' or grid_type == 't':
+ # extract example amplitude grid for Z, U and V and change NaNs to 0 (for land) and other values to 1 (for water)
+ mask = np.ma.MaskedArray.filled(np.flipud(np.rot90(Dataset(settings['tide_fes'] + constituents[0] + '_Z.nc').variables['amplitude'][:])))
+ mask[mask != 18446744073709551616.00000] = 1
+ mask[mask == 18446744073709551616.00000] = 0
+ self.mask_dataset[mz_name] = mask
+
+ #read and convert the height_dataset file to complex and store in dicts
+ hRe = []
+ hIm = []
+ lat_z = np.array(Dataset(settings['tide_fes'] + constituents[i] + '_Z.nc').variables['lat'][:])
+ lon_z = np.array(Dataset(settings['tide_fes'] + constituents[i] + '_Z.nc').variables['lon'][:])
+ for ncon in range(len(constituents)):
+ amp = np.ma.MaskedArray.filled(np.flipud(np.rot90(Dataset(settings['tide_fes']+str(constituents[ncon])+'_Z.nc').variables['amplitude'][:])))
+ # set fill values to zero
+ amp[amp == 18446744073709551616.00000] = 0
+ # convert amp to m from cm
+ amp = amp/100.00
+ phase = np.ma.MaskedArray.filled(np.flipud(np.rot90(Dataset(settings['tide_fes']+constituents[ncon]+'_Z.nc').variables['phase'][:])))
+ # set fill values to 0
+ phase[phase == 18446744073709551616.00000] = 0
+ # convert to real and imaginary conjugates and also convert to radians.
+ hRe.append(amp*np.cos(phase*(np.pi/180)))
+ hIm.append(-amp*np.sin(phase*(np.pi/180)))
+ hRe = np.stack(hRe)
+ hIm = np.stack(hIm)
+ self.height_dataset = [lon_z,lat_z,hRe,hIm]
+
+ elif grid_type == 'u':
+ mask = np.ma.MaskedArray.filled(np.flipud(np.rot90(Dataset(settings['tide_fes'] + constituents[0] + '_U.nc').variables['Ua'][:])))
+ mask[mask != 18446744073709551616.00000] = 1
+ mask[mask == 18446744073709551616.00000] = 0
+ self.mask_dataset[mu_name] = mask
+ #read and convert the velocity_dataset files to complex
+
+ URe = []
+ UIm = []
+ lat_u = np.array(Dataset(settings['tide_fes'] + constituents[i] + '_U.nc').variables['lat'][:])
+ lon_u = np.array(Dataset(settings['tide_fes'] + constituents[i] + '_U.nc').variables['lon'][:])
+ for ncon in range(len(constituents)):
+ amp = np.ma.MaskedArray.filled(np.flipud(np.rot90(Dataset(settings['tide_fes']+constituents[ncon]+'_U.nc').variables['Ua'][:])))
+ # set fill values to zero
+ amp[amp == 18446744073709551616.00000] = 0
+ phase = np.ma.MaskedArray.filled(np.flipud(np.rot90(Dataset(settings['tide_fes']+constituents[ncon]+'_U.nc').variables['Ug'][:])))
+ phase[phase == 18446744073709551616.00000] = 0
+ URe.append(amp*np.cos(phase*(np.pi/180)))
+ UIm.append(-amp*np.sin(phase*(np.pi/180)))
+ URe = np.stack(URe)
+ UIm = np.stack(UIm)
+ self.Uvelocity_dataset = [lon_u,lat_u,URe,UIm]
+
+ elif grid_type == 'v':
+ mask = np.ma.MaskedArray.filled(np.flipud(np.rot90(Dataset(settings['tide_fes'] + constituents[0] + '_V.nc').variables['Va'][:])))
+ mask[mask != 18446744073709551616.00000] = 1
+ mask[mask == 18446744073709551616.00000] = 0
+ self.mask_dataset[mv_name] = mask
+
+ VRe = []
+ VIm = []
+ lat_v = np.array(Dataset(settings['tide_fes'] + constituents[i] + '_V.nc').variables['lat'][:])
+ lon_v = np.array(Dataset(settings['tide_fes'] + constituents[i] + '_V.nc').variables['lon'][:])
+ for ncon in range(len(constituents)):
+ amp = np.ma.MaskedArray.filled(np.flipud(np.rot90(Dataset(settings['tide_fes']+constituents[ncon]+'_V.nc').variables['Va'][:])))
+ # set fill value to zero
+ amp[amp == 18446744073709551616.00000] = 0
+ phase = np.ma.MaskedArray.filled(np.flipud(np.rot90(Dataset(settings['tide_fes']+constituents[ncon]+'_V.nc').variables['Vg'][:])))
+ phase[phase == 18446744073709551616.00000] = 0
+ VRe.append(amp*np.cos(phase*(np.pi/180)))
+ VIm.append(-amp*np.sin(phase*(np.pi/180)))
+ VRe = np.stack(VRe)
+ VIm = np.stack(VIm)
+ self.Vvelocity_dataset = [lon_v,lat_v,VRe,VIm]
+
+ # open grid variables these are resampled TPXO parameters so may not work correctly.
+ self.grid = Dataset(settings['tide_fes']+'grid_fes.nc')
+ height_z = np.array(np.rot90(self.grid.variables['hz']))
+ # set fill values to zero
+ height_z[height_z <0.00] = 0.00
+
+ else:
+ print('Don''t know that tide model')
+
+ # Wrap coordinates in longitude if the domain is global
+ glob = 0
+ if lon_z[-1]-lon_z[0] == 360-lon_resolution:
+ glob = 1
+ if glob == 1:
+ lon_z = np.concatenate(([lon_z[0]-lon_resolution, ], lon_z,[lon_z[-1]+lon_resolution, ]))
+ height_z = np.concatenate(([height_z[-1, :], ], height_z, [height_z[0, :],]), axis=0)
+ mask_z = np.concatenate(([mask[-1, :], ], mask, [mask[0, :], ]), axis=0)
+
+ #adjust lon convention
+ xmin = np.min(lon)
+
+ if data_in_km == 0:
+ if xmin < lon_z[0]:
+ lon[lon < 0] = lon[lon < 0] + 360
+ if xmin > lon_z[-1]:
+ lon[lon > 180] = lon[lon > 180]-360
+
+ #height_z[height_z==0] = np.NaN
+# f=interpolate.RectBivariateSpline(lon_z,lat_z,height_z,kx=1,ky=1)
+# depth = np.zeros(lon.size)
+# for idx in range(lon.size):
+# depth[idx] = f(lon[idx],lat[idx])
+# print depth[369:371]
+
+# H2 = np.ravel(height_z)
+# H2[H2==0] = np.NaN
+# points= np.concatenate((np.ravel(self.height_dataset.variables['lon_z']),
+# np.ravel(self.height_dataset.variables['lat_z'])))
+# points= np.reshape(points,(points.shape[0]/2,2),order='F')
+# print points.shape
+# print np.ravel(height_z).shape
+# depth = interpolate.griddata(points,H2,(lon,lat))
+# print depth
+# print depth.shape
+
+ height_z[height_z == 0] = np.NaN
+ lonlat = np.concatenate((lon, lat))
+ lonlat = np.reshape(lonlat, (lon.size, 2), order='F')
+
+ depth = interpolate.interpn((lon_z, lat_z), height_z, lonlat)
+# f=interpolate.RectBivariateSpline(lon_z,lat_z,mask_z,kx=1,ky=1)
+# depth_mask = np.zeros(lon.size)
+# for idx in range(lon.size):
+# depth_mask[idx] = f(lon[idx],lat[idx])
+ depth_mask = interpolate.interpn((lon_z, lat_z), mask_z, lonlat)
+ index = np.where((np.isnan(depth)) & (depth_mask > 0))
+
+ if index[0].size != 0:
+ depth[index] = bilinear_interpolation(lon_z, lat_z, height_z, lon[index], lat[index])
+
+ if grid_type == 'z' or grid_type == 't':
+ self.amp, self.gph = self.interpolate_constituents(self.height_dataset,
+ hRe_name, hIm_name, lon_z_name, lat_z_name,
+ lon, lat, maskname=mz_name)
+ elif grid_type == 'u':
+ self.amp, self.gph = self.interpolate_constituents(self.Uvelocity_dataset,
+ URe_name, UIm_name, lon_u_name, lat_u_name,
+ lon, lat, depth, maskname=mu_name)
+ elif grid_type == 'v':
+ self.amp, self.gph = self.interpolate_constituents(self.Vvelocity_dataset,
+ VRe_name, VIm_name, lon_v_name, lat_v_name,
+ lon, lat, depth, maskname=mv_name)
+ else:
+ print('Unknown grid_type')
+ return
+
+ def interpolate_constituents(self, nc_dataset, real_var_name, img_var_name, lon_var_name,
+ lat_var_name, lon, lat, height_data=None, maskname=None):
+ """ Interpolates the tidal constituents along the given lat lon coordinates """
+ amp = np.zeros((len(nc_dataset[2]), lon.shape[0]))
+ gph = np.zeros((len(nc_dataset[2]), lon.shape[0]))
+
+ data = np.array(np.ravel(nc_dataset[2]), dtype=complex)
+ data.imag = np.array(np.ravel(nc_dataset[3]))
+ data = data.reshape(nc_dataset[2].shape)
+ #data = data.reshape(1,nc_dataset['M2'][real_var_name].shape)
+ # data[data==0] = np.NaN
+
+ # Lat Lon values
+ x_values = nc_dataset[0]
+ y_values = nc_dataset[1]
+ x_resolution = x_values[1] - x_values[0]
+ glob = 0
+ if x_values[-1]-x_values[0] == 360-x_resolution:
+ glob = 1
+
+ if glob == 1:
+ x_values = np.concatenate(([x_values[0]-x_resolution,], x_values,
+ [x_values[-1]+x_resolution, ]))
+
+ #adjust lon convention
+ xmin = np.min(lon)
+ if xmin < x_values[0]:
+ lon[lon < 0] = lon[lon < 0] + 360
+ if xmin > x_values[-1]:
+ lon[lon > 180] = lon[lon > 180]-360
+
+ lonlat = np.concatenate((lon, lat))
+ lonlat = np.reshape(lonlat, (lon.size, 2), order='F')
+
+ mask = self.mask_dataset[maskname]
+ mask = np.concatenate(([mask[-1, :], ], mask, [mask[0, :], ]), axis=0)
+ #interpolate the mask values
+ maskedpoints = interpolate.interpn((x_values, y_values), mask, lonlat)
+
+ data_temp = np.zeros((data.shape[0], lon.shape[0], 2, ))
+ for cons_index in range(data.shape[0]):
+ #interpolate real values
+ data_temp[cons_index, :, 0] = interpolate_data(x_values, y_values,
+ data[cons_index, :, :].real,
+ maskedpoints, lonlat)
+ #interpolate imag values
+ data_temp[cons_index, :, 1] = interpolate_data(x_values, y_values,
+ data[cons_index, :, :].imag,
+ maskedpoints, lonlat)
+
+ zcomplex = np.array(data_temp[cons_index, :, 0], dtype=complex)
+ zcomplex.imag = data_temp[cons_index, :, 1]
+
+ amp[cons_index, :] = np.absolute(zcomplex)
+ gph[cons_index, :] = np.arctan2(-1*zcomplex.imag, zcomplex.real)
+ gph = gph*180.0/np.pi
+ gph[gph < 0] = gph[gph < 0]+360.0
+ return amp, gph
+
+def interpolate_data(lon, lat, data, mask, lonlat):
+ """ Interpolate data data on regular grid for given lonlat coordinates """
+ result = np.zeros((lonlat.shape[0], ))
+ data[data == 0] = np.NaN
+ data = np.concatenate(([data[-1, :], ], data, [data[0, :], ]), axis=0)
+ result[:] = interpolate.interpn((lon, lat), data, lonlat)
+ index = np.where((np.isnan(result)) & (mask > 0))
+ if index[0].size != 0:
+ result[index] = bilinear_interpolation(lon, lat, data, np.ravel(lonlat[index, 0]),
+ np.ravel(lonlat[index, 1]))
+ return result
+
+def bilinear_interpolation(lon, lat, data, lon_new, lat_new):
+ """ Does a bilinear interpolation of grid where the data values are NaN's"""
+ glob = 0
+ lon_resolution = lon[1] - lon[0]
+ if lon[-1] - lon[1] == 360 - lon_resolution:
+ glob = 1
+ inan = np.where(np.isnan(data))
+ data[inan] = 0
+ mask = np.zeros(data.shape)
+ mask[data != 0] = 1
+# n = lon.size
+# m = lat.size
+ if lon.size != data.shape[0] or lat.size != data.shape[1]:
+ print('Check Dimensions')
+ return np.NaN
+ if glob == 1:
+ lon = np.concatenate(([lon[0] - 2 * lon_resolution, lon[0] - lon_resolution, ],
+ lon, [lon[-1] + lon_resolution, lon[-1] + 2 * lon_resolution]))
+ data = np.concatenate((data[-2, :], data[-1, :], data, data[0, :], data[1, :]), axis=0)
+ mask = np.concatenate((mask[-2, :], mask[-1, :], mask, mask[0, :], mask[1, :]), axis=0)
+ lon_new_copy = lon_new
+
+ nonmask_index = np.where((lon_new_copy < lon[0]) & (lon_new_copy > lon[-1]))
+ if lon[-1] > 180:
+ lon_new_copy[nonmask_index] = lon_new_copy[nonmask_index] + 360
+ if lon[-1] < 0:
+ lon_new_copy[nonmask_index] = lon_new_copy[nonmask_index] - 360
+ lon_new_copy[lon_new_copy > 360] = lon_new_copy[lon_new_copy > 360] - 360
+ lon_new_copy[lon_new_copy < -180] = lon_new_copy[lon_new_copy < -180] + 360
+
+ weight_factor_0 = 1 / (4 + 2 * np.sqrt(2))
+ weight_factor_1 = weight_factor_0 / np.sqrt(2)
+ height_temp = weight_factor_1 * data[0:-2, 0:-2] + weight_factor_0 * data[0:-2, 1:-1] + \
+ weight_factor_1 * data[0:-2, 2:] + weight_factor_1 * data[2:, 0:-2] + \
+ weight_factor_0 * data[2:, 1:-1] + weight_factor_1 * data[2:, 2:] + \
+ weight_factor_0 * data[1:-1, 0:-2] + weight_factor_0 * data[1:-1, 2:]
+ mask_temp = weight_factor_1 * mask[0:-2, 0:-2] + weight_factor_0 * mask[0:-2, 1:-1] + \
+ weight_factor_1 * mask[0:-2, 2:] + weight_factor_1 * mask[2:, 0:-2] + \
+ weight_factor_0 * mask[2:, 1:-1] + weight_factor_1 * mask[2:, 2:] + \
+ weight_factor_0 * mask[1:-1, 0:-2] + weight_factor_0 * mask[1:-1, 2:]
+ mask_temp[mask_temp == 0] = 1
+ data_copy = data.copy()
+ data_copy[1:-1, 1:-1] = np.divide(height_temp, mask_temp)
+ nonmask_index = np.where(mask == 1)
+
+ lonlat = np.concatenate((lon_new_copy, lat_new))
+ lonlat = np.reshape(lonlat, (lon_new_copy.size, 2), order='F')
+ result = interpolate.interpn((lon, lat), data_copy, lonlat)
+ return result
+
+
+#lat=[42.8920,42.9549,43.0178]
+#lon=[339.4313,339.4324,339.4335]
+#lat_u=[42.8916,42.9545,43.0174]
+#lon_u=[339.4735,339.4746,339.4757]
+#lat = np.array(lat_u)
+#lon = np.array(lon_u)
+#lon = TPXO_Extract(lat,lon,'velocity_dataset')
\ No newline at end of file
diff --git a/pynemo/tide/nemo_bdy_tide.py b/pynemo/tide/nemo_bdy_tide.py
deleted file mode 100644
index 0fdf5aba..00000000
--- a/pynemo/tide/nemo_bdy_tide.py
+++ /dev/null
@@ -1,161 +0,0 @@
-'''
-
-
-
-'''
-import numpy as np
-import scipy.spatial as sp
-from netCDF4 import Dataset
-import copy # DEBUG ONLY- allows multiple runs without corruption
-import nemo_bdy_grid_angle
-#from nemo_bdy_extr_tm3 import rot_rep
-
-class Extract:
-
- def __init__(self, setup, DstCoord, Grid):
-
- self.g_type = Grid.grid_type
- DC = copy.deepcopy(DstCoord)
- dst_lon = DC.bdy_lonlat[self.g_type]['lon'][Grid.bdy_r == 0]
- dst_lat = DC.bdy_lonlat[self.g_type]['lat'][Grid.bdy_r == 0]
- self.dst_dep = DC.depths[self.g_type]['bdy_hbat'][Grid.bdy_r == 0]
- self.harm_Im = {} # tidal boundary data: Imaginary
- self.harm_Re = {} # tidal boundary data: Real
-
- # Modify lon for 0-360 TODO this needs to be auto-dectected
-
- dst_lon = np.array([x if x > 0 else x+360 for x in dst_lon])
-
- fileIDb = '/Users/jdha/Projects/pynemo_data/DATA/grid_tpxo7.2.nc' # TPX bathymetry file
- nb = Dataset(fileIDb) # Open the TPX bathybetry file using the NetCDF4-Python library
-
- # Open the TPX Datafiles using the NetCDF4-Python library
-# T_GridAngles = nemo_bdy_grid_angle.GridAngle(
-# self.settings['src_hgr'], imin, imax, jmin, jmax, 't')
-# RotStr_GridAngles = nemo_bdy_grid_angle.GridAngle(
-# self.settings['dst_hgr'], 1, maxI, 1, maxJ, self.rot_str)
-
-# self.gcos = T_GridAngles.cosval
-# self.gsin = T_GridAngles.sinval
-
- if self.g_type == 't':
- self.fileID = '/Users/jdha/Projects/pynemo_data/DATA/h_tpxo7.2.nc' # TPX sea surface height file
- self.var_Im = 'hIm'
- self.var_Re = 'hRe'
- nc = Dataset(self.fileID) # pass variable ids to nc
- lon = np.ravel(nc.variables['lon_z'][:,:]) # need to add in a east-west wrap-around
- lat = np.ravel(nc.variables['lat_z'][:,:])
- bat = np.ravel(nb.variables['hz'][:,:])
- msk = np.ravel(nb.variables['mz'][:,:])
- elif self.g_type == 'u':
- self.fileID = '/Users/jdha/Projects/pynemo_data/DATA/u_tpxo7.2.nc' # TPX velocity file
- self.var_Im = 'UIm'
- self.var_Re = 'URe'
- self.key_tr = setup['tide_trans']
- nc = Dataset(self.fileID) # pass variable ids to nc
- lon = np.ravel(nc.variables['lon_u'][:,:])
- lat = np.ravel(nc.variables['lat_u'][:,:])
- bat = np.ravel(nb.variables['hu'][:,:])
- msk = np.ravel(nb.variables['mu'][:,:])
- else:
- self.fileID = '/Users/jdha/Projects/pynemo_data/DATA/u_tpxo7.2.nc' # TPX velocity file
- self.var_Im = 'VIm'
- self.var_Re = 'VRe'
- self.key_tr = setup['tide_trans']
- nc = Dataset(self.fileID) # pass variable ids to nc
- lon = np.ravel(nc.variables['lon_v'][:,:])
- lat = np.ravel(nc.variables['lat_v'][:,:])
- bat = np.ravel(nb.variables['hv'][:,:])
- msk = np.ravel(nb.variables['mv'][:,:])
-
- # Pull out the constituents that are avaibable
- self.cons = []
- for ncon in range(nc.variables['con'].shape[0]):
- self.cons.append(nc.variables['con'][ncon,:].tostring().strip())
-
- nc.close() # Close Datafile
- nb.close() # Close Bathymetry file
-
- # Find nearest neighbours on the source grid to each dst bdy point
- source_tree = sp.cKDTree(list(zip(lon, lat)))
- dst_pts = list(zip(dst_lon, dst_lat))
- nn_dist, self.nn_id = source_tree.query(dst_pts, k=4, eps=0, p=2,
- distance_upper_bound=0.5)
-
- # Create a weighting index for interpolation onto dst bdy point
- # need to check for missing values
-
- ind = nn_dist == np.inf
-
- self.nn_id[ind] = 0 # better way of carrying None in the indices?
- dx = (lon[self.nn_id] - np.repeat(np.reshape(dst_lon,[dst_lon.size, 1]),4,axis=1) ) * np.cos(np.repeat(np.reshape(dst_lat,[dst_lat.size, 1]),4,axis=1) * np.pi / 180.)
- dy = lat[self.nn_id] - np.repeat(np.reshape(dst_lat,[dst_lat.size, 1]),4,axis=1)
-
- dist_tot = np.power((np.power(dx, 2) + np.power(dy, 2)), 0.5)
-
- self.msk = msk[self.nn_id]
- self.bat = bat[self.nn_id]
-
- dist_tot[ind | self.msk] = np.nan
-
- dist_wei = 1/( np.divide(dist_tot,(np.repeat(np.reshape(np.nansum(dist_tot,axis=1),[dst_lat.size, 1]),4,axis=1)) ) )
-
- self.nn_wei = dist_wei/np.repeat(np.reshape(np.nansum(dist_wei, axis=1),[dst_lat.size, 1]),4,axis=1)
- self.nn_wei[ind | self.msk] = 0.
-
- # Need to identify missing points and throw a warning and set values to zero
-
- mv = np.sum(self.wei,axis=1) == 0
- print('##WARNING## There are', np.sum(mv), 'missing values, these will be set to ZERO')
-
- def extract_con(self, con):
-
- if con in self.cons:
- con_ind = self.cons.index(con)
-
- # Extract the complex amplitude components
-
- nc = Dataset(self.fileID) # pass variable ids to nc
-
- vIm = np.ravel(nc.variables[self.var_Im][con_ind,:,:])
- vRe = np.ravel(nc.variables[self.var_Re][con_ind,:,:])
-
- nc.close()
-
- if self.g_type != 't':
-
- self.harm_Im[con] = np.sum(vIm[self.nn_id]*self.nn_wei,axis=1)
- self.harm_Re[con] = np.sum(vRe[self.nn_id]*self.nn_wei,axis=1)
-
- else: # Convert transports to velocities
-
- if self.key_tr == True: # We convert to velocity using tidal model bathymetry
-
- self.harm_Im[con] = np.sum(vIm[self.nn_id]*self.nn_wei,axis=1)/np.sum(self.bat[self.nn_id]*self.nn_wei,axis=1)
- self.harm_Re[con] = np.sum(vRe[self.nn_id]*self.nn_wei,axis=1)/np.sum(self.bat[self.nn_id]*self.nn_wei,axis=1)
-
- else: # We convert to velocity using the regional model bathymetry
-
- self.harm_Im[con] = np.sum(vIm[self.nn_id]*self.nn_wei,axis=1)/self.dst_dep
- self.harm_Re[con] = np.sum(vRe[self.nn_id]*self.nn_wei,axis=1)/self.dst_dep
-
-
- # Rotate vectors
-
- self.harm_Im_rot[con] = self.rot_rep(self.harm_Im[con], self.harm_Im[con], self.rot_str,
- 'en to %s' %self.rot_dir, self.dst_gcos, self.dst_gsin)
- self.harm_Re_rot[con] = self.rot_rep(self.harm_Re[con], self.harm_Re[con], self.rot_str,
- 'en to %s' %self.rot_dir, self.dst_gcos, self.dst_gsin)
-
- else:
-
- # throw some warning
- print('##WARNING## Missing constituent values will be set to ZERO')
-
- self.harm_Im[con] = np.zeros(self.nn_id[:,0].size)
- self.harm_Re[con] = np.zeros(self.nn_id[:,0].size)
-
-
-
-
-
diff --git a/pynemo/tide/nemo_bdy_tide2.py b/pynemo/tide/nemo_bdy_tide2.py
deleted file mode 100644
index 581a012f..00000000
--- a/pynemo/tide/nemo_bdy_tide2.py
+++ /dev/null
@@ -1 +0,0 @@
-Non
\ No newline at end of file
diff --git a/pynemo/tide/nemo_bdy_tide3.py b/pynemo/tide/nemo_bdy_tide3.py
index 7c9a148a..35513e3e 100644
--- a/pynemo/tide/nemo_bdy_tide3.py
+++ b/pynemo/tide/nemo_bdy_tide3.py
@@ -2,12 +2,14 @@
Module to extract constituents for the input grid mapped onto output grid
@author: Mr. Srikanth Nagella
+@author: thopri
'''
# pylint: disable=E1103
# pylint: disable=no-name-in-module
import copy
from . import tpxo_extract_HC
+from . import fes_extract_HC
import numpy as np
from netCDF4 import Dataset
from pynemo import nemo_bdy_grid_angle
@@ -16,8 +18,10 @@
import logging
-def nemo_bdy_tpx7p2_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp):
- """ TPXO Global Tidal model interpolation including rotation grid"""
+def nemo_bdy_tide_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp,tide_model):
+ """ Global Tidal model interpolation including rotation grid"""
+ # set tide model string to lowercase if not already
+ tide_model = tide_model.lower()
key_transport = 0 # compute the velocities from transport
numharm = len(comp)
logger = logging.getLogger(__name__)
@@ -27,22 +31,37 @@ def nemo_bdy_tpx7p2_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp):
dst_lat = DC.bdy_lonlat[g_type]['lat'][Grid_T.bdy_r == 0]
#nbdyz = len(Grid_T.bdy_i)
- nbdyu = len(Grid_U.bdy_i)
- nbdyv = len(Grid_V.bdy_i)
+ nbdyu = len(np.where(Grid_U.bdy_r == 0)[0])
+ nbdyv = len(np.where(Grid_V.bdy_r == 0)[0])
+
+ # TODO: change from if statement defining HC extract to string passed that defines HC extract script
+ # e.g. pass 'tpxo' for tpxo_extract_HC.py or 'fes' fro fes_extract_HC.py. This will make easier to add new
+ # databases of HC
#convert the dst_lon into TMD Conventions (0E/360E)
dst_lon[dst_lon < 0.0] = dst_lon[dst_lon < 0.0]+360.0
#extract the surface elevation at each z-point
- tpxo_z = tpxo_extract_HC.TpxoExtract(setup.settings, dst_lat, dst_lon, g_type)
+ if tide_model == 'tpxo':
+ tpxo_z = tpxo_extract_HC.HcExtract(setup.settings, dst_lat, dst_lon, g_type)
+ if tide_model == 'fes':
+ fes_z = fes_extract_HC.HcExtract(setup.settings,dst_lat,dst_lon,g_type)
+
#convert back the z-longitudes into the usual conventions (-180E/+180E)
dst_lon[dst_lon > 180.0] = dst_lon[dst_lon > 180.0]-360.0
#check if elevation data are missing
- ind = np.where((np.isnan(tpxo_z.amp)) | (np.isnan(tpxo_z.gph)))
+ if tide_model == 'tpxo':
+ ind = np.where((np.isnan(tpxo_z.amp)) | (np.isnan(tpxo_z.gph)))
+ if tide_model == 'fes':
+ ind = np.where((np.isnan(fes_z.amp)) | (np.isnan(fes_z.gph)))
if ind[0].size > 0:
logger.warning('Missing elveation along the open boundary')
+ if tide_model == 'tpxo':
+ ampz = tpxo_z.amp
+ phaz = tpxo_z.gph
+ if tide_model == 'fes':
+ ampz = fes_z.amp
+ phaz = fes_z.gph
- ampz = tpxo_z.amp
- phaz = tpxo_z.gph
ampz[ind] = 0.0
phaz[ind] = 0.0
@@ -52,16 +71,25 @@ def nemo_bdy_tpx7p2_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp):
#convert the U-longitudes into the TMD conventions (0/360E)
dst_lon[dst_lon < 0.0] = dst_lon[dst_lon < 0.0]+360.0
+ if tide_model == 'tpxo':
+ tpxo_ux = tpxo_extract_HC.HcExtract(setup.settings, dst_lat, dst_lon, Grid_U.grid_type)
+ tpxo_vx = tpxo_extract_HC.HcExtract(setup.settings, dst_lat, dst_lon, Grid_V.grid_type)
- tpxo_ux = tpxo_extract_HC.TpxoExtract(setup.settings, dst_lat, dst_lon, Grid_U.grid_type)
- tpxo_vx = tpxo_extract_HC.TpxoExtract(setup.settings, dst_lat, dst_lon, Grid_V.grid_type)
+ ampuX = tpxo_ux.amp
+ phauX = tpxo_ux.gph
+ ampvX = tpxo_vx.amp
+ phavX = tpxo_vx.gph
- ampuX = tpxo_ux.amp
- phauX = tpxo_ux.gph
- ampvX = tpxo_vx.amp
- phavX = tpxo_vx.gph
+ if tide_model == 'fes':
+ fes_ux = fes_extract_HC.HcExtract(setup.settings, dst_lat, dst_lon, Grid_U.grid_type)
+ fes_vx = fes_extract_HC.HcExtract(setup.settings, dst_lat, dst_lon, Grid_V.grid_type)
- #check if ux data are missing
+ ampuX = fes_ux.amp
+ phauX = fes_ux.gph
+ ampvX = fes_vx.amp
+ phavX = fes_vx.gph
+
+ #check if ux data are missing
ind = np.where((np.isnan(ampuX)) | (np.isnan(phauX)))
if ind[0].size > 0:
logger.warning('Missing zonal velocity along the x open boundary')
@@ -83,13 +111,23 @@ def nemo_bdy_tpx7p2_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp):
#convert the U-longitudes into the TMD conventions (0/360E)
dst_lon[dst_lon < 0.0] = dst_lon[dst_lon < 0.0]+360.0
- tpxo_uy = tpxo_extract_HC.TpxoExtract(setup.settings, dst_lat, dst_lon, Grid_U.grid_type)
- tpxo_vy = tpxo_extract_HC.TpxoExtract(setup.settings, dst_lat, dst_lon, Grid_V.grid_type)
+ if tide_model == 'tpxo':
+ tpxo_uy = tpxo_extract_HC.HcExtract(setup.settings, dst_lat, dst_lon, Grid_U.grid_type)
+ tpxo_vy = tpxo_extract_HC.HcExtract(setup.settings, dst_lat, dst_lon, Grid_V.grid_type)
+
+ ampuY = tpxo_uy.amp
+ phauY = tpxo_uy.gph
+ ampvY = tpxo_vy.amp
+ phavY = tpxo_vy.gph
+
+ if tide_model == 'fes':
+ fes_uy = fes_extract_HC.HcExtract(setup.settings, dst_lat, dst_lon, Grid_U.grid_type)
+ fes_vy = fes_extract_HC.HcExtract(setup.settings, dst_lat, dst_lon, Grid_V.grid_type)
- ampuY = tpxo_uy.amp
- phauY = tpxo_uy.gph
- ampvY = tpxo_vy.amp
- phavY = tpxo_vy.gph
+ ampuY = fes_uy.amp
+ phauY = fes_uy.gph
+ ampvY = fes_vy.amp
+ phavY = fes_vy.gph
#check if ux data are missing
ind = np.where((np.isnan(ampuY)) | (np.isnan(phauY)))
@@ -177,7 +215,12 @@ def nemo_bdy_tpx7p2_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp):
cosvY = np.zeros((numharm, nbdyv))
sinvY = np.zeros((numharm, nbdyv))
- compindx = constituents_index(tpxo_z.cons, comp)
+ if tide_model == 'tpxo':
+ compindx = constituents_index(tpxo_z.cons, comp)
+
+ if tide_model == 'fes':
+ compindx = constituents_index(fes_z.cons, comp)
+
for h in range(0, numharm):
c = int(compindx[h])
if c != -1:
@@ -217,11 +260,14 @@ def nemo_bdy_tpx7p2_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp):
dst_gsin = grid_angles.sinval
#retain only boundary points rotation information
- tmp_gcos = np.zeros(Grid_U.bdy_i.shape[0])
- tmp_gsin = np.zeros(Grid_U.bdy_i.shape[0])
- for index in range(Grid_U.bdy_i.shape[0]):
- tmp_gcos[index] = dst_gcos[Grid_U.bdy_i[index, 1], Grid_U.bdy_i[index, 0]]
- tmp_gsin[index] = dst_gsin[Grid_U.bdy_i[index, 1], Grid_U.bdy_i[index, 0]]
+ tmp_gcos = np.zeros(len(np.where(Grid_U.bdy_r == 0)[0]))
+ tmp_gsin = np.zeros(len(np.where(Grid_U.bdy_r == 0)[0]))
+
+ bdy_r_in_i = Grid_U.bdy_i[Grid_U.bdy_r == 0]
+
+ for index in range(len(np.where(Grid_U.bdy_r == 0)[0])):
+ tmp_gcos[index] = dst_gcos[bdy_r_in_i[index, 1], bdy_r_in_i[index, 0]]
+ tmp_gsin[index] = dst_gsin[bdy_r_in_i[index, 1], bdy_r_in_i[index, 0]]
dst_gcos = tmp_gcos
dst_gsin = tmp_gsin
@@ -236,11 +282,14 @@ def nemo_bdy_tpx7p2_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp):
dst_gsin = grid_angles.sinval
#retain only boundary points rotation information
- tmp_gcos = np.zeros(Grid_V.bdy_i.shape[0])
- tmp_gsin = np.zeros(Grid_V.bdy_i.shape[0])
- for index in range(Grid_V.bdy_i.shape[0]):
- tmp_gcos[index] = dst_gcos[Grid_V.bdy_i[index, 1], Grid_V.bdy_i[index, 0]]
- tmp_gsin[index] = dst_gsin[Grid_V.bdy_i[index, 1], Grid_V.bdy_i[index, 0]]
+ tmp_gcos = np.zeros(len(np.where(Grid_V.bdy_r == 0)[0]))
+ tmp_gsin = np.zeros(len(np.where(Grid_V.bdy_r == 0)[0]))
+
+ bdy_r_in_i = Grid_V.bdy_i[Grid_V.bdy_r == 0]
+
+ for index in range(len(np.where(Grid_V.bdy_r == 0)[0])):
+ tmp_gcos[index] = dst_gcos[bdy_r_in_i[index, 1], bdy_r_in_i[index, 0]]
+ tmp_gsin[index] = dst_gsin[bdy_r_in_i[index, 1], bdy_r_in_i[index, 0]]
dst_gcos = tmp_gcos
dst_gsin = tmp_gsin
@@ -248,7 +297,10 @@ def nemo_bdy_tpx7p2_rot(setup, DstCoord, Grid_T, Grid_U, Grid_V, comp):
sinv = rot_rep(sinuY, sinvY, 'v', 'en to j', dst_gcos, dst_gsin)
#return the values
- return cosz, sinz, cosu, sinu, cosv, sinv
+ cons = {}
+ cons['cos'] = {'z':cosz,'u':cosu,'v':cosv}
+ cons['sin'] = {'z':sinz,'u':sinu,'v':sinv}
+ return cons
def constituents_index(constituents, inputcons):
diff --git a/pynemo/tide/nemo_bdy_tide_ncgen.py b/pynemo/tide/nemo_bdy_tide_ncgen.py
index 43943fa7..9a832b50 100644
--- a/pynemo/tide/nemo_bdy_tide_ncgen.py
+++ b/pynemo/tide/nemo_bdy_tide_ncgen.py
@@ -7,8 +7,9 @@
from netCDF4 import Dataset
import datetime
import logging
+from pynemo import nemo_ncml_parse as ncml_parse
-def CreateBDYTideNetcdfFile(filename, N,I,J,h,fv,grd):
+def CreateBDYTideNetcdfFile(filename, N,I,J,h,fv,grd,ncml_out):
gridNames = ['T', 'U', 'V']
# Dimension Lengths
@@ -21,104 +22,124 @@ def CreateBDYTideNetcdfFile(filename, N,I,J,h,fv,grd):
ncid = Dataset(filename, 'w', clobber=True, format='NETCDF4')
#define dimensions
- dimxbID = ncid.createDimension('xb',xb_len)
- dimybID = ncid.createDimension('yb',yb_len)
- dimxID = ncid.createDimension('x', x_len)
- dimyID = ncid.createDimension('y', y_len)
+ xb = ncml_parse.dst_dims(ncml_out, 'xb')
+ dimxbID = ncid.createDimension(xb,xb_len)
+ yb = ncml_parse.dst_dims(ncml_out, 'yb')
+ dimybID = ncid.createDimension(yb,yb_len)
+ x = ncml_parse.dst_dims(ncml_out, 'x')
+ dimxID = ncid.createDimension(x, x_len)
+ y = ncml_parse.dst_dims(ncml_out, 'y')
+ dimyID = ncid.createDimension(y, y_len)
+
+ #define variable
+ lon_var = ncml_parse.dst_var(ncml_out, 'nav_lon')
+ varlonID = ncid.createVariable(lon_var['name'], lon_var['type'], (lon_var['shape'][0], lon_var['shape'][1], ))
+ lat_var = ncml_parse.dst_var(ncml_out, 'nav_lat')
+ varlatID = ncid.createVariable(lat_var['name'], lat_var['type'], (lat_var['shape'][0], lat_var['shape'][1], ))
- #define variable
- varlonID = ncid.createVariable('nav_lon','f4',('y','x',))
- varlatID = ncid.createVariable('nav_lat','f4',('y','x',))
-
-
if grd =='T':
- varmskID = ncid.createVariable('bdy_msk','f4',('y','x',),fill_value=fv)
- varz1ID = ncid.createVariable('z1','f4',('yb','xb',),fill_value=fv)
- varz2ID = ncid.createVariable('z2','f4',('yb','xb',),fill_value=fv)
+ bdy_msk = ncml_parse.dst_var(ncml_out, 'bdy_msk')
+ varmskID = ncid.createVariable(bdy_msk['name'], bdy_msk['type'], (bdy_msk['shape'][0], bdy_msk['shape'][1],),fill_value=fv)
+ z1 = ncml_parse.dst_var(ncml_out,'z1')
+ varz1ID = ncid.createVariable(z1['name'],z1['type'],(z1['shape'][0],z1['shape'][1],),fill_value=fv)
+ z2 = ncml_parse.dst_var(ncml_out,'z2')
+ varz2ID = ncid.createVariable(z2['name'],z2['type'],(z2['shape'][0],z2['shape'][1],),fill_value=fv)
+
elif grd == 'U':
- varu1ID = ncid.createVariable('u1','f4',('yb','xb',),fill_value=fv)
- varu2ID = ncid.createVariable('u2','f4',('yb','xb',),fill_value=fv)
+ bdy_msk = ncml_parse.dst_var(ncml_out, 'bdy_msk')
+ varmskID = ncid.createVariable(bdy_msk['name'], bdy_msk['type'], (bdy_msk['shape'][0], bdy_msk['shape'][1],),fill_value=fv)
+ u1 = ncml_parse.dst_var(ncml_out,'u1')
+ varu1ID = ncid.createVariable(u1['name'],u1['type'],(u1['shape'][0],u1['shape'][1],),fill_value=fv)
+ u2 = ncml_parse.dst_var(ncml_out,'u2')
+ varu2ID = ncid.createVariable(u2['name'],u2['type'],(u2['shape'][0],u2['shape'][1],),fill_value=fv)
elif grd == 'V':
- varv1ID = ncid.createVariable('v1','f4',('yb','xb',),fill_value=fv)
- varv2ID = ncid.createVariable('v2','f4',('yb','xb',),fill_value=fv)
+ bdy_msk = ncml_parse.dst_var(ncml_out, 'bdy_msk')
+ varmskID = ncid.createVariable(bdy_msk['name'], bdy_msk['type'], (bdy_msk['shape'][0], bdy_msk['shape'][1],),fill_value=fv)
+ v1 = ncml_parse.dst_var(ncml_out,'v1')
+ varv1ID = ncid.createVariable(v1['name'],v1['type'],(v1['shape'][0],v1['shape'][1],),fill_value=fv)
+ v2 = ncml_parse.dst_var(ncml_out,'v2')
+ varv2ID = ncid.createVariable(v2['name'],v2['type'],(v2['shape'][0],v2['shape'][1],),fill_value=fv)
else :
logging.error("Unknown Grid input")
- varnbiID = ncid.createVariable('nbidta','i4',('yb','xb',))
- varnbjID = ncid.createVariable('nbjdta','i4',('yb','xb',))
- varnbrID = ncid.createVariable('nbrdta','i4',('yb','xb',))
+ nbidta = ncml_parse.dst_var(ncml_out,'nbidta')
+ varnbiID = ncid.createVariable(nbidta['name'], nbidta['type'], (nbidta['shape'][0], nbidta['shape'][1], ))
+ nbjdta = ncml_parse.dst_var(ncml_out,'nbjdta')
+ varnbjID = ncid.createVariable(nbjdta['name'], nbjdta['type'], (nbjdta['shape'][0], nbjdta['shape'][1], ))
+ nbrdta = ncml_parse.dst_var(ncml_out, 'nbrdta')
+ varnbrID = ncid.createVariable(nbrdta['name'], nbrdta['type'], (nbrdta['shape'][0], nbrdta['shape'][1], ))
#Global Attributes
ncid.file_name = filename
ncid.creation_date = str(datetime.datetime.now())
ncid.history = h
- ncid.institution = 'National Oceanography Centre, Livepool, U.K.'
+ ncid.institution = ncml_parse.dst_glob_attrib(ncml_out,'institution')
#Longitude axis attributes
- varlonID.axis = 'Longitude'
- varlonID.short_name = 'nav_lon'
- varlonID.units = 'degrees_east'
- varlonID.long_name = 'Longitude'
+ varlonID.axis = ncml_parse.dst_var_attrib(ncml_out,lon_var['name'],'axis')
+ varlonID.short_name = ncml_parse.dst_var_attrib(ncml_out,lon_var['name'],'short_name')
+ varlonID.units = ncml_parse.dst_var_attrib(ncml_out,lon_var['name'],'units')
+ varlonID.long_name = ncml_parse.dst_var_attrib(ncml_out,lon_var['name'],'long_name')
#Latitude axis attributes
- varlatID.axis = 'Latitude'
- varlatID.short_name = 'nav_lat'
- varlatID.units = 'degrees_east'
- varlatID.long_name = 'Latitude'
+ varlatID.axis = ncml_parse.dst_var_attrib(ncml_out,lat_var['name'],'axis')
+ varlatID.short_name = ncml_parse.dst_var_attrib(ncml_out,lat_var['name'],'short_name')
+ varlatID.units = ncml_parse.dst_var_attrib(ncml_out,lat_var['name'],'units')
+ varlatID.long_name = ncml_parse.dst_var_attrib(ncml_out,lat_var['name'],'long_name')
#nbidta attributes
- varnbiID.short_name = 'nbidta'
- varnbiID.units = 'unitless'
- varnbiID.long_name = 'Bdy i indices'
+ varnbiID.short_name = ncml_parse.dst_var_attrib(ncml_out,nbidta['name'],'short_name')
+ varnbiID.units = ncml_parse.dst_var_attrib(ncml_out,nbidta['name'],'units')
+ varnbiID.long_name = ncml_parse.dst_var_attrib(ncml_out,nbidta['name'],'long_name')
#nbjdta attributes
- varnbjID.short_name = 'nbjdta'
- varnbjID.units = 'unitless'
- varnbjID.long_name = 'Bdy j indices'
+ varnbjID.short_name = ncml_parse.dst_var_attrib(ncml_out,nbjdta['name'],'short_name')
+ varnbjID.units = ncml_parse.dst_var_attrib(ncml_out,nbjdta['name'],'units')
+ varnbjID.long_name = ncml_parse.dst_var_attrib(ncml_out,nbjdta['name'],'long_name')
#nbrdta attributes
- varnbrID.short_name = 'nbrdta'
- varnbrID.units = 'unitless'
- varnbrID.long_name = 'Bdy discrete distance'
+ varnbrID.short_name = ncml_parse.dst_var_attrib(ncml_out,nbrdta['name'],'short_name')
+ varnbrID.units = ncml_parse.dst_var_attrib(ncml_out,nbrdta['name'],'units')
+ varnbrID.long_name = ncml_parse.dst_var_attrib(ncml_out,nbrdta['name'],'long_name')
+
if grd == 'T' :
-
- varmskID.short_name = 'bdy_msk'
- varmskID.units = 'unitless'
- varmskID.long_name = 'Structured boundary mask'
+ varmskID.short_name = ncml_parse.dst_var_attrib(ncml_out,varmskID.name,'short_name')
+ varmskID.units = ncml_parse.dst_var_attrib(ncml_out,varmskID.name,'units')
+ varmskID.long_name = ncml_parse.dst_var_attrib(ncml_out,varmskID.name,'long_name')
- varz1ID.units = 'm'
- varz1ID.short_name = 'z1'
- varz1ID.long_name = 'tidal elevation: cosine'
- varz1ID.grid = 'bdyT'
+ varz1ID.units = ncml_parse.dst_var_attrib(ncml_out,varz1ID.name,'units')
+ varz1ID.short_name = ncml_parse.dst_var_attrib(ncml_out,varz1ID.name,'short_name')
+ varz1ID.long_name = ncml_parse.dst_var_attrib(ncml_out,varz1ID.name,'long_name')
+ varz1ID.grid = ncml_parse.dst_var_attrib(ncml_out,varz1ID.name,'grid')
- varz2ID.units = 'm'
- varz2ID.short_name = 'z2'
- varz2ID.long_name = 'tidal elevation: sine'
- varz2ID.grid = 'bdyT'
+ varz2ID.units = ncml_parse.dst_var_attrib(ncml_out,varz2ID.name,'units')
+ varz2ID.short_name = ncml_parse.dst_var_attrib(ncml_out,varz2ID.name,'short_name')
+ varz2ID.long_name = ncml_parse.dst_var_attrib(ncml_out,varz2ID.name,'long_name')
+ varz2ID.grid = ncml_parse.dst_var_attrib(ncml_out,varz2ID.name,'grid')
elif grd == 'U' :
- varu1ID.units = 'm/s'
- varu1ID.short_name = 'u1'
- varu1ID.long_name = 'tidal east velocity: cosine'
- varu1ID.grid = 'bdyU'
+ varu1ID.units = ncml_parse.dst_var_attrib(ncml_out,varu1ID.name,'units')
+ varu1ID.short_name = ncml_parse.dst_var_attrib(ncml_out,varu1ID.name,'short_name')
+ varu1ID.long_name = ncml_parse.dst_var_attrib(ncml_out,varu1ID.name,'long_name')
+ varu1ID.grid = ncml_parse.dst_var_attrib(ncml_out,varu1ID.name,'grid')
- varu2ID.units = 'm/s'
- varu2ID.short_name = 'u2'
- varu2ID.long_name = 'tidal east velocity: sine'
- varu2ID.grid = 'bdyU'
+ varu2ID.units = ncml_parse.dst_var_attrib(ncml_out,varu2ID.name,'units')
+ varu2ID.short_name = ncml_parse.dst_var_attrib(ncml_out,varu2ID.name,'short_name')
+ varu2ID.long_name = ncml_parse.dst_var_attrib(ncml_out,varu2ID.name,'long_name')
+ varu2ID.grid = ncml_parse.dst_var_attrib(ncml_out,varu2ID.name,'grid')
elif grd == 'V':
- varv1ID.units = 'm/s'
- varv1ID.short_name = 'v1'
- varv1ID.long_name = 'tidal north velocity: cosine'
- varv1ID.grid = 'bdyV'
+ varv1ID.units = ncml_parse.dst_var_attrib(ncml_out,varv1ID.name,'units')
+ varv1ID.short_name = ncml_parse.dst_var_attrib(ncml_out,varv1ID.name,'short_name')
+ varv1ID.long_name = ncml_parse.dst_var_attrib(ncml_out,varv1ID.name,'long_name')
+ varv1ID.grid = ncml_parse.dst_var_attrib(ncml_out,varv1ID.name,'grid')
- varv2ID.units = 'm/s'
- varv2ID.short_name = 'v2'
- varv2ID.long_name = 'tidal north velocity: sine'
- varv2ID.grid = 'bdyV'
+ varv2ID.units = ncml_parse.dst_var_attrib(ncml_out,varv2ID.name,'units')
+ varv2ID.short_name = ncml_parse.dst_var_attrib(ncml_out,varv2ID.name,'short_name')
+ varv2ID.long_name = ncml_parse.dst_var_attrib(ncml_out,varv2ID.name,'long_name')
+ varv2ID.grid = ncml_parse.dst_var_attrib(ncml_out,varv2ID.name,'grid')
else :
logging.error('Unknown Grid')
diff --git a/pynemo/tide/tpxo_extract_HC.py b/pynemo/tide/tpxo_extract_HC.py
index 10cf627d..1943c53d 100644
--- a/pynemo/tide/tpxo_extract_HC.py
+++ b/pynemo/tide/tpxo_extract_HC.py
@@ -12,7 +12,7 @@
from scipy import interpolate
import numpy as np
-class TpxoExtract(object):
+class HcExtract(object):
""" This is TPXO model extract_hc.c implementation in python"""
def __init__(self, settings, lat, lon, grid_type):
"""initialises the Extract of tide information from the netcdf
@@ -51,7 +51,7 @@ def __init__(self, settings, lat, lon, grid_type):
# Pull out the constituents that are avaibable
self.cons = []
for ncon in range(self.height_dataset.variables['con'].shape[0]):
- self.cons.append(self.height_dataset.variables['con'][ncon, :].tostring().strip())
+ self.cons.append(self.height_dataset.variables['con'][ncon, :].tostring().strip().decode())
elif tide_model == 'FES':
constituents = ['2N2','EPS2','J1','K1','K2','L2','LA2','M2','M3','M4','M6','M8','MF','MKS2','MM','MN4','MS4','MSF','MSQM','MTM','MU2','N2','N4','NU2','O1','P1','Q1','R2','S1','S2','S4','SA','SSA','T2']
print('did not actually code stuff for FES in this routine. Though that would be ideal. Instead put it in fes_extract_HC.py')
diff --git a/pynemo/unit_tests/UT_config.py b/pynemo/unit_tests/UT_config.py
new file mode 100644
index 00000000..46705e04
--- /dev/null
+++ b/pynemo/unit_tests/UT_config.py
@@ -0,0 +1,14 @@
+# -*- coding: utf-8 -*-
+"""
+Config file for Unit test functions, file paths for I/O etc are defined here.
+
+"""
+
+# test input directory
+input_dir = 'pynemo/unit_tests/test_inputs/'
+# test output directory
+output_dir = 'pynemo/unit_tests/test_outputs/'
+# namelist directory
+unit_dir = 'pynemo/unit_tests/namelists/'
+# plot grids
+plot_grids = True
diff --git a/pynemo/unit_tests/__init__.py b/pynemo/unit_tests/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/pynemo/unit_tests/gen_tools.py b/pynemo/unit_tests/gen_tools.py
new file mode 100644
index 00000000..f09f7978
--- /dev/null
+++ b/pynemo/unit_tests/gen_tools.py
@@ -0,0 +1,624 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Wed Mar 11 11:16:57 2020
+
+unit test grid generation tools
+
+@author: thopri and jdha (for example code)
+
+"""
+from netCDF4 import Dataset
+from math import cos,sin,radians
+import numpy as np
+import matplotlib.pyplot as plt
+from math import radians
+
+
+def set_hgrid(dx, dy, jpi, jpj, zoffx=0, zoffy=0,sf=1):
+ if sf > 1:
+ jpi = int(jpj - (jpi/sf))+1
+ jpj = int(jpj -(jpj/sf))+1
+ # Set grid positions [km]
+ latt = np.zeros((jpi, jpj))
+ lont = np.zeros((jpi, jpj))
+ lonu = np.zeros((jpi, jpj))
+ latu = np.zeros((jpi, jpj))
+ lonv = np.zeros((jpi, jpj))
+ latv = np.zeros((jpi, jpj))
+ lonf = np.zeros((jpi, jpj))
+ latf = np.zeros((jpi, jpj))
+
+ for i in range(0, jpi):
+ lont[i, :] = zoffx * dx * 1.e-3 + dx * 1.e-3 * np.float(i)
+ lonu[i, :] = zoffx * dx * 1.e-3 + dx * 1.e-3 * (np.float(i) + 0.5)
+
+ for j in range(0, jpj):
+ latt[:, j] = zoffy * dy * 1.e-3 + dy * 1.e-3 * float(j)
+ latv[:, j] = zoffy * dy * 1.e-3 + dy * 1.e-3 * (float(j) + 0.5)
+
+ lonv = lont
+ lonf = lonu
+ latu = latt
+ latf = latv
+
+ e1t = np.ones((jpi, jpj)) * dx
+ e2t = np.ones((jpi, jpj)) * dy
+ e1u = np.ones((jpi, jpj)) * dx
+ e2u = np.ones((jpi, jpj)) * dy
+ e1v = np.ones((jpi, jpj)) * dx
+ e2v = np.ones((jpi, jpj)) * dy
+ e1f = np.ones((jpi, jpj)) * dx
+ e2f = np.ones((jpi, jpj)) * dy
+
+ # Set bathymetry [m]:
+ batt = 500. + 0.5 * 1500. * (1.0 + np.tanh((lont - 40.) / 7.))
+
+ # Set surface mask:
+ ktop = np.zeros((jpi, jpj))
+ #ktop[1:jpi - 1, nghost + 1:jpj - nghost - 1] = 1
+ #batt = np.where((ktop == 0.), 0., batt)
+
+ # Set coriolis parameter:
+ ff_t = np.zeros((jpi, jpj))
+ ff_f = np.zeros((jpi, jpj))
+
+ grid_h = {'lont':lont, 'latt':latt, 'lonu':lonu, 'latu':latu, 'lonv':lonv, 'latv':latv, 'lonf':lonf, 'latf':latf, \
+ 'e1t':e1t, 'e2t':e2t, 'e1u':e1u, 'e2u':e2u, 'e1v':e1v, 'e2v':e2v, 'e1f':e1f, 'e2f':e2f, 'batt':batt, \
+ 'ktop':ktop, 'ff_f':ff_f, 'ff_t':ff_t,'jpi':jpi,'jpj':jpj,'dy':dy,'dx':dx}
+
+ return grid_h
+
+def set_zgrid(grid_h,jpk,max_dep,min_dep,z_dim):
+
+ jpi = grid_h['jpi']
+ jpj = grid_h['jpj']
+
+ dept_1d = np.linspace(min_dep,max_dep,jpk)
+ e3t_1d = np.linspace(1.0,z_dim,jpk)
+ e3w_1d = np.linspace(1.0,z_dim,jpk)
+ gdept_0 = np.linspace(min_dep,max_dep,jpk)
+ gdepw_0 = np.linspace(min_dep,max_dep,jpk)
+
+ e3t = np.zeros((jpi, jpj, len(e3t_1d)))
+ e3u = np.zeros((jpi, jpj, len(e3t_1d)))
+ e3v = np.zeros((jpi, jpj, len(e3t_1d)))
+ e3w = np.zeros((jpi, jpj, len(e3w_1d)))
+ e3f = np.zeros((jpi, jpj, len(e3t_1d)))
+ e3uw = np.zeros((jpi, jpj, len(e3t_1d)))
+ e3vw = np.zeros((jpi, jpj, len(e3t_1d)))
+ gdept = np.zeros((jpi,jpj,len(gdept_0)))
+ gdepw = np.zeros((jpi,jpj,len(gdepw_0)))
+
+ e3t[:] = e3t_1d
+ e3u[:] = e3t_1d
+ e3v[:] = e3t_1d
+ e3w[:] = e3w_1d
+ e3f[:] = e3t_1d
+ e3uw[:] = e3t_1d
+ e3vw[:] = e3t_1d
+ gdept[:] = gdept_0
+ gdepw[:] = gdepw_0
+
+ grid_z = {'dept_1d':dept_1d,'e3t_1d':e3t_1d,'e3w_1d':e3w_1d,'e3t':e3t,'e3u':e3u,'e3v':e3v,'e3w':e3w,'e3f':e3f, \
+ 'e3uw':e3uw,'e3vw':e3vw,'gdept_0':gdept_0,'gdept':gdept,'gdepw_0':gdepw_0,'gdepw':gdepw}
+
+ return grid_z
+
+def write_coord_H(fileout, grid_h):
+ '''
+ Writes out a NEMO formatted coordinates file.
+
+ Args:
+ fileout (string): filename
+ lon[t/u/v/f](np.ndarray): longitude array at [t/u/v/f]-points (2D)
+ lat[t/u/v/f](np.ndarray): latitude array at [t/u/v/f]-points (2D)
+ e1[t/u/v/f] (np.ndarray): zonal scale factors at [t/u/v/f]-points
+ e2[t/u/v/f] (np.ndarray): meridional scale factors at [t/u/v/f]-points
+
+ Returns:
+ '''
+
+ # Open pointer to netcdf file
+ dataset = Dataset(fileout, 'w', format='NETCDF4_CLASSIC')
+
+ # Get input size and create appropriate dimensions
+ # TODO: add some sort of error handling
+ nx, ny = np.shape(grid_h['lont'])
+ nt = 1
+ dataset.createDimension('x', nx)
+ dataset.createDimension('y', ny)
+ dataset.createDimension('t', nt)
+
+ # Create Variables
+ nav_lon = dataset.createVariable('nav_lon', np.float32, ('y', 'x'))
+ nav_lat = dataset.createVariable('nav_lat', np.float32, ('y', 'x'))
+ time_counter = dataset.createVariable('time_counter',np.float32,('t'))
+
+ glamt = dataset.createVariable('glamt', np.float64, ('t','y', 'x'))
+ glamu = dataset.createVariable('glamu', np.float64, ('t','y', 'x'))
+ glamv = dataset.createVariable('glamv', np.float64, ('t','y', 'x'))
+ glamf = dataset.createVariable('glamf', np.float64, ('t','y', 'x'))
+ gphit = dataset.createVariable('gphit', np.float64, ('t','y', 'x'))
+ gphiu = dataset.createVariable('gphiu', np.float64, ('t','y', 'x'))
+ gphiv = dataset.createVariable('gphiv', np.float64, ('t','y', 'x'))
+ gphif = dataset.createVariable('gphif', np.float64, ('t','y', 'x'))
+
+ ge1t = dataset.createVariable('e1t', np.float64, ('t','y', 'x'))
+ ge1u = dataset.createVariable('e1u', np.float64, ('t','y', 'x'))
+ ge1v = dataset.createVariable('e1v', np.float64, ('t','y', 'x'))
+ ge1f = dataset.createVariable('e1f', np.float64, ('t','y', 'x'))
+ ge2t = dataset.createVariable('e2t', np.float64, ('t','y', 'x'))
+ ge2u = dataset.createVariable('e2u', np.float64, ('t','y', 'x'))
+ ge2v = dataset.createVariable('e2v', np.float64, ('t','y', 'x'))
+ ge2f = dataset.createVariable('e2f', np.float64, ('t','y', 'x'))
+
+ nav_lon.units, nav_lon.long_name = 'km', 'X'
+ nav_lat.units, nav_lat.long_name = 'km', 'Y'
+ time_counter.units, time_counter.long_name = 'seconds','time_counter'
+
+ # Populate file with input data
+ # TODO: do we need to transpose?
+ nav_lon[:, :] = grid_h['lont'].T
+ nav_lat[:, :] = grid_h['latt'].T
+
+ glamt[:, :] = grid_h['lont'].T
+ glamu[:, :] = grid_h['lonu'].T
+ glamv[:, :] = grid_h['lonv'].T
+ glamf[:, :] = grid_h['lonf'].T
+ gphit[:, :] = grid_h['latt'].T
+ gphiu[:, :] = grid_h['latu'].T
+ gphiv[:, :] = grid_h['latv'].T
+ gphif[:, :] = grid_h['latf'].T
+
+ ge1t[:, :] = grid_h['e1t'].T
+ ge1u[:, :] = grid_h['e1u'].T
+ ge1v[:, :] = grid_h['e1v'].T
+ ge1f[:, :] = grid_h['e1f'].T
+ ge2t[:, :] = grid_h['e2t'].T
+ ge2u[:, :] = grid_h['e2u'].T
+ ge2v[:, :] = grid_h['e2v'].T
+ ge2f[:, :] = grid_h['e2f'].T
+
+ # Close off pointer
+ dataset.close()
+
+ return 0
+
+def write_coord_Z(fileout, grid_h,grid_z):
+ '''
+ Writes out a NEMO formatted coordinates file.
+
+ Args:
+
+ Returns:
+ '''
+
+ # Open pointer to netcdf file
+ dataset = Dataset(fileout, 'w', format='NETCDF4_CLASSIC')
+
+ # Get input size and create appropriate dimensions
+ # TODO: add some sort of error handling
+ nx, ny, nz = np.shape(grid_z['e3t'])
+ nt = 1
+ dataset.createDimension('x', nx)
+ dataset.createDimension('y', ny)
+ dataset.createDimension('z', nz)
+ dataset.createDimension('t', nt)
+
+
+ # Create Variables
+ nav_lon = dataset.createVariable('nav_lon', np.float32, ('y', 'x'))
+ nav_lat = dataset.createVariable('nav_lat', np.float32, ('y', 'x'))
+ nav_lev = dataset.createVariable('nav_lev', np.float32, 'z')
+ time_counter = dataset.createVariable('time_counter', np.float32, ('t'))
+
+ ge3t1d = dataset.createVariable('e3t_0', np.float64, ('t','z'))
+ ge3w1d = dataset.createVariable('e3w_0', np.float64, ('t','z'))
+ gdept_0 = dataset.createVariable('gdept_0', np.float64, ('t','z'))
+ gdepw_0 = dataset.createVariable('gdepw_0', np.float64, ('t','z'))
+ gbat = dataset.createVariable('mbathy', np.float64, ('t','y', 'x'))
+ ge3t = dataset.createVariable('e3t', np.float64, ('t','z','y', 'x'))
+ ge3u = dataset.createVariable('e3u', np.float64, ('t','z','y', 'x'))
+ ge3v = dataset.createVariable('e3v', np.float64, ('t','z','y', 'x'))
+ ge3w = dataset.createVariable('e3w', np.float64, ('t', 'z', 'y', 'x'))
+ ge3f = dataset.createVariable('e3f', np.float64, ('t','z','y', 'x'))
+ ge3uw = dataset.createVariable('e3uw', np.float64, ('t','z', 'y', 'x'))
+ ge3vw = dataset.createVariable('e3vw', np.float64, ('t','z', 'y', 'x'))
+ gdept = dataset.createVariable('gdept', np.float64, ('t','z', 'y', 'x'))
+ gdepw = dataset.createVariable('gdepw', np.float64, ('t','z', 'y', 'x'))
+
+ nav_lon.units, nav_lon.long_name = 'km', 'X'
+ nav_lat.units, nav_lat.long_name = 'km', 'Y'
+ nav_lev.units, nav_lev.long_name = 'm', 'Z'
+ time_counter.units, time_counter.long_name = 'seconds', 'time_counter'
+
+ # Populate file with input data
+ # TODO: do we need to transpose?
+ nav_lon[:, :] = grid_h['lont'].T
+ nav_lat[:, :] = grid_h['latt'].T
+ nav_lev[:] = grid_z['dept_1d']
+
+ ge3t[:, :, :] = grid_z['e3t'].T
+ ge3u[:, :, :] = grid_z['e3u'].T
+ ge3v[:, :, :] = grid_z['e3v'].T
+ ge3f[:, :, :] = grid_z['e3f'].T
+ ge3w[:, :, :] = grid_z['e3w'].T
+ ge3uw[:, :, :] = grid_z['e3uw'].T
+ ge3vw[:, :, :] = grid_z['e3vw'].T
+
+ ge3t1d[:] = grid_z['e3t_1d']
+ ge3w1d[:] = grid_z['e3w_1d']
+ gbat[:,:] = grid_h['batt'].T
+ gdept[:,:,:] = grid_z['gdept'].T
+ gdept_0[:] = grid_z['gdept_0']
+ gdepw[:,:,:] = grid_z['gdepw'].T
+ gdepw_0[:] = grid_z['gdepw_0']
+
+
+ # Close off pointer
+ dataset.close()
+
+ return 0
+
+# def write_domcfg(fileout, ln_zco, ln_zps, ln_sco, ln_isfcav, jperio, bat,
+# lont, latt, lonu, latu, lonv, latv, lonf, latf,
+# e1t, e2t, e1u, e2u, e1v, e2v, e1f, e2f, ff_f, ff_t,
+# dept_1d, e3t_1d, e3w_1d, e3t, e3u, e3v, e3f, e3w, e3uw, e3vw,
+# ktop, kbot):
+# '''
+# Writes out a NEMO formatted domcfg file.
+#
+# Args:
+# fileout (string): filename
+# ln_zco (logical): vertical coordinate flag [z-level]
+# ln_zps (logical): vertical coordinate flag [z-partial-step]
+# ln_sco (logical): vertical coordinate flag [sigma]
+# ln_isfcav (logical): ice cavity flag
+# jperio (int): domain type
+# bat (np.ndarray): bathymetry array at t-points (2D)
+# lon[t/u/v/f](np.ndarray): longitude array at [t/u/v/f]-points (2D)
+# lat[t/u/v/f](np.ndarray): latitude array at [t/u/v/f]-points (2D)
+# e1[t/u/v/f] (np.ndarray): zonal scale factors at [t/u/v/f]-points
+# e2[t/u/v/f] (np.ndarray): meridional scale factors at [t/u/v/f]-points
+# ff_[f/t] (np.ndarray): coriolis parameter at [t/f]-points
+# dept_1d (np.ndarray): 1D depth levels at t-points
+# e3[t/w]_1d (np.ndarray): 1D vertical scale factors at [t/w]-points
+# e3[t/u/v/f] (np.ndarray): vertcal scale factors at [t/u/v/f]-points
+# e3[w/uw/vw] (np.ndarray): vertcal scale factors at [w/uw/vw]-points
+# ktop (np.ndarray): upper most wet point
+# kbot (np.ndarray): lower most wet point
+#
+# Returns:
+# '''
+#
+# # Open pointer to netcdf file
+# dataset = Dataset(fileout, 'w', format='NETCDF4_CLASSIC')
+#
+# # Get input size and create appropriate dimensions
+# # TODO: add some sort of error handling
+# nx, ny, nz = np.shape(e3t)
+# dataset.createDimension('x', nx)
+# dataset.createDimension('y', ny)
+# dataset.createDimension('z', nz)
+#
+# # create Variables
+# nav_lon = dataset.createVariable('nav_lon', np.float32, ('y', 'x'))
+# nav_lat = dataset.createVariable('nav_lat', np.float32, ('y', 'x'))
+# nav_lev = dataset.createVariable('nav_lev', np.float32, 'z')
+#
+# giglo = dataset.createVariable('jpiglo', "i4")
+# gjglo = dataset.createVariable('jpjglo', "i4")
+# gkglo = dataset.createVariable('jpkglo', "i4")
+#
+# gperio = dataset.createVariable('jperio', "i4")
+#
+# gzco = dataset.createVariable('ln_zco', "i4")
+# gzps = dataset.createVariable('ln_zps', "i4")
+# gsco = dataset.createVariable('ln_sco', "i4")
+# gcav = dataset.createVariable('ln_isfcav', "i4")
+#
+# ge3t1d = dataset.createVariable('e3t_1d', np.float64, 'z')
+# ge3w1d = dataset.createVariable('e3w_1d', np.float64, 'z')
+# gitop = dataset.createVariable('top_level', "i4", ('y', 'x'))
+# gibot = dataset.createVariable('bottom_level', "i4", ('y', 'x'))
+# gbat = dataset.createVariable('Bathymetry', np.float64, ('y', 'x'))
+# glamt = dataset.createVariable('glamt', np.float64, ('y', 'x'))
+# glamu = dataset.createVariable('glamu', np.float64, ('y', 'x'))
+# glamv = dataset.createVariable('glamv', np.float64, ('y', 'x'))
+# glamf = dataset.createVariable('glamf', np.float64, ('y', 'x'))
+# gphit = dataset.createVariable('gphit', np.float64, ('y', 'x'))
+# gphiu = dataset.createVariable('gphiu', np.float64, ('y', 'x'))
+# gphiv = dataset.createVariable('gphiv', np.float64, ('y', 'x'))
+# gphif = dataset.createVariable('gphif', np.float64, ('y', 'x'))
+# ge1t = dataset.createVariable('e1t', np.float64, ('y', 'x'))
+# ge1u = dataset.createVariable('e1u', np.float64, ('y', 'x'))
+# ge1v = dataset.createVariable('e1v', np.float64, ('y', 'x'))
+# ge1f = dataset.createVariable('e1f', np.float64, ('y', 'x'))
+# ge2t = dataset.createVariable('e2t', np.float64, ('y', 'x'))
+# ge2u = dataset.createVariable('e2u', np.float64, ('y', 'x'))
+# ge2v = dataset.createVariable('e2v', np.float64, ('y', 'x'))
+# ge2f = dataset.createVariable('e2f', np.float64, ('y', 'x'))
+# gfff = dataset.createVariable('ff_f', np.float64, ('y', 'x'))
+# gfft = dataset.createVariable('ff_t', np.float64, ('y', 'x'))
+# ge3t = dataset.createVariable('e3t_0', np.float64, ('z', 'y', 'x'))
+# ge3w = dataset.createVariable('e3w_0', np.float64, ('z', 'y', 'x'))
+# ge3u = dataset.createVariable('e3u_0', np.float64, ('z', 'y', 'x'))
+# ge3v = dataset.createVariable('e3v_0', np.float64, ('z', 'y', 'x'))
+# ge3f = dataset.createVariable('e3f_0', np.float64, ('z', 'y', 'x'))
+# ge3uw = dataset.createVariable('e3uw_0', np.float64, ('z', 'y', 'x'))
+# ge3vw = dataset.createVariable('e3vw_0', np.float64, ('z', 'y', 'x'))
+#
+# nav_lon.units, nav_lon.long_name = 'km', 'X'
+# nav_lat.units, nav_lat.long_name = 'km', 'Y'
+#
+# # Populate file with input data
+# giglo[:] = nx
+# gjglo[:] = ny
+# gkglo[:] = nz
+#
+# gzco[:] = ln_zco
+# gzps[:] = ln_zps
+# gsco[:] = ln_sco
+# gcav[:] = ln_isfcav
+#
+# gperio[:] = jperio
+#
+# # TODO: do we need to transpose?
+# nav_lon[:, :] = lont.T
+# nav_lat[:, :] = latt.T
+# nav_lev[:] = dept_1d
+#
+# ge3t1d[:] = e3t_1d
+# ge3w1d[:] = e3w_1d
+#
+# gitop[:, :] = ktop.T
+# gibot[:, :] = kbot.T
+#
+# gbat[:, :] = bat.T
+#
+# glamt[:, :] = lont.T
+# glamu[:, :] = lonu.T
+# glamv[:, :] = lonv.T
+# glamf[:, :] = lonf.T
+# gphit[:, :] = latt.T
+# gphiu[:, :] = latu.T
+# gphiv[:, :] = latv.T
+# gphif[:, :] = latf.T
+#
+# ge1t[:, :] = e1t.T
+# ge1u[:, :] = e1u.T
+# ge1v[:, :] = e1v.T
+# ge1f[:, :] = e1f.T
+# ge2t[:, :] = e2t.T
+# ge2u[:, :] = e2u.T
+# ge2v[:, :] = e2v.T
+# ge2f[:, :] = e2f.T
+# gfff[:, :] = ff_f.T
+# gfft[:, :] = ff_t.T
+#
+# ge3t[:, :, :] = e3t.T
+# ge3w[:, :, :] = e3w.T
+# ge3u[:, :, :] = e3u.T
+# ge3v[:, :, :] = e3v.T
+# ge3f[:, :, :] = e3f.T
+# ge3uw[:, :, :] = e3uw.T
+# ge3vw[:, :, :] = e3vw.T
+#
+# # Close off pointer
+# dataset.close()
+
+def rotate_around_point(lat_in,lon_in, radians , origin=(0, 0)):
+ """Rotate a point around a given point.
+ """
+ # create empty lat and lon arrays that match input
+ new_lon = np.zeros(np.shape(lon_in))
+ new_lat = np.zeros(np.shape(lat_in))
+ # extract origin lat and lon as offset
+ offset_lat, offset_lon = origin
+ cos_rad = cos(radians)
+ sin_rad = sin(radians)
+ # cycle through lat and lon arrays and calcule new lat and lon values based on rotation
+ # and origin values
+ for indx in range(np.shape(lat_in)[0]):
+ for indy in range(np.shape(lon_in)[1]):
+ adjusted_lat = (lat_in[indx,indy] - offset_lat)
+ adjusted_lon = (lon_in[indx,indy] - offset_lon)
+ new_lat[indx,indy] = offset_lat + cos_rad * adjusted_lat + -sin_rad * adjusted_lon
+ new_lon[indx,indy] = offset_lon + sin_rad * adjusted_lat + cos_rad * adjusted_lon
+
+ return new_lat, new_lon
+
+def plot_grids(lat_in,lon_in,new_lat,new_lon,off_lat,off_lon,src_lat,src_lon):
+
+ # define lat and lon extents (define in future using input values?)
+ #maxlat = 72
+ #minlat = 32
+ #maxlon = 18
+ #minlon = -28
+
+ plt.figure(figsize=[18, 18]) # a new figure window
+
+ ax = plt.subplot(221)#, projection=ccrs.PlateCarree()) # specify (nrows, ncols, axnum)
+ #ax.set_extent([minlon,maxlon,minlat,maxlat],crs=ccrs.PlateCarree()) #set extent
+ #ax.set_title('Original Grid', fontsize=20,y=1.05) #set title
+ #ax.add_feature(cartopy.feature.LAND, zorder=0) # add land polygon
+ #ax.add_feature(cartopy.feature.COASTLINE, zorder=10) # add coastline polyline
+ #gl = ax.gridlines(crs=ccrs.PlateCarree(), linewidth=2, color='black', alpha=0.5, linestyle='--', draw_labels=True)
+
+ ax1 = plt.subplot(222)#, projection=ccrs.PlateCarree()) # specify (nrows, ncols, axnum)
+ #ax1.set_extent([minlon,maxlon,minlat,maxlat],crs=ccrs.PlateCarree()) #set extent
+ #ax1.set_title('Rotated Grid', fontsize=20,y=1.05) #set title
+ #ax1.add_feature(cartopy.feature.LAND, zorder=0) # add land polygon
+ #ax1.add_feature(cartopy.feature.COASTLINE, zorder=10) # add coastline polyline
+ #gl1 = ax1.gridlines(crs=ccrs.PlateCarree(), linewidth=2, color='black', alpha=0.5, linestyle='--', draw_labels=True)
+
+ # tile 1D lat and lon to 2D arrays for plotting (src lat and lon only)
+ #src_lon = np.tile(src_lon, (np.shape(src_lat)[0], 1))
+ #src_lat = np.tile(src_lat, (np.shape(src_lon)[1], 1))
+ #src_lat = np.rot90(src_lat)
+
+ ax2 = plt.subplot(223)
+ ax3 = plt.subplot(224)
+
+ # plot lat and lon for all grids
+ ax.plot(lon_in, lat_in, color='blue', marker='.', linestyle="")
+ ax.plot(src_lon,src_lat, color='green', marker='o', linestyle="")
+ ax1.plot(new_lon,new_lat, color='red', marker='.', linestyle="")
+ ax1.plot(src_lon,src_lat, color='green',marker='o',linestyle="")
+ ax2.plot(off_lon,off_lat, color='yellow',marker='.',linestyle="")
+ ax2.plot(src_lon, src_lat, color='green', marker='o', linestyle="")
+ # tweak margins of subplots as tight layout doesn't work for some reason?
+ plt.subplots_adjust(left=0.01, right=1, top=0.9, bottom=0.05,wspace=0.01)
+
+ plt.show()
+
+def write_mask(fileout,grid_h,grid_z):
+ '''
+ Writes out a
+
+ Args:
+
+ Returns:
+ '''
+ # Open pointer to netcdf file
+ dataset = Dataset(fileout, 'w', format='NETCDF4_CLASSIC')
+
+ # Get input size and create appropriate dimensions
+ # TODO: add some sort of error handling
+ nt = 1
+ nx, ny, nz = np.shape(grid_z['e3t'])
+ dataset.createDimension('x', nx)
+ dataset.createDimension('y', ny)
+ dataset.createDimension('z', nz)
+ dataset.createDimension('t', nt)
+ # Create Variables
+ nav_lon = dataset.createVariable('nav_lon', np.float32, ('y', 'x'))
+ nav_lat = dataset.createVariable('nav_lat', np.float32, ('y', 'x'))
+ nav_lev = dataset.createVariable('nav_lev', np.float32, 'z')
+ time_counter = dataset.createVariable('time_counter', np.float32, ('t'))
+
+ fmaskutil = dataset.createVariable('fmaskutil', np.float64, ('t','y', 'x'))
+ tmaskutil = dataset.createVariable('tmaskutil', np.float64, ('t','y', 'x'))
+ umaskutil = dataset.createVariable('umaskutil', np.float64, ('t','y', 'x'))
+ vmaskutil = dataset.createVariable('vmaskutil', np.float64, ('t','y', 'x'))
+
+ fmask = dataset.createVariable('fmask', np.float64, ('t','z','y', 'x'))
+ tmask = dataset.createVariable('tmask', np.float64, ('t','z','y', 'x'))
+ umask = dataset.createVariable('umask', np.float64, ('t','z','y', 'x'))
+ vmask = dataset.createVariable('vmask', np.float64, ('t','z','y', 'x'))
+
+ nav_lon.units, nav_lon.long_name = 'km', 'X'
+ nav_lat.units, nav_lat.long_name = 'km', 'Y'
+ nav_lev.units, nav_lev.long_name = 'm', 'Z'
+ time_counter.units, time_counter.long_name = 'seconds', 'time_counter'
+
+ # Populate file with input data
+ # TODO: do we need to transpose?
+ nav_lon[:, :] = grid_h['lont'].T
+ nav_lat[:, :] = grid_h['latt'].T
+ nav_lev[:] = grid_z['dept_1d']
+
+ threeD_mask = np.ones(np.shape(grid_z['e3t']))
+ twoD_mask = np.ones(np.shape(grid_h['e1t']))
+
+ fmask[:,:,:] = threeD_mask.T
+ fmaskutil[:,:] = twoD_mask.T
+ tmask[:,:,:] = threeD_mask.T
+ tmaskutil[:,:] = twoD_mask.T
+ umask[:,:,:] = threeD_mask.T
+ umaskutil[:,:] = twoD_mask.T
+ vmask[:,:,:] = threeD_mask.T
+ vmaskutil[:,:] = twoD_mask.T
+
+ dataset.close()
+
+ return 0
+
+def write_parameter(fileout, grid_h,grid_z,params,grid):
+ '''
+ Writes out a
+
+ Args:
+
+ Returns:
+ '''
+
+ # Open pointer to netcdf file
+ dataset = Dataset(fileout+'_'+grid.upper()+'.nc', 'w', format='NETCDF4_CLASSIC')
+
+ # Get input size and create appropriate dimensions
+ # TODO: add some sort of error handling
+ nt = 31
+ nx, ny, nz = np.shape(grid_z['e3t'])
+ dataset.createDimension('x', nx)
+ dataset.createDimension('y', ny)
+ dataset.createDimension('z', nz)
+ dataset.createDimension('time', nt)
+
+ # Create Variables
+ longitude = dataset.createVariable('longitude', np.float32, ('y', 'x'))
+ latitude = dataset.createVariable('latitude', np.float32, ('y', 'x'))
+ depth = dataset.createVariable('depth'+grid, np.float32, 'z')
+ time_counter = dataset.createVariable('time', np.float32, ('time'))
+ longitude.units, longitude.long_name = 'km', 'X'
+ latitude.units, latitude.long_name = 'km', 'Y'
+ depth.units, depth.long_name = 'm', 'Z'
+ time_counter.units = 'hours since 1950-01-01 00:00:00'
+ time_counter.long_name = 'Time (hours since 1950-01-01)'
+ time_counter.axis = 'T'
+ time_counter._CoordinateAxisType = "Time"
+ time_counter.calendar = 'gregorian'
+ time_counter.standard_name = 'time'
+ # Populate file with input data
+ longitude[:, :] = grid_h['lont'].T
+ latitude[:, :] = grid_h['latt'].T
+ depth[:] = grid_z['dept_1d']
+ time_counter[:] = np.linspace(587340.00,588060.00,31)
+ for key in params:
+ parameter = dataset.createVariable(str(params[key]['name']), np.float64, ('time','z', 'y', 'x'))
+ parameter.units, parameter.long_name = str(params[key]['units']), str(params[key]['longname'])
+ value_fill = np.ones(np.shape(grid_z['e3t']))
+ value_fill = value_fill*params[key]['const_value']
+ parameter[:, :, :] = value_fill.T
+
+ # Close off pointer
+ dataset.close()
+ return 0
+
+def write_bathy(fileout, grid_h,grid_z):
+ '''
+ Writes out a
+
+ Args:
+
+ Returns:
+ '''
+ # Open pointer to netcdf file
+ dataset = Dataset(fileout, 'w', format='NETCDF4_CLASSIC')
+
+ # Get input size and create appropriate dimensions
+ # TODO: add some sort of error handling
+ nx, ny = np.shape(grid_h['e1t'])
+ dataset.createDimension('x', nx)
+ dataset.createDimension('y', ny)
+
+
+ # Create Variables
+ nav_lon = dataset.createVariable('nav_lon', np.float32, ('y', 'x'))
+
+ nav_lat = dataset.createVariable('nav_lat', np.float32, ('y', 'x'))
+ nav_lon.units, nav_lon.long_name = 'km', 'X'
+ nav_lat.units, nav_lat.long_name = 'km', 'Y'
+
+ Bathymetry = dataset.createVariable('Bathymetry', np.float64,('y','x'))
+ Bathymetry.units,Bathymetry.long_name = 'meters','Median depth by area'
+ nav_lon[:, :] = grid_h['lont'].T
+ nav_lat[:, :] = grid_h['latt'].T
+
+ Bathy = np.ones((nx,ny))
+ Bathy = Bathy * grid_z['dept_1d'][-1]
+
+ Bathymetry[:,:] = Bathy
+
+ dataset.close()
+ return 0
\ No newline at end of file
diff --git a/pynemo/unit_tests/namelists/namelist_unit_test_offset.bdy b/pynemo/unit_tests/namelists/namelist_unit_test_offset.bdy
new file mode 100644
index 00000000..d50f053d
--- /dev/null
+++ b/pynemo/unit_tests/namelists/namelist_unit_test_offset.bdy
@@ -0,0 +1,109 @@
+!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+!! NEMO/OPA : namelist for BDY generation tool
+!!
+!! User inputs for generating open boundary conditions
+!! employed by the BDY module in NEMO. Boundary data
+!! can be set up for v3.2 NEMO and above.
+!!
+!! More info here.....
+!!
+!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+
+!------------------------------------------------------------------------------
+! vertical coordinate
+!------------------------------------------------------------------------------
+ ln_zco = .false. ! z-coordinate - full steps (T/F)
+ ln_zps = .true. ! z-coordinate - partial steps (T/F)
+ ln_sco = .false. ! s- or hybrid z-s-coordinate (T/F)
+ rn_hmin = -10 ! min depth of the ocean (>0) or
+ ! min number of ocean level (<0)
+
+!------------------------------------------------------------------------------
+! s-coordinate or hybrid z-s-coordinate
+!------------------------------------------------------------------------------
+ rn_sbot_min = 10. ! minimum depth of s-bottom surface (>0) (m)
+ rn_sbot_max = 7000. ! maximum depth of s-bottom surface
+ ! (= ocean depth) (>0) (m)
+ ln_s_sigma = .false. ! hybrid s-sigma coordinates
+ rn_hc = 150.0 ! critical depth with s-sigma
+
+!------------------------------------------------------------------------------
+! grid information
+!------------------------------------------------------------------------------
+ sn_src_hgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_src_hgr_zps.nc'
+ sn_src_zgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_src_zgr_zps.nc'
+ sn_dst_hgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_offset_dst_hgr_zps.nc'
+ sn_dst_zgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_offset_dst_zgr_zps.nc'
+ sn_src_msk = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/mask.nc'
+ sn_bathy = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_offset_dst_bathy.nc'
+
+!------------------------------------------------------------------------------
+! I/O
+!------------------------------------------------------------------------------
+ sn_src_dir = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/unit_tests.ncml' ! src_files/'
+ sn_dst_dir = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_outputs'
+ sn_fn = 'unit_test_offset' ! prefix for output files
+ nn_fv = -1e20 ! set fill value for output files
+ nn_src_time_adj = 0 ! src time adjustment
+ sn_dst_metainfo = 'Unit Test Data'
+
+!------------------------------------------------------------------------------
+! CMEMS Data Source Configuration
+!------------------------------------------------------------------------------
+ ln_use_cmems = .true.
+
+!------------------------------------------------------------------------------
+! unstructured open boundaries
+!------------------------------------------------------------------------------
+ ln_coords_file = .true. ! =T : produce bdy coordinates files
+ cn_coords_file = 'coordinates.bdy.nc' ! name of bdy coordinates files
+ ! (if ln_coords_file=.TRUE.)
+ ln_mask_file = .false. ! =T : read mask from file
+ cn_mask_file = 'mask.nc' ! name of mask file
+ ! (if ln_mask_file=.TRUE.)
+ ln_dyn2d = .true. ! boundary conditions for
+ ! barotropic fields
+ ln_dyn3d = .false. ! boundary conditions for
+ ! baroclinic velocities
+ ln_tra = .true. ! boundary conditions for T and S
+ ln_ice = .false. ! ice boundary condition
+ nn_rimwidth = 9 ! width of the relaxation zone
+
+!------------------------------------------------------------------------------
+! unstructured open boundaries tidal parameters
+!------------------------------------------------------------------------------
+ ln_tide = .false. ! =T : produce bdy tidal conditions
+ sn_tide_model = 'FES' ! Name of tidal model (FES|TPXO)
+ clname(1) = 'M2' ! constituent name
+ clname(2) = 'S2'
+ clname(3) = 'K2'
+ ln_trans = .true. ! interpolate transport rather than
+ ! velocities
+!------------------------------------------------------------------------------
+! Time information
+!------------------------------------------------------------------------------
+ nn_year_000 = 2017 ! year start
+ nn_year_end = 2017 ! year end
+ nn_month_000 = 01 ! month start (default = 1 is years>1)
+ nn_month_end = 01 ! month end (default = 12 is years>1)
+ sn_dst_calendar = 'gregorian' ! output calendar format
+ nn_base_year = 1960 ! base year for time counter
+ sn_tide_grid = './src_data/tide/grid_tpxo7.2.nc'
+ sn_tide_h = './src_data/tide/h_tpxo7.2.nc'
+ sn_tide_u = './src_data/tide/u_tpxo7.2.nc'
+
+!------------------------------------------------------------------------------
+! Additional parameters
+!------------------------------------------------------------------------------
+ nn_wei = 1 ! smoothing filter weights
+ rn_r0 = 0.041666666 ! decorrelation distance use in gauss
+ ! smoothing onto dst points. Need to
+ ! make this a funct. of dlon
+ sn_history = 'Benchmarking test case'
+ ! history for netcdf file
+ ln_nemo3p4 = .true. ! else presume v3.2 or v3.3
+ nn_alpha = 0 ! Euler rotation angle
+ nn_beta = 0 ! Euler rotation angle
+ nn_gamma = 0 ! Euler rotation angle
+ rn_mask_max_depth = 100.0 ! Maximum depth to be ignored for the mask
+ rn_mask_shelfbreak_dist = 20000.0 ! Distance from the shelf break
diff --git a/pynemo/unit_tests/namelists/namelist_unit_test_orth.bdy b/pynemo/unit_tests/namelists/namelist_unit_test_orth.bdy
new file mode 100644
index 00000000..ff4ac6c1
--- /dev/null
+++ b/pynemo/unit_tests/namelists/namelist_unit_test_orth.bdy
@@ -0,0 +1,109 @@
+!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+!! NEMO/OPA : namelist for BDY generation tool
+!!
+!! User inputs for generating open boundary conditions
+!! employed by the BDY module in NEMO. Boundary data
+!! can be set up for v3.2 NEMO and above.
+!!
+!! More info here.....
+!!
+!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+
+!------------------------------------------------------------------------------
+! vertical coordinate
+!------------------------------------------------------------------------------
+ ln_zco = .false. ! z-coordinate - full steps (T/F)
+ ln_zps = .true. ! z-coordinate - partial steps (T/F)
+ ln_sco = .false. ! s- or hybrid z-s-coordinate (T/F)
+ rn_hmin = -10 ! min depth of the ocean (>0) or
+ ! min number of ocean level (<0)
+
+!------------------------------------------------------------------------------
+! s-coordinate or hybrid z-s-coordinate
+!------------------------------------------------------------------------------
+ rn_sbot_min = 10. ! minimum depth of s-bottom surface (>0) (m)
+ rn_sbot_max = 7000. ! maximum depth of s-bottom surface
+ ! (= ocean depth) (>0) (m)
+ ln_s_sigma = .false. ! hybrid s-sigma coordinates
+ rn_hc = 150.0 ! critical depth with s-sigma
+
+!------------------------------------------------------------------------------
+! grid information
+!------------------------------------------------------------------------------
+ sn_src_hgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_src_hgr_zps.nc'
+ sn_src_zgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_src_zgr_zps.nc'
+ sn_dst_hgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_dst_hgr_zps.nc'
+ sn_dst_zgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_dst_zgr_zps.nc'
+ sn_src_msk = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/mask.nc'
+ sn_bathy = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_dst_bathy.nc'
+
+!------------------------------------------------------------------------------
+! I/O
+!------------------------------------------------------------------------------
+ sn_src_dir = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/unit_tests.ncml' ! src_files/'
+ sn_dst_dir = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_outputs'
+ sn_fn = 'unit_test_orth' ! prefix for output files
+ nn_fv = -1e20 ! set fill value for output files
+ nn_src_time_adj = 0 ! src time adjustment
+ sn_dst_metainfo = 'Unit Test Data'
+
+!------------------------------------------------------------------------------
+! CMEMS Data Source Configuration
+!------------------------------------------------------------------------------
+ ln_use_cmems = .true.
+
+!------------------------------------------------------------------------------
+! unstructured open boundaries
+!------------------------------------------------------------------------------
+ ln_coords_file = .true. ! =T : produce bdy coordinates files
+ cn_coords_file = 'coordinates.bdy.nc' ! name of bdy coordinates files
+ ! (if ln_coords_file=.TRUE.)
+ ln_mask_file = .false. ! =T : read mask from file
+ cn_mask_file = 'mask.nc' ! name of mask file
+ ! (if ln_mask_file=.TRUE.)
+ ln_dyn2d = .true. ! boundary conditions for
+ ! barotropic fields
+ ln_dyn3d = .false. ! boundary conditions for
+ ! baroclinic velocities
+ ln_tra = .true. ! boundary conditions for T and S
+ ln_ice = .false. ! ice boundary condition
+ nn_rimwidth = 9 ! width of the relaxation zone
+
+!------------------------------------------------------------------------------
+! unstructured open boundaries tidal parameters
+!------------------------------------------------------------------------------
+ ln_tide = .false. ! =T : produce bdy tidal conditions
+ sn_tide_model = 'FES' ! Name of tidal model (FES|TPXO)
+ clname(1) = 'M2' ! constituent name
+ clname(2) = 'S2'
+ clname(3) = 'K2'
+ ln_trans = .true. ! interpolate transport rather than
+ ! velocities
+!------------------------------------------------------------------------------
+! Time information
+!------------------------------------------------------------------------------
+ nn_year_000 = 2017 ! year start
+ nn_year_end = 2017 ! year end
+ nn_month_000 = 01 ! month start (default = 1 is years>1)
+ nn_month_end = 01 ! month end (default = 12 is years>1)
+ sn_dst_calendar = 'gregorian' ! output calendar format
+ nn_base_year = 1960 ! base year for time counter
+ sn_tide_grid = './src_data/tide/grid_tpxo7.2.nc'
+ sn_tide_h = './src_data/tide/h_tpxo7.2.nc'
+ sn_tide_u = './src_data/tide/u_tpxo7.2.nc'
+
+!------------------------------------------------------------------------------
+! Additional parameters
+!------------------------------------------------------------------------------
+ nn_wei = 1 ! smoothing filter weights
+ rn_r0 = 0.041666666 ! decorrelation distance use in gauss
+ ! smoothing onto dst points. Need to
+ ! make this a funct. of dlon
+ sn_history = 'Benchmarking test case'
+ ! history for netcdf file
+ ln_nemo3p4 = .true. ! else presume v3.2 or v3.3
+ nn_alpha = 0 ! Euler rotation angle
+ nn_beta = 0 ! Euler rotation angle
+ nn_gamma = 0 ! Euler rotation angle
+ rn_mask_max_depth = 100.0 ! Maximum depth to be ignored for the mask
+ rn_mask_shelfbreak_dist = 20000.0 ! Distance from the shelf break
diff --git a/pynemo/unit_tests/namelists/namelist_unit_test_rotated.bdy b/pynemo/unit_tests/namelists/namelist_unit_test_rotated.bdy
new file mode 100644
index 00000000..baa850d5
--- /dev/null
+++ b/pynemo/unit_tests/namelists/namelist_unit_test_rotated.bdy
@@ -0,0 +1,109 @@
+!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+!! NEMO/OPA : namelist for BDY generation tool
+!!
+!! User inputs for generating open boundary conditions
+!! employed by the BDY module in NEMO. Boundary data
+!! can be set up for v3.2 NEMO and above.
+!!
+!! More info here.....
+!!
+!!>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
+
+!------------------------------------------------------------------------------
+! vertical coordinate
+!------------------------------------------------------------------------------
+ ln_zco = .false. ! z-coordinate - full steps (T/F)
+ ln_zps = .true. ! z-coordinate - partial steps (T/F)
+ ln_sco = .false. ! s- or hybrid z-s-coordinate (T/F)
+ rn_hmin = -10 ! min depth of the ocean (>0) or
+ ! min number of ocean level (<0)
+
+!------------------------------------------------------------------------------
+! s-coordinate or hybrid z-s-coordinate
+!------------------------------------------------------------------------------
+ rn_sbot_min = 10. ! minimum depth of s-bottom surface (>0) (m)
+ rn_sbot_max = 7000. ! maximum depth of s-bottom surface
+ ! (= ocean depth) (>0) (m)
+ ln_s_sigma = .false. ! hybrid s-sigma coordinates
+ rn_hc = 150.0 ! critical depth with s-sigma
+
+!------------------------------------------------------------------------------
+! grid information
+!------------------------------------------------------------------------------
+ sn_src_hgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_src_hgr_zps.nc'
+ sn_src_zgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_src_zgr_zps.nc'
+ sn_dst_hgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_rot_dst_hgr_zps.nc'
+ sn_dst_zgr = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_rot_dst_zgr_zps.nc'
+ sn_src_msk = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/mask.nc'
+ sn_bathy = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_inputs/test_rot_dst_bathy.nc'
+
+!------------------------------------------------------------------------------
+! I/O
+!------------------------------------------------------------------------------
+ sn_src_dir = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/unit_tests.ncml' ! src_files/'
+ sn_dst_dir = '/Users/thopri/Projects/PyNEMO/pynemo/unit_tests/test_outputs'
+ sn_fn = 'unit_test_rotated' ! prefix for output files
+ nn_fv = -1e20 ! set fill value for output files
+ nn_src_time_adj = 0 ! src time adjustment
+ sn_dst_metainfo = 'Unit Test Data'
+
+!------------------------------------------------------------------------------
+! CMEMS Data Source Configuration
+!------------------------------------------------------------------------------
+ ln_use_cmems = .true.
+
+!------------------------------------------------------------------------------
+! unstructured open boundaries
+!------------------------------------------------------------------------------
+ ln_coords_file = .true. ! =T : produce bdy coordinates files
+ cn_coords_file = 'coordinates.bdy.nc' ! name of bdy coordinates files
+ ! (if ln_coords_file=.TRUE.)
+ ln_mask_file = .false. ! =T : read mask from file
+ cn_mask_file = 'mask.nc' ! name of mask file
+ ! (if ln_mask_file=.TRUE.)
+ ln_dyn2d = .true. ! boundary conditions for
+ ! barotropic fields
+ ln_dyn3d = .false. ! boundary conditions for
+ ! baroclinic velocities
+ ln_tra = .true. ! boundary conditions for T and S
+ ln_ice = .false. ! ice boundary condition
+ nn_rimwidth = 9 ! width of the relaxation zone
+
+!------------------------------------------------------------------------------
+! unstructured open boundaries tidal parameters
+!------------------------------------------------------------------------------
+ ln_tide = .false. ! =T : produce bdy tidal conditions
+ sn_tide_model = 'FES' ! Name of tidal model (FES|TPXO)
+ clname(1) = 'M2' ! constituent name
+ clname(2) = 'S2'
+ clname(3) = 'K2'
+ ln_trans = .true. ! interpolate transport rather than
+ ! velocities
+!------------------------------------------------------------------------------
+! Time information
+!------------------------------------------------------------------------------
+ nn_year_000 = 2017 ! year start
+ nn_year_end = 2017 ! year end
+ nn_month_000 = 01 ! month start (default = 1 is years>1)
+ nn_month_end = 01 ! month end (default = 12 is years>1)
+ sn_dst_calendar = 'gregorian' ! output calendar format
+ nn_base_year = 1960 ! base year for time counter
+ sn_tide_grid = './src_data/tide/grid_tpxo7.2.nc'
+ sn_tide_h = './src_data/tide/h_tpxo7.2.nc'
+ sn_tide_u = './src_data/tide/u_tpxo7.2.nc'
+
+!------------------------------------------------------------------------------
+! Additional parameters
+!------------------------------------------------------------------------------
+ nn_wei = 1 ! smoothing filter weights
+ rn_r0 = 0.041666666 ! decorrelation distance use in gauss
+ ! smoothing onto dst points. Need to
+ ! make this a funct. of dlon
+ sn_history = 'Benchmarking test case'
+ ! history for netcdf file
+ ln_nemo3p4 = .true. ! else presume v3.2 or v3.3
+ nn_alpha = 0 ! Euler rotation angle
+ nn_beta = 0 ! Euler rotation angle
+ nn_gamma = 0 ! Euler rotation angle
+ rn_mask_max_depth = 100.0 ! Maximum depth to be ignored for the mask
+ rn_mask_shelfbreak_dist = 20000.0 ! Distance from the shelf break
diff --git a/pynemo/unit_tests/test_gen.py b/pynemo/unit_tests/test_gen.py
new file mode 100644
index 00000000..af2bc3f2
--- /dev/null
+++ b/pynemo/unit_tests/test_gen.py
@@ -0,0 +1,134 @@
+"""
+Source: https://gist.github.com/LyleScott/d17e9d314fbe6fc29767d8c5c029c362
+
+Adapted Function to rotate NEMO grid for unit testing. rot variable defines how much rotation
+occurs, the results are then plotted in a figure showing original (blue points) and rotated (red) grids.
+The source coordinate grid is also plotted (green).
+
+"""
+from pynemo.unit_tests import gen_tools as gt
+from pynemo.unit_tests import UT_config as config
+
+# TODO: organise the variables better, (maybe in a single dict or class?)
+
+def _main():
+ #define directory for test input data for PyNEMO
+
+ #Source Coords
+ dx = 1000 # units in km
+ dy = 1000 # units in Km
+ jpi = 16
+ jpj = 16
+ jpk = 10
+ max_dep = 100
+ min_dep = 10
+ z_end_dim = 1
+ h_fname = config.input_dir+'test_src_hgr_zps.nc'
+ z_fname = config.input_dir+'test_src_zgr_zps.nc'
+ grid_h1 = gt.set_hgrid(dx,dy,jpi,jpj)
+ grid_z1 = gt.set_zgrid(grid_h1,jpk,max_dep,min_dep,z_end_dim)
+ write_coord_H = gt.write_coord_H(h_fname,grid_h1)
+ write_coord_Z = gt.write_coord_Z(z_fname,grid_h1,grid_z1)
+ if write_coord_H + write_coord_Z == 0:
+ print("Parent grid generation successful!")
+
+ #Dst Coords
+ dx = 100 # units in km
+ dy = 100 # units in Km
+ jpi = 100
+ jpj = 100
+ zoffx = 30
+ zoffy = 30
+ jpk = 10
+ max_dep = 100
+ min_dep = 10
+ z_end_dim = 1
+ sf = 10
+ h_fname = config.input_dir+'test_dst_hgr_zps.nc'
+ z_fname = config.input_dir+'test_dst_zgr_zps.nc'
+ grid_h2 = gt.set_hgrid(dx,dy,jpi,jpj,zoffx,zoffy,sf)
+ grid_z2 = gt.set_zgrid(grid_h2,jpk,max_dep,min_dep,z_end_dim)
+ write_coord_H = gt.write_coord_H(h_fname,grid_h2)
+ write_coord_Z = gt.write_coord_Z(z_fname,grid_h2,grid_z2)
+ # write bathy files (constant bathy)
+ bathy_fname = config.input_dir+'test_dst_bathy.nc'
+ bathy = gt.write_bathy(bathy_fname,grid_h2,grid_z2)
+ if write_coord_H + write_coord_Z + bathy == 0:
+ print("Org child grid generation successful!")
+
+ # set rotation and origin point
+ rot = 45
+ theta = gt.radians(rot)
+ origin = (8,8)
+
+ # rotate grid
+ rot_h_fname = config.input_dir+'test_rot_dst_hgr_zps.nc'
+ rot_z_fname = config.input_dir+'test_rot_dst_zgr_zps.nc'
+ grid_rot = grid_h2.copy()
+ grid_rot['latt'], grid_rot['lont'] = gt.rotate_around_point(grid_h2['latt'],grid_h2['lont'],theta,origin)
+ grid_rot['latu'], grid_rot['lonu'] = gt.rotate_around_point(grid_h2['latu'], grid_h2['lonu'], theta, origin)
+ grid_rot['latv'], grid_rot['lonv'] = gt.rotate_around_point(grid_h2['latv'], grid_h2['lonv'], theta, origin)
+ grid_rot['latf'], grid_rot['lonf'] = gt.rotate_around_point(grid_h2['latf'], grid_h2['lonf'], theta, origin)
+ write_coord_H = gt.write_coord_H(rot_h_fname,grid_rot)
+ write_coord_Z = gt.write_coord_Z(rot_z_fname,grid_rot,grid_z2)
+ # write bathy files (constant bathy)
+ bathy_fname = config.input_dir+'test_rot_dst_bathy.nc'
+ bathy = gt.write_bathy(bathy_fname,grid_rot,grid_z2)
+ if write_coord_H + write_coord_Z + bathy == 0:
+ print("Rotated child grid generation Successful!")
+
+ # offset grid
+ dx = 100 # units in km
+ dy = 100 # units in Km
+ jpi = 100
+ jpj = 100
+ zoffx = 35
+ zoffy = 35
+ jpk = 10
+ max_dep = 100
+ min_dep = 10
+ z_end_dim = 1
+ sf = 10
+ h_fname = config.input_dir+'test_offset_dst_hgr_zps.nc'
+ z_fname = config.input_dir+'test_offset_dst_zgr_zps.nc'
+ grid_h3 = gt.set_hgrid(dx,dy,jpi,jpj,zoffx,zoffy,sf)
+ grid_z3 = gt.set_zgrid(grid_h2,jpk,max_dep,min_dep,z_end_dim)
+ write_coord_H = gt.write_coord_H(h_fname,grid_h3)
+ write_coord_Z = gt.write_coord_Z(z_fname,grid_h3,grid_z3)
+ # write bathy files (constant bathy)
+ bathy_fname = config.input_dir+'test_offset_dst_bathy.nc'
+ bathy = gt.write_bathy(bathy_fname,grid_h3,grid_z3)
+ if write_coord_H + write_coord_Z + bathy == 0:
+ print("Offset child grid gneration successful!")
+ if config.plot_grids == True:
+ #plot orginal, rotatated and source lat and lon
+ gt.plot_grids(grid_h2['latt'],grid_h2['lont'],grid_rot['latt'],grid_rot['lont'],grid_h3['latt'], \
+ grid_h3['lont'],grid_h1['latt'],grid_h1['lont'])
+
+ # write boundary files (constant parameters)
+ out_fname = config.input_dir+'output_boundary' #drop file extension
+ params_t = {'param1': {'name':'thetao','const_value':15.0,'longname':'temperature','units':'degreesC'},
+ 'param2': {'name':'so','const_value':35.0,'longname':'salinity','units':'PSU'},
+ 'param3': {'name': 'zos', 'const_value': 1.0, 'longname': 'sea surface height', 'units': 'metres'}
+ }
+ params_u = {'param1': {'name':'uo','const_value':0.5,'longname':'Zonal current','units':'ms-1'}
+ }
+ params_v = {'param1': {'name':'vo','const_value':0.5,'longname':'Meridional current','units':'ms-1'}
+ }
+ # Define Grids using lowercase character string
+ boundary_T = gt.write_parameter(out_fname,grid_h1,grid_z1,params_t,'t')
+ boundary_U = gt.write_parameter(out_fname, grid_h1, grid_z1, params_u, 'u')
+ boundary_V = gt.write_parameter(out_fname, grid_h1, grid_z1, params_v, 'v')
+ if boundary_T + boundary_U + boundary_V == 0:
+ print('Boundary file generation successful!')
+
+ #write_mask
+ mask_fname = config.input_dir+'mask.nc'
+ mask = gt.write_mask(mask_fname,grid_h1,grid_z1)
+ if mask == 0:
+ print('Mask file generation successful!')
+
+ return 0
+
+if __name__ == '__main__':
+ _main()
\ No newline at end of file
diff --git a/pynemo/unit_tests/unit_tests.ncml b/pynemo/unit_tests/unit_tests.ncml
new file mode 100644
index 00000000..c03b1740
--- /dev/null
+++ b/pynemo/unit_tests/unit_tests.ncml
@@ -0,0 +1,28 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/pynemo/utils/cmems_errors.py b/pynemo/utils/cmems_errors.py
new file mode 100644
index 00000000..89a2cca4
--- /dev/null
+++ b/pynemo/utils/cmems_errors.py
@@ -0,0 +1,17 @@
+'''
+This file contains all the logged errors from MOTU client, these are added over time. The MOTU client is prone to errors
+that only may need a restart to complete. To add an error the key needs to be the error code returned by MOTU,
+this is ususally a number or set of numbers. The dictionary entry is an explantion of the error. Only the key is checked
+so make sure it is written correctly.
+'''
+
+# errors that are worth retrying download, e,g, error in netcdfwriter finish
+MOTU_retry = {'004-27': 'Error in NetcdfWriter finish',
+ 'Errno 60': '[Errno 60] Operation timed out',
+ 'Excp 11': 'Execution failed: [Excp 11] "Dataset retrival incomplete.'
+ }
+# errors that are not worth retrying e.g. cmems network is down
+MOTU_critical = {'Errno 50': 'Network Down'}
+# FTP specific errors
+FTP_retry = {'999': 'add ftp retry errors here' }
+FTP_critical = {'999': 'add ftp critical errors here' }
\ No newline at end of file
diff --git a/pynemo_37.yml b/pynemo_37.yml
new file mode 100644
index 00000000..cc68a4f9
--- /dev/null
+++ b/pynemo_37.yml
@@ -0,0 +1,27 @@
+name: pynemo3
+channels:
+ - defaults
+dependencies:
+ - matplotlib=3.2
+ - basemap=1.2.0
+ - netcdf4=1.5.3
+ - pyqt=5.9.2
+ - scipy=1.2.1
+ - python=3.7.6
+ - pip=20.0.2
+ - pandas=1.0.1
+ - pytest=5.3.5
+ - xarray=0.15.0
+ - xlsxwriter=1.2.8
+ - xmltodict=0.12.0
+ - cftime=1.4.1
+ - pip:
+ - idna==2.9
+ - lxml==4.5.0
+ - pyjnius==1.2.1
+ - seawater==3.3.4
+ - thredds-crawler==1.5.4
+ - motuclient==1.8.4
+ - sphinx==3.0.2
+ - sphinx-rtd-theme==0.4.3
+ - yaspin==0.16.0
diff --git a/pynemo_stats b/pynemo_stats
new file mode 100644
index 00000000..a8981850
Binary files /dev/null and b/pynemo_stats differ
diff --git a/setup.py b/setup.py
index bdc2c36a..5318e949 100644
--- a/setup.py
+++ b/setup.py
@@ -38,7 +38,7 @@
keywords='Oceanography, NEMO',
- packages=['pynemo','pynemo.tests','pynemo.gui','pynemo.utils','pynemo.tide','pynemo.reader'],
+ packages=['pynemo','pynemo.tests','pynemo.gui','pynemo.utils','pynemo.tide','pynemo.reader','pynemo.unit_tests'],
install_requires=['netCDF4>=1.1.9','scipy','numpy','matplotlib', 'basemap', 'thredds_crawler', 'seawater'],
diff --git a/test_scripts/bdy_coords_plot.py b/test_scripts/bdy_coords_plot.py
index 9647cd5c..e4f50041 100755
--- a/test_scripts/bdy_coords_plot.py
+++ b/test_scripts/bdy_coords_plot.py
@@ -15,7 +15,7 @@
from pynemo.tests import bdy_coords as bdc
-bdc.process_bdy('/Users/thopri/Projects/PyNEMO/inputs/namelist_remote.bdy',False)
+bdc.process_bdy('/inputs/namelist_cmems.bdy', False)
rootgrp = Dataset('/Users/thopri/Projects/PyNEMO/outputs/NNA_R12_bdyT_y1979m11.nc', "r", format="NETCDF4")
bdy_msk = np.squeeze(rootgrp.variables['bdy_msk'][:]) - 1
diff --git a/test_scripts/bdy_var_plot.py b/test_scripts/bdy_var_plot.py
index cfc47308..0d8bae13 100644
--- a/test_scripts/bdy_var_plot.py
+++ b/test_scripts/bdy_var_plot.py
@@ -12,11 +12,14 @@
def nemo_bdy_order(fname):
"""
Determine the ordering and breaks in BDY files to aid plotting.
+
This function takes the i/j coordinates from BDY files and orders them sequentially
making it easier to visualise sections along the open boundary. Breaks in the open
boundary are also determined (i.e. where the distance between two points > 2**0.5)
+
Args:
fname (str) : filename of BDY file
+
Returns:
bdy_ind (dict): re-ordered indices
bdy_dst (dict): distance (in model coords) between points
@@ -58,13 +61,13 @@ def nemo_bdy_order(fname):
id_order[0,] = 0
flag = False
mark = 0
- source_tree = sp.cKDTree(zip(nbi_tmp, nbj_tmp), balanced_tree=False, compact_nodes=False)
+ source_tree = sp.cKDTree(list(zip(nbi_tmp, nbj_tmp)), balanced_tree=False, compact_nodes=False)
# order bdy entries
while count < nbdy[r]:
- nn_dist, nn_id = source_tree.query(zip(nbi_tmp[id_order[count - 1]], nbj_tmp[id_order[count - 1]]),
+ nn_dist, nn_id = source_tree.query(list(zip(nbi_tmp[id_order[count - 1]], nbj_tmp[id_order[count - 1]])),
k=3, distance_upper_bound=2.9)
if np.sum(id_order == nn_id[0, 1]) == 1: # is the nearest point already in the list?
if np.sum(id_order == nn_id[0, 2]) == 1: # is the 2nd nearest point already in the list?
@@ -123,11 +126,14 @@ def nemo_bdy_order(fname):
def plot_bdy(fname, bdy_ind, bdy_dst, bdy_brk, varnam, t, rw):
"""
Determine the ordering and breaks in BDY files to aid plotting.
+
This function takes the i/j coordinates from BDY files and orders them sequentially
making it easier to visualise sections along the open boundary. Breaks in the open
boundary are also determined (i.e. where the distance between two points > 2**0.5)
+
Args:
fname (str) : filename of BDY file
+
Returns:
bdy_ind (dict): re-ordered indices
bdy_dst (dict): distance (in model coords) between points
@@ -155,7 +161,7 @@ def plot_bdy(fname, bdy_ind, bdy_dst, bdy_brk, varnam, t, rw):
try:
gdep = np.squeeze(rootgrp.variables['depthv'][:, :, :])
except KeyError:
- print 'depth variable not found'
+ print ('depth variable not found')
rootgrp.close()
@@ -201,19 +207,19 @@ def plot_bdy(fname, bdy_ind, bdy_dst, bdy_brk, varnam, t, rw):
# create a pseudo bathymetry from the depth data
- bathy = np.zeros_like(coords)
- mbath = np.sum(dta[n].mask == 0, axis=0)
+ #bathy = np.zeros_like(coords)
+ #mbath = np.sum(dta[n].mask == 0, axis=0)
- for i in range(len(coords)):
- bathy[i] = gdepw[mbath[i], i]
+ #for i in range(len(coords)):
+ # bathy[i] = gdepw[mbath[i], i]
- bathy_patch = Polygon(np.vstack((np.hstack((coords[0], coords, coords[-1])),
- np.hstack((np.amax(bathy[:]), bathy, np.amax(bathy[:]))))).T,
- closed=True,
- facecolor=(0.8, 0.8, 0), alpha=0, edgecolor=None)
+ #bathy_patch = Polygon(np.vstack((np.hstack((coords[0], coords, coords[-1])),
+ # np.hstack((np.amax(bathy[:]), bathy, np.amax(bathy[:]))))).T,
+ # closed=True,
+ # facecolor=(0.8, 0.8, 0), alpha=0, edgecolor=None)
# Add patch to axes
- ax[n].add_patch(bathy_patch)
+ #ax[n].add_patch(bathy_patch)
ax[n].set_title('BDY points along section: ' + str(n))
patches = []
colors = []
@@ -242,18 +248,19 @@ def plot_bdy(fname, bdy_ind, bdy_dst, bdy_brk, varnam, t, rw):
# plt.plot(x, y, 'k-', linewidth=0.1)
# plt.plot(coords[i], gdept[k, i], 'k.', markersize=1)
- plt.plot(coords, bathy, '-', color=(0.4, 0, 0))
+ #plt.plot(coords, bathy, '-', color=(0.4, 0, 0))
p = PatchCollection(patches, alpha=0.8, edgecolor='none')
p.set_array(np.array(colors))
ax[n].add_collection(p)
f.colorbar(p, ax=ax[n])
- ax[n].set_ylim((0, np.max(bathy)))
+ #ax[n].set_ylim((0, np.max(bathy)))
ax[n].invert_yaxis()
return f
-fname = '/Users/thopri/Projects/PyNEMO/outputs/NNA_R12_bdyT_y1979m11.nc'
+fname = '/Users/thopri/Projects/PyNEMO/outputs/NNA_R12_bdyT_y2017m01.nc'
+print(fname)
ind, dst, brk = nemo_bdy_order(fname)
-f = plot_bdy(fname, ind, dst, brk, 'votemper', 0, 0)
+f = plot_bdy(fname, ind, dst, brk, 'thetao', 0, 0)
plt.show()
\ No newline at end of file
diff --git a/test_scripts/jars/netcdfAll-4.6.jar b/test_scripts/jars/netcdfAll-4.6.jar
new file mode 100644
index 00000000..6b067fe3
Binary files /dev/null and b/test_scripts/jars/netcdfAll-4.6.jar differ
diff --git a/test_scripts/meta_data.py b/test_scripts/meta_data.py
new file mode 100644
index 00000000..0c74653f
--- /dev/null
+++ b/test_scripts/meta_data.py
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+"""
+Set of functions to download CMEMS files using FTP (for static mask data) and MOTU (for subsetted variable data).
+
+"""
+
+from netCDF4 import Dataset
+import re
+# list of datasets to check
+datasets = [ "/Users/thopri/Projects/PyNEMO/inputs/subset_2017-01-01_2017-01-31_T.nc",
+ "/Users/thopri/Projects/PyNEMO/inputs/subset_2017-01-01_2017-01-31_U.nc",
+ "/Users/thopri/Projects/PyNEMO/inputs/subset_2017-01-01_2017-01-31_V.nc",
+ "http://opendap4gws.jasmin.ac.uk/thredds/noc_msm/dodsC/pynemo_data/ORCA025-N206_19791101d05T.nc",
+ "http://opendap4gws.jasmin.ac.uk/thredds/noc_msm/dodsC/pynemo_data/ORCA025-N206_19791101d05U.nc",
+ "http://opendap4gws.jasmin.ac.uk/thredds/noc_msm/dodsC/pynemo_data/ORCA025-N206_19791101d05V.nc",
+ "http://opendap4gws.jasmin.ac.uk/thredds/noc_msm/dodsC/pynemo_data/ORCA025-N206_19791101d05I.nc"
+ ]
+
+# list of strings to catagorise variable names... each entry can have multiple entries.
+# NOTE list in each dict entry is in a specifc order where long names go first e.g. latitude is before lat. This is to
+# stop errors in selecting names. e.g. lat would also be valid for latitude and nav_lat so trying lat needs to be after those options
+# DOUBLE NOTE order of dicts in check list is also important as it can result in false ID's e.g. ice data for some reason has long name
+# sea surface height so if SSH is in dict before ice variables it will assign ice variable names to SSH.
+
+chk_list = {'temperature': ['temp'],
+ 'salinity': ['sal'],
+ 'ice_thic': ['icethic'],
+ 'snow_thic': ['snowthi'],
+ 'ileadfra': ['leadfra'],
+ 'SSH': ['surface', 'sea'],
+ 'depth': ['depth'],
+ 'time': ['time', 'counter'],
+ 'Ucomponent': ['zonal', 'current', 'eastward', 'uo'],
+ 'Vcomponent': ['meridional', 'current', 'northward', 'vo'],
+ 'windstress-i': ['i-axis'],
+ 'windstress-j': ['j-axis'],
+ 'latitude': ['latitude', 'nav_lat','lat','y'],
+ 'longitude': ['longitude','nav_lon','lon','x'],
+ }
+
+# function to use regex to find if string is in variable name or if not check long name. Case of string is ignored
+# Attribute errors are common due to long name not existing in some datasets at the moment this error is passed (maybe log?)
+def data_chk(data, str, key):
+ for i in range(len(str)):
+ try:
+ chk = re.search(str[i], data[key].name, re.IGNORECASE)
+ if chk is not None:
+ return chk
+ if chk is None:
+ chk = re.search(str[i], data[key].long_name, re.IGNORECASE)
+ if chk is not None:
+ return chk
+ except AttributeError:
+ pass
+
+
+i = 0
+meta_dataset = {}
+
+for dat in datasets:
+ # open netcdf dataset
+ F = Dataset(dat)
+ # extract variable meta data and dimension meta data
+ meta = F.variables
+ dims = F.dimensions
+ # create empty dict to save catagorised data
+ meta_dataset['dataset'+str(i+1)] = {}
+ meta_dataset['dataset'+str(i+1)]['var_names'] = {}
+ meta_dataset['dataset'+str(i+1)]['dim_names'] = {}
+
+ # for all variable names, compare strings on chk list and write key to meta dict on first match
+ for key in meta:
+ for chk_key,chk in chk_list.items():
+ var_match = data_chk(meta,chk,key)
+ if var_match is not None:
+ meta_dataset['dataset'+str(i+1)]['var_names'][chk_key] = key
+ break
+ if len(meta_dataset['dataset'+str(i+1)]['var_names']) != len(meta):
+ print('not all variables matched for dataset '+str(i+1))
+
+ # for all dimension names, compare strings on chk list and write key to meta dict on first match
+ for key in dims:
+ for chk_key,chk in chk_list.items():
+ dim_match = data_chk(dims,chk,key)
+ if dim_match is not None:
+ meta_dataset['dataset'+str(i+1)]['dim_names'][chk_key] = key
+ break
+ if len(meta_dataset['dataset'+str(i+1)]['dim_names']) != len(dims):
+ print('not all dimensions matched for dataset '+str(i+1))
+ i = i + 1
+ # close netcdf file and print meta dict
+ F.close()
+
+print(meta_dataset)
+
+
diff --git a/test_scripts/ncml_gen_nc.py b/test_scripts/ncml_gen_nc.py
new file mode 100644
index 00000000..a0ce0e62
--- /dev/null
+++ b/test_scripts/ncml_gen_nc.py
@@ -0,0 +1,48 @@
+# import os
+# import jnius_config
+# ncmlpath, file_name = os.path.split(__file__)
+# ncmlpath = os.path.join(ncmlpath, "jars", "netcdfAll-4.6.jar")
+# jnius_config.set_classpath('.',ncmlpath)
+#
+# from jnius import autoclass
+#
+# ncml_template = "/Users/thopri/Projects/PyNEMO/inputs/NEMO_output_T.ncml"
+# nc_file = "/Users/thopri/Projects/PyNEMO/test_scripts/test.nc"
+#
+# NcMLReader = autoclass('ucar.nc2.ncml.NcMLReader')
+# dataset2 = NcMLReader.writeNcMLToFile(ncml_template,nc_file)
+
+
+#NetcdfDataset ncfileIn = NcMLReader.readNcML (ncml_filename, null);
+
+import xml.etree.ElementTree as ET
+import xmltodict
+ncml = "/Users/thopri/Projects/PyNEMO/inputs/NEMO_output_T.ncml"
+ncml_xml = xmltodict.parse(ET.tostring(ET.parse(ncml).getroot()))
+
+dimensions = ncml_xml['ns0:netcdf']['ns0:dimension']
+variables = ncml_xml['ns0:netcdf']['ns0:variable']
+nc_attrib = ncml_xml['ns0:netcdf']['ns0:attribute']
+var_attrib = ncml_xml['ns0:netcdf']['ns0:variable'][0]['ns0:attribute']
+
+print('the end')
+# for key in ncml_meta:
+# for key2 in ncml_meta[key]:
+# if 'dimension' in key2:
+# print(key2)
+
+
+# def find(key, dictionary):
+# for k, v in dictionary.items():
+# if k in key:
+# yield dictionary
+# elif isinstance(v, dict):
+# for result in find(key, v):
+# yield result
+# elif isinstance(v, list):
+# for d in v:
+# if isinstance(d, dict):
+# for result in find(key, d):
+# yield result
+#
+# print(list(find("ns0:dimension", ncml_meta)))
\ No newline at end of file
diff --git a/test_scripts/resample_netcdf.py b/test_scripts/resample_netcdf.py
new file mode 100644
index 00000000..8c352ba3
--- /dev/null
+++ b/test_scripts/resample_netcdf.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+"""
+Resample netcdf code, used for making small low res testing datasets.
+"""
+
+import xarray as xr
+import numpy as np
+
+# subset X Y dims netcdf
+ds = xr.open_dataset('unit_tests/test_data/NNA_R12_bathy_meter_bench.nc')
+
+new_y = np.linspace(ds.y[0],ds.y[-1],100)
+
+new_x = np.linspace(ds.x[0],ds.x[-1],85)
+
+dsi = ds.interp(x=new_x,y=new_y)
+
+dsi.to_netcdf('unit_tests/test_data/dst_bathy.nc')
+
+# subset lat lon dims netcdf
+
+ds = xr.open_dataset('unit_tests/test_data/subset_bathy.nc')
+
+new_lon = np.linspace(ds.longitude[0],ds.longitude[-1],45)
+
+new_lat = np.linspace(ds.latitude[0],ds.latitude[-1],30)
+
+dsi = ds.interp(latitude=new_lat,longitude=new_lon)
+
+dsi.to_netcdf('unit_tests/test_data/resample_bathy.nc')
+
+
+# drop unneeded variables
+dsd = xr.Dataset.drop_vars(ds,'zos')
+
+# remove unwanted time
+ds_sel = dsd.sel({'time':'01-01-2017'})
+