diff --git a/CHANGELOG.md b/CHANGELOG.md index de6e786c..69f0ea67 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -21,6 +21,31 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ----------------------------- +## [v3.1.0] - 2025-06-26 + +- 0-diff vs. v3.0.0. + +### Added + +- Added python package for post-processing ObsFcstAna output into data assimilation diagnostics ([PR #87](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/87), [PR #111](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/111)). +- Support for 2d output from EASE tile space and 2d output on EASE grid: + - Switched EASE grid handling to new MAPL EASE Grid Factory ([PR #115](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/115)). + - Revised pre-processing of HISTORY template ([PR #118](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/118)). +- Support for tile space of stretched cube-sphere grids ([PR #109](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/109)). + +### Changed + +- Revised experiment setup for coupled land-atm DAS ([PR #102](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/102)). +- Updated defaults in LDASsa_DEFAULT_inputs_*.nml files ([PR #104](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/104)). +- Added optional SLURM "constraint" ([PR #112](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/112)). +- Specify only "ntasks_model" in SLURM resource request ([PR #106](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/106)). + +### Fixed + +- UDUNITS error ([PR #101](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/101), [PR #123](https://github.com/GEOS-ESM/GEOSldas_GridComp/pull/123)). + +----------------------------- + ## [v3.0.0] - 2025-05-28 - 0-diff vs. v2.0.0. diff --git a/GEOS_LdasGridComp.F90 b/GEOS_LdasGridComp.F90 index cf481aa1..ccb6ad8d 100644 --- a/GEOS_LdasGridComp.F90 +++ b/GEOS_LdasGridComp.F90 @@ -7,8 +7,8 @@ module GEOS_LdasGridCompMod ! !USES use ESMF - use MAPL_Mod - + use MAPL + use GEOS_MetforceGridCompMod, only: MetforceSetServices => SetServices use GEOS_LandGridCompMod, only: LandSetServices => SetServices use GEOS_LandPertGridCompMod, only: LandPertSetServices => SetServices @@ -16,7 +16,6 @@ module GEOS_LdasGridCompMod use GEOS_LandAssimGridCompMod, only: LandAssimSetServices => SetServices use GEOS_LandiceGridCompMod, only: LandiceSetServices => SetServices - use EASE_conv, only: ease_inverse use LDAS_TileCoordType, only: tile_coord_type , T_TILECOORD_STATE, TILECOORD_WRAP use LDAS_TileCoordType, only: grid_def_type, io_grid_def_type, operator (==) use LDAS_TileCoordRoutines, only: get_minExtent_grid, get_ij_ind_from_latlon, io_domain_files @@ -537,12 +536,12 @@ subroutine Initialize(gc, import, export, clock, rc) call ESMF_GRID_INTERIOR(agrid,I1,IN,J1,JN) do I = 1,size(centerX,1) - call ease_inverse(gridname,1.0*(I+I1-2),0.0,lat,lon) + call MAPL_ease_inverse(gridname,1.0*(I+I1-2),0.0,lat,lon) centerX(I,:) = lon * MAPL_DEGREES_TO_RADIANS enddo do J = 1,size(centerY,2) - call ease_inverse(gridname,0.0,1.0*(J+J1-2),lat,lon) + call MAPL_ease_inverse(gridname,0.0,1.0*(J+J1-2),lat,lon) centerY(:,J) = lat * MAPL_DEGREES_TO_RADIANS enddo diff --git a/GEOSlandassim_GridComp/clsm_ensupd_enkf_update.F90 b/GEOSlandassim_GridComp/clsm_ensupd_enkf_update.F90 index eedd28c8..b304ac01 100644 --- a/GEOSlandassim_GridComp/clsm_ensupd_enkf_update.F90 +++ b/GEOSlandassim_GridComp/clsm_ensupd_enkf_update.F90 @@ -74,8 +74,8 @@ module clsm_ensupd_enkf_update use nr_ran2_gasdev, ONLY: & NRANDSEED - use ease_conv, ONLY: & - ease_convert + use MAPL, ONLY: & + MAPL_ease_convert use my_matrix_functions, ONLY: & row_std @@ -2235,7 +2235,7 @@ subroutine write_smapL4SMaup( option, date_time, exp_id, N_ens, & ) if (index(tile_grid_g%gridtype, 'M09') /=0) then - call ease_convert(trim(tile_grid_g%gridtype), this_lat, this_lon, col_ind, row_ind) + call MAPL_ease_convert(trim(tile_grid_g%gridtype), this_lat, this_lon, col_ind, row_ind) endif ! col_ind and row_ind are zero-based, need one-based index here @@ -2254,7 +2254,7 @@ subroutine write_smapL4SMaup( option, date_time, exp_id, N_ens, & ) if (index(tile_grid_g%gridtype, 'M09') /=0) then - call ease_convert(trim(tile_grid_g%gridtype), this_lat, this_lon, col_ind, row_ind) + call MAPL_ease_convert(trim(tile_grid_g%gridtype), this_lat, this_lon, col_ind, row_ind) endif ! col_ind and row_ind are zero-based, need one-based index here @@ -2278,7 +2278,7 @@ subroutine write_smapL4SMaup( option, date_time, exp_id, N_ens, & if (index(tile_grid_g%gridtype, 'M09') /=0) then ! subindex (1:7) to get the string EASEvx_ gridname_tmp = tile_grid_g%gridtype(1:7)//'M36' - call ease_convert(gridname_tmp, this_lat, this_lon, col_ind, row_ind) + call MAPL_ease_convert(gridname_tmp, this_lat, this_lon, col_ind, row_ind) endif ! col_ind and row_ind are zero-based, need one-based index here diff --git a/GEOSlandassim_GridComp/clsm_ensupd_read_obs.F90 b/GEOSlandassim_GridComp/clsm_ensupd_read_obs.F90 index 22ab792d..67aa07fe 100644 --- a/GEOSlandassim_GridComp/clsm_ensupd_read_obs.F90 +++ b/GEOSlandassim_GridComp/clsm_ensupd_read_obs.F90 @@ -25,9 +25,9 @@ module clsm_ensupd_read_obs use io_hdf5, ONLY: & hdf5read - use EASE_conv, ONLY: & - ease_convert, & - ease_extent + use MAPL, ONLY: & + MAPL_ease_convert, & + MAPL_ease_extent use LDAS_ensdrv_globals, ONLY: & logit, & @@ -5542,12 +5542,12 @@ subroutine read_obs_SMOS( date_time, N_catd, this_obs_param, & if (tmp_tile_num(ii)>0) then - call ease_convert('EASEv2_M36', & + call MAPL_ease_convert('EASEv2_M36', & tile_coord(tmp_tile_num(ii))%com_lat, & tile_coord(tmp_tile_num(ii))%com_lon, & M36_col_ind_tile, M36_row_ind_tile ) - call ease_convert('EASEv2_M36', & + call MAPL_ease_convert('EASEv2_M36', & tmp_lat(ii), & tmp_lon(ii), & M36_col_ind_obs, M36_row_ind_obs ) @@ -7159,12 +7159,12 @@ subroutine read_obs_SMAP_FT( date_time, N_catd, this_obs_param, & if (tmp_tile_num(ii)>0) then - call ease_convert('EASEv2_M09', & + call MAPL_ease_convert('EASEv2_M09', & tile_coord(tmp_tile_num(ii))%com_lat, & tile_coord(tmp_tile_num(ii))%com_lon, & M09_col_ind_tile, M09_row_ind_tile ) - call ease_convert('EASEv2_M09', & + call MAPL_ease_convert('EASEv2_M09', & tmp_lat(ii), & tmp_lon(ii), & M09_col_ind_obs, M09_row_ind_obs ) @@ -8229,12 +8229,12 @@ subroutine read_obs_SMAP_halforbit_Tb( date_time, N_catd, this_obs_param, & if (tmp_tile_num(ii)>0) then - call ease_convert('EASEv2_M36', & + call MAPL_ease_convert('EASEv2_M36', & tile_coord(tmp_tile_num(ii))%com_lat, & tile_coord(tmp_tile_num(ii))%com_lon, & M36_col_ind_tile, M36_row_ind_tile ) - call ease_convert('EASEv2_M36', & + call MAPL_ease_convert('EASEv2_M36', & tmp_lat(ii), & tmp_lon(ii), & M36_col_ind_obs, M36_row_ind_obs ) @@ -8529,7 +8529,7 @@ subroutine turn_off_assim_SMAP_L1CTb(N_obs_param, obs_param, N_obsl, Observation ! ! assemble 36 km EASEv2 mask of L2AP_Tb obs - call ease_extent( 'EASEv2_M36', N_cols, N_rows ) + call MAPL_ease_extent( 'EASEv2_M36', N_cols, N_rows ) allocate( mask_h_A(N_cols,N_rows) ) allocate( mask_h_D(N_cols,N_rows) ) @@ -8551,7 +8551,7 @@ subroutine turn_off_assim_SMAP_L1CTb(N_obs_param, obs_param, N_obsl, Observation if (Observations_f(ii)%species==species_L2AP_Tbh_A) then - call ease_convert('EASEv2_M36', Observations_f(ii)%lat, Observations_f(ii)%lon, & + call MAPL_ease_convert('EASEv2_M36', Observations_f(ii)%lat, Observations_f(ii)%lon, & col, row) ! set mask=.true. for the M36 grid cell that contains the L2AP_Tb obs; @@ -8573,7 +8573,7 @@ subroutine turn_off_assim_SMAP_L1CTb(N_obs_param, obs_param, N_obsl, Observation if (Observations_f(ii)%species==species_L2AP_Tbh_D) then - call ease_convert('EASEv2_M36', Observations_f(ii)%lat, Observations_f(ii)%lon, & + call MAPL_ease_convert('EASEv2_M36', Observations_f(ii)%lat, Observations_f(ii)%lon, & col, row) ! set mask=.true. for the M36 grid cell that contains the L2AP_Tb obs; @@ -8595,7 +8595,7 @@ subroutine turn_off_assim_SMAP_L1CTb(N_obs_param, obs_param, N_obsl, Observation if (Observations_f(ii)%species==species_L2AP_Tbv_A) then - call ease_convert('EASEv2_M36', Observations_f(ii)%lat, Observations_f(ii)%lon, & + call MAPL_ease_convert('EASEv2_M36', Observations_f(ii)%lat, Observations_f(ii)%lon, & col, row) ! set mask=.true. for the M36 grid cell that contains the L2AP_Tb obs; @@ -8617,7 +8617,7 @@ subroutine turn_off_assim_SMAP_L1CTb(N_obs_param, obs_param, N_obsl, Observation if (Observations_f(ii)%species==species_L2AP_Tbv_D) then - call ease_convert('EASEv2_M36', Observations_f(ii)%lat, Observations_f(ii)%lon, & + call MAPL_ease_convert('EASEv2_M36', Observations_f(ii)%lat, Observations_f(ii)%lon, & col, row) ! set mask=.true. for the M36 grid cell that contains the L2AP_Tb obs; @@ -8652,7 +8652,7 @@ subroutine turn_off_assim_SMAP_L1CTb(N_obs_param, obs_param, N_obsl, Observation if (Observations_l(ii)%species==species_L1C_Tbh_A) then - call ease_convert('EASEv2_M36', & + call MAPL_ease_convert('EASEv2_M36', & Observations_l(ii)%lat, Observations_l(ii)%lon, col, row) ! note conversion to one-based indices @@ -8671,7 +8671,7 @@ subroutine turn_off_assim_SMAP_L1CTb(N_obs_param, obs_param, N_obsl, Observation if (Observations_l(ii)%species==species_L1C_Tbh_D) then - call ease_convert('EASEv2_M36', & + call MAPL_ease_convert('EASEv2_M36', & Observations_l(ii)%lat, Observations_l(ii)%lon, col, row) ! note conversion to one-based indices @@ -8692,7 +8692,7 @@ subroutine turn_off_assim_SMAP_L1CTb(N_obs_param, obs_param, N_obsl, Observation if (Observations_l(ii)%species==species_L1C_Tbv_A) then - call ease_convert('EASEv2_M36', & + call MAPL_ease_convert('EASEv2_M36', & Observations_l(ii)%lat, Observations_l(ii)%lon, col, row) ! note conversion to one-based indices @@ -8711,7 +8711,7 @@ subroutine turn_off_assim_SMAP_L1CTb(N_obs_param, obs_param, N_obsl, Observation if (Observations_l(ii)%species==species_L1C_Tbv_D) then - call ease_convert('EASEv2_M36', & + call MAPL_ease_convert('EASEv2_M36', & Observations_l(ii)%lat, Observations_l(ii)%lon, col, row) ! note conversion to one-based indices diff --git a/GEOSldas_App/CMakeLists.txt b/GEOSldas_App/CMakeLists.txt index f13954ec..0f4024a7 100644 --- a/GEOSldas_App/CMakeLists.txt +++ b/GEOSldas_App/CMakeLists.txt @@ -39,7 +39,7 @@ configure_file(${file} ${file} @ONLY) install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/${file} DESTINATION bin) file(GLOB rc_files GEOSldas_*rc) -file(GLOB nml_files LDASsa_DEFAULT*nml) +file(GLOB nml_files LDASsa_DEFAULT*nml LandAtmDAS_nml/LDASsa_SPECIAL*nml ) install( FILES ${rc_files} ${nml_files} diff --git a/GEOSldas_App/GEOSldas_HIST.rc b/GEOSldas_App/GEOSldas_HIST.rc index d9d09dd8..37ce4b59 100644 --- a/GEOSldas_App/GEOSldas_HIST.rc +++ b/GEOSldas_App/GEOSldas_HIST.rc @@ -1,24 +1,23 @@ # Sample HISTORY.rc file for GEOSldas # # This HISTORY template is edited by "ldas_setup" via "process_hist.csh". -# The strings '#ASSIM', '#EASE', and '#CUBE' are *not* linked to MAPL HISTORY -# functionality. For example, the line -# "#CUBE 'tavg24_2d_lnd_Nx'" -# does *not* mean that the 'lnd' output will be on a cube-sphere grid. -#CUBE VERSION: 1 +VERSION: 1 + +# Must edit 'EXPID' manually if HISTORY file is re-used without going through "ldas_setup". -# Must edit 'EXPID' manually if HISTORY file is re-used without going -# through "ldas_setup". -# EXPID: GEOSldas_expid +# ------------------------------------------------------------------------------------------------ + +# pre-defined Collections + COLLECTIONS: -#EASE 'tavg24_1d_lfs_Nt' -#CUBE 'tavg24_2d_lfs_Nx' -#EASE 'tavg24_1d_lnd_Nt' -#CUBE 'tavg24_2d_lnd_Nx' -#ASSIM 'SMAP_L4_SM_gph' +#OUT1d 'tavg24_1d_lfs_Nt' +#OUT2d 'tavg24_2d_lfs_Nx' +#OUT1d 'tavg24_1d_lnd_Nt' +#OUT2d 'tavg24_2d_lnd_Nx' +# 'SMAP_L4_SM_gph' # 'inst1_1d_lnr_Nt' # 'catch_progn_incr' # 'inst3_1d_lndfcstana_Nt' @@ -29,24 +28,34 @@ COLLECTIONS: # 'tavg24_1d_glc_Nt' :: -#CUBE GRID_LABELS: PC720x361-DC -#CUBE PC1440x721-DC +# -------------------------------------------------------------------------------------------------- + +# 2d output can be on the following grids (see [COLLECTION_NAME].grid_label]) + +GRID_LABELS: PC720x361-DC + PC1440x721-DC + EASEv2_M36 + :: + +PC720x361-DC.GRID_TYPE: LatLon +PC720x361-DC.IM_WORLD: 720 +PC720x361-DC.JM_WORLD: 361 +PC720x361-DC.POLE: PC +PC720x361-DC.DATELINE: DC +PC720x361-DC.LM: 1 -#CUBE :: +PC1440x721-DC.GRID_TYPE: LatLon +PC1440x721-DC.IM_WORLD: 1440 +PC1440x721-DC.JM_WORLD: 721 +PC1440x721-DC.POLE: PC +PC1440x721-DC.DATELINE: DC +PC1440x721-DC.LM: 1 -#CUBE PC720x361-DC.GRID_TYPE: LatLon -#CUBE PC720x361-DC.IM_WORLD: 720 -#CUBE PC720x361-DC.JM_WORLD: 361 -#CUBE PC720x361-DC.POLE: PC -#CUBE PC720x361-DC.DATELINE: DC -#CUBE PC720x361-DC.LM: 1 +EASEv2_M36.GRID_TYPE: EASE +EASEv2_M36.GRIDNAME: EASEv2_M36 +EASEv2_M36.LM: 1 -#CUBE PC1440x721-DC.GRID_TYPE: LatLon -#CUBE PC1440x721-DC.IM_WORLD: 1440 -#CUBE PC1440x721-DC.JM_WORLD: 721 -#CUBE PC1440x721-DC.POLE: PC -#CUBE PC1440x721-DC.DATELINE: DC -#CUBE PC1440x721-DC.LM: 1 +# -------------------------------------------------------------------------------------------------- # Detailed definition of the collections listed above # @@ -219,15 +228,16 @@ COLLECTIONS: tavg24_2d_lnd_Nx.format: 'CFIO', tavg24_2d_lnd_Nx.descr: '2d,Daily,Time-Averaged,Single-Level,Assimilation,Land Surface Diagnostics', - tavg24_2d_lnd_Nx.nbits: 12, + tavg24_2d_lnd_Nx.nbits: 12, tavg24_2d_lnd_Nx.template: '%y4%m2%d2_%h2%n2z.nc4', tavg24_2d_lnd_Nx.mode: 'time-averaged', tavg24_2d_lnd_Nx.frequency: 240000, tavg24_2d_lnd_Nx.ref_time: 000000, - tavg24_2d_lnd_Nx.regrid_exch: '../input/tile.data' - tavg24_2d_lnd_Nx.regrid_name: 'GRIDNAME' + tavg24_2d_lnd_Nx.regrid_exch: '../input/tile.data', + tavg24_2d_lnd_Nx.regrid_name: 'GRIDNAME', # tavg24_2d_lnd_Nx.regrid_method: 'BILINEAR_MONOTONIC' , - tavg24_2d_lnd_Nx.grid_label: PC720x361-DC + tavg24_2d_lnd_Nx.grid_label: PC720x361-DC, +# tavg24_2d_lnd_Nx.grid_label: EASEv2_M36, tavg24_2d_lnd_Nx.deflate: 2, tavg24_2d_lnd_Nx.fields: 'GRN' , 'VEGDYN' , 'LAI' , 'VEGDYN' , diff --git a/GEOSldas_App/LDASsa_DEFAULT_inputs_ensprop.nml b/GEOSldas_App/LDASsa_DEFAULT_inputs_ensprop.nml index dbb29e84..425b8c91 100644 --- a/GEOSldas_App/LDASsa_DEFAULT_inputs_ensprop.nml +++ b/GEOSldas_App/LDASsa_DEFAULT_inputs_ensprop.nml @@ -188,8 +188,8 @@ coarsen_force_pert%wind = .false. ! (limits on range of random numbers: specify max absolute value ! allowed to be drawn from a standard normal distribution) -std_normal_max_force_pert%pcp = 2.5 -std_normal_max_force_pert%sw = 2.5 +std_normal_max_force_pert%pcp = 3. +std_normal_max_force_pert%sw = 3. std_normal_max_force_pert%lw = 2.5 std_normal_max_force_pert%tair = 2.5 std_normal_max_force_pert%qair = 2.5 diff --git a/GEOSldas_App/LDASsa_DEFAULT_inputs_ensupd.nml b/GEOSldas_App/LDASsa_DEFAULT_inputs_ensupd.nml index b1c1764d..0a31e143 100644 --- a/GEOSldas_App/LDASsa_DEFAULT_inputs_ensupd.nml +++ b/GEOSldas_App/LDASsa_DEFAULT_inputs_ensupd.nml @@ -110,9 +110,9 @@ fcsterr_inflation_fac = -9999. ! %RTM_ID = ID of radiative transfer model to use for Tb forward modeling ! (subroutine get_obs_pred()) ! 0 = none -! 1 = L-band tau-omega model as in De Lannoy et al. 2013 (doi:10.1175/JHM-D-12-092.1) (SMOS) +! 1 = L-band tau-omega model as in De Lannoy et al. 2013 (doi:10.1175/JHM-D-12-092.1) (old SMOS preproc) ! 2 = same as 1 but without Pellarin atm corr (SMAP) -! 3 = same as 1 but with Mironov and SMAP L2_SM pol mixing (SMOS) +! 3 = same as 1 but with Mironov and SMAP L2_SM pol mixing ! 4 = same as 3 but without Pellarin atm corr (targeted for SMAP L4_SM Version 8) ! %bias_Npar = number of obs bias states tracked per day (integer) ! %bias_trel = e-folding time scale of obs bias memory [s] @@ -898,7 +898,7 @@ obs_param_nml(17)%FOV_units = 'km' obs_param_nml(17)%assim = .false. obs_param_nml(17)%scale = .false. obs_param_nml(17)%getinnov = .false. -obs_param_nml(17)%RTM_ID = 2 +obs_param_nml(17)%RTM_ID = 4 obs_param_nml(17)%bias_Npar = 0 obs_param_nml(17)%bias_trel = 864000 obs_param_nml(17)%bias_tcut = 432000 @@ -916,7 +916,7 @@ obs_param_nml(17)%flistname = '' obs_param_nml(17)%errstd = 4. obs_param_nml(17)%std_normal_max = 2.5 obs_param_nml(17)%zeromean = .true. -obs_param_nml(17)%coarsen_pert = .false. +obs_param_nml(17)%coarsen_pert = .true. obs_param_nml(17)%xcorr = 0.25 obs_param_nml(17)%ycorr = 0.25 obs_param_nml(17)%adapt = 0 @@ -942,7 +942,7 @@ obs_param_nml(18)%FOV_units = 'km' obs_param_nml(18)%assim = .false. obs_param_nml(18)%scale = .false. obs_param_nml(18)%getinnov = .false. -obs_param_nml(18)%RTM_ID = 2 +obs_param_nml(18)%RTM_ID = 4 obs_param_nml(18)%bias_Npar = 0 obs_param_nml(18)%bias_trel = 864000 obs_param_nml(18)%bias_tcut = 432000 @@ -960,7 +960,7 @@ obs_param_nml(18)%flistname = '' obs_param_nml(18)%errstd = 4. obs_param_nml(18)%std_normal_max = 2.5 obs_param_nml(18)%zeromean = .true. -obs_param_nml(18)%coarsen_pert = .false. +obs_param_nml(18)%coarsen_pert = .true. obs_param_nml(18)%xcorr = 0.25 obs_param_nml(18)%ycorr = 0.25 obs_param_nml(18)%adapt = 0 @@ -986,7 +986,7 @@ obs_param_nml(19)%FOV_units = 'km' obs_param_nml(19)%assim = .false. obs_param_nml(19)%scale = .false. obs_param_nml(19)%getinnov = .false. -obs_param_nml(19)%RTM_ID = 2 +obs_param_nml(19)%RTM_ID = 4 obs_param_nml(19)%bias_Npar = 0 obs_param_nml(19)%bias_trel = 864000 obs_param_nml(19)%bias_tcut = 432000 @@ -1004,7 +1004,7 @@ obs_param_nml(19)%flistname = '' obs_param_nml(19)%errstd = 4. obs_param_nml(19)%std_normal_max = 2.5 obs_param_nml(19)%zeromean = .true. -obs_param_nml(19)%coarsen_pert = .false. +obs_param_nml(19)%coarsen_pert = .true. obs_param_nml(19)%xcorr = 0.25 obs_param_nml(19)%ycorr = 0.25 obs_param_nml(19)%adapt = 0 @@ -1030,7 +1030,7 @@ obs_param_nml(20)%FOV_units = 'km' obs_param_nml(20)%assim = .false. obs_param_nml(20)%scale = .false. obs_param_nml(20)%getinnov = .false. -obs_param_nml(20)%RTM_ID = 2 +obs_param_nml(20)%RTM_ID = 4 obs_param_nml(20)%bias_Npar = 0 obs_param_nml(20)%bias_trel = 864000 obs_param_nml(20)%bias_tcut = 432000 @@ -1048,7 +1048,7 @@ obs_param_nml(20)%flistname = '' obs_param_nml(20)%errstd = 4. obs_param_nml(20)%std_normal_max = 2.5 obs_param_nml(20)%zeromean = .true. -obs_param_nml(20)%coarsen_pert = .false. +obs_param_nml(20)%coarsen_pert = .true. obs_param_nml(20)%xcorr = 0.25 obs_param_nml(20)%ycorr = 0.25 obs_param_nml(20)%adapt = 0 @@ -1068,14 +1068,14 @@ obs_param_nml(21)%FOV_units = 'km' obs_param_nml(21)%assim = .false. obs_param_nml(21)%scale = .false. obs_param_nml(21)%getinnov = .false. -obs_param_nml(21)%RTM_ID = 2 +obs_param_nml(21)%RTM_ID = 4 obs_param_nml(21)%bias_Npar = 0 obs_param_nml(21)%bias_trel = 864000 obs_param_nml(21)%bias_tcut = 432000 obs_param_nml(21)%nodata = -9999. obs_param_nml(21)%varname = 'Tb' obs_param_nml(21)%units = 'K' -obs_param_nml(21)%path = '/discover/nobackup/projects/gmao/ssd/land/l_data/SMOS/EASEv2/ESA_REPR/SMOS_M36_SCLF1C_fit_nosky_noatm_v620_ESA_v102/SMOS_fit_poly2/' +obs_param_nml(21)%path = '/discover/nobackup/projects/gmao/smap/SMAP_Nature/SMOS/EASEv2/ESA/SMOS_M36_SCLF1C_fit_nosky_noatm_v724_ESA_v102/SMOS_fit_poly2/' obs_param_nml(21)%name = '' obs_param_nml(21)%maskpath = '' obs_param_nml(21)%maskname = '' @@ -1083,10 +1083,10 @@ obs_param_nml(21)%scalepath = '' obs_param_nml(21)%scalename = '' obs_param_nml(21)%flistpath = '' obs_param_nml(21)%flistname = '' -obs_param_nml(21)%errstd = 1.5 +obs_param_nml(21)%errstd = 4. obs_param_nml(21)%std_normal_max = 2.5 obs_param_nml(21)%zeromean = .true. -obs_param_nml(21)%coarsen_pert = .false. +obs_param_nml(21)%coarsen_pert = .true. obs_param_nml(21)%xcorr = 0.25 obs_param_nml(21)%ycorr = 0.25 obs_param_nml(21)%adapt = 0 @@ -1106,14 +1106,14 @@ obs_param_nml(22)%FOV_units = 'km' obs_param_nml(22)%assim = .false. obs_param_nml(22)%scale = .false. obs_param_nml(22)%getinnov = .false. -obs_param_nml(22)%RTM_ID = 2 +obs_param_nml(22)%RTM_ID = 4 obs_param_nml(22)%bias_Npar = 0 obs_param_nml(22)%bias_trel = 864000 obs_param_nml(22)%bias_tcut = 432000 obs_param_nml(22)%nodata = -9999. obs_param_nml(22)%varname = 'Tb' obs_param_nml(22)%units = 'K' -obs_param_nml(22)%path = '/discover/nobackup/projects/gmao/ssd/land/l_data/SMOS/EASEv2/ESA_REPR/SMOS_M36_SCLF1C_fit_nosky_noatm_v620_ESA_v102/SMOS_fit_poly2/' +obs_param_nml(22)%path = '/discover/nobackup/projects/gmao/smap/SMAP_Nature/SMOS/EASEv2/ESA/SMOS_M36_SCLF1C_fit_nosky_noatm_v724_ESA_v102/SMOS_fit_poly2/' obs_param_nml(22)%name = '' obs_param_nml(22)%maskpath = '' obs_param_nml(22)%maskname = '' @@ -1121,10 +1121,10 @@ obs_param_nml(22)%scalepath = '' obs_param_nml(22)%scalename = '' obs_param_nml(22)%flistpath = '' obs_param_nml(22)%flistname = '' -obs_param_nml(22)%errstd = 1.5 +obs_param_nml(22)%errstd = 4. obs_param_nml(22)%std_normal_max = 2.5 obs_param_nml(22)%zeromean = .true. -obs_param_nml(22)%coarsen_pert = .false. +obs_param_nml(22)%coarsen_pert = .true. obs_param_nml(22)%xcorr = 0.25 obs_param_nml(22)%ycorr = 0.25 obs_param_nml(22)%adapt = 0 @@ -1144,14 +1144,14 @@ obs_param_nml(23)%FOV_units = 'km' obs_param_nml(23)%assim = .false. obs_param_nml(23)%scale = .false. obs_param_nml(23)%getinnov = .false. -obs_param_nml(23)%RTM_ID = 2 +obs_param_nml(23)%RTM_ID = 4 obs_param_nml(23)%bias_Npar = 0 obs_param_nml(23)%bias_trel = 864000 obs_param_nml(23)%bias_tcut = 432000 obs_param_nml(23)%nodata = -9999. obs_param_nml(23)%varname = 'Tb' obs_param_nml(23)%units = 'K' -obs_param_nml(23)%path = '/discover/nobackup/projects/gmao/ssd/land/l_data/SMOS/EASEv2/ESA_REPR/SMOS_M36_SCLF1C_fit_nosky_noatm_v620_ESA_v102/SMOS_fit_poly2/' +obs_param_nml(23)%path = '/discover/nobackup/projects/gmao/smap/SMAP_Nature/SMOS/EASEv2/ESA/SMOS_M36_SCLF1C_fit_nosky_noatm_v724_ESA_v102/SMOS_fit_poly2/' obs_param_nml(23)%name = '' obs_param_nml(23)%maskpath = '' obs_param_nml(23)%maskname = '' @@ -1159,10 +1159,10 @@ obs_param_nml(23)%scalepath = '' obs_param_nml(23)%scalename = '' obs_param_nml(23)%flistpath = '' obs_param_nml(23)%flistname = '' -obs_param_nml(23)%errstd = 1.5 +obs_param_nml(23)%errstd = 4. obs_param_nml(23)%std_normal_max = 2.5 obs_param_nml(23)%zeromean = .true. -obs_param_nml(23)%coarsen_pert = .false. +obs_param_nml(23)%coarsen_pert = .true. obs_param_nml(23)%xcorr = 0.25 obs_param_nml(23)%ycorr = 0.25 obs_param_nml(23)%adapt = 0 @@ -1182,14 +1182,14 @@ obs_param_nml(24)%FOV_units = 'km' obs_param_nml(24)%assim = .false. obs_param_nml(24)%scale = .false. obs_param_nml(24)%getinnov = .false. -obs_param_nml(24)%RTM_ID = 2 +obs_param_nml(24)%RTM_ID = 4 obs_param_nml(24)%bias_Npar = 0 obs_param_nml(24)%bias_trel = 864000 obs_param_nml(24)%bias_tcut = 432000 obs_param_nml(24)%nodata = -9999. obs_param_nml(24)%varname = 'Tb' obs_param_nml(24)%units = 'K' -obs_param_nml(24)%path = '/discover/nobackup/projects/gmao/ssd/land/l_data/SMOS/EASEv2/ESA_REPR/SMOS_M36_SCLF1C_fit_nosky_noatm_v620_ESA_v102/SMOS_fit_poly2/' +obs_param_nml(24)%path = '/discover/nobackup/projects/gmao/smap/SMAP_Nature/SMOS/EASEv2/ESA/SMOS_M36_SCLF1C_fit_nosky_noatm_v724_ESA_v102/SMOS_fit_poly2/' obs_param_nml(24)%name = '' obs_param_nml(24)%maskpath = '' obs_param_nml(24)%maskname = '' @@ -1197,10 +1197,10 @@ obs_param_nml(24)%scalepath = '' obs_param_nml(24)%scalename = '' obs_param_nml(24)%flistpath = '' obs_param_nml(24)%flistname = '' -obs_param_nml(24)%errstd = 1.5 +obs_param_nml(24)%errstd = 4. obs_param_nml(24)%std_normal_max = 2.5 obs_param_nml(24)%zeromean = .true. -obs_param_nml(24)%coarsen_pert = .false. +obs_param_nml(24)%coarsen_pert = .true. obs_param_nml(24)%xcorr = 0.25 obs_param_nml(24)%ycorr = 0.25 obs_param_nml(24)%adapt = 0 @@ -1458,14 +1458,14 @@ obs_param_nml(31)%FOV_units = 'km' obs_param_nml(31)%assim = .false. obs_param_nml(31)%scale = .false. obs_param_nml(31)%getinnov = .false. -obs_param_nml(31)%RTM_ID = 2 +obs_param_nml(31)%RTM_ID = 4 obs_param_nml(31)%bias_Npar = 0 obs_param_nml(31)%bias_trel = 864000 obs_param_nml(31)%bias_tcut = 432000 obs_param_nml(31)%nodata = -9999. obs_param_nml(31)%varname = 'Tb' obs_param_nml(31)%units = 'K' -obs_param_nml(31)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB/' +obs_param_nml(31)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB/' obs_param_nml(31)%name = '' obs_param_nml(31)%maskpath = '' obs_param_nml(31)%maskname = '' @@ -1476,7 +1476,7 @@ obs_param_nml(31)%flistname = '' obs_param_nml(31)%errstd = 4. obs_param_nml(31)%std_normal_max = 2.5 obs_param_nml(31)%zeromean = .true. -obs_param_nml(31)%coarsen_pert = .false. +obs_param_nml(31)%coarsen_pert = .true. obs_param_nml(31)%xcorr = 0.25 obs_param_nml(31)%ycorr = 0.25 obs_param_nml(31)%adapt = 0 @@ -1496,14 +1496,14 @@ obs_param_nml(32)%FOV_units = 'km' obs_param_nml(32)%assim = .false. obs_param_nml(32)%scale = .false. obs_param_nml(32)%getinnov = .false. -obs_param_nml(32)%RTM_ID = 2 +obs_param_nml(32)%RTM_ID = 4 obs_param_nml(32)%bias_Npar = 0 obs_param_nml(32)%bias_trel = 864000 obs_param_nml(32)%bias_tcut = 432000 obs_param_nml(32)%nodata = -9999. obs_param_nml(32)%varname = 'Tb' obs_param_nml(32)%units = 'K' -obs_param_nml(32)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB/' +obs_param_nml(32)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB/' obs_param_nml(32)%name = '' obs_param_nml(32)%maskpath = '' obs_param_nml(32)%maskname = '' @@ -1514,7 +1514,7 @@ obs_param_nml(32)%flistname = '' obs_param_nml(32)%errstd = 4. obs_param_nml(32)%std_normal_max = 2.5 obs_param_nml(32)%zeromean = .true. -obs_param_nml(32)%coarsen_pert = .false. +obs_param_nml(32)%coarsen_pert = .true. obs_param_nml(32)%xcorr = 0.25 obs_param_nml(32)%ycorr = 0.25 obs_param_nml(32)%adapt = 0 @@ -1534,14 +1534,14 @@ obs_param_nml(33)%FOV_units = 'km' obs_param_nml(33)%assim = .false. obs_param_nml(33)%scale = .false. obs_param_nml(33)%getinnov = .false. -obs_param_nml(33)%RTM_ID = 2 +obs_param_nml(33)%RTM_ID = 4 obs_param_nml(33)%bias_Npar = 0 obs_param_nml(33)%bias_trel = 864000 obs_param_nml(33)%bias_tcut = 432000 obs_param_nml(33)%nodata = -9999. obs_param_nml(33)%varname = 'Tb' obs_param_nml(33)%units = 'K' -obs_param_nml(33)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB/' +obs_param_nml(33)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB/' obs_param_nml(33)%name = '' obs_param_nml(33)%maskpath = '' obs_param_nml(33)%maskname = '' @@ -1552,7 +1552,7 @@ obs_param_nml(33)%flistname = '' obs_param_nml(33)%errstd = 4. obs_param_nml(33)%std_normal_max = 2.5 obs_param_nml(33)%zeromean = .true. -obs_param_nml(33)%coarsen_pert = .false. +obs_param_nml(33)%coarsen_pert = .true. obs_param_nml(33)%xcorr = 0.25 obs_param_nml(33)%ycorr = 0.25 obs_param_nml(33)%adapt = 0 @@ -1572,14 +1572,14 @@ obs_param_nml(34)%FOV_units = 'km' obs_param_nml(34)%assim = .false. obs_param_nml(34)%scale = .false. obs_param_nml(34)%getinnov = .false. -obs_param_nml(34)%RTM_ID = 2 +obs_param_nml(34)%RTM_ID = 4 obs_param_nml(34)%bias_Npar = 0 obs_param_nml(34)%bias_trel = 864000 obs_param_nml(34)%bias_tcut = 432000 obs_param_nml(34)%nodata = -9999. obs_param_nml(34)%varname = 'Tb' obs_param_nml(34)%units = 'K' -obs_param_nml(34)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB/' +obs_param_nml(34)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB/' obs_param_nml(34)%name = '' obs_param_nml(34)%maskpath = '' obs_param_nml(34)%maskname = '' @@ -1590,7 +1590,7 @@ obs_param_nml(34)%flistname = '' obs_param_nml(34)%errstd = 4. obs_param_nml(34)%std_normal_max = 2.5 obs_param_nml(34)%zeromean = .true. -obs_param_nml(34)%coarsen_pert = .false. +obs_param_nml(34)%coarsen_pert = .true. obs_param_nml(34)%xcorr = 0.25 obs_param_nml(34)%ycorr = 0.25 obs_param_nml(34)%adapt = 0 @@ -1621,14 +1621,14 @@ obs_param_nml(35)%FOV_units = 'km' obs_param_nml(35)%assim = .false. obs_param_nml(35)%scale = .false. obs_param_nml(35)%getinnov = .false. -obs_param_nml(35)%RTM_ID = 2 +obs_param_nml(35)%RTM_ID = 4 obs_param_nml(35)%bias_Npar = 0 obs_param_nml(35)%bias_trel = 864000 obs_param_nml(35)%bias_tcut = 432000 obs_param_nml(35)%nodata = -9999. obs_param_nml(35)%varname = 'Tb' obs_param_nml(35)%units = 'K' -obs_param_nml(35)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L2_SM_AP/' +obs_param_nml(35)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L2_SM_AP/' obs_param_nml(35)%name = '' obs_param_nml(35)%maskpath = '' obs_param_nml(35)%maskname = '' @@ -1659,14 +1659,14 @@ obs_param_nml(36)%FOV_units = 'km' obs_param_nml(36)%assim = .false. obs_param_nml(36)%scale = .false. obs_param_nml(36)%getinnov = .false. -obs_param_nml(36)%RTM_ID = 2 +obs_param_nml(36)%RTM_ID = 4 obs_param_nml(36)%bias_Npar = 0 obs_param_nml(36)%bias_trel = 864000 obs_param_nml(36)%bias_tcut = 432000 obs_param_nml(36)%nodata = -9999. obs_param_nml(36)%varname = 'Tb' obs_param_nml(36)%units = 'K' -obs_param_nml(36)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L2_SM_AP/' +obs_param_nml(36)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L2_SM_AP/' obs_param_nml(36)%name = '' obs_param_nml(36)%maskpath = '' obs_param_nml(36)%maskname = '' @@ -1697,14 +1697,14 @@ obs_param_nml(37)%FOV_units = 'km' obs_param_nml(37)%assim = .false. obs_param_nml(37)%scale = .false. obs_param_nml(37)%getinnov = .false. -obs_param_nml(37)%RTM_ID = 2 +obs_param_nml(37)%RTM_ID = 4 obs_param_nml(37)%bias_Npar = 0 obs_param_nml(37)%bias_trel = 864000 obs_param_nml(37)%bias_tcut = 432000 obs_param_nml(37)%nodata = -9999. obs_param_nml(37)%varname = 'Tb' obs_param_nml(37)%units = 'K' -obs_param_nml(37)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L2_SM_AP/' +obs_param_nml(37)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L2_SM_AP/' obs_param_nml(37)%name = '' obs_param_nml(37)%maskpath = '' obs_param_nml(37)%maskname = '' @@ -1735,14 +1735,14 @@ obs_param_nml(38)%FOV_units = 'km' obs_param_nml(38)%assim = .false. obs_param_nml(38)%scale = .false. obs_param_nml(38)%getinnov = .false. -obs_param_nml(38)%RTM_ID = 2 +obs_param_nml(38)%RTM_ID = 4 obs_param_nml(38)%bias_Npar = 0 obs_param_nml(38)%bias_trel = 864000 obs_param_nml(38)%bias_tcut = 432000 obs_param_nml(38)%nodata = -9999. obs_param_nml(38)%varname = 'Tb' obs_param_nml(38)%units = 'K' -obs_param_nml(38)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L2_SM_AP/' +obs_param_nml(38)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L2_SM_AP/' obs_param_nml(38)%name = '' obs_param_nml(38)%maskpath = '' obs_param_nml(38)%maskname = '' @@ -1779,7 +1779,7 @@ obs_param_nml(39)%bias_tcut = 432000 obs_param_nml(39)%nodata = -9999. obs_param_nml(39)%varname = 'FT' obs_param_nml(39)%units = '-' -obs_param_nml(39)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L2_SM_AP/' +obs_param_nml(39)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L2_SM_AP/' obs_param_nml(39)%name = '' obs_param_nml(39)%maskpath = '' obs_param_nml(39)%maskname = '' @@ -1816,7 +1816,7 @@ obs_param_nml(40)%bias_tcut = 432000 obs_param_nml(40)%nodata = -9999. obs_param_nml(40)%varname = 'FT' obs_param_nml(40)%units = '-' -obs_param_nml(40)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L2_SM_AP/' +obs_param_nml(40)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L2_SM_AP/' obs_param_nml(40)%name = '' obs_param_nml(40)%maskpath = '' obs_param_nml(40)%maskname = '' @@ -1869,14 +1869,14 @@ obs_param_nml(41)%FOV_units = 'km' obs_param_nml(41)%assim = .false. obs_param_nml(41)%scale = .false. obs_param_nml(41)%getinnov = .false. -obs_param_nml(41)%RTM_ID = 2 +obs_param_nml(41)%RTM_ID = 4 obs_param_nml(41)%bias_Npar = 0 obs_param_nml(41)%bias_trel = 864000 obs_param_nml(41)%bias_tcut = 432000 obs_param_nml(41)%nodata = -9999. obs_param_nml(41)%varname = 'Tb' obs_param_nml(41)%units = 'K' -obs_param_nml(41)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB_E/' +obs_param_nml(41)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB_E/' obs_param_nml(41)%name = '' obs_param_nml(41)%maskpath = '' obs_param_nml(41)%maskname = '' @@ -1887,7 +1887,7 @@ obs_param_nml(41)%flistname = '' obs_param_nml(41)%errstd = 4. obs_param_nml(41)%std_normal_max = 2.5 obs_param_nml(41)%zeromean = .true. -obs_param_nml(41)%coarsen_pert = .false. +obs_param_nml(41)%coarsen_pert = .true. obs_param_nml(41)%xcorr = 0.1875 obs_param_nml(41)%ycorr = 0.1875 obs_param_nml(41)%adapt = 0 @@ -1907,14 +1907,14 @@ obs_param_nml(42)%FOV_units = 'km' obs_param_nml(42)%assim = .false. obs_param_nml(42)%scale = .false. obs_param_nml(42)%getinnov = .false. -obs_param_nml(42)%RTM_ID = 2 +obs_param_nml(42)%RTM_ID = 4 obs_param_nml(42)%bias_Npar = 0 obs_param_nml(42)%bias_trel = 864000 obs_param_nml(42)%bias_tcut = 432000 obs_param_nml(42)%nodata = -9999. obs_param_nml(42)%varname = 'Tb' obs_param_nml(42)%units = 'K' -obs_param_nml(42)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB_E/' +obs_param_nml(42)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB_E/' obs_param_nml(42)%name = '' obs_param_nml(42)%maskpath = '' obs_param_nml(42)%maskname = '' @@ -1925,7 +1925,7 @@ obs_param_nml(42)%flistname = '' obs_param_nml(42)%errstd = 4. obs_param_nml(42)%std_normal_max = 2.5 obs_param_nml(42)%zeromean = .true. -obs_param_nml(42)%coarsen_pert = .false. +obs_param_nml(42)%coarsen_pert = .true. obs_param_nml(42)%xcorr = 0.1875 obs_param_nml(42)%ycorr = 0.1875 obs_param_nml(42)%adapt = 0 @@ -1945,14 +1945,14 @@ obs_param_nml(43)%FOV_units = 'km' obs_param_nml(43)%assim = .false. obs_param_nml(43)%scale = .false. obs_param_nml(43)%getinnov = .false. -obs_param_nml(43)%RTM_ID = 2 +obs_param_nml(43)%RTM_ID = 4 obs_param_nml(43)%bias_Npar = 0 obs_param_nml(43)%bias_trel = 864000 obs_param_nml(43)%bias_tcut = 432000 obs_param_nml(43)%nodata = -9999. obs_param_nml(43)%varname = 'Tb' obs_param_nml(43)%units = 'K' -obs_param_nml(43)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB_E/' +obs_param_nml(43)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB_E/' obs_param_nml(43)%name = '' obs_param_nml(43)%maskpath = '' obs_param_nml(43)%maskname = '' @@ -1963,7 +1963,7 @@ obs_param_nml(43)%flistname = '' obs_param_nml(43)%errstd = 4. obs_param_nml(43)%std_normal_max = 2.5 obs_param_nml(43)%zeromean = .true. -obs_param_nml(43)%coarsen_pert = .false. +obs_param_nml(43)%coarsen_pert = .true. obs_param_nml(43)%xcorr = 0.1875 obs_param_nml(43)%ycorr = 0.1875 obs_param_nml(43)%adapt = 0 @@ -1983,14 +1983,14 @@ obs_param_nml(44)%FOV_units = 'km' obs_param_nml(44)%assim = .false. obs_param_nml(44)%scale = .false. obs_param_nml(44)%getinnov = .false. -obs_param_nml(44)%RTM_ID = 2 +obs_param_nml(44)%RTM_ID = 4 obs_param_nml(44)%bias_Npar = 0 obs_param_nml(44)%bias_trel = 864000 obs_param_nml(44)%bias_tcut = 432000 obs_param_nml(44)%nodata = -9999. obs_param_nml(44)%varname = 'Tb' obs_param_nml(44)%units = 'K' -obs_param_nml(44)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB_E/' +obs_param_nml(44)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB_E/' obs_param_nml(44)%name = '' obs_param_nml(44)%maskpath = '' obs_param_nml(44)%maskname = '' @@ -2001,7 +2001,7 @@ obs_param_nml(44)%flistname = '' obs_param_nml(44)%errstd = 4. obs_param_nml(44)%std_normal_max = 2.5 obs_param_nml(44)%zeromean = .true. -obs_param_nml(44)%coarsen_pert = .false. +obs_param_nml(44)%coarsen_pert = .true. obs_param_nml(44)%xcorr = 0.1875 obs_param_nml(44)%ycorr = 0.1875 obs_param_nml(44)%adapt = 0 @@ -2021,14 +2021,14 @@ obs_param_nml(45)%FOV_units = 'km' obs_param_nml(45)%assim = .false. obs_param_nml(45)%scale = .false. obs_param_nml(45)%getinnov = .false. -obs_param_nml(45)%RTM_ID = 2 +obs_param_nml(45)%RTM_ID = 4 obs_param_nml(45)%bias_Npar = 0 obs_param_nml(45)%bias_trel = 864000 obs_param_nml(45)%bias_tcut = 432000 obs_param_nml(45)%nodata = -9999. obs_param_nml(45)%varname = 'Tb' obs_param_nml(45)%units = 'K' -obs_param_nml(45)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB_E/' +obs_param_nml(45)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB_E/' obs_param_nml(45)%name = '' obs_param_nml(45)%maskpath = '' obs_param_nml(45)%maskname = '' @@ -2039,7 +2039,7 @@ obs_param_nml(45)%flistname = '' obs_param_nml(45)%errstd = 4. obs_param_nml(45)%std_normal_max = 2.5 obs_param_nml(45)%zeromean = .true. -obs_param_nml(45)%coarsen_pert = .false. +obs_param_nml(45)%coarsen_pert = .true. obs_param_nml(45)%xcorr = 0.1875 obs_param_nml(45)%ycorr = 0.1875 obs_param_nml(45)%adapt = 0 @@ -2059,14 +2059,14 @@ obs_param_nml(46)%FOV_units = 'km' obs_param_nml(46)%assim = .false. obs_param_nml(46)%scale = .false. obs_param_nml(46)%getinnov = .false. -obs_param_nml(46)%RTM_ID = 2 +obs_param_nml(46)%RTM_ID = 4 obs_param_nml(46)%bias_Npar = 0 obs_param_nml(46)%bias_trel = 864000 obs_param_nml(46)%bias_tcut = 432000 obs_param_nml(46)%nodata = -9999. obs_param_nml(46)%varname = 'Tb' obs_param_nml(46)%units = 'K' -obs_param_nml(46)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB_E/' +obs_param_nml(46)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB_E/' obs_param_nml(46)%name = '' obs_param_nml(46)%maskpath = '' obs_param_nml(46)%maskname = '' @@ -2077,7 +2077,7 @@ obs_param_nml(46)%flistname = '' obs_param_nml(46)%errstd = 4. obs_param_nml(46)%std_normal_max = 2.5 obs_param_nml(46)%zeromean = .true. -obs_param_nml(46)%coarsen_pert = .false. +obs_param_nml(46)%coarsen_pert = .true. obs_param_nml(46)%xcorr = 0.1875 obs_param_nml(46)%ycorr = 0.1875 obs_param_nml(46)%adapt = 0 @@ -2097,14 +2097,14 @@ obs_param_nml(47)%FOV_units = 'km' obs_param_nml(47)%assim = .false. obs_param_nml(47)%scale = .false. obs_param_nml(47)%getinnov = .false. -obs_param_nml(47)%RTM_ID = 2 +obs_param_nml(47)%RTM_ID = 4 obs_param_nml(47)%bias_Npar = 0 obs_param_nml(47)%bias_trel = 864000 obs_param_nml(47)%bias_tcut = 432000 obs_param_nml(47)%nodata = -9999. obs_param_nml(47)%varname = 'Tb' obs_param_nml(47)%units = 'K' -obs_param_nml(47)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB_E/' +obs_param_nml(47)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB_E/' obs_param_nml(47)%name = '' obs_param_nml(47)%maskpath = '' obs_param_nml(47)%maskname = '' @@ -2115,7 +2115,7 @@ obs_param_nml(47)%flistname = '' obs_param_nml(47)%errstd = 4. obs_param_nml(47)%std_normal_max = 2.5 obs_param_nml(47)%zeromean = .true. -obs_param_nml(47)%coarsen_pert = .false. +obs_param_nml(47)%coarsen_pert = .true. obs_param_nml(47)%xcorr = 0.1875 obs_param_nml(47)%ycorr = 0.1875 obs_param_nml(47)%adapt = 0 @@ -2135,14 +2135,14 @@ obs_param_nml(48)%FOV_units = 'km' obs_param_nml(48)%assim = .false. obs_param_nml(48)%scale = .false. obs_param_nml(48)%getinnov = .false. -obs_param_nml(48)%RTM_ID = 2 +obs_param_nml(48)%RTM_ID = 4 obs_param_nml(48)%bias_Npar = 0 obs_param_nml(48)%bias_trel = 864000 obs_param_nml(48)%bias_tcut = 432000 obs_param_nml(48)%nodata = -9999. obs_param_nml(48)%varname = 'Tb' obs_param_nml(48)%units = 'K' -obs_param_nml(48)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/L1C_TB_E/' +obs_param_nml(48)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB_E/' obs_param_nml(48)%name = '' obs_param_nml(48)%maskpath = '' obs_param_nml(48)%maskname = '' @@ -2153,7 +2153,7 @@ obs_param_nml(48)%flistname = '' obs_param_nml(48)%errstd = 4. obs_param_nml(48)%std_normal_max = 2.5 obs_param_nml(48)%zeromean = .true. -obs_param_nml(48)%coarsen_pert = .false. +obs_param_nml(48)%coarsen_pert = .true. obs_param_nml(48)%xcorr = 0.1875 obs_param_nml(48)%ycorr = 0.1875 obs_param_nml(48)%adapt = 0 @@ -2198,7 +2198,7 @@ obs_param_nml(49)%flistname = '' obs_param_nml(49)%errstd = 9. obs_param_nml(49)%std_normal_max = 2.5 obs_param_nml(49)%zeromean = .true. -obs_param_nml(49)%coarsen_pert = .false. +obs_param_nml(49)%coarsen_pert = .true. obs_param_nml(49)%xcorr = 0.25 obs_param_nml(49)%ycorr = 0.25 obs_param_nml(49)%adapt = 0 @@ -2237,7 +2237,7 @@ obs_param_nml(50)%flistname = '' obs_param_nml(50)%errstd = 9. obs_param_nml(50)%std_normal_max = 2.5 obs_param_nml(50)%zeromean = .true. -obs_param_nml(50)%coarsen_pert = .false. +obs_param_nml(50)%coarsen_pert = .true. obs_param_nml(50)%xcorr = 0.25 obs_param_nml(50)%ycorr = 0.25 obs_param_nml(50)%adapt = 0 @@ -2276,7 +2276,7 @@ obs_param_nml(51)%flistname = '' obs_param_nml(51)%errstd = 9. obs_param_nml(51)%std_normal_max = 2.5 obs_param_nml(51)%zeromean = .true. -obs_param_nml(51)%coarsen_pert = .false. +obs_param_nml(51)%coarsen_pert = .true. obs_param_nml(51)%xcorr = 0.25 obs_param_nml(51)%ycorr = 0.25 obs_param_nml(51)%adapt = 0 diff --git a/GEOSldas_App/LandAtmDAS_nml/LDASsa_SPECIAL_inputs_ensprop.nml b/GEOSldas_App/LandAtmDAS_nml/LDASsa_SPECIAL_inputs_ensprop.nml new file mode 100644 index 00000000..d76fef4c --- /dev/null +++ b/GEOSldas_App/LandAtmDAS_nml/LDASsa_SPECIAL_inputs_ensprop.nml @@ -0,0 +1,193 @@ +! +! namelist of EnKF inputs +! +! CUSTOM version for coupled land-atm DAS with soil moisture analysis based on SMAP Tb obs +! +! -------------------------------------------------------------------- + +&ens_prop_inputs + +progn_pert_dtstep = 10800 ! time step for generation of AR(1) prognostics perts [s] + +force_pert_dtstep = 10800 ! time step for generation of AR(1) forcing perts [s] + +! --------------------------------------------------------------------- +! +! forcing error (or forcing perturbation) parameters +! + +descr_force_pert%pcp = 'pcp' +descr_force_pert%sw = 'sw' +descr_force_pert%lw = 'lw' +descr_force_pert%tair = 'tair' +descr_force_pert%qair = 'qair' +descr_force_pert%wind = 'wind' + +! specify whether forcing perturbations are additive or multiplicative +! +! additive: typ = 0. +! multiplicative and lognormal: typ = 1. +! + +typ_force_pert%pcp = 1. +typ_force_pert%sw = 1. +typ_force_pert%lw = 0. + + +std_force_pert%pcp = 0.5 ! units if additive: [kg/m2/s], if multiplicative: [fraction] +std_force_pert%sw = 0.3 ! units if additive: [W/m^2] , if multiplicative: [fraction] +std_force_pert%lw = 20. ! units if additive: [W/m^2] , if multiplicative: [fraction] + +! read error std-dev from file? (if .false., default values above apply) + +stdfromfile_force_pert%pcp = .false. +stdfromfile_force_pert%sw = .false. +stdfromfile_force_pert%lw = .false. + +! specify file name (with full path) that contains std-dev values + +stdfilename_force_pert = '' + +! enforce zero (sample) mean across ensemble? + +zeromean_force_pert%pcp = .true. +zeromean_force_pert%sw = .true. +zeromean_force_pert%lw = .true. + +! allow perturbations to be computed on coarsened grid? + +coarsen_force_pert%pcp = .true. +coarsen_force_pert%sw = .true. +coarsen_force_pert%lw = .true. + +! max perturbation relative to standard normal +! (limits on range of random numbers: specify max absolute value +! allowed to be drawn from a standard normal distribution) + +std_normal_max_force_pert%pcp = 3. +std_normal_max_force_pert%sw = 3. +std_normal_max_force_pert%lw = 2.5 + +! spatial correlation of forcing perturbations + +xcorr_force_pert%pcp = 0.5 ! [deg] +xcorr_force_pert%sw = 0.5 ! [deg] +xcorr_force_pert%lw = 0.5 ! [deg] + +ycorr_force_pert%pcp = 0.5 ! [deg] +ycorr_force_pert%sw = 0.5 ! [deg] +ycorr_force_pert%lw = 0.5 ! [deg] + +! temporal correlation of forcing perturbations + +tcorr_force_pert%pcp = 86400. ! [s] +tcorr_force_pert%sw = 86400. ! [s] +tcorr_force_pert%lw = 86400. ! [s] + +! correlation coefficients -1 <= rho <= 1 +! +! specify only essential information, the other side of off-diagonals and +! the diagonal will be filled in later (subroutines read_ens_prop_inputs +! and get_force_pert_inputs) + +ccorr_force_pert%pcp%sw = -.8 +ccorr_force_pert%pcp%lw = .5 + +ccorr_force_pert%sw%lw = -.5 + + +! --------------------------------------------------------------------- +! +! model error (or progn_pert) parameters +! +! the mean is computed according to "typ" for unbiased perturbations +! and not specified here + +! string that describes the prognostics to be perturbed +! (see subroutine apply_progn_pert() for details) + +descr_progn_pert%catdef = 'catdef' +descr_progn_pert%srfexc = 'srfexc' + + +! specify whether model error is additive or multiplicative +! +! additive: typ = 0. +! multiplicative and lognormal: typ = 1. +! +! real numbers are used so that "assemble_state()" can +! be used to assemble the model error parameters + +typ_progn_pert%catdef = 0. +typ_progn_pert%srfexc = 0. + + +! The perturbation (or error) std-dev can be specified as a spatially constant +! (default) value. Alternatively, perturbation std-dev values can be read from +! a netcdf-4 input file (where they may be spatially constant or distributed). +! See subroutines get_progn_pert_param() and get_force_pert_param(). +! +! Turn off all perturbations by setting std-dev values to zero and +! "stdfromfile" to false. +! +! Default, spatially homogeneous perturbations std-dev +! (used unless std-devs are read from file, see below) + +std_progn_pert%catdef = 0.24 ! units if additive: [kg/m2/HOUR], if multiplicative: [fraction/HOUR] +std_progn_pert%srfexc = 0.16 ! units if additive: [kg/m2/HOUR], if multiplicative: [fraction/HOUR] + + +! read error std-dev from file? (if .false., default values above apply) + +stdfromfile_progn_pert%catdef = .false. +stdfromfile_progn_pert%srfexc = .false. + +! specify file name (with full path) that contains std-dev values + +stdfilename_progn_pert = '' + +! enforce zero (sample) mean across ensemble? + +zeromean_progn_pert%catdef = .true. +zeromean_progn_pert%srfexc = .true. + +! allow perturbations to be computed on coarsened grid? + +coarsen_progn_pert%catdef = .false. +coarsen_progn_pert%srfexc = .false. + +! max perturbation relative to standard normal +! (limits on range of random numbers: specify max absolute value +! allowed to be drawn from a standard normal distribution) + +std_normal_max_progn_pert%catdef = 2.5 +std_normal_max_progn_pert%srfexc = 2.5 + +! model error spatial correlation [deg] +! (x runs east-west, y runs north-south) + +xcorr_progn_pert%catdef = 0.3 ! [deg] +xcorr_progn_pert%srfexc = 0.3 ! [deg] + +ycorr_progn_pert%catdef = 0.3 ! [deg] +ycorr_progn_pert%srfexc = 0.3 ! [deg] + +! model error temporal correlation [s] + +tcorr_progn_pert%catdef = 10800. ! [s] +tcorr_progn_pert%srfexc = 10800. ! [s] + +! correlation coefficients -1 <= rho <= 1 +! +! specify only essential information, the other side of off-diagonals and +! the diagonal will be filled in later (subroutines read_ens_prop_inputs +! and get_force_pert_inputs) +! +! (the default input list below was put together with matlab +! script create_ccorr_cat_progn_default.m) + +ccorr_progn_pert%catdef%srfexc = 0.0 + +/ + +! =========================== EOF ======================================= diff --git a/GEOSldas_App/LandAtmDAS_nml/LDASsa_SPECIAL_inputs_ensupd.nml b/GEOSldas_App/LandAtmDAS_nml/LDASsa_SPECIAL_inputs_ensupd.nml new file mode 100644 index 00000000..e8ecb12e --- /dev/null +++ b/GEOSldas_App/LandAtmDAS_nml/LDASsa_SPECIAL_inputs_ensupd.nml @@ -0,0 +1,404 @@ +! +! namelist of EnKF inputs for land EnKF update +! +! CUSTOM version for coupled land-atm DAS with soil moisture analysis based on SMAP Tb obs +! +! ---------------------------------------------------------------------- + +&ens_upd_inputs + +! ---------------------------------------------------------------------- +! +! update type - for details see subroutine cat_enkf_update() +! (note: all 3d updates use compact support) +! +! local = "1d", regional = "3d" +! +! # = no longer supported +! +! update_type | analysis state vector | assimilated obs +! ------------------------------------------------------------------------------------------------- +! 0 | NO assimilation, NO bias correction | n/a +! # 1 | 1d soil moisture | sfmc +! # 2 | 3d soil moisture | sfmc +! 3 | 1d Tskin (assim incr NOT applied, use w/ bias corr) | Tskin +! 4 | 1d Tskin/ght1 (assim incr applied, use w/ or w/o bias corr) | Tskin +! 5 | 1d Tskin/ght1 (assim incr NOT applied, use w/ bias corr) | Tskin +! 6 | 1d soil moisture/Tskin/ght(1) | Tb +! 7 | 3d Tskin/ght1 update | Tskin +! 8 | 3d soil moisture/Tskin/ght(1) | Tb +! 9 | 1d Tskin/ght1 update | FT +! 10 | 3d soil moisture/Tskin/ght(1) excl. catdef unless PEATCLSM tile | Tb +! 11 | 1d snow analysis (Toure et al. 2018 empirical gain) | SCF +! 12 | 3d soil moisture/Tskin/ght(1) excl. catdef unless PEATCLSM tile | sfmc/sfds, Tb, SCF +! | & 1d snow analysis (Toure et al. 2018 empirical gain) | +! 13 | 3d soil moisture/Tskin/ght(1) excl. catdef unless PEATCLSM tile | sfmc/sfds, Tb + +update_type = 10 + +out_obslog = .true. +out_ObsFcstAna = .true. +out_smapL4SMaup = .false. + +! --------------------------------------------------------------------- +! +! Compact support parameters - for 3d updates +! +! All correlations vanish outside of an ellipse with semi-axes xcompact +! and ycompact + +xcompact = 1.25 ! [deg] longitude +ycompact = 1.25 ! [deg] latitude + +! --------------------------------------------------------------------- +! +! forecast error covariance inflaction factor +! +! - assigns more weight to observations in analysis by inflating forecast error covariance +! - works on std-dev, i.e., var_inflated = var * inflation_fac**2 +! - typical values: 1 <= inflation_fac <= 1.5 +! - to turn off, set to any negative real number + +fcsterr_inflation_fac = -9999. + +! --------------------------------------------------------------------- +! +! Definition of measurement species and parameters +! +! NOTE: When additional types of measurements are included here, +! at least the following parameters and subroutines must be adapted: +! +! - N_obs_species_nml in clsm_ensupd_glob_param.f90 +! [- read_ens_upd_inputs()] +! [- collect_obs()] +! - read_obs() +! - get_obs_pred() +! - cat_enkf_update() +! +! +! Definition of obs_param_nml fields (see also enkf_types.F90): +! +! %descr = description +! %species = identifier for type of measurement +! %orbit = type of (half-)orbit +! 0 = n/a [eg., in situ obs] +! 1 = ascending +! 2 = descending +! 3 = ascending or descending +! 4 = geostationary +! %pol = polarization +! 0 = n/a [eg., multi-pol. retrieval] +! 1 = horizontal +! 2 = vertical +! 3 = ... +! %N_ang = # satellite viewing angles in species (radiance obs only) +! %ang = vector of satellite viewing angles +! %freq = frequency [Hz] +! %FOV = field-of-view *radius*, see NOTES below +! (if FOV==0. equate obs footprint w/ tile) +! %FOV_units = field-of-view units ('km' or 'deg'), see NOTES below +! %assim = Should this obs type be assimilated (state update)? (logical) +! %scale = Should this obs be scaled? (logical) +! %getinnov = Should innov be computed for this obs type (logical) +! (innovations are always computed if assim==.true.) +! %RTM_ID = ID of radiative transfer model to use for Tb forward modeling +! (subroutine get_obs_pred()) +! 0 = none +! 1 = L-band tau-omega model as in De Lannoy et al. 2013 (doi:10.1175/JHM-D-12-092.1) (SMOS) +! 2 = same as 1 but without Pellarin atm corr (SMAP) +! 3 = same as 1 but with Mironov and SMAP L2_SM pol mixing (SMOS) +! 4 = same as 3 but without Pellarin atm corr (targeted for SMAP L4_SM Version 8) +! %bias_Npar = number of obs bias states tracked per day (integer) +! %bias_trel = e-folding time scale of obs bias memory [s] +! %bias_tcut = cutoff time for confident obs bias estimate [s] +! %nodata = no-data-value +! %varname = equivalent model variable name (for "Obs_pred") +! %units = units (eg., 'K' or 'm3/m3') +! %path = path to measurement files +! %name = name identifier for file containing measurements +! %maskpath = path to obs mask file +! %maskname = filename for obs mask +! %scalepath = path to file(s) with scaling parameters +! %scalename = filename for scaling parameters +! %flistpath = path to file with list of obs file names +! %flistname = name of file with list of obs file names +! %errstd = default obs error std +! %std_normal_max = maximum allowed perturbation (relative to N(0,1)) +! %zeromean = enforce zero mean across ensemble +! %coarsen_pert = generate obs perturbations on coarser grid (see pert_param_type%coarsen) +! %xcorr = correlation length (deg) in longitude direction +! %ycorr = correlation length (deg) in latitude direction +! +! For observation perturbations, always use: +! +! tcorr = 0. (never temporally correlated) +! typ = 0 (always additive) +! ccorr = 0. (never cross-correlated) +! +! (these are specified in get_obs_pert_inputs() and not here) +! +! +! NOTES: +! +! Field-of-view (FOV) can be specified in units of [km] or [deg] lat/lon. +! Note the special case of FOV=0. below. +! If FOV is specified in units of [km], the FOV in units of [deg] lat/lon that +! is used to compute observation predictions will depend on latitude. +! If FOV is specified in units of [deg] lat/lon, its value remains constant and +! is independent of latitude. +! The choice of units also determines the shape function that is used to +! compute the observation predictions. +! Units of [km] are meant for observations that are based on relatively +! coarse-scale measurements (such as microwave data). The resolution of such obs +! in units of [km] is approximately constant across the globe and independent +! of latitude. Observation predictions are computed by averaging tile-based +! model forecasts out to a distance of fac_search_FOV_km*FOV using a Gaussian kernel, +! where fac_search_FOV_km=2.0 as of 28 March 2015. +! Specifically, the normalized square distance is defined as +! +! ndist2 = dx^2/FOV_x^2 + dy^2/FOV_y^2 +! +! where FOV_x and dx are the meridional FOV and the meridional distance between the obs +! and the tile (in units of deg lat/lon), with FOV_x proportional to 1/cos(lat). +! FOV_y and dy are the corresponding zonal values. +! The weights are then proportional to +! +! exp( -0.5*ndist2 ) +! +! The averaging is therefore over an ellipse in lat/lon space, with weights +! decreasing away from the center of the observation. +! A 2.0*FOV averaging footprint encapsulates about 91% of the power. A 1.0*FOV +! averaging footprint would encapsulate about 47% of the power. These numbers +! are meant to be approximately consistent with FOV numbers for microwave radiometers +! (see 3 Dec 2014 email from Ed Kim reproduced below). +! Note that weights are further adjusted based on tile area. +! Units of [deg] lat/lon are meant for observations that are based on +! relatively high-resolution measurements (such as infrared data). Such +! observations are often available on a lat/lon grid that is much coarser than +! the footprint of the underlying observations. The assimilated data product +! therefore has a resolution that varies with latitude. Observation predictions are +! computed by averaging over a constant kernel out to a distance of FOV. +! The averaging is therefore over a circle in lat/lon space, with weights that do not +! depend on the distance from the center of the observation. +! (Note that weights are further adjusted based on tile area.) +! If FOV=0., observation predictions are computed by assigning the model forecast +! associated with the tile to which the observation is formally assigned. +! This is useful if the resolution of the assimilated observations is higher +! than that of the model tile space. This might be the case for snow-cover-fraction +! observations. FOV=0 can also be useful for tile-based synthetic observations. +! +! +! ------------------------------------------------------------------------ +! +! Date: Wed, 3 Dec 2014 11:21:30 -0600 +! From: +! To: , +! Subject: FW: [SMAP] antenna pattern question +! +! Hi Rolf & Gabrielle, +! +! First, a little terminology: the weighted integral is what Level 1 folks call +! "beam efficiency". So, apparently, Steven is assuming the "-3dB beam efficiency" +! is ~50%. The calculated [SMAP] beam efficiency within the -3dB contour is +! 53.40% (v-pol), 53.83% (h-pol). +! If you draw the -3dB contour on the Earth's surface, for h-pol, 53.83% of the energy +! comes from inside the contour, and 100-53.83 = 46.17% comes from outside the contour. +! The accuracy of the 1/10 and 1/100 digits is questionable, anyway. +! So, if you used 53% for v-pol and 54% for h-pol, you should be fine. +! I guess this means Steven was not far off, if he is using "50%." +! This -3dB beam efficiency means we have significant energy coming from outside +! the 3dB footprint, which is the footprint we use to come up with the "40 km" footprint +! size number. +! And, this is why many folks who use microwave instruments prefer to use a contour that +! encloses a higher % of the beam energy as a better measure of the footprint size. +! One such measure is the "main beam efficiency (MBE)." This beamwidth is usually taken +! to be 2.5 times the 3dB beamwidth. The corresponding footprint size is then +! 2.5x 40km = 100km. +! The last calculation put the MBE at 89.23 for V-pol and 89.33 for H-pol. +! So, for h-pol, 89.33% of the energy comes from inside a 100km footprint, +! and 100-89.33 = 10.67% from outside. +! - Ed +! +! ------------------------------------------------------------------------ +! +! IMPORTANT: The number of measurement species defined below must *match* +! global parameter "N_obs_species_nml" +! +! Multi-angular observations (eg., SMOS) are defined as a single +! species here (in the nml file) and are later split into +! multiple species, each having a unique incidence angle +! (see subroutine read_ens_upd_inputs()) +! +! +! ------------------------------------------------------------------------ + + +! -------------------------------------------------------------------- +! +! SMAP L1C_TB brightness temperature (36 km EASE grid) +! +! "A" = ascending (6pm *SMAP* overpass) +! "D" = descending (6am *SMAP* overpass) +! +! "Tbh" = h-pol Tb +! "Tbv" = v-pol Tb +! +! ------------------- +! +! 31 = SMAP_L1C_Tbh_A + +obs_param_nml(31)%descr = 'SMAP_L1C_Tbh_A' +obs_param_nml(31)%orbit = 1 +obs_param_nml(31)%pol = 1 +obs_param_nml(31)%N_ang = 1 +obs_param_nml(31)%ang(1) = 40. +obs_param_nml(31)%freq = 1.41e9 +obs_param_nml(31)%FOV = 20. +obs_param_nml(31)%FOV_units = 'km' +obs_param_nml(31)%assim = .true. +obs_param_nml(31)%scale = .true. +obs_param_nml(31)%getinnov = .true. +obs_param_nml(31)%RTM_ID = 4 +obs_param_nml(31)%bias_Npar = 0 +obs_param_nml(31)%bias_trel = 864000 +obs_param_nml(31)%bias_tcut = 432000 +obs_param_nml(31)%nodata = -9999. +obs_param_nml(31)%varname = 'Tb' +obs_param_nml(31)%units = 'K' +obs_param_nml(31)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB/' +obs_param_nml(31)%name = '' +obs_param_nml(31)%maskpath = '' +obs_param_nml(31)%maskname = '' +obs_param_nml(31)%scalepath = '/discover/nobackup/projects/gmao/smap/LDAS_inputs_for_LADAS/scaling/SMAP_L1C_Tb_pentad/LADAS_v000/x6C_GLOBAL/' +obs_param_nml(31)%scalename = 'ScMO_SMAP__e24_zscore_stats_2015_p19_2020_p18_hscale_0.00_W_9p_Nmin_20' +obs_param_nml(31)%flistpath = '/discover/nobackup/projects/gmao/smap/LDAS_inputs_for_LADAS/obs/SMAP/L1C_TB_flist/SPL4SM_OL8000/' +obs_param_nml(31)%flistname = 'SMAP_L1C_TB_A_list.txt' +obs_param_nml(31)%errstd = 4. +obs_param_nml(31)%std_normal_max = 2.5 +obs_param_nml(31)%zeromean = .true. +obs_param_nml(31)%coarsen_pert = .true. +obs_param_nml(31)%xcorr = 0.25 +obs_param_nml(31)%ycorr = 0.25 +obs_param_nml(31)%adapt = 0 + +! ------------------- +! +! 32 = SMAP_L1C_Tbh_D + +obs_param_nml(32)%descr = 'SMAP_L1C_Tbh_D' +obs_param_nml(32)%orbit = 2 +obs_param_nml(32)%pol = 1 +obs_param_nml(32)%N_ang = 1 +obs_param_nml(32)%ang(1) = 40. +obs_param_nml(32)%freq = 1.41e9 +obs_param_nml(32)%FOV = 20. +obs_param_nml(32)%FOV_units = 'km' +obs_param_nml(32)%assim = .true. +obs_param_nml(32)%scale = .true. +obs_param_nml(32)%getinnov = .true. +obs_param_nml(32)%RTM_ID = 4 +obs_param_nml(32)%bias_Npar = 0 +obs_param_nml(32)%bias_trel = 864000 +obs_param_nml(32)%bias_tcut = 432000 +obs_param_nml(32)%nodata = -9999. +obs_param_nml(32)%varname = 'Tb' +obs_param_nml(32)%units = 'K' +obs_param_nml(32)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB/' +obs_param_nml(32)%name = '' +obs_param_nml(32)%maskpath = '' +obs_param_nml(32)%maskname = '' +obs_param_nml(32)%scalepath = '/discover/nobackup/projects/gmao/smap/LDAS_inputs_for_LADAS/scaling/SMAP_L1C_Tb_pentad/LADAS_v000/x6C_GLOBAL/' +obs_param_nml(32)%scalename = 'ScMO_SMAP__e24_zscore_stats_2015_p19_2020_p18_hscale_0.00_W_9p_Nmin_20' +obs_param_nml(32)%flistpath = '/discover/nobackup/projects/gmao/smap/LDAS_inputs_for_LADAS/obs/SMAP/L1C_TB_flist/SPL4SM_OL8000/' +obs_param_nml(32)%flistname = 'SMAP_L1C_TB_D_list.txt' +obs_param_nml(32)%errstd = 4. +obs_param_nml(32)%std_normal_max = 2.5 +obs_param_nml(32)%zeromean = .true. +obs_param_nml(32)%coarsen_pert = .true. +obs_param_nml(32)%xcorr = 0.25 +obs_param_nml(32)%ycorr = 0.25 +obs_param_nml(32)%adapt = 0 + +! ------------------- +! +! 33 = SMAP_L1C_Tbv_A + +obs_param_nml(33)%descr = 'SMAP_L1C_Tbv_A' +obs_param_nml(33)%orbit = 1 +obs_param_nml(33)%pol = 2 +obs_param_nml(33)%N_ang = 1 +obs_param_nml(33)%ang(1) = 40. +obs_param_nml(33)%freq = 1.41e9 +obs_param_nml(33)%FOV = 20. +obs_param_nml(33)%FOV_units = 'km' +obs_param_nml(33)%assim = .true. +obs_param_nml(33)%scale = .true. +obs_param_nml(33)%getinnov = .true. +obs_param_nml(33)%RTM_ID = 4 +obs_param_nml(33)%bias_Npar = 0 +obs_param_nml(33)%bias_trel = 864000 +obs_param_nml(33)%bias_tcut = 432000 +obs_param_nml(33)%nodata = -9999. +obs_param_nml(33)%varname = 'Tb' +obs_param_nml(33)%units = 'K' +obs_param_nml(33)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB/' +obs_param_nml(33)%name = '' +obs_param_nml(33)%maskpath = '' +obs_param_nml(33)%maskname = '' +obs_param_nml(33)%scalepath = '/discover/nobackup/projects/gmao/smap/LDAS_inputs_for_LADAS/scaling/SMAP_L1C_Tb_pentad/LADAS_v000/x6C_GLOBAL/' +obs_param_nml(33)%scalename = 'ScMO_SMAP__e24_zscore_stats_2015_p19_2020_p18_hscale_0.00_W_9p_Nmin_20' +obs_param_nml(33)%flistpath = '/discover/nobackup/projects/gmao/smap/LDAS_inputs_for_LADAS/obs/SMAP/L1C_TB_flist/SPL4SM_OL8000/' +obs_param_nml(33)%flistname = 'SMAP_L1C_TB_A_list.txt' +obs_param_nml(33)%errstd = 4. +obs_param_nml(33)%std_normal_max = 2.5 +obs_param_nml(33)%zeromean = .true. +obs_param_nml(33)%coarsen_pert = .true. +obs_param_nml(33)%xcorr = 0.25 +obs_param_nml(33)%ycorr = 0.25 +obs_param_nml(33)%adapt = 0 + +! ------------------- +! +! 34 = SMAP_L1C_Tbv_D + +obs_param_nml(34)%descr = 'SMAP_L1C_Tbv_D' +obs_param_nml(34)%orbit = 2 +obs_param_nml(34)%pol = 2 +obs_param_nml(34)%N_ang = 1 +obs_param_nml(34)%ang(1) = 40. +obs_param_nml(34)%freq = 1.41e9 +obs_param_nml(34)%FOV = 20. +obs_param_nml(34)%FOV_units = 'km' +obs_param_nml(34)%assim = .true. +obs_param_nml(34)%scale = .true. +obs_param_nml(34)%getinnov = .true. +obs_param_nml(34)%RTM_ID = 4 +obs_param_nml(34)%bias_Npar = 0 +obs_param_nml(34)%bias_trel = 864000 +obs_param_nml(34)%bias_tcut = 432000 +obs_param_nml(34)%nodata = -9999. +obs_param_nml(34)%varname = 'Tb' +obs_param_nml(34)%units = 'K' +obs_param_nml(34)%path = '/discover/nobackup/projects/gmao/smap/SMAP_L4/SMAP/OPS/L1C_TB/' +obs_param_nml(34)%name = '' +obs_param_nml(34)%maskpath = '' +obs_param_nml(34)%maskname = '' +obs_param_nml(34)%scalepath = '/discover/nobackup/projects/gmao/smap/LDAS_inputs_for_LADAS/scaling/SMAP_L1C_Tb_pentad/LADAS_v000/x6C_GLOBAL/' +obs_param_nml(34)%scalename = 'ScMO_SMAP__e24_zscore_stats_2015_p19_2020_p18_hscale_0.00_W_9p_Nmin_20' +obs_param_nml(34)%flistpath = '/discover/nobackup/projects/gmao/smap/LDAS_inputs_for_LADAS/obs/SMAP/L1C_TB_flist/SPL4SM_OL8000/' +obs_param_nml(34)%flistname = 'SMAP_L1C_TB_D_list.txt' +obs_param_nml(34)%errstd = 4. +obs_param_nml(34)%std_normal_max = 2.5 +obs_param_nml(34)%zeromean = .true. +obs_param_nml(34)%coarsen_pert = .true. +obs_param_nml(34)%xcorr = 0.25 +obs_param_nml(34)%ycorr = 0.25 +obs_param_nml(34)%adapt = 0 + + +! -------------------------------------------------------------------- + + +/ + +! =========================== EOF ======================================= diff --git a/GEOSldas_App/ldas_setup b/GEOSldas_App/ldas_setup index d1f332c8..845312b6 100755 --- a/GEOSldas_App/ldas_setup +++ b/GEOSldas_App/ldas_setup @@ -59,11 +59,11 @@ class LDASsetup: # ------ # Required resource manager input fields # ------ - rqdRmInpKeys = ['account', 'walltime', 'ntasks_model', 'ntasks-per-node'] + rqdRmInpKeys = ['account', 'walltime', 'ntasks_model'] # ------ # Optional resource manager input fields # ------ - optSlurmInpKeys = ['job_name', 'qos', 'oserver_nodes', 'writers-per-node'] + optSlurmInpKeys = ['job_name', 'qos', 'oserver_nodes', 'writers-per-node', 'constraint'] # =============================================================================================== @@ -111,9 +111,8 @@ class LDASsetup: self.agcm_res = cmdLineArgs['agcm_res'] self.bcs_version = cmdLineArgs['bcs_version'] self.rstloc = cmdLineArgs['rstloc'] - self.varwindow = cmdLineArgs['varwindow'] - self.LDAS_nml_path = cmdLineArgs['LDAS_nml_path'] - self.mwrtm_path = cmdLineArgs['mwrtm_path'] + self.varwindow = cmdLineArgs['varwindow'] + self.nens = cmdLineArgs['nens'] # obsolete command line args self.runmodel = cmdLineArgs['runmodel'] @@ -190,81 +189,79 @@ class LDASsetup: ### check if ldas is coupled to adas; if so, set/overwrite input parameters accordingly if self.ladas_cpl > 0 : - # make sure all necessary command line arguments were supplied - assert self.nymdb is not None, "Error. Must have command line arg nymdb for coupled land-atm DAS.\n" - assert self.nhmsb is not None, "Error. Must have command line arg nhmsb for coupled land-atm DAS.\n" - assert self.agcm_res is not None, "Error. Must have command line arg agcm_res for coupled land-atm DAS.\n" - assert self.bcs_version is not None, "Error. Must have command line arg bcs_version for coupled land-atm DAS.\n" - assert self.rstloc is not None, "Error. Must have command line arg rstloc for coupled land-atm DAS.\n" - assert self.varwindow is not None, "Error. Must have command line arg varwindow for coupled land-atm DAS.\n" - assert self.LDAS_nml_path is not None, "Error. Must have command line arg LDAS_nml_path for coupled land-atm DAS.\n" - assert self.mwrtm_path is not None, "Error. Must have command line arg mwrtm_path for coupled land-atm DAS.\n" + # make sure all necessary command line arguments were supplied + assert self.nymdb is not None, "Error. Must have command line arg nymdb for coupled land-atm DAS.\n" + assert self.nhmsb is not None, "Error. Must have command line arg nhmsb for coupled land-atm DAS.\n" + assert self.agcm_res is not None, "Error. Must have command line arg agcm_res for coupled land-atm DAS.\n" + assert self.bcs_version is not None, "Error. Must have command line arg bcs_version for coupled land-atm DAS.\n" + assert self.rstloc is not None, "Error. Must have command line arg rstloc for coupled land-atm DAS.\n" + assert self.varwindow is not None, "Error. Must have command line arg varwindow for coupled land-atm DAS.\n" + assert self.nens is not None, "Error. Must have command line arg nens for coupled land-atmensDAS.\n" - self.rqdExeInp['BEG_DATE'] = f"{self.nymdb} {self.nhmsb}" - rstloc_ = self.rstloc.rstrip('/') # remove trailing '/' - assert os.path.isdir(rstloc_) # make sure rstloc_ is a valid directory - self.rstloc = os.path.abspath(rstloc_) - self.rqdExeInp['RESTART_PATH'] = os.path.dirname( self.rstloc) - self.rqdExeInp['RESTART_ID'] = os.path.basename(self.rstloc) - - self.adas_expdir = os.path.dirname( self.exphome) - self.rqdExeInp['ADAS_EXPDIR'] = self.adas_expdir - self.adas_expid = os.path.basename(self.adas_expdir) - self.rqdExeInp['MET_TAG'] = self.adas_expid + '__bkg' - if self.ladas_cpl == 1 : - # ldas coupled with determistic component of adas - self.rqdExeInp['EXP_ID'] = self.adas_expid + '_LDAS' - self.rqdExeInp['MET_PATH'] = self.adas_expdir + '/recycle/holdpredout' - self.rqdExeInp['ENSEMBLE_FORCING'] = 'NO' - self.nens = 24 # for now, hardwire nens for atm det - elif self.ladas_cpl == 2 : - # ldas coupled with ensemble component of adas - self.rqdExeInp['EXP_ID'] = self.adas_expid + '_LDAS4ens' - self.rqdExeInp['MET_PATH'] = self.adas_expdir + '/atmens/mem' - self.rqdExeInp['ENSEMBLE_FORCING'] = 'YES' - self.nens = 32 # for now, hardwire nens for atm ens - else : - exit("Error. Unknown value of self.ladas_cpl.\n") - self.rqdExeInp['NUM_LDAS_ENSEMBLE'] = self.nens - self.first_ens_id = 1 - self.rqdExeInp['FIRST_ENS_ID'] = self.first_ens_id - self.rqdExeInp['EXP_DOMAIN'] = 'CF'+ self.agcm_res +'x6C_GLOBAL' + self.rqdExeInp['BEG_DATE'] = f"{self.nymdb} {self.nhmsb}" + rstloc_ = self.rstloc.rstrip('/') # remove trailing '/' + assert os.path.isdir(rstloc_) # make sure rstloc_ is a valid directory + self.rstloc = os.path.abspath(rstloc_) + self.rqdExeInp['RESTART_PATH'] = os.path.dirname( self.rstloc) + self.rqdExeInp['RESTART_ID'] = os.path.basename(self.rstloc) + self.adas_expdir = os.path.dirname( self.exphome) + self.rqdExeInp['ADAS_EXPDIR'] = self.adas_expdir + self.adas_expid = os.path.basename(self.adas_expdir) + self.rqdExeInp['MET_TAG'] = self.adas_expid + '__bkg' + + if self.ladas_cpl == 1 : + # ldas coupled with determistic component of ADAS + self.rqdExeInp['EXP_ID'] = self.adas_expid + '_LDAS' + self.rqdExeInp['MET_PATH'] = self.adas_expdir + '/recycle/holdpredout' + self.rqdExeInp['ENSEMBLE_FORCING'] = 'NO' + elif self.ladas_cpl == 2 : + # ldas coupled with ensemble component of ADAS + self.rqdExeInp['EXP_ID'] = self.adas_expid + '_LDAS4ens' + self.rqdExeInp['MET_PATH'] = self.adas_expdir + '/atmens/mem' + self.rqdExeInp['ENSEMBLE_FORCING'] = 'YES' + else : + exit("Error. Unknown value of self.ladas_cpl.\n") - # when coupled to ADAS, "BCS_PATH" EXCLUDE bcs version info - # hard-wired BCS_PATH for now - self.rqdExeInp['BCS_PATH'] = "/discover/nobackup/projects/gmao/bcs_shared/fvInput/ExtData/esm/tiles" - self.rqdExeInp['BCS_PATH'] = self.rqdExeInp['BCS_PATH'].rstrip('/') + '/' + self.bcs_version - if self.bcs_version == "Icarus-NLv3" : - self.rqdExeInp['BCS_PATH'] = self.rqdExeInp['BCS_PATH'] + '_new_layout' - self.rqdExeInp['BCS_RESOLUTION'] = 'CF'+ self.agcm_res +'x6C_CF' + self.agcm_res +'x6C' - self.rqdExeInp['RESTART_DOMAIN'] = 'CF'+ self.agcm_res +'x6C_GLOBAL' - - # the following are not in default rqdExeInp list; hardwire for now - self.rqdExeInp['LAND_ASSIM'] = "YES" - self.rqdExeInp['MET_HINTERP'] = 0 - self.landassim_dt = 10800 # seconds - # make sure ADAS analysis window [minutes] is multiple of LANDASSIM_DT [seconds] - if int(self.varwindow) % (self.landassim_dt/60) == 0 : - self.rqdExeInp['LANDASSIM_DT'] = self.landassim_dt - else : - exit("Error. LANDASSIM_DT is inconsistent with ADAS analysis window.\n") - self.rqdExeInp['LANDASSIM_T0'] = "013000" # HHMMSS - jsgmt1 = "00000000" - jsgmt2 = hours_to_hhmmss(int(self.varwindow)/60) # convert minutes to HHMMSS - self.rqdExeInp['JOB_SGMT'] = f"{jsgmt1} {jsgmt2}" - self.rqdExeInp['NUM_SGMT'] = 1 - self.rqdExeInp['FORCE_DTSTEP'] = 3600 - - # determine END_DATE = BEG_DATE + TIME_STEP_OF_ADAS_CYCLE - _beg_date = datetime.strptime( self.rqdExeInp['BEG_DATE'], "%Y%m%d %H%M%S") - _hours = int(self.rqdExeInp['JOB_SGMT'][ 9:11]) - _end_date = _beg_date + timedelta(hours=int(self.varwindow)/60) - - self.rqdExeInp['END_DATE'] = _end_date.strftime("%Y%m%d %H%M%S") - self.rqdExeInp['NML_INPUT_PATH'] = self.LDAS_nml_path - self.rqdExeInp['MWRTM_PATH'] = self.mwrtm_path + self.rqdExeInp['NUM_LDAS_ENSEMBLE'] = self.nens # fvsetup finds Nens by counting restart files + self.first_ens_id = 1 # match ADAS convention + self.rqdExeInp['FIRST_ENS_ID'] = self.first_ens_id + + self.agcm_res = 'CF' + self.agcm_res # change format to "CFnnnn" + self.rqdExeInp['EXP_DOMAIN'] = self.agcm_res +'x6C_GLOBAL' - # end if self.ladas_cpl > 0 + # when coupled to ADAS, "BCS_PATH" EXCLUDE bcs version info + # hard-wired BCS_PATH for now + self.rqdExeInp['BCS_PATH'] = "/discover/nobackup/projects/gmao/bcs_shared/fvInput/ExtData/esm/tiles" + self.rqdExeInp['BCS_PATH'] = self.rqdExeInp['BCS_PATH'].rstrip('/') + '/' + self.bcs_version + if self.bcs_version == "Icarus-NLv3" : + self.rqdExeInp['BCS_PATH'] = self.rqdExeInp['BCS_PATH'] + '_new_layout' + self.rqdExeInp['BCS_RESOLUTION'] = self.agcm_res +'x6C_' + self.agcm_res +'x6C' + self.rqdExeInp['RESTART_DOMAIN'] = self.agcm_res +'x6C_GLOBAL' + + # the following are not in default rqdExeInp list; hardwire for now + self.rqdExeInp['MWRTM_PATH'] = '/discover/nobackup/projects/gmao/smap/LDAS_inputs_for_LADAS/RTM_params/RTMParam_SMAP_L4SM_v006/' + self.rqdExeInp['LAND_ASSIM'] = "YES" + self.rqdExeInp['MET_HINTERP'] = 0 + self.landassim_dt = 10800 # seconds + # make sure ADAS analysis window [minutes] is multiple of LANDASSIM_DT [seconds] + if int(self.varwindow) % (self.landassim_dt/60) == 0 : + self.rqdExeInp['LANDASSIM_DT'] = self.landassim_dt + else : + exit("Error. LANDASSIM_DT is inconsistent with ADAS analysis window.\n") + self.rqdExeInp['LANDASSIM_T0'] = "013000" # HHMMSS + jsgmt1 = "00000000" + jsgmt2 = hours_to_hhmmss(int(self.varwindow)/60) # convert minutes to HHMMSS + self.rqdExeInp['JOB_SGMT'] = f"{jsgmt1} {jsgmt2}" + self.rqdExeInp['NUM_SGMT'] = 1 + self.rqdExeInp['FORCE_DTSTEP'] = 3600 + + # determine END_DATE = BEG_DATE + TIME_STEP_OF_ADAS_CYCLE + _beg_date = datetime.strptime( self.rqdExeInp['BEG_DATE'], "%Y%m%d %H%M%S") + _hours = int(self.rqdExeInp['JOB_SGMT'][ 9:11]) + _end_date = _beg_date + timedelta(hours=int(self.varwindow)/60) + self.rqdExeInp['END_DATE'] = _end_date.strftime("%Y%m%d %H%M%S") + + # end if self.ladas_cpl > 0 ----------------------------------------------------------------------------------------- for key in rqdExeInpKeys : assert key in self.rqdExeInp,' "%s" is required in the input file %s' % (key,self.exeinpfile) @@ -466,12 +463,15 @@ class LDASsetup: extension = os.path.splitext(tmptile)[1] if extension == '.domain': extension = os.path.splitext(tmptile)[0] - + gridname_ ='' if extension == '.til': - self.rqdExeInp['GRIDNAME'] = linecache.getline(tmptile, 3).strip() + gridname_ = linecache.getline(tmptile, 3).strip() else: nc_file = netCDF4.Dataset(tmptile,'r') - self.rqdExeInp['GRIDNAME'] = nc_file.getncattr('Grid_Name') + gridname_ = nc_file.getncattr('Grid_Name') + # in case it is an old name: SMAP-EASEvx-Mxx + gridname_ = gridname_.replace('SMAP-','').replace('-M','_M') + self.rqdExeInp['GRIDNAME'] = gridname_ if 'LSM_CHOICE' not in self.rqdExeInp: self.rqdExeInp['LSM_CHOICE'] = 1 @@ -613,8 +613,6 @@ class LDASsetup: self.rqdRmInp['account'] = cmdLineArgs['account'] self.rqdRmInp['walltime'] = "01:00:00" self.rqdRmInp['ntasks_model'] = 120 - self.rqdRmInp['ntasks-per-node'] = 46 # 46 works on Cascade Lake and Milan - # print rm inputs @@ -632,36 +630,27 @@ class LDASsetup: # exefyl # ------ - self.bindir = os.path.dirname(os.path.realpath(__file__)) - self.blddir = self.bindir.rsplit('/',1)[0] - exefyl = '/bin/GEOSldas.x' - tmp_execfyl= self.blddir+exefyl + self.bindir = os.path.dirname(os.path.realpath(__file__)) + self.blddir = self.bindir.rsplit('/',1)[0] + exefyl = '/bin/GEOSldas.x' + tmp_execfyl = self.blddir + exefyl assert os.path.isfile(tmp_execfyl),\ 'Executable [%s] does not exist!' % tmp_execfyl - self.expdir = self.exphome + '/' + self.rqdExeInp['EXP_ID'] - self.rundir = self.expdir + '/run' - self.inpdir = self.expdir + '/input' - self.outdir = self.expdir + '/output' - self.scratchdir = self.expdir + '/scratch' - self.blddirLn = self.expdir + '/build' - self.out_path = self.outdir+'/'+self.rqdExeInp['EXP_DOMAIN'] - self.bcsdir = self.outdir+'/'+self.rqdExeInp['EXP_DOMAIN']+'/rc_out/' - self.rstdir = self.outdir+'/'+self.rqdExeInp['EXP_DOMAIN']+'/rs/' - self.exefyl = self.blddirLn+exefyl - - my_ntasks_per_node = int(self.rqdRmInp['ntasks-per-node']) - - # default number of nodes - my_nodes = self.rqdRmInp['ntasks_model'] // my_ntasks_per_node - if self.rqdRmInp['ntasks_model'] % my_ntasks_per_node > 0 : - my_nodes = my_nodes + 1 + self.expdir = self.exphome + '/' + self.rqdExeInp['EXP_ID'] + self.rundir = self.expdir + '/run' + self.inpdir = self.expdir + '/input' + self.outdir = self.expdir + '/output' + self.scratchdir = self.expdir + '/scratch' + self.blddirLn = self.expdir + '/build' + self.out_path = self.outdir + '/'+self.rqdExeInp['EXP_DOMAIN'] + self.bcsdir = self.outdir + '/'+self.rqdExeInp['EXP_DOMAIN']+'/rc_out/' + self.rstdir = self.outdir + '/'+self.rqdExeInp['EXP_DOMAIN']+'/rs/' + self.exefyl = self.blddirLn + exefyl # default is set to 0 ( no output server) if 'oserver_nodes' not in self.optRmInp : self.optRmInp['oserver_nodes'] = 0 - self.optRmInp['nodes'] = my_nodes + int(self.optRmInp['oserver_nodes']) - if (int(self.optRmInp['oserver_nodes']) >=1) : self.rqdExeInp['WRITE_RESTART_BY_OSERVER'] = "YES" # set default for now @@ -1238,17 +1227,22 @@ class LDASsetup: for nmlfile in default_nml: shortfile=self.rundir+'/'+nmlfile.split('/')[-1] shutil.copy2(nmlfile, shortfile) - # special nml + # special nml special_nml=[] - if 'NML_INPUT_PATH' in self.rqdExeInp : - special_nml = glob.glob(self.rqdExeInp['NML_INPUT_PATH']+'/LDASsa_SPECIAL_inputs_*.nml') - if self.ladas_cpl > 0: - special_nml = glob.glob(self.rqdExeInp['NML_INPUT_PATH']+'/'+ self.rqdExeInp['EXP_DOMAIN'] + '/LDASsa_SPECIAL_inputs_*.nml') - + if self.ladas_cpl > 0: + special_nml= glob.glob(etcdir+'/LDASsa_SPECIAL_inputs_*.nml') + else : + if 'NML_INPUT_PATH' in self.rqdExeInp : + special_nml = glob.glob(self.rqdExeInp['NML_INPUT_PATH']+'/LDASsa_SPECIAL_inputs_*.nml') + for nmlfile in special_nml: - shortfile=nmlfile.split('/')[-1] - shutil.copy2(nmlfile, self.rundir+'/'+shortfile) + shortfile=self.rundir+'/'+nmlfile.split('/')[-1] + shutil.copy2(nmlfile, shortfile) + if self.ladas_cpl > 0: + # edit resolution info in ensupd nml file + sp.run(['sed', '-i', 's//'+self.agcm_res+'/g', self.rundir+'/LDASsa_SPECIAL_inputs_ensupd.nml']) + # get optimzed NX and IMS optimized_distribution_file = tempfile.NamedTemporaryFile(delete=False) print ("Optimizing... decomposition of processes.... \n") @@ -1289,12 +1283,18 @@ class LDASsetup: shutil.copy2(histrc_file,tmprcfile) else : shutil.copy2(histrc_file,tmprcfile) - GRID='EASE ' + self.rqdExeInp['GRIDNAME'] + ' ' +tmprcfile - if '-CF' in self.rqdExeInp['GRIDNAME'] : - GRID ='CUBE ' + self.rqdExeInp['GRIDNAME'] + ' ' +tmprcfile - _assim = '1' if self.assim else '0' - cmd =self.bindir +'/process_hist.csh '+ str(self.rqdExeInp['LSM_CHOICE']) + ' ' + str(self.rqdExeInp['AEROSOL_DEPOSITION']) + \ - ' ' + GRID + ' ' + str(self.rqdExeInp['RUN_IRRIG']) + ' ' + _assim + ' '+ str(self.nens) + if 'EASE' in self.rqdExeInp['GRIDNAME'] : + TMPSTR='OUT1d' + else : + TMPSTR='OUT2d' + cmd = self.bindir +'/process_hist.csh' + ' ' \ + + tmprcfile + ' ' \ + + TMPSTR + ' ' \ + + self.rqdExeInp['GRIDNAME'] + ' ' \ + + str(self.rqdExeInp['LSM_CHOICE']) + ' ' \ + + str(self.rqdExeInp['AEROSOL_DEPOSITION']) + ' ' \ + + str(self.rqdExeInp['RUN_IRRIG']) + ' ' \ + + str(self.nens) print(cmd) #os.system(cmd) sp.call(shlex.split(cmd)) @@ -1449,6 +1449,14 @@ class LDASsetup: valn = 'landpert'+tmpl_+'_internal_checkpoint' ldasrcInp[keyn]= valn + # add items for stretched grid + if '-SG' in self.rqdExeInp['BCS_RESOLUTION']: + pos_ = self.rqdExeInp['BCS_RESOLUTION'].find('-SG') + SG = self.rqdExeInp['BCS_RESOLUTION'][pos_+1:pos_+6] # get ID of stretched grid (e.g., SG002) + ldasrcInp['STRETCH_FACTOR'] = STRETCH_GRID[SG][0] + ldasrcInp['TARGET_LAT'] = STRETCH_GRID[SG][1] + ldasrcInp['TARGET_LON'] = STRETCH_GRID[SG][2] + # write LDAS.rc fout =open(self.rundir+'/'+shortfile,'w') # ldasrcInp['NUM_LDAS_ENSEMBLE']=ldasrcInp.pop('NUM_ENSEMBLE') @@ -1571,15 +1579,17 @@ class LDASsetup: '.'.join([expid, 'ldas_err', myDateTime, 'txt']), ]), self.rundir) - constraint='cas' - if self.GEOS_SITE == "NAS": - constraint = 'cas_ait' - elif self.GEOS_SITE == "NCCS": - constraint = '"[mil|cas]"' + + constraint = '"[mil|cas]"' + if self.GEOS_SITE == "NAS" : + constraint = 'cas_ait' + + if 'constraint' in self.optRmInp: + constraint = self.optRmInp['constraint'] SBATCHQSUB = 'sbatch' if self.GEOS_SITE == 'NAS': - SBATCHQSUB = 'qsub' + SBATCHQSUB = 'qsub' DETECTED_MPI_STACK = "@MPI_STACK@" @@ -1588,8 +1598,7 @@ class LDASsetup: SBATCHQSUB = SBATCHQSUB, MY_ACCOUNT = self.rqdRmInp['account'], MY_WALLTIME = self.rqdRmInp['walltime'], - MY_NODES = str(self.optRmInp['nodes']), - MY_NTASKS_PER_NODE = str(self.rqdRmInp['ntasks-per-node']), + MY_NTASKS_MODEL = str(self.rqdRmInp['ntasks_model']), MY_CONSTRAINT = constraint, MY_OSERVER_NODES = str(self.optRmInp['oserver_nodes']), MY_WRITERS_NPES = str(self.optRmInp['writers-per-node']), @@ -1848,9 +1857,6 @@ def _printRmInputKeys(rqdRmInpKeys, optRmInpKeys): print ('# [At NCCS: Use command "getsponsor" to see available account number(s).]' ) print ('# - walltime = walltime requested; format is HH:MM:SS (hours/minutes/seconds)') print ('# - ntasks_model = number of processors requested for the model (typically 126; output server is not included)') - print ('# - ntasks-per-node = number of tasks per node (typically 46 for Cascade Lake and 126 for Milan)') - print ('# [If >46, Milan nodes will be allocated, else Cascade Lake or Milan.]') - print ('# [NCCS recommends <=46 for Cascade Lake and <=126 for Milan.]') print ('#') for key in rqdRmInpKeys: print (key + ':') @@ -1861,10 +1867,11 @@ def _printRmInputKeys(rqdRmInpKeys, optRmInpKeys): print ('# NOTE:') print ('# - job_name = name of experiment; default is "exp_id"') print ('# - qos = quality-of-service; do not specify by default; specify "debug" for faster but limited service.') - print ('# - oserver_nodes = number of nodes for oserver ( default is 0 )') - print ('# - writers-per-node = tasks per oserver_node for writing ( default is 5 ),') + print ('# - oserver_nodes = number of nodes for oserver ( default is 0, for future use )') + print ('# - writers-per-node = tasks per oserver_node for writing ( default is 5, for future use ),') print ('# IMPORTANT REQUIREMENT: total #writers = writers-per-node * oserver_nodes >= 2') print ('# Jobs will hang when oserver_nodes = writers-per-node = 1.') + print ('# - constraint = name of chip set(s) (NCCS default is "[mil|cas]", NAS default is "cas_ait").') print ('#') for key in optRmInpKeys: print ('#'+key + ':') @@ -1960,7 +1967,7 @@ def parseCmdLine(): ) p_setup.add_argument( '--agcm_res', - help='AGCM resolution associated with boundary conditions: xxxx (4 digits). Required when exeinp is dummy', + help='AGCM resolution associated with boundary conditions: nnnn (4 digits). Required when exeinp is dummy', type=str ) p_setup.add_argument( @@ -1979,16 +1986,10 @@ def parseCmdLine(): type=str ) p_setup.add_argument( - '--LDAS_nml_path', - help='path to LDAS (special) nml input files. Required when exeinp is dummy', + '--nens', + help='number of ensemble members. Required when exeinp is dummy', type=str - ) - p_setup.add_argument( - '--mwrtm_path', - help='path to LDAS L-band microwave radiative transfer model (mwrtm) parameters. Required when exeinp is dummy', - type=str - ) - + ) # obsolete command line args p_setup.add_argument( diff --git a/GEOSldas_App/lenkf_j_template.py b/GEOSldas_App/lenkf_j_template.py index a6233d52..0ed56fd0 100644 --- a/GEOSldas_App/lenkf_j_template.py +++ b/GEOSldas_App/lenkf_j_template.py @@ -12,7 +12,7 @@ #SBATCH --error={MY_EXPDIR}/scratch/GEOSldas_err_txt #SBATCH --account={MY_ACCOUNT} #SBATCH --time={MY_WALLTIME} -#SBATCH --nodes={MY_NODES} --ntasks-per-node={MY_NTASKS_PER_NODE} +#SBATCH --ntasks={MY_NTASKS_MODEL} #SBATCH --job-name={MY_JOB} #SBATCH --qos={MY_QOS} #SBATCH --constraint={MY_CONSTRAINT} @@ -63,6 +63,8 @@ source $GEOSBIN/g5_modules +setenv BASEBIN ${{BASEDIR}}/Linux/bin + setenv MPI_STACK {DETECTED_MPI_STACK} if ( ${{MPI_STACK}} == "openmpi" ) then @@ -103,8 +105,6 @@ # reversed sequence for LADAS_COUPLING (Sep 2020) (needed when coupling with ADAS using different BASEDIR) setenv LD_LIBRARY_PATH ${{BASEDIR}}/${{ARCH}}/lib:${{ESMADIR}}/lib:${{LD_LIBRARY_PATH}} -module load nco - setenv RUN_CMD "$GEOSBIN/esma_mpirun -np " ####################################################################### @@ -602,9 +602,9 @@ sed -i -e "s/NT/$LEN_SUB/g" timestamp.cdl sed -i -e "s/DATAVALUES/$tstep2/g" timestamp.cdl - ncgen -k4 -o timestamp.nc4 timestamp.cdl - ncrcat -h $EXPID.$ThisCol.${{YYYY}}${{MM}}${{DD}}_* ${{EXPID}}.${{ThisCol}}.$YYYY$MM$DD.nc4 - ncks -4 -h -v time_stamp timestamp.nc4 -A ${{EXPID}}.${{ThisCol}}.$YYYY$MM$DD.nc4 + $BASEBIN/ncgen -k4 -o timestamp.nc4 timestamp.cdl + $BASEBIN/ncrcat -h $EXPID.$ThisCol.${{YYYY}}${{MM}}${{DD}}_* ${{EXPID}}.${{ThisCol}}.$YYYY$MM$DD.nc4 + $BASEBIN/ncks -4 -h -v time_stamp timestamp.nc4 -A ${{EXPID}}.${{ThisCol}}.$YYYY$MM$DD.nc4 /bin/rm timestamp.cdl /bin/rm timestamp.nc4 # rudimentary check for desired nc4 file; if ok, delete sub-daily files @@ -646,7 +646,7 @@ if($NAVAIL != $NDAYS) continue # create monthly-mean nc4 file - ncra -h $EXPID.$ThisCol.${{YYYY}}${{MM}}*.nc4 ${{EXPID}}.${{ThisCol}}.monthly.$YYYY$MM.nc4 + $BASEBIN/ncra -h $EXPID.$ThisCol.${{YYYY}}${{MM}}*.nc4 ${{EXPID}}.${{ThisCol}}.monthly.$YYYY$MM.nc4 if($POSTPROC_HIST == 2) then # rudimentary check for desired nc4 file; if ok, delete daily files @@ -745,7 +745,7 @@ set tmp_file = $EXPDIR/output/$EXPDOMAIN/rs/$ENSDIR/Y${{eYEAR}}/M${{eMON}}/${{EXPID}}.${{rstf}}_internal_rst.${{eYEAR}}${{eMON}}${{eDAY}}_${{eHour}}${{eMin}} # copy generic restart file to final location/name but remove lat/lon variables # (lat/lon variables are not correct when running in EASE-grid tile space) - ncks -4 -O -C -x -v lat,lon ${{rstf}}${{ENSID}}_internal_checkpoint $tmp_file + $BASEBIN/ncks -4 -O -C -x -v lat,lon ${{rstf}}${{ENSID}}_internal_checkpoint $tmp_file /bin/rm -f ${{rstf}}${{ENSID}}_internal_checkpoint set old_rst = `/usr/bin/readlink -f $EXPDIR/input/restart/${{rstf}}${{ENSID}}_internal_rst` /bin/rm -f $EXPDIR/input/restart/${{rstf}}${{ENSID}}_internal_rst @@ -777,7 +777,7 @@ set TM = `echo $ThisTime | cut -c5-6` set THISDIR = $EXPDIR/output/$EXPDOMAIN/rs/$ENSDIR/Y${{TY}}/M${{TM}}/ if (! -e $THISDIR ) mkdir -p $THISDIR - (ncks -4 -O -C -x -v lat,lon $rfile ${{THISDIR}}${{EXPID}}.landpert_internal_rst.${{ThisTime}}.nc4;\\ + ($BASEBIN/ncks -4 -O -C -x -v lat,lon $rfile ${{THISDIR}}${{EXPID}}.landpert_internal_rst.${{ThisTime}}.nc4;\\ /usr/bin/gzip ${{THISDIR}}${{EXPID}}.landpert_internal_rst.${{ThisTime}}.nc4; \\ /bin/rm -f $rfile) & end diff --git a/GEOSldas_App/preprocess_ldas_routines.F90 b/GEOSldas_App/preprocess_ldas_routines.F90 index 382a549d..738bbe06 100644 --- a/GEOSldas_App/preprocess_ldas_routines.F90 +++ b/GEOSldas_App/preprocess_ldas_routines.F90 @@ -2402,11 +2402,17 @@ subroutine optimize_latlon(fname_tilefile, N_proc_string, optimized_file, run_di call ldas_abort(LDAS_GENERIC_ERROR, Iam, err_msg) end if + if (index(gridname, 'SMAP') /=0 ) then + gridname = trim(adjustl(gridname)) + gridname = gridname(6:) + gridname(7:7) = '_' ! replace '-' with '_' + endif + open(10,file=optimized_file, action='write') - write(10,'(A)') "GEOSldas.GRID_TYPE: LatLon" + write(10,'(A)') "GEOSldas.GRID_TYPE: EASE" write(10,'(A)') "GEOSldas.GRIDNAME: "//trim(gridname) write(10,'(A)') "GEOSldas.LM: 1" - write(10,'(A)') "GEOSldas.POLE: PE" + write(10,'(A)') "GEOSldas.POLE: XY" write(10,'(A)') "GEOSldas.DATELINE: DE" write(10,'(A,I6)') "GEOSldas.IM_WORLD: ", N_lon write(10,'(A,I6)') "GEOSldas.JM_WORLD: ", N_lat diff --git a/GEOSldas_App/process_hist.csh b/GEOSldas_App/process_hist.csh index 4c6516ff..6156e736 100644 --- a/GEOSldas_App/process_hist.csh +++ b/GEOSldas_App/process_hist.csh @@ -1,56 +1,74 @@ #!/bin/csh -f -## I am changed the CUBE/EASE logic -## if CUBE we produce 2D -## anything else, SMAP and other offline grids we produce tile space - -setenv LSM_CHOICE $1 -setenv AEROSOL_DEPOSITION $2 -setenv GRID $3 -setenv GRIDNAME $4 -setenv HISTRC $5 -setenv RUN_IRRIG $6 -setenv ASSIM $7 -setenv NENS $8 +# process GEOSldas_HIST.rc (=$HISTRC) template +# +# - turn on/off HIST collections depending on tile space +# - EASE: turn on tile-space (1d) output +# - otherwise: turn on gridded (2d) output +# - turn on/off output variables depending on LSM_CHOICE, AEROSOL_DEPOSITION, and RUN_IRRIG +# - fill in source 'GridComp' info for variables depending on NENS + +# process command line args + +setenv HISTRC $1 # file name of HIST rc template (GEOSldas_HIST.rc) +setenv OUTxd $2 # "OUT1d" or "OUT2d" (to turn on/off collections) +setenv GRIDNAME "'$3'" # full name of grid associated with tile space +setenv LSM_CHOICE $4 +setenv AEROSOL_DEPOSITION $5 +setenv RUN_IRRIG $6 +setenv NENS $7 + +# ------------------------------------------------- echo $GRIDNAME -if($ASSIM == 1) then - sed -i 's|\#ASSIM|''|g' $HISTRC - sed -i '/^\#EASE/d' $HISTRC - sed -i '/^\#CUBE/d' $HISTRC -else - sed -i '/^\#ASSIM/d' $HISTRC -endif +# uncomment 2d or 1d collections, depending on "OUT1d" (EASE tile space) or "OUT2d" (non-EASE tile space) -if($GRID == CUBE) then - sed -i '/^\#EASE/d' $HISTRC - sed -i 's|\#CUBE|''|g' $HISTRC - sed -i 's|GRIDNAME|'"$GRIDNAME"'|g' $HISTRC +if($OUTxd == OUT1d) then + sed -i 's|\#OUT1d|''|g' $HISTRC else - sed -i '/^\#CUBE/d' $HISTRC - sed -i 's|\#EASE|''|g' $HISTRC - sed -i 's|GRIDNAME|'"$GRIDNAME"'|g' $HISTRC + sed -i 's|\#OUT2d|''|g' $HISTRC endif +# fill in name of grid associated with tile space + +sed -i -e s/\'GRIDNAME\'/$GRIDNAME/g $HISTRC + +# set 'GridComp' based on LSM_CHOICE; +# turn on/off variables associated with CATCHCN, AEROSOL_DEPOSITION, RUN_IRRIG + if($LSM_CHOICE == 1) then set GridComp = CATCH - sed -i '/^>>>HIST_CATCHCN<<>>HIST_CATCHCNCLM45<<>>HIST_CATCHCN<<>>HIST_CATCHCNCLM45<<>>HIST_CATCHCNCLM45<<>>HIST_CATCHCN<<>>HIST_CATCHCNCLM45<<>>HIST_CATCHCN<<>>HIST_CATCHCN<<>>HIST_CATCHCN<<>>HIST_CATCHCNCLM45<<>>HIST_AEROSOL<<>>HIST_AEROSOL<<>>HIST_IRRIG<<>>HIST_IRRIG<< 1) then set GridComp = ENSAVG sed -i 's|VEGDYN|'VEGDYN_e0000'|g' $HISTRC @@ -63,16 +81,6 @@ if($NENS > 1) then # sed -i 's|DATAATM|'DATAATM0000'|g' $HISTRC endif -sed -i 's|GridComp|'$GridComp'|g' $HISTRC +# fill in source 'GridComp' information for output variables -if($AEROSOL_DEPOSITION == 0) then - sed -i '/^>>>HIST_AEROSOL<<>>HIST_AEROSOL<<>>HIST_IRRIG<<>>HIST_IRRIG<< out.log & + +Authors: Q. Liu, R. Reichle, A. Fox +Last Modified: June 2025 +""" + +import sys; sys.path.append('../../shared/python/') +import warnings; warnings.filterwarnings("ignore") +import os + +import numpy as np + +from datetime import timedelta +from postproc_ObsFcstAna import postproc_ObsFcstAna +from user_config import get_config # user-defined config info + +# --- +# +# If the script is run in the background, uncomment the following lines to see the redirected +# standard output in the out.log file immediately. When the lines are commented out, the redirected +# standard output will not appear in the out.log file until the job has completed. +# If the script is run in the foreground, the lines must be commented out. +# +#import io +#sys.stdout = io.TextIOWrapper(open(sys.stdout.fileno(), 'wb', 0), write_through=True) +#sys.stderr = io.TextIOWrapper(open(sys.stderr.fileno(), 'wb', 0), write_through=True) +# +# --- + +def main(): + + config = get_config() # edit user-defined inputs in user_config.py + + exp_list = config['exp_list'] + start_time = config['start_time'] + end_time = config['end_time'] + sum_path = config['sum_path'] + out_path = config['out_path'] + + # ------------------------------------------------------------------------------------ + # Postprocess raw ObsFcstAna output data into monthly sums + + # Initialize the postprocessing object + postproc = postproc_ObsFcstAna(exp_list, start_time, end_time, sum_path=sum_path) + + # Compute and save monthly sums + postproc.save_monthly_sums() + +if __name__ == '__main__': + main() + +# ====================== EOF ========================================================= diff --git a/GEOSldas_App/util/postproc/ObsFcstAna_stats/Plot_stats_maps.py b/GEOSldas_App/util/postproc/ObsFcstAna_stats/Plot_stats_maps.py new file mode 100644 index 00000000..18a4c09e --- /dev/null +++ b/GEOSldas_App/util/postproc/ObsFcstAna_stats/Plot_stats_maps.py @@ -0,0 +1,224 @@ +#!/usr/bin/env python3 + +""" +Sample script for plotting maps of long-term data assimilation diagnostics. +Plots Nobs-weighted avg of each metric across all species. +Requires saved files with monthly sums (see Get_ObsFcstAna_sums.py). +Stats of *normalized* OmFs are approximated! + +Usage: + 1. Edit "user_config.py" with experiments information. + 2. Run this script as follows (on Discover): + $ module load python/GEOSpyD + $ ./Plot_stats_maps.py +""" + +import sys; sys.path.append('../../shared/python/') +import warnings; warnings.filterwarnings("ignore") +import os + +import numpy as np +import matplotlib.pyplot as plt + +from datetime import timedelta + +from netCDF4 import Dataset + +from tile_to_latlongrid import tile_to_latlongrid +from plot import plotMap +from EASEv2 import EASEv2_ind2latlon + +from postproc_ObsFcstAna import postproc_ObsFcstAna +from user_config import get_config + +def Main_OmF_maps(): + + # Plot maps of long-term temporal stats + + config = get_config() # edit user-defined inputs in user_config.py + + exp_list = config['exp_list'] + start_time = config['start_time'] + end_time = config['end_time'] + sum_path = config['sum_path'] + out_path = config['out_path'] + + # min total number of individual O-Fs required for stats at a location (across all months in period) + + Nmin = 20 + + # ------------------------------------------------------------------------------------ + # Read or compute stats. Each field has dimension [N_tile, N_species] + # ------------------------------------------------------------------------------------ + + # File name for long-term temporal stats + stats_file = out_path + 'temporal_stats_'+exp_list[0]['exptag']+'_'+ start_time.strftime('%Y%m%d')+'_'+ \ + (end_time+timedelta(days=-1)).strftime('%Y%m%d')+'.nc4' + + # Read or compute long-term temporal stats. Each field has the dimension [N_tile, N_species]. + if os.path.isfile(stats_file): + + print('reading stats nc4 file '+ stats_file) + stats = {} + with Dataset(stats_file,'r') as nc: + for key, value in nc.variables.items(): + stats[key] = value[:].filled(np.nan) + + else: + # Initialize the postprocessing object and get information + postproc_obj = postproc_ObsFcstAna(exp_list, start_time, end_time, sum_path=sum_path) + stats = postproc_obj.calc_temporal_stats_from_sums(write_to_nc=True, fout_stats=stats_file) + + N_data = stats['N_data'] + OmF_mean = stats['OmF_mean'] + OmF_stdv = stats['OmF_stdv'] + OmA_mean = stats['OmA_mean'] + OmA_stdv = stats['OmA_stdv'] + OmF_norm_mean = stats['OmF_norm_mean'] + OmF_norm_stdv = stats['OmF_norm_stdv'] + + # Screen stats with Nmin (except for N_data) + OmF_mean[ N_data < Nmin] = np.nan + OmF_stdv[ N_data < Nmin] = np.nan + OmA_mean[ N_data < Nmin] = np.nan + OmA_stdv[ N_data < Nmin] = np.nan + OmF_norm_mean[N_data < Nmin] = np.nan + OmF_norm_stdv[N_data < Nmin] = np.nan + + # Select/combine fields for plotting. The following provides an example to + # computes average stats across all species. + # + # Compute Nobs-weighted avg of each metric across all species. + # Typically used for SMAP Tb_h/h from asc and desc overpasses, + # or ASCAT soil moisture from Metop-A/B/C. + # DOES NOT MAKE SENSE IF, SAY, SPECIES HAVE DIFFERENT UNITS! + + Ndata_sum = np.nansum( N_data, axis=1) + + OmF_mean = np.nansum(OmF_mean *N_data, axis=1)/Ndata_sum + OmF_stdv = np.nansum(OmF_stdv *N_data, axis=1)/Ndata_sum + OmF_norm_mean = np.nansum(OmF_norm_mean*N_data, axis=1)/Ndata_sum + OmF_norm_stdv = np.nansum(OmF_norm_stdv*N_data, axis=1)/Ndata_sum + OmA_mean = np.nansum(OmA_mean *N_data, axis=1)/Ndata_sum + OmA_stdv = np.nansum(OmA_stdv *N_data, axis=1)/Ndata_sum + + N_data = Ndata_sum + + # The obs are assigned to tiles only for book-keeping purposes, and the resolution of + # the obs is often coarser than that of the model tile space. Therefore, typically many + # tiles are never assigned any obs and end up with N_data=0. + # Here, we remove these unwanted zeros from N_data to keep them out of the computation of + # spatial averages (global avg; mapping from tile space to lat/lon plotting grid) + + N_data[N_data == 0] = np.nan + + # -------------------------------------------------------------------------------- + # Plot stats on regular lat/lon grid, with grid spacing guided by the footprint + # scale (field-of-view; FOV) of the observations. + # --------------------------------------------------------------------------------- + + # Get geographic info about model grid, model tile space, and obs FOV + # (assumes that all species have same FOV and FOV_units) + + tc = exp_list[0]['tilecoord'] + tg = exp_list[0]['tilegrid_global'] + obs_fov = exp_list[0]['obsparam'][0]['FOV'] + obs_fov_units = exp_list[0]['obsparam'][0]['FOV_units'] + + # Determine spacing of lat/lon grid for plotting (degree) based on obs FOV. + # Can also set manually if desired. + + my_res = obs_fov*2. # Note: FOV is a radius, hence the factor of 2. + + # If applicable, convert to degree, assuming 100 km is about 1 deg. + if 'km' in obs_fov_units: + my_res = my_res/100. + + # If obs_fov=0, reset to match model resolution. + if obs_fov == 0: + my_res = np.sqrt( tg['dlat']*tg['dlon'] ) + + # pick a resolution from a sample vector of resolutions + + sample_res = [0.05, 0.1, 0.25, 0.5, 1.0, 2.0] + + my_res = min(sample_res, key=lambda x: abs(x - my_res)) + + print(f'resolution of lat/lon grid for plotting: {my_res} degree') + + # ---------------------------------------------------------------------------------- + # Plot + # ---------------------------------------------------------------------------------- + + exptag = exp_list[0]['exptag'] + + fig, axes = plt.subplots(2,2, figsize=(18,10)) + plt.rcParams.update({'font.size':14}) + + for i in np.arange(2): + for j in np.arange(2): + units = '[k]' + if i == 0 and j == 0: + tile_data = N_data + # crange is [cmin, cmax] + crange =[0, np.ceil((end_time-start_time).days/150)*300] + colormap = plt.get_cmap('jet',20) + title_txt = exptag + ' Tb Nobs (excl. 0s)' + start_time.strftime('%Y%m')+'_'+end_time.strftime('%Y%m') + units = '[-]' + if i == 0 and j ==1: + tile_data = OmF_mean + crange =[-3, 3] + colormap = plt.get_cmap('bwr', 15) + title_txt = exptag + ' Tb O-F mean '+ start_time.strftime('%Y%m')+'_'+end_time.strftime('%Y%m') + if i == 1 and j == 0: + tile_data = OmF_stdv + crange =[0, 15] + colormap = plt.get_cmap ('jet',15) + title_txt = exptag + ' Tb O-F stdv '+ start_time.strftime('%Y%m')+'_'+end_time.strftime('%Y%m') + if i == 1 and j == 1: + tile_data = OmF_norm_stdv + crange =[0, 15] + colormap = plt.get_cmap ('jet',15) + title_txt = exptag + ' Tb normalized O-F stdv (approx!) '+ start_time.strftime('%Y%m%d')+'_'+end_time.strftime('%Y%m%d') + + colormap.set_bad(color='0.9') # light grey, 0-black, 1-white + + # map tile_data on 2-d regular lat/lon grid for plotting + grid_data, lat_2d, lon_2d = tile_to_latlongrid(tile_data, tc, my_res) + + + # compute spatial avg of metric (directly from tile_data; no weighting) + mean = np.nanmean( tile_data ) + absmean = np.nanmean(np.abs(tile_data)) + + if 'normalized' in title_txt: + absmean = np.nanmean(np.abs(tile_data-1.) ) + + if 'normalized' in title_txt and 'stdv' in title_txt: + title_txt = title_txt + '\n' + "avg=%.3f, avg(abs(nstdv-1))=%.3f" % (mean, absmean)+' '+units + elif 'mean' in title_txt: + title_txt = title_txt + '\n' + "avg=%.3f, avg(abs)=%.3f" % (mean, absmean)+' '+units + else: + title_txt = title_txt + '\n' + "avg=%.2f" % (mean )+' '+units + + if 'normalized' in title_txt: + grid_data = np.log10(grid_data) + crange = [-0.6, 0.45] + + mm, cs = plotMap(grid_data, ax =axes[i,j], lat=lat_2d, lon=lon_2d, cRange=crange, \ + title=title_txt, cmap=colormap, bounding=[-60, 80, -180,180]) + + plt.tight_layout() + # Save figure to file + fig.savefig(out_path+'Map_OmF_'+ exptag +'_'+start_time.strftime('%Y%m')+'_'+\ + (end_time+timedelta(days=-1)).strftime('%Y%m')+'.png') + #plt.show() + plt.close(fig) + +# ----------------------------------------------------------------------------------------------- + +if __name__ == '__main__': + + Main_OmF_maps() + +# ====================== EOF ========================================================= diff --git a/GEOSldas_App/util/postproc/ObsFcstAna_stats/Plot_stats_timeseries.py b/GEOSldas_App/util/postproc/ObsFcstAna_stats/Plot_stats_timeseries.py new file mode 100644 index 00000000..2c499cfd --- /dev/null +++ b/GEOSldas_App/util/postproc/ObsFcstAna_stats/Plot_stats_timeseries.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python3 + +""" +Sample script for plotting monthly time series of spatially averaged data assimilation diagnostics. +Computes Nobs-weighted avg of each metric across all species. +Requires saved files with monthly sums (see Get_ObsFcstAna_stat.py). +""" + +import sys; sys.path.append('../../shared/python/') +import warnings; warnings.filterwarnings("ignore") +import os + +import numpy as np +import matplotlib.pyplot as plt +import pickle + +from datetime import datetime, timedelta +from dateutil.relativedelta import relativedelta + +from postproc_ObsFcstAna import postproc_ObsFcstAna +from user_config import get_config + +def Main_OmF_timeseries(): + + config = get_config() # edit user-defined inputs in user_config.py + + exp_list = config['exp_list'] + start_time = config['start_time'] + end_time = config['end_time'] + sum_path = config['sum_path'] + out_path = config['out_path'] + + # ------------------------------------------------------------------------------------ + # + # Initialize the postprocessing object + postproc_obj = postproc_ObsFcstAna(exp_list, start_time, end_time, sum_path=sum_path) + + exptag = postproc_obj.exptag + start_time = postproc_obj.start_time + end_time = postproc_obj.end_time + + stats_file = out_path + 'spatial_stats_'+exptag+'_'+start_time.strftime('%Y%m')+ \ + '_'+(end_time+timedelta(days=-1)).strftime('%Y%m')+'.pkl' + + # Read or compute monthly stats across space + + if os.path.isfile(stats_file): + + with open(stats_file,'rb') as file: + print(f'reading from {stats_file}') + stats = pickle.load(file) + + else: + + stats = postproc_obj.calc_spatial_stats_from_sums() + + if stats_file is not None: + with open(stats_file,'wb') as file: + print(f'saving stats to {stats_file}') + pickle.dump(stats,file) + + date_vec = stats['date_vec'] + + # Compute Nobs-weighted avg of each metric across all species. + # Typically used for SMAP Tb_h/h from asc and desc overpasses, + # or ASCAT soil moisture from Metop-A/B/C. + # DOES NOT MAKE SENSE IF, SAY, SPECIES HAVE DIFFERENT UNITS! + stats_plot = {} + N_data = np.nansum(stats['N_data' ], axis=1) + stats_plot['N_data' ] = N_data + stats_plot['OmF_mean'] = np.nansum(stats['OmF_mean']*stats['N_data'], axis=1)/N_data + stats_plot['OmF_stdv'] = np.nansum(stats['OmF_stdv']*stats['N_data'], axis=1)/N_data + stats_plot['OmA_mean'] = np.nansum(stats['OmA_mean']*stats['N_data'], axis=1)/N_data + stats_plot['OmA_stdv'] = np.nansum(stats['OmA_stdv']*stats['N_data'], axis=1)/N_data + + plot_var = 'OmF_mean' + + fig, ax = plt.subplots(figsize=(10,4)) + bars = ax.bar(date_vec, stats_plot[plot_var]) + + plt.grid(True, linestyle='--', alpha=0.5) + + plt.xticks(ticks=date_vec[::2], labels=date_vec[::2]) + plt.title(exptag+' monthly '+plot_var) + plt.xlim(-1, len(date_vec)+1) + plt.ylim(-.1, 2.) + + plt.tight_layout() + #plt.show() + plt.savefig(out_path+'Bars_'+plot_var+exptag+date_vec[0]+'_'+\ + date_vec[-1]+'.png') + plt.close() + +if __name__ == "__main__": + + Main_OmF_timeseries() + + +# ====================== EOF ========================================================= diff --git a/GEOSldas_App/util/postproc/ObsFcstAna_stats/helper/write_nc4.py b/GEOSldas_App/util/postproc/ObsFcstAna_stats/helper/write_nc4.py new file mode 100644 index 00000000..57ebf694 --- /dev/null +++ b/GEOSldas_App/util/postproc/ObsFcstAna_stats/helper/write_nc4.py @@ -0,0 +1,62 @@ + +# collection of write routines for ObsFcstAna sums and stats + +from netCDF4 import Dataset, date2num + +import numpy as np + +# -------------------------------------------------------------------------------- + +def write_sums_nc4(file_path, N_data, data_sum, data2_sum, oxf_sum, oxa_sum, fxa_sum, obs_param): + + nc = Dataset(file_path,'w',format='NETCDF4') + + tile = nc.createDimension('tile', N_data.shape[0]) + species = nc.createDimension('species', N_data.shape[1]) + + data = nc.createVariable('obs_param_assim', 'c', ('species', ), zlib=True, complevel=4) + for i in range(len(obs_param)): + data[i] = obs_param[i]['assim'] + + data = nc.createVariable('N_data', 'i4', ('tile','species', ), zlib=True, complevel=4) + data[:,:] = N_data + + data = nc.createVariable('obsxfcst_sum', 'f4', ('tile','species', ), zlib=True, complevel=4) + data[:,:] = oxf_sum + + data = nc.createVariable('obsxana_sum', 'f4', ('tile','species', ), zlib=True, complevel=4) + data[:,:] = oxa_sum + + data = nc.createVariable('fcstxana_sum', 'f4', ('tile','species', ), zlib=True, complevel=4) + data[:,:] = fxa_sum + + for key, value in data_sum.items(): + varname = key+'_sum' + data = nc.createVariable(varname, 'f4', ('tile','species', ), zlib=True, complevel=4) + data[:,:] = value + + for key, value in data2_sum.items(): + varname = key+'2_sum' + data = nc.createVariable(varname, 'f4', ('tile','species', ), zlib=True, complevel=4) + data[:,:] = value + + nc.close() + +# -------------------------------------------------------------------------------- + +def write_stats_nc4(file_path, stats): + + nc = Dataset(file_path,'w',format='NETCDF4') + + tile = nc.createDimension('tile', stats['N_data'].shape[0]) + species = nc.createDimension('species', stats['N_data'].shape[1]) + + for key, value in stats.items(): + data = nc.createVariable(key,'f4',('tile','species', ), \ + fill_value=-9999.0, zlib=True, complevel=4) + data[:,:] = value + + nc.close() + + +# ======================= EOF ========================================================= diff --git a/GEOSldas_App/util/postproc/ObsFcstAna_stats/postproc_ObsFcstAna.py b/GEOSldas_App/util/postproc/ObsFcstAna_stats/postproc_ObsFcstAna.py new file mode 100644 index 00000000..00058e06 --- /dev/null +++ b/GEOSldas_App/util/postproc/ObsFcstAna_stats/postproc_ObsFcstAna.py @@ -0,0 +1,518 @@ + +# Tool to preprocess GEOSldas ObsFcstAna output into monthly sums, sums of squares, and sums of cross-products +# +# qliu, amfox, rreichle - May 2025 + +import numpy as np +import os +import yaml + +import sys; sys.path.append('../../shared/python/') + +import warnings; warnings.filterwarnings("ignore") + +from datetime import datetime, timedelta +from dateutil.relativedelta import relativedelta +from netCDF4 import Dataset, date2num +from read_GEOSldas import read_ObsFcstAna, read_tilecoord, read_obs_param + +from helper.write_nc4 import write_sums_nc4, write_stats_nc4 + +class postproc_ObsFcstAna: + + def __init__(self, exp_list, start_time, end_time, sum_path='./'): + self.exp_list = exp_list + self.expdir_list = [item['expdir'] for item in exp_list] + self.expid_list = [item['expid'] for item in exp_list] + self.exptag = exp_list[0]['exptag'] + self.domain = exp_list[0]['domain'] + self.start_time = start_time + self.end_time = end_time + self.da_t0 = exp_list[0]['da_t0'] + self.da_dt = exp_list[0]['da_dt'] + self.var_list = ['obs_obs','obs_obsvar','obs_fcst','obs_fcstvar','obs_ana','obs_anavar'] + self.tilecoord = exp_list[0]['tilecoord'] + self.tilegrid_global = exp_list[0]['tilegrid_global'] + self.tilegrid_domain = exp_list[0]['tilegrid_domain'] + self.obsparam_list = [item['obsparam'] for item in exp_list] + self.sum_path = sum_path + + # Determine experiment that supplies obs data + self.obs_from = -1 + for exp_idx, exp in enumerate(exp_list): + if exp.get('use_obs', None): # found use_obs=True + if self.obs_from >= 0: + print("ERROR: use_obs=True in multiple experiments. Edit user_config.py to remove conflict." ) + sys.exit() + else: + self.obs_from = exp_idx + if self.obs_from < 0: self.obs_from = 0 # by default, obs data are from exp_list[0] + print(f"obs data are from {exp_list[self.obs_from]['expid']}") + + # Verify the configuration every time when current class is initialized + # to avoid saving sums with different configs in the same directory + + # Same configurations should have identical values for these fields + config_verify = ['expdir','expid','exptag','domain','use_obs','species_list'] + + # Construct config for each experiment + config_list = [] + for exp in exp_list: + config_list.append({var:exp[var] for var in config_verify if var in exp}) + + # File of configuration for verification + f_config = self.sum_path + '/' + self.exptag + '_config.yaml' + + # Save a new file or compare current configuration with previously saved + if not os.path.exists(f_config): + with open(f_config, 'w') as f: + yaml.dump(config_list, f, default_flow_style=False) + print(f'Configuration saved to {f_config}') + else: + with open(f_config,'r') as f: + saved_exp_list = yaml.safe_load(f) + print(f'Found saved configuration') + + if config_list != saved_exp_list: + print('user configuration is different from previously saved '+f_config) + sys.exit() + # ---------------------------------------------------------------------------------------------------------- + # + # Function to compute monthly sums from x-hourly ObsFcstAna data for a single month. + + def compute_monthly_sums(self, date_time): + + expdir_list = self.expdir_list + expid_list = self.expid_list + tc = self.tilecoord + obsparam_list = self.obsparam_list + var_list = self.var_list + obs_from = self.obs_from + da_dt = self.da_dt + + n_tile = tc['N_tile'] + n_spec = len(obsparam_list[0]) + + date_time = date_time.replace(hour=int(self.da_t0), minute=int(np.mod(self.da_t0,1)*60)) + stop_time = date_time + relativedelta(months=1) + + data_sum = {} + data2_sum = {} + + N_data = np.zeros((n_tile, n_spec)) + oxf_sum = np.zeros((n_tile, n_spec)) + oxa_sum = np.zeros((n_tile, n_spec)) + fxa_sum = np.zeros((n_tile, n_spec)) + + for var in var_list: + data_sum[ var] = np.zeros((n_tile, n_spec)) + data2_sum[var] = np.zeros((n_tile, n_spec)) + + while date_time < stop_time: + + # read the list of experiments at each time step (OFA="ObsFcstAna") + OFA_list = [] + for i in range(len(expdir_list)): + fname = expdir_list[i]+expid_list[i]+'/output/'+self.domain+'/ana/ens_avg/Y'+ \ + date_time.strftime('%Y') + '/M' + \ + date_time.strftime('%m') + '/' + \ + expid_list[i]+'.ens_avg.ldas_ObsFcstAna.' + \ + date_time.strftime('%Y%m%d_%H%M') +'z.bin' + if os.path.isfile(fname): + print('read '+fname) + OFA_list.append(read_ObsFcstAna(fname)) + + data_all=[] + for OFA, obs_param in zip(OFA_list,obsparam_list): + + # Initialize full size variable for an experiment + data_tile={} + for var in var_list: + data_tile[var] = np.zeros((n_tile, n_spec)) + np.nan + + if len(OFA['obs_tilenum']) > 0: + for ispec in np.arange(n_spec): + # check species overall "assim" flag for masking + this_species = int(obs_param[ispec]['species']) + masked_data = {} + + # get mask + if obs_param[ispec]['assim'] == 'T': + is_valid = np.logical_and(OFA['obs_species'] == this_species, OFA['obs_assim']==1) + else: + is_valid = OFA['obs_species'] == this_species + + tile_idx = OFA['obs_tilenum'][is_valid]-1 + + for var in var_list: + masked_data[var] = OFA[var][is_valid] + + for var in var_list: + data_tile[var][tile_idx, ispec] = masked_data[var] + + data_all.append(data_tile) + + if len(data_all) > 0: + # cross-mask over all experiments + is_cross_valid = ~np.isnan(data_all[0]['obs_obs']) + for data in data_all[1:]: + mask = ~np.isnan(data['obs_obs']) + is_cross_valid = np.logical_and(is_cross_valid,mask) + + # reconstruct the output variable dictionary based on input options; + # obs_obs and obs_obsvar are from exp_list[obs_from], the rest are from exp_list[0] + data_tile = {} + for var in var_list: + if 'obs_obs' in var: + data_tile[var] = data_all[obs_from][var] + else: + data_tile[var] = data_all[0][var] + + is_valid = is_cross_valid + + N_data[ is_valid] += 1 + oxf_sum[is_valid] += data_tile['obs_obs' ][is_valid] * data_tile['obs_fcst'][is_valid] + oxa_sum[is_valid] += data_tile['obs_obs' ][is_valid] * data_tile['obs_ana' ][is_valid] + fxa_sum[is_valid] += data_tile['obs_fcst'][is_valid] * data_tile['obs_ana' ][is_valid] + + for var in var_list: + data_sum[ var][is_valid] += data_tile[var][is_valid] + data2_sum[var][is_valid] += data_tile[var][is_valid] **2 + + date_time = date_time + timedelta(seconds=da_dt) + + return N_data, data_sum, data2_sum, oxf_sum, oxa_sum, fxa_sum + + # ---------------------------------------------------------------------------------------------------------- + # + # Function to compute monthly sums and save results in nc4 files for all months in [start_time, end_time]. + # + # Skips computation/saving of monthly sums output if file already exists. + + def save_monthly_sums(self): + expdir_list = self.expdir_list + expid_list = self.expid_list + var_list = self.var_list + tc = self.tilecoord + obsparam_list = self.obsparam_list + + date_time = self.start_time + + while date_time < self.end_time: # loop through months + + # make monthly file output directory + mo_path = self.sum_path + '/Y'+ date_time.strftime('%Y') + '/M' + date_time.strftime('%m') + '/' + os.makedirs(mo_path, exist_ok=True) + + fout = self.exptag + '.ens_avg.ldas_ObsFcstAna_sums.' + date_time.strftime('%Y%m') +'.nc4' + + fout = mo_path + fout + + # skip if output file already exists + if not os.path.isfile(fout): + print('computing monthly sums...') + # compute monthly sums + mN_data, mdata_sum, mdata2_sum, moxf_sum, moxa_sum, mfxa_sum = \ + self.compute_monthly_sums(date_time) + + # save monthly sums in nc4 file + write_sums_nc4(fout, mN_data,mdata_sum, mdata2_sum, moxf_sum, moxa_sum, mfxa_sum, obsparam_list[0]) + else: + print('file exists, skipping '+fout) + + date_time = date_time + relativedelta(months=1) + + # ---------------------------------------------------------------------------------------------------------- + # + # Function to compute long-term temporal statistics of individual species based on monthly sums. + # + # Assumes that monthly sums files have been saved [see save_monthly_sums()]. + + def calc_temporal_stats_from_sums(self, write_to_nc=True, fout_stats='./stats.nc4'): + + # Variable list for computing sum and sum-of-squares + var_list = self.var_list + + # Read tilecoord and obsparam for tile and obs species information + n_tile = self.tilecoord['N_tile'] + n_spec = len(self.obsparam_list[0]) + + # --------------------------------------------------------------- + # + # compute accumulated sums for period (start_time, end_time) + + # Initialize statistical metrics + data_sum = {} + data2_sum = {} + N_data = np.zeros((n_tile, n_spec)) + oxf_sum = np.zeros((n_tile, n_spec)) + oxa_sum = np.zeros((n_tile, n_spec)) + fxa_sum = np.zeros((n_tile, n_spec)) + + for var in var_list: + data_sum[ var] = np.zeros((n_tile, n_spec)) + data2_sum[var] = np.zeros((n_tile, n_spec)) + + # Time loop: processing data at monthly time step + + date_time = self.start_time + + while date_time < self.end_time: # loop through months + + fpath = self.sum_path + '/Y'+ date_time.strftime('%Y') + '/M' + date_time.strftime('%m') + '/' + + fout = self.exptag + '.ens_avg.ldas_ObsFcstAna_sums.' + date_time.strftime('%Y%m') +'.nc4' + + fout = fpath + fout + + # Read and accumulate monthly sums data + if os.path.isfile(fout): + print('read sums from monthly file: '+fout) + mdata_sum = {} + mdata2_sum = {} + with Dataset(fout,'r') as nc: + mN_data = nc.variables['N_data' ][:] + moxf_sum = nc.variables['obsxfcst_sum'][:] + moxa_sum = nc.variables['obsxana_sum' ][:] + mfxa_sum = nc.variables['fcstxana_sum'][:] + for var in var_list: + mdata_sum[ var] = nc.variables[var+'_sum' ][:] + mdata2_sum[var] = nc.variables[var+'2_sum' ][:] + + # Aggregate monthly data + N_data += mN_data + oxf_sum += moxf_sum + oxa_sum += moxa_sum + fxa_sum += mfxa_sum + + for var in var_list: + data_sum[ var] += mdata_sum[ var] + data2_sum[var] += mdata2_sum[var] + else: + raise FileNotFoundError(f"File {fout} does not exist, run save_monthly_sums() first") + + date_time = date_time + relativedelta(months=1) + + # -------------------------------------------------------------------- + # + # Compute stats (DA diagnostics) from accumulated sums data. + + data_mean = {} + data2_mean = {} + data_var = {} + + # calculation + for var in var_list: + data_sum[var][ N_data == 0] = np.nan + data2_sum[var][N_data == 0] = np.nan + + data_mean[ var] = data_sum[ var] / N_data + data2_mean[var] = data2_sum[var] / N_data + # var(x) = E[x2] - (E[x])^2 + data_var[var] = data2_mean[var] - data_mean[var]**2 + + oxf_sum[N_data == 0] = np.nan + oxa_sum[N_data == 0] = np.nan + fxa_sum[N_data == 0] = np.nan + # E[xy] + oxf_mean = oxf_sum / N_data + oxa_mean = oxa_sum / N_data + fxa_mean = fxa_sum / N_data + + # Then compute metrics of O-F, O-A, etc. based on above computed + + O_mean = data_mean['obs_obs'] + F_mean = data_mean['obs_fcst'] + A_mean = data_mean['obs_ana'] + + O_stdv = np.sqrt(data_var['obs_obs']) + F_stdv = np.sqrt(data_var['obs_fcst']) + A_stdv = np.sqrt(data_var['obs_ana']) + + # mean(x-y) = E[x] - E[y] + OmF_mean = O_mean - F_mean + OmA_mean = O_mean - A_mean + + # var(x-y) = var(x) + var(y) - 2cov(x,y) + # cov(x,y) = E[xy] - E[x]E[y] + OmF_stdv = np.sqrt(O_stdv**2 + F_stdv**2 - 2 * (oxf_mean - O_mean*F_mean)) + OmA_stdv = np.sqrt(O_stdv**2 + A_stdv**2 - 2 * (oxa_mean - O_mean*A_mean)) + + # ***************************************************************************************** + # The time series mean and std-dev of the *normalized* OmF computed here are APPROXIMATED! + # ***************************************************************************************** + # Here, we first compute the stats of the OmF time series and then normalize using + # the time-avg "obsvar" and "fcstvar" values. + # Since "fcstvar" changes with time, the OmF values should be normalized at each time + # step (as in the older matlab scripts), and then the time series stats can be computed. + # To compute the exact stats with this python package, the sum and sum-of-squares of + # the normalized OmF values would need to be added into the sums files. + # + OmF_norm_mean = OmF_mean / np.sqrt(data_mean['obs_obsvar'] + data_mean['obs_fcstvar']) # APPROXIMATED stat! + OmF_norm_stdv = np.sqrt(OmF_stdv**2 / (data_mean['obs_obsvar'] + data_mean['obs_fcstvar'])) # APPROXIMATED stat! + + # Mask out data points without any obs (NOTE: apply Nmin threshold when plotting or computing map avg) + # Do NOT apply to N_data + O_mean[ N_data < 1] = np.nan + O_stdv[ N_data < 1] = np.nan + F_mean[ N_data < 1] = np.nan + F_stdv[ N_data < 1] = np.nan + A_mean[ N_data < 1] = np.nan + A_stdv[ N_data < 1] = np.nan + + OmF_mean[ N_data < 1] = np.nan + OmF_stdv[ N_data < 1] = np.nan + OmF_norm_mean[N_data < 1] = np.nan + OmF_norm_stdv[N_data < 1] = np.nan + OmA_mean[ N_data < 1] = np.nan + OmA_stdv[ N_data < 1] = np.nan + + stats = { + 'O_mean' : O_mean, 'O_stdv' : O_stdv, + 'F_mean' : F_mean, 'F_stdv' : F_stdv, + 'A_mean' : A_mean, 'A_stdv' : A_stdv, + 'OmF_mean' : OmF_mean, 'OmF_stdv' : OmF_stdv, + 'OmA_mean' : OmA_mean, 'OmA_stdv' : OmA_stdv, + 'OmF_norm_mean': OmF_norm_mean, 'OmF_norm_stdv': OmF_norm_stdv, + 'N_data' : N_data, + } + + if write_to_nc: + print('writing stats nc4 file: '+fout_stats) + write_stats_nc4(fout_stats, stats) + + return stats + + # ---------------------------------------------------------------------------------------------------------- + # + # Function to compute the O-F/O-A *spatial* statistics for a *single* month based on + # previously saved monthly sums. + # Individual temporal and grid cell DA diagnostic values within a month are aggregated first; + # the monthly Ndata/mean/stdv are derived from the aggregated sample. Consequently, in the + # final DA diagnostics, each obs value gets equal weight. Note that this differs from computing + # the straight spatial avg across a map of a given DA diagnostic. The latter approach gives + # the same weight to each location, regardless of how many obs are available at the location. + + def calc_spatial_stats_from_sums(self): + + start_time = self.start_time + end_time = self.end_time + + var_list = ['obs_obs', 'obs_fcst','obs_ana'] + + O_mean = [] + O_stdv = [] + F_mean = [] + F_stdv = [] + A_mean = [] + A_stdv = [] + OmF_mean = [] + OmF_stdv = [] + OmA_mean = [] + OmA_stdv = [] + N_data = [] + date_vec = [] + + # Time loop + current_time = start_time + while current_time < end_time: + + mo_path = self.sum_path + '/Y'+ current_time.strftime('%Y') + '/M' + current_time.strftime('%m') + '/' + fnc4_sums = mo_path + self.exptag + '.ens_avg.ldas_ObsFcstAna_sums.' + current_time.strftime('%Y%m') +'.nc4' + + mdata_sum = {} + mdata2_sum = {} + + try: + with Dataset(fnc4_sums,'r') as nc: + mN_data = nc.variables['N_data' ][:] + moxf_sum = nc.variables['obsxfcst_sum'][:] + moxa_sum = nc.variables['obsxana_sum' ][:] + moxf_sum[mN_data == 0] = np.nan + moxa_sum[mN_data == 0] = np.nan + for var in var_list: + mdata_sum[ var] = nc.variables[var+'_sum' ][:] + mdata2_sum[var] = nc.variables[var+'2_sum'][:] + mdata_sum[ var][mN_data == 0] = np.nan + mdata2_sum[var][mN_data == 0] = np.nan + except FileNotFoundError: + print(f"Error: File '{fnc4_sums}' not found. Run Get_ObsFcstAna_sums.py first.") + sys.exit(1) + except Exception as e: + print(f"An unexpected error occurred: {e}") + sys.exit(1) + + # Make sure only aggregate tiles with valid values for all variables + for var in var_list: + mN_data = mN_data.astype(float) + mN_data[np.isnan(mdata_sum[var])] = np.nan + mN_data[mN_data == 0] = np.nan + + # cross mask before aggregating tile values + for var in var_list: + mdata_sum[ var][np.isnan(mN_data)] = np.nan + mdata2_sum[var][np.isnan(mN_data)] = np.nan + moxf_sum[ np.isnan(mN_data)] = np.nan + moxa_sum[ np.isnan(mN_data)] = np.nan + + # Aggregate data of all tiles + N_data_mo = np.nansum(mN_data, axis=0) + with np.errstate(divide='ignore', invalid='ignore'): + OxF_mean = np.nansum(moxf_sum, axis=0) / np.where(N_data_mo > 0, N_data_mo, np.nan) + OxA_mean = np.nansum(moxa_sum, axis=0) / np.where(N_data_mo > 0, N_data_mo, np.nan) + data_mean = {} + data2_mean = {} + data_var = {} + for var in var_list: + with np.errstate(divide='ignore', invalid='ignore'): + data_mean[ var] = np.nansum(mdata_sum[var ], axis=0) / np.where(N_data_mo > 0, N_data_mo, np.nan) + data2_mean[var] = np.nansum(mdata2_sum[var], axis=0) / np.where(N_data_mo > 0, N_data_mo, np.nan) + # var(x) = E[x2] - (E[x])^2 + data_var[var] = data2_mean[var] - data_mean[var]**2 + + # Compute monthly metrics of O-F, O-A, etc. based on above stats + O_mean_mo = data_mean['obs_obs'] + F_mean_mo = data_mean['obs_fcst'] + A_mean_mo = data_mean['obs_ana'] + + O_var = data_var['obs_obs'] + F_var = data_var['obs_fcst'] + A_var = data_var['obs_ana'] + + # mean(x-y) = E[x] - E[y] + OmF_mean_mo = O_mean_mo - F_mean_mo + OmA_mean_mo = O_mean_mo - A_mean_mo + + # var(x-y) = var(x) + var(y) - 2cov(x,y) + # cov(x,y) = E[xy] - E[x]E[y] + OmF_stdv_mo = np.sqrt(O_var + F_var - 2 * (OxF_mean - O_mean_mo*F_mean_mo)) + OmA_stdv_mo = np.sqrt(O_var + A_var - 2 * (OxA_mean - O_mean_mo*A_mean_mo)) + + # Extend timeseries + N_data.append( N_data_mo ) + O_mean.append( O_mean_mo ) + O_stdv.append( np.sqrt(O_var)) + F_mean.append( F_mean_mo ) + F_stdv.append( np.sqrt(F_var)) + A_mean.append( A_mean_mo ) + A_stdv.append( np.sqrt(A_var)) + OmF_mean.append(OmF_mean_mo ) + OmF_stdv.append(OmF_stdv_mo ) + OmA_mean.append(OmA_mean_mo ) + OmA_stdv.append(OmA_stdv_mo ) + + date_vec.append(current_time.strftime('%Y%m')) + current_time = current_time + relativedelta(months=1) + + stats = { + 'O_mean' : np.array(O_mean), 'O_stdv' : np.array(O_stdv), + 'F_mean' : np.array(F_mean), 'F_stdv' : np.array(F_stdv), + 'A_mean' : np.array(A_mean), 'A_stdv' : np.array(A_stdv), + 'OmF_mean': np.array(OmF_mean), 'OmF_stdv': np.array(OmF_stdv), + 'OmA_mean': np.array(OmA_mean), 'OmA_stdv': np.array(OmA_stdv), + 'N_data' : np.array(N_data), 'date_vec': date_vec + } + + return stats + +# ============== EOF ==================================================================== diff --git a/GEOSldas_App/util/postproc/ObsFcstAna_stats/user_config.py b/GEOSldas_App/util/postproc/ObsFcstAna_stats/user_config.py new file mode 100644 index 00000000..169bbcf3 --- /dev/null +++ b/GEOSldas_App/util/postproc/ObsFcstAna_stats/user_config.py @@ -0,0 +1,150 @@ + +import sys; sys.path.append('../../shared/python/') +import warnings; warnings.filterwarnings("ignore") +import os + +import numpy as np + +from datetime import datetime, timedelta +from read_GEOSldas import read_tilecoord, read_tilegrids, read_obs_param +from postproc_ObsFcstAna import postproc_ObsFcstAna + +def get_config(): + + # ========================================================================================= + # + # User-defined inputs for post-processing of ObsFcstAna output + + # Range of months to process: + + start_year = 2015 + start_month = 4 + last_year = 2016 + last_month = 3 + + # Sums or stats will be processed for exp_main: + + exp_main = { + 'expdir' : '/discover/nobackup/projects/gmao/merra/iau/merra_land/SMAP_runs/SMAP_Nature_v11/', + 'expid' : 'DAv8_SMOSSMAP', # GEOSldas exp ID of simulation + 'exptag' : 'DAMulti_SMAP', # string used in output file names for sums & stats, + # can be same as expid or different -- e.g., reflect info + # about subset of included species or about cross-masking + 'domain' : 'SMAP_EASEv2_M36_GLOBAL', + 'da_t0' : 3, # (fractional) UTC hour of first land analysis + 'da_dt' : 10800, # ObsFcstAna file interval in seconds + 'species_list' : [5,6,7,8], # indices of species to be processed + 'obsparam_time' : "20150401_0000" # time stamp of obsparam file (YYYYMMDD_HHMM) + } + + # Optional experiment(s) can be added for cross-masking or extracting obs from a different experiment. + # + # All optional experiments and the main experiment must have identical tile space (BCs resolution) and domain. + # + # If the default "species" number/order do not match, set "species_list" accordingly to force a match. + # Output will be cross-masked between all specified experiments. + # + # Forecasts and analyses are always from the main experiment. + # Observations are from the experiment with 'use_obs' set to True (default is exp_main). The most + # likely use case for reading obs from a supplemental experiment is when computing OmF etc diagnostics + # for an open loop experiment that only has unscaled obs, and _scaled_ obs must be read from a + # coresponding DA experiment. + + exp_sup1 = { + 'expdir' : '/discover/nobackup/projects/gmao/merra/iau/merra_land/SMAP_runs/SMAP_Nature_v11/', + 'expid' : 'DAv8_M36', + 'use_obs' : True, # if True, use obs data from this exp + 'species_list' : [1,2,3,4], # indices of species to be processed, + # must identify same species as selected in main exp + 'obsparam_time' : "20150401_0000" # time stamp of obsparam file (YYYYMMDD_HHMM) + } + + # Convert experiments input to a list; first entry must be exp_main: + + #exp_list = [exp_main] # no cross-masking + exp_list = [exp_main, exp_sup1] # cross-mask exp_main with exp_sup1 + + # Top level directory for all output from this package: + + out_path = '/discover/nobackup/[USERNAME]/' + + # Directory for monthly sum files: + # - Can use the experiment directory or a different path. + # - Automatically appends /Yyyyy/Mmm/ for individual months. + + sum_path = out_path+'/'+exp_main['expid']+'/output/'+exp_main['domain']+'/ana/ens_avg/' + + # + # ===================== end of user-defined inputs ================================================= + + # process time range info; end_time is first of month after (end_year, end_month) + + if last_month==12 : + end_month = 1 + end_year = last_year + 1 + else : + end_month = last_month + 1 + end_year = last_year + + start_time = datetime( start_year, start_month, 1) + end_time = datetime( end_year, end_month, 1) + + # Get tilecoord and obsparam information for each experiment + + domain = exp_list[0]['domain'] + + for exp in exp_list: + expdir = exp['expdir'] + expid = exp['expid'] + + YYYY = exp['obsparam_time'][0:4] + MM = exp['obsparam_time'][4:6] + + fop = expdir+expid+'/output/'+domain+'/rc_out/Y'+YYYY+'/M'+MM+'/'+expid+'.ldas_obsparam.'+exp['obsparam_time']+'z.txt' + obsparam_orig = read_obs_param(fop) + + # get the species list, default is all species + species_list = exp.get('species_list', [ int(obsparam_orig[i]['species']) for i in range(len(obsparam_orig)) ]) + + # subset obsparam_orig based on species_list; keep order of obsparam_orig (independent of order of species_list) + obsparam = [] + for i in range(len(obsparam_orig)): + if int(obsparam_orig[i]['species']) in species_list: + obsparam.append(obsparam_orig[i]) + + ftc = expdir+expid+'/output/'+ domain+'/rc_out/'+ expid+'.ldas_tilecoord.bin' + tc = read_tilecoord(ftc) + + ftg = expdir+expid+'/output/'+ domain+'/rc_out/'+ expid+'.ldas_tilegrids.bin' + tg_global, tg_domain = read_tilegrids(ftg) + + # add tilecoord and obsparam into to exp + exp.update({'tilecoord':tc, 'obsparam':obsparam, 'tilegrid_global':tg_global, 'tilegrid_domain':tg_domain}) + + # verify that obs species match across experiments + for exp_idx, exp in enumerate(exp_list) : + obsparam = exp.get('obsparam') + if exp_idx==0 : + obsparam_main = obsparam + else: + if len(obsparam) != len(obsparam_main) : + print("ERROR: 'obsparam' mismatch (length)! Check 'select_species' input in user_config.py." ) + sys.exit() + else: + for a, b in zip(obsparam, obsparam_main) : + if a['descr'] != b['descr'] : + print("ERROR: 'obsparam' mismatch (descr)! Check 'select_species' input in user_config.py." ) + sys.exit() + + # wrap up config + config ={ + 'exp_list' : exp_list, + 'start_time' : start_time, + 'end_time' : end_time, + 'sum_path' : sum_path, + 'out_path' : out_path, + } + + return config + +# ====================== EOF ========================================================= diff --git a/GEOSldas_App/util/shared/matlab/EASEv2_ind2latlon.m b/GEOSldas_App/util/shared/matlab/EASEv2_ind2latlon.m index c2d61140..dacbb4ec 100644 --- a/GEOSldas_App/util/shared/matlab/EASEv2_ind2latlon.m +++ b/GEOSldas_App/util/shared/matlab/EASEv2_ind2latlon.m @@ -1,202 +1,202 @@ -% -% SMAPEASE2INVERSE The principal function is to perform inverse transformation -% from (row,col)'s to (lat,lon)'s for a set of nested EASE -% grids defined at 1, 3, 9, and 36km grid resolutions. These -% grids are all based on the EASE-Grid 2.0 specification (WGS84 -% ellipsoid). -% -% SYNTAX [lat,lon] = smapease2inverse(row,col,gridid) -% -% where gridid is a 3-character string enclosed in single -% quotes, in the form of {M|N|S}{01,03,09,36}. This subroutine -% accepts vector inputs and produce vector outputs. -% -% HISTORY This subroutine was adapted from the offical EASE-Grid-2.0 -% conversion utilities (written in IDL) developed by the -% NSIDC. -% -% Note that in NSIDC's original implementation, (row,col) are -% zero-based. In other words, the first cell is (0,0) and the -% last cell is (N-1,M-1), where N and M are the row and column -% dimensions of the array. In this MATLAB implementation, the -% same convention is used. In other words, the end point of -% the first cell is located at (r,c) = (-0.5,-0.5) whereas the -% end point of the last cell is located at (r,c) = (14615.5, -% 34703.5). Thus, -% -% [lat,lon] = smapease2inverse(-0.5,-0.5,'M01') returns: -% lat = 85.044566407398861 -% lon = 1.799999999999994e+02 -% -% [lat,lon] = smapease2inverse(14615.5,34703.5,'M01') returns: -% lat = -85.044566407398861 -% lon = -1.799999999999994e+02 -% -% The polar grids, on the other hand, are more complete in -% terms of latitude coverage: -% -% [lat,lon] = smapease2inverse(8999,8999,'N01') -% lat = 89.993669248945238 -% lon = -135 -% [lat,lon] = smapease2inverse(9000,9000,'N01') -% lat = 89.993669248945238 -% lon = 45 -% -% [lat,lon] = smapease2inverse(8999,8999,'S01') -% lat = -89.993669248945238 -% lon = -45 -% [lat,lon] = smapease2inverse(9000,9000,'S01') -% lat = -89.993669248945238 -% lon = 135 -% -% UPDATE North/south polar projections were added. (03/2012) -% -% REFERENCE Brodzik, M. J., B. Billingsley, T. Haran, B. Raup, and M. H. -% Savoie (2012): EASE-Grid 2.0: Incremental but Significant -% Improvements for Earth-Gridded Data Sets. ISPRS International -% Journal of Geo-Information, vol. 1, no. 1, pp. 32-45, -% http://www.mdpi.com/2220-9964/1/1/32/ -% -% Steven Chan, 11/2011 -% Email: steven.k.chan@jpl.nasa.gov - -function [lat,lon] = EASEv2_ind2latlon(row,col,gridid) - -% Constants returned by EASE2_GRID_INFO.PRO -projection = gridid(1); -switch gridid - case 'M36' - map_scale_m = 36032.220840584; - cols = 964; - rows = 406; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'M09' - map_scale_m = 9008.055210146; - cols = 3856; - rows = 1624; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'M03' - map_scale_m = 3002.6850700487; - cols = 11568; - rows = 4872; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'M01' - map_scale_m = 1000.89502334956; - cols = 34704; - rows = 14616; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'N36' - map_scale_m = 36000.0; - cols = 500; - rows = 500; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'N09' - map_scale_m = 9000.0; - cols = 2000; - rows = 2000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'N03' - map_scale_m = 3000.0; - cols = 6000; - rows = 6000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'N01' - map_scale_m = 1000.0; - cols = 18000; - rows = 18000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'S36' - map_scale_m = 36000.0; - cols = 500; - rows = 500; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'S09' - map_scale_m = 9000.0; - cols = 2000; - rows = 2000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'S03' - map_scale_m = 3000.0; - cols = 6000; - rows = 6000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'S01' - map_scale_m = 1000.0; - cols = 18000; - rows = 18000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - otherwise - disp(['ERROR: Incompatible grid specification.']); -end - -% Constants returned by EASE2_MAP_INFO.PRO -map_equatorial_radius_m = 6378137.0; -map_eccentricity = 0.081819190843; -e2 = map_eccentricity^2; -switch projection - case 'M' - map_reference_latitude = 0.0; - map_reference_longitude = 0.0; - map_second_reference_latitude = 30.0; - sin_phi1 = sin(map_second_reference_latitude*pi/180); - cos_phi1 = cos(map_second_reference_latitude*pi/180); - kz = cos_phi1/sqrt(1.0-e2*sin_phi1*sin_phi1); - case 'N' - map_reference_latitude = 90.0; - map_reference_longitude = 0.0; - case 'S' - map_reference_latitude = -90.0; - map_reference_longitude = 0.0; -end - -% Selected calculations inside WGS84_INVERSE.PRO -x = (col-r0)*map_scale_m; -y = (s0-row)*map_scale_m; - -% Selected calculations inside WGS84_INVERSE_XY.PRO -e4 = map_eccentricity^4; -e6 = map_eccentricity^6; -qp = (1.0-e2)*( (1.0/(1.0-e2))-(1.0/(2.0*map_eccentricity))*log((1.0-map_eccentricity)/(1.0+map_eccentricity))); -switch projection - case 'M' - beta = asin(2.0*y*kz/(map_equatorial_radius_m*qp)); - lam = x/(map_equatorial_radius_m*kz); - case 'N' - rho = sqrt(x.^2+y.^2); - beta = asin(1.0-(rho.^2)/(qp*map_equatorial_radius_m^2)); - lam = atan2(x,-y); - case 'S' - rho = sqrt(x.^2+y.^2); - beta = -asin(1.0-(rho.^2)/(qp*map_equatorial_radius_m^2)); - lam = atan2(x,y); -end -phi = beta+(((e2/3.0)+((31.0/180.0)*e4)+((517.0/5040.0)*e6))*sin(2.0*beta))+...\ - ((((23.0/360.0)*e4)+((251.0/3780.0)*e6))*sin(4.0*beta))+(((761.0/45360.0)*e6)*sin(6.0*beta)); -lat = phi*180.0/pi; -lon = map_reference_longitude+(lam*180.0/pi); -msk1 = lon < -180.0; lon(msk1) = lon(msk1) + 360.0; -msk2 = lon > +180.0; lon(msk2) = lon(msk2) - 360.0; -switch projection - case 'N' - idx = lat < 0.0; - lat(idx) = NaN; - lon(idx) = NaN; - case 'S' - idx = lat > 0.0; - lat(idx) = NaN; - lon(idx) = NaN; -end - -% ========= EOF ========================================================= +% +% SMAPEASE2INVERSE The principal function is to perform inverse transformation +% from (row,col)'s to (lat,lon)'s for a set of nested EASE +% grids defined at 1, 3, 9, and 36km grid resolutions. These +% grids are all based on the EASE-Grid 2.0 specification (WGS84 +% ellipsoid). +% +% SYNTAX [lat,lon] = smapease2inverse(row,col,gridid) +% +% where gridid is a 3-character string enclosed in single +% quotes, in the form of {M|N|S}{01,03,09,36}. This subroutine +% accepts vector inputs and produce vector outputs. +% +% HISTORY This subroutine was adapted from the offical EASE-Grid-2.0 +% conversion utilities (written in IDL) developed by the +% NSIDC. +% +% Note that in NSIDC's original implementation, (row,col) are +% zero-based. In other words, the first cell is (0,0) and the +% last cell is (N-1,M-1), where N and M are the row and column +% dimensions of the array. In this MATLAB implementation, the +% same convention is used. In other words, the end point of +% the first cell is located at (r,c) = (-0.5,-0.5) whereas the +% end point of the last cell is located at (r,c) = (14615.5, +% 34703.5). Thus, +% +% [lat,lon] = smapease2inverse(-0.5,-0.5,'M01') returns: +% lat = 85.044566407398861 +% lon = 1.799999999999994e+02 +% +% [lat,lon] = smapease2inverse(14615.5,34703.5,'M01') returns: +% lat = -85.044566407398861 +% lon = -1.799999999999994e+02 +% +% The polar grids, on the other hand, are more complete in +% terms of latitude coverage: +% +% [lat,lon] = smapease2inverse(8999,8999,'N01') +% lat = 89.993669248945238 +% lon = -135 +% [lat,lon] = smapease2inverse(9000,9000,'N01') +% lat = 89.993669248945238 +% lon = 45 +% +% [lat,lon] = smapease2inverse(8999,8999,'S01') +% lat = -89.993669248945238 +% lon = -45 +% [lat,lon] = smapease2inverse(9000,9000,'S01') +% lat = -89.993669248945238 +% lon = 135 +% +% UPDATE North/south polar projections were added. (03/2012) +% +% REFERENCE Brodzik, M. J., B. Billingsley, T. Haran, B. Raup, and M. H. +% Savoie (2012): EASE-Grid 2.0: Incremental but Significant +% Improvements for Earth-Gridded Data Sets. ISPRS International +% Journal of Geo-Information, vol. 1, no. 1, pp. 32-45, +% http://www.mdpi.com/2220-9964/1/1/32/ +% +% Steven Chan, 11/2011 +% Email: steven.k.chan@jpl.nasa.gov + +function [lat,lon] = EASEv2_ind2latlon(row,col,gridid) + +% Constants returned by EASE2_GRID_INFO.PRO +projection = gridid(1); +switch gridid + case 'M36' + map_scale_m = 36032.220840584; + cols = 964; + rows = 406; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'M09' + map_scale_m = 9008.055210146; + cols = 3856; + rows = 1624; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'M03' + map_scale_m = 3002.6850700487; + cols = 11568; + rows = 4872; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'M01' + map_scale_m = 1000.89502334956; + cols = 34704; + rows = 14616; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'N36' + map_scale_m = 36000.0; + cols = 500; + rows = 500; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'N09' + map_scale_m = 9000.0; + cols = 2000; + rows = 2000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'N03' + map_scale_m = 3000.0; + cols = 6000; + rows = 6000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'N01' + map_scale_m = 1000.0; + cols = 18000; + rows = 18000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'S36' + map_scale_m = 36000.0; + cols = 500; + rows = 500; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'S09' + map_scale_m = 9000.0; + cols = 2000; + rows = 2000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'S03' + map_scale_m = 3000.0; + cols = 6000; + rows = 6000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'S01' + map_scale_m = 1000.0; + cols = 18000; + rows = 18000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + otherwise + disp(['ERROR: Incompatible grid specification.']); +end + +% Constants returned by EASE2_MAP_INFO.PRO +map_equatorial_radius_m = 6378137.0; +map_eccentricity = 0.081819190843; +e2 = map_eccentricity^2; +switch projection + case 'M' + map_reference_latitude = 0.0; + map_reference_longitude = 0.0; + map_second_reference_latitude = 30.0; + sin_phi1 = sin(map_second_reference_latitude*pi/180); + cos_phi1 = cos(map_second_reference_latitude*pi/180); + kz = cos_phi1/sqrt(1.0-e2*sin_phi1*sin_phi1); + case 'N' + map_reference_latitude = 90.0; + map_reference_longitude = 0.0; + case 'S' + map_reference_latitude = -90.0; + map_reference_longitude = 0.0; +end + +% Selected calculations inside WGS84_INVERSE.PRO +x = (col-r0)*map_scale_m; +y = (s0-row)*map_scale_m; + +% Selected calculations inside WGS84_INVERSE_XY.PRO +e4 = map_eccentricity^4; +e6 = map_eccentricity^6; +qp = (1.0-e2)*( (1.0/(1.0-e2))-(1.0/(2.0*map_eccentricity))*log((1.0-map_eccentricity)/(1.0+map_eccentricity))); +switch projection + case 'M' + beta = asin(2.0*y*kz/(map_equatorial_radius_m*qp)); + lam = x/(map_equatorial_radius_m*kz); + case 'N' + rho = sqrt(x.^2+y.^2); + beta = asin(1.0-(rho.^2)/(qp*map_equatorial_radius_m^2)); + lam = atan2(x,-y); + case 'S' + rho = sqrt(x.^2+y.^2); + beta = -asin(1.0-(rho.^2)/(qp*map_equatorial_radius_m^2)); + lam = atan2(x,y); +end +phi = beta+(((e2/3.0)+((31.0/180.0)*e4)+((517.0/5040.0)*e6))*sin(2.0*beta))+...\ + ((((23.0/360.0)*e4)+((251.0/3780.0)*e6))*sin(4.0*beta))+(((761.0/45360.0)*e6)*sin(6.0*beta)); +lat = phi*180.0/pi; +lon = map_reference_longitude+(lam*180.0/pi); +msk1 = lon < -180.0; lon(msk1) = lon(msk1) + 360.0; +msk2 = lon > +180.0; lon(msk2) = lon(msk2) - 360.0; +switch projection + case 'N' + idx = lat < 0.0; + lat(idx) = NaN; + lon(idx) = NaN; + case 'S' + idx = lat > 0.0; + lat(idx) = NaN; + lon(idx) = NaN; +end + +% ========= EOF ========================================================= diff --git a/GEOSldas_App/util/shared/matlab/EASEv2_latlon2ind.m b/GEOSldas_App/util/shared/matlab/EASEv2_latlon2ind.m index e8afdd7b..a4080d4b 100644 --- a/GEOSldas_App/util/shared/matlab/EASEv2_latlon2ind.m +++ b/GEOSldas_App/util/shared/matlab/EASEv2_latlon2ind.m @@ -1,218 +1,218 @@ -% -% SMAPEASE2FORWARD The principal function is to perform forward transformation -% from (lat,lon)'s to (row,col)'s for a set of nested EASE -% grids defined at 1, 3, 9, and 36km grid resolutions. These -% grids are all based on the EASE-Grid 2.0 specification (WGS84 -% ellipsoid). -% -% SYNTAX [row,col] = smapease2forward(lat,lon,gridid) -% -% where gridid is a 3-character string enclosed in single -% quotes, in the form of {M|N|S}{01,03,09,36}. This subroutine -% accepts vector inputs and produce vector outputs. -% -% HISTORY This subroutine was adapted from the offical EASE-Grid-2.0 -% conversion utilities (written in IDL) developed by the -% NSIDC. -% -% Note that in NSIDC's original implementation, (row,col) are -% zero-based. In other words, the first cell is (0,0) and the -% last cell is (N-1,M-1), where N and M are the row and column -% dimensions of the array. In this MATLAB implementation, the -% same convention is used. In other words, the end point of -% the first cell is located at (r,c) = (-0.5,-0.5) whereas the -% end point of the last cell is located at (r,c) = (14615.5, -% 34703.5). Thus, -% -% [lat,lon] = smapease2inverse(-0.5,-0.5,'M01') returns: -% lat = 85.044566407398861 -% lon = 1.799999999999994e+02 -% -% [lat,lon] = smapease2inverse(14615.5,34703.5,'M01') returns: -% lat = -85.044566407398861 -% lon = -1.799999999999994e+02 -% -% The polar grids, on the other hand, are more complete in -% terms of latitude coverage: -% -% [lat,lon] = smapease2inverse(8999,8999,'N01') -% lat = 89.993669248945238 -% lon = -135 -% [lat,lon] = smapease2inverse(9000,9000,'N01') -% lat = 89.993669248945238 -% lon = 45 -% -% [lat,lon] = smapease2inverse(8999,8999,'S01') -% lat = -89.993669248945238 -% lon = -45 -% [lat,lon] = smapease2inverse(9000,9000,'S01') -% lat = -89.993669248945238 -% lon = 135 -% -% UPDATE North/south polar projections were added. (03/2012) -% -% REFERENCE Brodzik, M. J., B. Billingsley, T. Haran, B. Raup, and M. H. -% Savoie (2012): EASE-Grid 2.0: Incremental but Significant -% Improvements for Earth-Gridded Data Sets. ISPRS International -% Journal of Geo-Information, vol. 1, no. 1, pp. 32-45, -% http://www.mdpi.com/2220-9964/1/1/32/ -% -% Steven Chan, 11/2011 -% Email: steven.k.chan@jpl.nasa.gov - -function [row,col] = EASEv2_latlon2ind(lat,lon,gridid,return_rounded) - -% By design, [row, col] are real numbers, with the fractional portion indicating -% the position of the specified [lat, lon] coordinates between adjacent grid -% cell centers. E.g., col=0.5 indicates that the input longitude is on the -% boundary between grid cells associated with col=0 and col=1. -% If the [optional] input argument 'return_rounded' is present and ~=0, then -% [row, col] are rounded to the nearest integer. - -% Constants returned by EASE2_GRID_INFO.PRO -projection = gridid(1); -switch gridid - case 'M36' - map_scale_m = 36032.220840584; - cols = 964; - rows = 406; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'M09' - map_scale_m = 9008.055210146; - cols = 3856; - rows = 1624; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'M03' - map_scale_m = 3002.6850700487; - cols = 11568; - rows = 4872; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'M01' - map_scale_m = 1000.89502334956; - cols = 34704; - rows = 14616; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'N36' - map_scale_m = 36000.0; - cols = 500; - rows = 500; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'N09' - map_scale_m = 9000.0; - cols = 2000; - rows = 2000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'N03' - map_scale_m = 3000.0; - cols = 6000; - rows = 6000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'N01' - map_scale_m = 1000.0; - cols = 18000; - rows = 18000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'S36' - map_scale_m = 36000.0; - cols = 500; - rows = 500; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'S09' - map_scale_m = 9000.0; - cols = 2000; - rows = 2000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'S03' - map_scale_m = 3000.0; - cols = 6000; - rows = 6000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - case 'S01' - map_scale_m = 1000.0; - cols = 18000; - rows = 18000; - r0 = (cols-1)/2; - s0 = (rows-1)/2; - otherwise - disp(['ERROR: Incompatible grid specification.']); -end - -% Constants returned by EASE2_MAP_INFO.PRO -epsilon = 1.0e-6; -map_equatorial_radius_m = 6378137.0; -map_eccentricity = 0.081819190843; -e2 = map_eccentricity^2; -switch projection - case 'M' - map_reference_latitude = 0.0; - map_reference_longitude = 0.0; - map_second_reference_latitude = 30.0; - sin_phi1 = sin(map_second_reference_latitude*pi/180); - cos_phi1 = cos(map_second_reference_latitude*pi/180); - kz = cos_phi1/sqrt(1.0-e2*sin_phi1*sin_phi1); - case 'N' - map_reference_latitude = 90.0; - map_reference_longitude = 0.0; - case 'S' - map_reference_latitude = -90.0; - map_reference_longitude = 0.0; -end - -% Selected calculations inside WGS84_CONVERT.PRO and WGS84_CONVERT_XY.PRO -dlon = lon - map_reference_longitude; -msk1 = dlon < -180.0; dlon(msk1) = dlon(msk1) + 360.0; -msk2 = dlon > +180.0; dlon(msk2) = dlon(msk2) - 360.0; -phi = lat*pi/180.0; -lam = dlon*pi/180.0; -sin_phi = sin(phi); -q = (1.0-e2)*((sin_phi./(1.0-e2*sin_phi.*sin_phi))-(1.0/(2.0*map_eccentricity))*log((1.0-map_eccentricity*sin_phi)./(1.0+map_eccentricity*sin_phi))); -qp = 1.0-((1.0-e2)/(2.0*map_eccentricity)*log((1.0-map_eccentricity)/(1.0+map_eccentricity))); -switch projection - case 'M' - x = map_equatorial_radius_m*kz*lam; - y = (map_equatorial_radius_m*q)/(2.0*kz); - case 'N' - tmp = qp - q; - tmp(abs(tmp) < epsilon) = 0.0; - rho = map_equatorial_radius_m*sqrt(tmp); - x = rho.*sin(lam); - y = -rho.*cos(lam); - case 'S' - tmp = qp + q; - tmp(abs(tmp) < epsilon) = 0.0; - rho = map_equatorial_radius_m*sqrt(tmp); - x = rho.*sin(lam); - y = rho.*cos(lam); -end -row = s0-(y/map_scale_m); -col = r0+(x/map_scale_m); -switch projection - case 'N' - idx = lat < 0.0; - row(idx) = NaN; - col(idx) = NaN; - case 'S' - idx = lat > 0.0; - row(idx) = NaN; - col(idx) = NaN; -end - -if exist('return_rounded','var') - if return_rounded - col=round(col); - row=round(row); - end -end - -% ========= EOF ========================================================= +% +% SMAPEASE2FORWARD The principal function is to perform forward transformation +% from (lat,lon)'s to (row,col)'s for a set of nested EASE +% grids defined at 1, 3, 9, and 36km grid resolutions. These +% grids are all based on the EASE-Grid 2.0 specification (WGS84 +% ellipsoid). +% +% SYNTAX [row,col] = smapease2forward(lat,lon,gridid) +% +% where gridid is a 3-character string enclosed in single +% quotes, in the form of {M|N|S}{01,03,09,36}. This subroutine +% accepts vector inputs and produce vector outputs. +% +% HISTORY This subroutine was adapted from the offical EASE-Grid-2.0 +% conversion utilities (written in IDL) developed by the +% NSIDC. +% +% Note that in NSIDC's original implementation, (row,col) are +% zero-based. In other words, the first cell is (0,0) and the +% last cell is (N-1,M-1), where N and M are the row and column +% dimensions of the array. In this MATLAB implementation, the +% same convention is used. In other words, the end point of +% the first cell is located at (r,c) = (-0.5,-0.5) whereas the +% end point of the last cell is located at (r,c) = (14615.5, +% 34703.5). Thus, +% +% [lat,lon] = smapease2inverse(-0.5,-0.5,'M01') returns: +% lat = 85.044566407398861 +% lon = 1.799999999999994e+02 +% +% [lat,lon] = smapease2inverse(14615.5,34703.5,'M01') returns: +% lat = -85.044566407398861 +% lon = -1.799999999999994e+02 +% +% The polar grids, on the other hand, are more complete in +% terms of latitude coverage: +% +% [lat,lon] = smapease2inverse(8999,8999,'N01') +% lat = 89.993669248945238 +% lon = -135 +% [lat,lon] = smapease2inverse(9000,9000,'N01') +% lat = 89.993669248945238 +% lon = 45 +% +% [lat,lon] = smapease2inverse(8999,8999,'S01') +% lat = -89.993669248945238 +% lon = -45 +% [lat,lon] = smapease2inverse(9000,9000,'S01') +% lat = -89.993669248945238 +% lon = 135 +% +% UPDATE North/south polar projections were added. (03/2012) +% +% REFERENCE Brodzik, M. J., B. Billingsley, T. Haran, B. Raup, and M. H. +% Savoie (2012): EASE-Grid 2.0: Incremental but Significant +% Improvements for Earth-Gridded Data Sets. ISPRS International +% Journal of Geo-Information, vol. 1, no. 1, pp. 32-45, +% http://www.mdpi.com/2220-9964/1/1/32/ +% +% Steven Chan, 11/2011 +% Email: steven.k.chan@jpl.nasa.gov + +function [row,col] = EASEv2_latlon2ind(lat,lon,gridid,return_rounded) + +% By design, [row, col] are real numbers, with the fractional portion indicating +% the position of the specified [lat, lon] coordinates between adjacent grid +% cell centers. E.g., col=0.5 indicates that the input longitude is on the +% boundary between grid cells associated with col=0 and col=1. +% If the [optional] input argument 'return_rounded' is present and ~=0, then +% [row, col] are rounded to the nearest integer. + +% Constants returned by EASE2_GRID_INFO.PRO +projection = gridid(1); +switch gridid + case 'M36' + map_scale_m = 36032.220840584; + cols = 964; + rows = 406; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'M09' + map_scale_m = 9008.055210146; + cols = 3856; + rows = 1624; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'M03' + map_scale_m = 3002.6850700487; + cols = 11568; + rows = 4872; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'M01' + map_scale_m = 1000.89502334956; + cols = 34704; + rows = 14616; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'N36' + map_scale_m = 36000.0; + cols = 500; + rows = 500; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'N09' + map_scale_m = 9000.0; + cols = 2000; + rows = 2000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'N03' + map_scale_m = 3000.0; + cols = 6000; + rows = 6000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'N01' + map_scale_m = 1000.0; + cols = 18000; + rows = 18000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'S36' + map_scale_m = 36000.0; + cols = 500; + rows = 500; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'S09' + map_scale_m = 9000.0; + cols = 2000; + rows = 2000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'S03' + map_scale_m = 3000.0; + cols = 6000; + rows = 6000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + case 'S01' + map_scale_m = 1000.0; + cols = 18000; + rows = 18000; + r0 = (cols-1)/2; + s0 = (rows-1)/2; + otherwise + disp(['ERROR: Incompatible grid specification.']); +end + +% Constants returned by EASE2_MAP_INFO.PRO +epsilon = 1.0e-6; +map_equatorial_radius_m = 6378137.0; +map_eccentricity = 0.081819190843; +e2 = map_eccentricity^2; +switch projection + case 'M' + map_reference_latitude = 0.0; + map_reference_longitude = 0.0; + map_second_reference_latitude = 30.0; + sin_phi1 = sin(map_second_reference_latitude*pi/180); + cos_phi1 = cos(map_second_reference_latitude*pi/180); + kz = cos_phi1/sqrt(1.0-e2*sin_phi1*sin_phi1); + case 'N' + map_reference_latitude = 90.0; + map_reference_longitude = 0.0; + case 'S' + map_reference_latitude = -90.0; + map_reference_longitude = 0.0; +end + +% Selected calculations inside WGS84_CONVERT.PRO and WGS84_CONVERT_XY.PRO +dlon = lon - map_reference_longitude; +msk1 = dlon < -180.0; dlon(msk1) = dlon(msk1) + 360.0; +msk2 = dlon > +180.0; dlon(msk2) = dlon(msk2) - 360.0; +phi = lat*pi/180.0; +lam = dlon*pi/180.0; +sin_phi = sin(phi); +q = (1.0-e2)*((sin_phi./(1.0-e2*sin_phi.*sin_phi))-(1.0/(2.0*map_eccentricity))*log((1.0-map_eccentricity*sin_phi)./(1.0+map_eccentricity*sin_phi))); +qp = 1.0-((1.0-e2)/(2.0*map_eccentricity)*log((1.0-map_eccentricity)/(1.0+map_eccentricity))); +switch projection + case 'M' + x = map_equatorial_radius_m*kz*lam; + y = (map_equatorial_radius_m*q)/(2.0*kz); + case 'N' + tmp = qp - q; + tmp(abs(tmp) < epsilon) = 0.0; + rho = map_equatorial_radius_m*sqrt(tmp); + x = rho.*sin(lam); + y = -rho.*cos(lam); + case 'S' + tmp = qp + q; + tmp(abs(tmp) < epsilon) = 0.0; + rho = map_equatorial_radius_m*sqrt(tmp); + x = rho.*sin(lam); + y = rho.*cos(lam); +end +row = s0-(y/map_scale_m); +col = r0+(x/map_scale_m); +switch projection + case 'N' + idx = lat < 0.0; + row(idx) = NaN; + col(idx) = NaN; + case 'S' + idx = lat > 0.0; + row(idx) = NaN; + col(idx) = NaN; +end + +if exist('return_rounded','var') + if return_rounded + col=round(col); + row=round(row); + end +end + +% ========= EOF ========================================================= diff --git a/GEOSldas_App/util/shared/python/EASEv2.py b/GEOSldas_App/util/shared/python/EASEv2.py new file mode 100644 index 00000000..31cd4392 --- /dev/null +++ b/GEOSldas_App/util/shared/python/EASEv2.py @@ -0,0 +1,199 @@ +import numpy as np + +# SMAPEASE2INVERSE The principal function is to perform inverse transformation +# from (row,col)'s to (lat,lon)'s for a set of nested EASE +# grids defined at 1, 3, 9, and 36km grid resolutions. These +# grids are all based on the EASE-Grid 2.0 specification (WGS84 +# ellipsoid). + +# SYNTAX [lat,lon] = smapease2inverse(row,col,gridid) + +# where gridid is a 3-character string enclosed in single +# quotes, in the form of {M|N|S}{01,03,09,36}. This subroutine +# accepts vector inputs and produce vector outputs. + +# HISTORY This subroutine was adapted from the offical EASE-Grid-2.0 +# conversion utilities (written in IDL) developed by the +# NSIDC. + +# Note that in NSIDC's original implementation, (row,col) are +# zero-based. In other words, the first cell is (0,0) and the +# last cell is (N-1,M-1), where N and M are the row and column +# dimensions of the array. In this MATLAB implementation, the +# same convention is used. In other words, the end point of +# the first cell is located at (r,c) = (-0.5,-0.5) whereas the +# end point of the last cell is located at (r,c) = (14615.5, +# 34703.5). Thus, + +# [lat,lon] = smapease2inverse(-0.5,-0.5,'M01') returns: +# lat = 85.044566407398861 +# lon = 1.799999999999994e+02 + +# [lat,lon] = smapease2inverse(14615.5,34703.5,'M01') returns: +# lat = -85.044566407398861 +# lon = -1.799999999999994e+02 + +# The polar grids, on the other hand, are more complete in +# terms of latitude coverage: + +# [lat,lon] = smapease2inverse(8999,8999,'N01') +# lat = 89.993669248945238 +# lon = -135 +# [lat,lon] = smapease2inverse(9000,9000,'N01') +# lat = 89.993669248945238 +# lon = 45 + +# [lat,lon] = smapease2inverse(8999,8999,'S01') +# lat = -89.993669248945238 +# lon = -45 +# [lat,lon] = smapease2inverse(9000,9000,'S01') +# lat = -89.993669248945238 +# lon = 135 + +# UPDATE North/south polar projections were added. (03/2012) + +# REFERENCE Brodzik, M. J., B. Billingsley, T. Haran, B. Raup, and M. H. +# Savoie (2012): EASE-Grid 2.0: Incremental but Significant +# Improvements for Earth-Gridded Data Sets. ISPRS International +# Journal of Geo-Information, vol. 1, no. 1, pp. 32-45, +# http://www.mdpi.com/2220-9964/1/1/32/ +# +# Steven Chan, 11/2011 +# Email: steven.k.chan@jpl.nasa.gov + +def EASEv2_ind2latlon(row=None,col=None,gridid=None,*args,**kwargs): + + # Constants returned by EASE2_GRID_INFO.PRO + projection=gridid[0] + if 'M36' == gridid: + map_scale_m=36032.220840584 + cols=964 + rows=406 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + elif 'M09' == gridid: + map_scale_m=9008.055210146 + cols=3856 + rows=1624 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + elif 'M03' == gridid: + map_scale_m=3002.6850700487 + cols=11568 + rows=4872 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + elif 'M01' == gridid: + map_scale_m=1000.89502334956 + cols=34704 + rows=14616 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + elif 'N36' == gridid: + map_scale_m=36000.0 + cols=500 + rows=500 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + elif 'N09' == gridid: + map_scale_m=9000.0 + cols=2000 + rows=2000 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + elif 'N03' == gridid: + map_scale_m=3000.0 + cols=6000 + rows=6000 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + elif 'N01' == gridid: + map_scale_m=1000.0 + cols=18000 + rows=18000 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + elif 'S36' == gridid: + map_scale_m=36000.0 + cols=500 + rows=500 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + elif 'S09' == gridid: + map_scale_m=9000.0 + cols=2000 + rows=2000 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + elif 'S03' == gridid: + map_scale_m=3000.0 + cols=6000 + rows=6000 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + elif 'S01' == gridid: + map_scale_m=1000.0 + cols=18000 + rows=18000 + r0=(cols - 1) / 2 + s0=(rows - 1) / 2 + else: + print('ERROR: Incompatible grid specification.') + + # Constants returned by EASE2_MAP_INFO.PRO + map_equatorial_radius_m=6378137.0 + map_eccentricity=0.081819190843 + e2=map_eccentricity ** 2 + if 'M' == projection: + map_reference_latitude=0.0 + map_reference_longitude=0.0 + map_second_reference_latitude=30.0 + sin_phi1=np.sin(map_second_reference_latitude*np.pi / 180) + cos_phi1=np.cos(map_second_reference_latitude*np.pi / 180) + kz=cos_phi1 / np.sqrt(1.0 - e2*sin_phi1*sin_phi1) + elif 'N' == projection: + map_reference_latitude=90.0 + map_reference_longitude=0.0 + elif 'S' == projection: + map_reference_latitude=- 90.0 + map_reference_longitude=0.0 + + # Selected calculations inside WGS84_INVERSE.PRO + x= (col - r0) * map_scale_m + y= (s0 - row) * map_scale_m + # Selected calculations inside WGS84_INVERSE_XY.PRO + e4=map_eccentricity ** 4 + e6=map_eccentricity ** 6 + qp= (1.0 - e2)*((1.0 / (1.0 - e2)) - (1.0 / (2.0*map_eccentricity))*np.log((1.0 - map_eccentricity) / (1.0 + map_eccentricity))) + if 'M' == projection: + beta= np.arcsin(2.0 * y * kz / (map_equatorial_radius_m * qp)) + lam= x / (map_equatorial_radius_m*kz) + elif 'N' == projection: + rho=np.sqrt(x ** 2 + y ** 2) + beta=np.arcsin(1.0 - (rho ** 2) / (qp * map_equatorial_radius_m ** 2)) + lam=np.arctan2(x,- y) + elif 'S' == projection: + rho=np.sqrt(x ** 2 + y ** 2) + beta=- np.arcsin(1.0 - (rho ** 2) / (qp * map_equatorial_radius_m ** 2)) + lam=np.arctan2(x,y) + + phi= beta + (((e2 / 3.0) + ((31.0 / 180.0) * e4) + ((517.0 / 5040.0) * e6)) * np.sin(2.0 * beta)) + \ + ((((23.0 / 360.0) * e4) + ((251.0 / 3780.0) * e6)) * np.sin(4.0*beta)) + (((761.0 / 45360.0) * e6) * np.sin(6.0 * beta)) + lat= phi * 180.0 / np.pi + lon= map_reference_longitude + (lam * 180.0 / np.pi) + msk1= np.where(lon < - 180.0) + lon[msk1]= lon[msk1] + 360.0 + msk2= np.where(lon > 180.0) + lon[msk2]=lon[msk2] - 360.0 + if 'N' == projection: + idx=np.where(lat < 0.0) + lat[idx]= float('nan') + lon[idx]= float('nan') + elif 'S' == projection: + idx= np.where(lat > 0.0) + lat[idx]= float('nan') + lon[idx]= float('nan') + + return lat, lon + +# ============= EOF ============================================ diff --git a/GEOSldas_App/util/shared/python/plot.py b/GEOSldas_App/util/shared/python/plot.py new file mode 100644 index 00000000..5dff0892 --- /dev/null +++ b/GEOSldas_App/util/shared/python/plot.py @@ -0,0 +1,80 @@ +# collection of py functions for plotting + +import numpy as np +import matplotlib.pyplot as plt + +from mpl_toolkits import basemap + +def plotMap( + data, + *, + ax=None, + lat=None, + lon=None, + title=None, + cRange=None, + figsize=(8, 4), + clbar=True, + cRangeint=False, + cmap=plt.cm.jet, + bounding=None, + prj="cyl", +): + + # color range + if cRange is not None: + vmin = cRange[0] + vmax = cRange[1] + else: + temp = flatData(data) + vmin = np.percentile(temp, 5) + vmax = np.percentile(temp, 95) + if cRangeint is True: + vmin = int(round(vmin)) + vmax = int(round(vmax)) + if ax is None: + fig = plt.figure(figsize=figsize) + ax = fig.subplots() + + # map boundary + if bounding is None: + bounding = [ + np.min(lat) - 0.5, + np.max(lat) + 0.5, + np.min(lon) - 0.5, + np.max(lon) + 0.5, + ] + + # add basemap + mm = basemap.Basemap( + llcrnrlat=bounding[0], + urcrnrlat=bounding[1], + llcrnrlon=bounding[2], + urcrnrlon=bounding[3], + projection=prj, + resolution="c", + ax=ax, + ) + mm.drawcoastlines() + #mm.drawstates(linestyle="dashed") + #mm.drawcountries(linewidth=1.0, linestyle="-.") + # plot data on basemap + cs = mm.pcolormesh(lon, lat, data, cmap=cmap, vmin=vmin, vmax=vmax) + + # colorbar + if clbar is True: + cb = mm.colorbar(cs, pad="5%", location="bottom") + if 'normalized' in title: + cb.set_ticks(np.linspace(vmin,vmax,6)) + cb.set_ticklabels([f'{10**x:.2f}' for x in np.linspace(vmin, vmax, 6)]) + + # plot title, return objects in case plot needs adjustment after function call + if title is not None: + ax.set_title(title) + if ax is None: + return fig, ax, mm + else: + return mm, cs + + +# ================ EOF ================================================= diff --git a/GEOSldas_App/util/shared/python/read_GEOSldas.py b/GEOSldas_App/util/shared/python/read_GEOSldas.py new file mode 100644 index 00000000..ccc1aa18 --- /dev/null +++ b/GEOSldas_App/util/shared/python/read_GEOSldas.py @@ -0,0 +1,295 @@ + +# collection of readers for GEOSldas output files + +import struct +import os +import numpy as np + +# ---------------------------------------------------------------------------- +# +# reader for GEOSldas obsparam file (ASCII) + +def read_obs_param(fname): + print(f"Reading {fname}") + + with open(fname, 'r') as fid: + N_obs_param = int(fid.readline().strip()) + + obs_param = [] + for _ in range(N_obs_param): + param = {} + param['descr'] = fid.readline().strip().strip('"') + param['species'] = float(fid.readline().strip()) + param['orbit'] = float(fid.readline().strip()) + param['pol'] = float(fid.readline().strip()) + + param['N_ang'] = int(float(fid.readline().strip())) + + param['ang'] = np.array([float(x) for x in fid.readline().split()]) + + param['freq'] = float(fid.readline().strip()) + param['FOV'] = float(fid.readline().strip()) + param['FOV_units'] = fid.readline().strip().strip('"') + param['assim'] = fid.readline().strip() + param['scale'] = fid.readline().strip() + param['getinnov'] = fid.readline().strip() + param['RTM_ID'] = float(fid.readline().strip()) + param['bias_Npar'] = float(fid.readline().strip()) + param['bias_trel'] = float(fid.readline().strip()) + param['bias_tcut'] = float(fid.readline().strip()) + param['nodata'] = float(fid.readline().strip()) + param['varname'] = fid.readline().strip().strip('"') + param['units'] = fid.readline().strip().strip('"') + param['path'] = fid.readline().strip().strip('"') + param['name'] = fid.readline().strip().strip('"') + param['maskpath'] = fid.readline().strip().strip('"') + param['maskname'] = fid.readline().strip().strip('"') + param['scalepath'] = fid.readline().strip().strip('"') + param['scalename'] = fid.readline().strip().strip('"') + param['flistpath'] = fid.readline().strip().strip('"') + param['flistname'] = fid.readline().strip().strip('"') + param['errstd'] = float(fid.readline().strip()) + param['std_normal_max'] = float(fid.readline().strip()) + param['zeromean'] = fid.readline().strip() + param['coarsen_pert'] = fid.readline().strip() + param['xcorr'] = float(fid.readline().strip()) + param['ycorr'] = float(fid.readline().strip()) + param['adapt'] = float(fid.readline().strip()) + + obs_param.append(param) + + print(f"Done reading obs_param for {N_obs_param} species") + + return obs_param + +# ---------------------------------------------------------------------------- +# +# reader for GEOSldas tilecoord file (binary) + +def read_tilecoord(fname): + int_precision = 'i' + float_precision = 'f' + + # SPECIFY endianness + machfmt = '<' # '>' for big-endian, '<' for little-endian + + print(f"reading from {fname}") + + tile_coord = {} + + with open(fname, 'rb') as ifp: + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + tile_coord['N_tile'] = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + Nt = tile_coord['N_tile'] + + fields = ['tile_id', 'typ', 'pfaf', 'com_lon', 'com_lat', 'min_lon', 'max_lon', + 'min_lat', 'max_lat', 'i_indg', 'j_indg', 'frac_cell', 'frac_pfaf', + 'area', 'elev'] + + for field in fields: + this_dtype = int_precision if field in ['tile_id', 'typ', 'pfaf', 'i_indg', 'j_indg'] else float_precision + + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + tile_coord[field] = np.frombuffer(ifp.read(Nt * 4), dtype=f'{machfmt}{this_dtype}') + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + print("done reading file") + return tile_coord + +# ---------------------------------------------------------------------------- +# +# reader for GEOSldas tilecoord file (binary) + +def read_tilegrids(fname): + """ + Read tile grid information from file and return global and domain grid structures. + + Parameters: + ---------- + fname : str + Path to the input file (either .txt or .bin) + + Returns: + ------- + tile_grid_g : dict + Global tile grid information + tile_grid_d : dict + Domain tile grid information + """ + + # Set endian format + machfmt = '<' # '>' for big-endian, '<' for little-endian + + # Read binary file + print(f'reading from {fname}') + + with open(fname, 'rb') as ifp: + # Read "global" and "domain" records + for grid in ['global','domain']: + + tile_grid = {} + + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + tile_grid['gridtype'] = ifp.read(40).decode('ascii').strip('\x00') + tile_grid['ind_base'] = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + tile_grid['i_dir'] = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + tile_grid['j_dir'] = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + tile_grid['N_lon'] = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + tile_grid['N_lat'] = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + tile_grid['i_offg'] = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + tile_grid['j_offg'] = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + tile_grid['ll_lon'] = struct.unpack(f'{machfmt}f', ifp.read(4))[0] + tile_grid['ll_lat'] = struct.unpack(f'{machfmt}f', ifp.read(4))[0] + tile_grid['ur_lon'] = struct.unpack(f'{machfmt}f', ifp.read(4))[0] + tile_grid['ur_lat'] = struct.unpack(f'{machfmt}f', ifp.read(4))[0] + tile_grid['dlon'] = struct.unpack(f'{machfmt}f', ifp.read(4))[0] + tile_grid['dlat'] = struct.unpack(f'{machfmt}f', ifp.read(4))[0] + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + if 'global' in grid: + tile_grid_g = tile_grid + else: + tile_grid_d = tile_grid + + return tile_grid_g, tile_grid_d + +# ---------------------------------------------------------------------------- +# +# reader for GEOSldas ObsFcstAna file (binary) + +def read_ObsFcstAna(fname, isLDASsa=False): + + # Initialize outputs + nodata = -9999 + + date_time = { + 'year' : nodata, + 'month' : nodata, + 'day' : nodata, + 'hour' : nodata, + 'min' : nodata, + 'sec' : nodata, + 'dofyr' : nodata, + 'pentad': nodata + } + + obs_assim = [] + obs_species = [] + obs_tilenum = [] + obs_lon = [] + obs_lat = [] + obs_obs = [] + obs_obsvar = [] + obs_fcst = [] + obs_fcstvar = [] + obs_ana = [] + obs_anavar = [] + + # SPECIFY endianness + machfmt = '<' # '>' for big-endian, '<' for little-endian + + if os.path.exists(fname): + print(f"reading from {fname}") + + with open(fname, 'rb') as ifp: + # Read N_obs and time stamp entry + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + N_obs = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + year, month, day, hour, minute, second, dofyr, pentad = struct.unpack(f'{machfmt}8i', ifp.read(32)) + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + date_time = { + 'year' : year, + 'month' : month, + 'day' : day, + 'hour' : hour, + 'min' : minute, + 'sec' : second, + 'dofyr' : dofyr, + 'pentad': pentad + } + + # Read observation assim flag + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + tmp_data = np.frombuffer(ifp.read(N_obs * 4), dtype=f'{machfmt}i').copy() + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + obs_assim = np.zeros(N_obs, dtype=int) + obs_assim[tmp_data != 0] = 1 + + # Read species information + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + obs_species = np.frombuffer(ifp.read(N_obs * 4), dtype=f'{machfmt}i').copy() + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + # Read tile number information + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + obs_tilenum = np.frombuffer(ifp.read(N_obs * 4), dtype=f'{machfmt}i').copy() + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + # Read longitude + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + obs_lon = np.frombuffer(ifp.read(N_obs * 4), dtype=f'{machfmt}f').copy() + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + # Read latitude + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + obs_lat = np.frombuffer(ifp.read(N_obs * 4), dtype=f'{machfmt}f').copy() + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + # Read observation value + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + obs_obs = np.frombuffer(ifp.read(N_obs * 4), dtype=f'{machfmt}f').copy() + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + # Read observation variance + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + obs_obsvar = np.frombuffer(ifp.read(N_obs * 4), dtype=f'{machfmt}f').copy() + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + # Read observation-space model forecast value + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + obs_fcst = np.frombuffer(ifp.read(N_obs * 4), dtype=f'{machfmt}f').copy() + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + # Read observation-space model forecast variance + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + obs_fcstvar = np.frombuffer(ifp.read(N_obs * 4), dtype=f'{machfmt}f').copy() + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + # Read observation-space analysis value + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + obs_ana = np.frombuffer(ifp.read(N_obs * 4), dtype=f'{machfmt}f').copy() + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + # Read observation-space analysis variance + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + obs_anavar = np.frombuffer(ifp.read(N_obs * 4), dtype=f'{machfmt}f').copy() + fortran_tag = struct.unpack(f'{machfmt}i', ifp.read(4))[0] + + # No-data check + obs_obsvar[ obs_obsvar == nodata] = np.nan + obs_fcst[ obs_fcst == nodata] = np.nan + obs_fcstvar[obs_fcstvar == nodata] = np.nan + obs_ana[ obs_ana == nodata] = np.nan + obs_anavar[ obs_anavar == nodata] = np.nan + + else: + print(f"file does not exist: {fname}") + + return {'date_time' : date_time, + 'obs_assim' : obs_assim, + 'obs_species': obs_species, + 'obs_tilenum': obs_tilenum, + 'obs_lon' : obs_lon, + 'obs_lat' : obs_lat, + 'obs_obs' : obs_obs, + 'obs_obsvar' : obs_obsvar, + 'obs_fcst' : obs_fcst, + 'obs_fcstvar': obs_fcstvar, + 'obs_ana' : obs_ana, + 'obs_anavar' : obs_anavar} + +# ================ EOF ================================================= diff --git a/GEOSldas_App/util/shared/python/tile2grid.py b/GEOSldas_App/util/shared/python/tile2grid.py new file mode 100644 index 00000000..096d45b6 --- /dev/null +++ b/GEOSldas_App/util/shared/python/tile2grid.py @@ -0,0 +1,66 @@ + + +import os +import numpy as np + +def tile2grid(tile_data, tile_coord, tile_grid): + + """ + Map (1d) tile-space data onto (2d) grid that is associated with the tile space. + + Parameters: + ---------- + tile_data : Array in tile space, shape (N_tile, N_fields) + tile_coord : Dictionary containing tile coordinate information + tile_grid : Dictionary containing tile grid information + + Returns: + ------- + grid_data : Array in grid space, shape (N_lat, N_lon, N_fields) + """ + + # Verify input datasize + if tile_data.shape[0] != tile_coord['N_tile']: + print(f'Error: size of tile2grid input data does not match that of N_tile') + sys.exit() + + # if tile_data is 1-D [N_tile], expand into 2-D [N_tile,1] + if tile_data.ndim == 1: + tile_data = np.expand_dims(tile_data, axis=1) + + N_fields = tile_data.shape[-1] + # Initialize grid data array + grid_data = np.zeros((tile_grid['N_lat'], tile_grid['N_lon'], N_fields)) + + for k in range(N_fields): + # Initialize weight grid for current field + wgrid = np.zeros((tile_grid['N_lat'], tile_grid['N_lon'])) + + # Loop through tile space + for n in range(tile_coord['N_tile']): + # Calculate grid indices (adjust for Python's 0-based indexing) + i = tile_coord['i_indg'][n] - (tile_grid['i_offg'] - (1-tile_grid['ind_base'])) - 1 + j = tile_coord['j_indg'][n] - (tile_grid['j_offg'] - (1-tile_grid['ind_base'])) - 1 + + # Get weight + w = tile_coord['frac_cell'][n] + + # Check if current tile data is valid (not no-data) + if ~np.isnan(tile_data[n, k]): + # Accumulate weighted data + grid_data[j, i, k] += w * tile_data[n,k] + wgrid[ j, i] += w + + # Normalize and set no-data values + for i in range(tile_grid['N_lon']): + for j in range(tile_grid['N_lat']): + if wgrid[j, i] > 0.0: + # Normalize by total weight + grid_data[j, i, k] = grid_data[j, i, k] / wgrid[j, i] + else: + # Set no-data value + grid_data[j, i, k] = np.nan + + return grid_data.squeeze() + +# ============ EOF =============================================== diff --git a/GEOSldas_App/util/shared/python/tile_to_latlongrid.py b/GEOSldas_App/util/shared/python/tile_to_latlongrid.py new file mode 100644 index 00000000..da870be0 --- /dev/null +++ b/GEOSldas_App/util/shared/python/tile_to_latlongrid.py @@ -0,0 +1,65 @@ +import os +import numpy as np + +# + +def tile_to_latlongrid( tile_data, tile_coord, resolution, nodata=1.e15, nodata_tol_frac=1.e-4): + + """ + Map (1d) tile-space data onto (2d) regular lat/lon grid of arbitrary "resolution". + + Use "tile2grid.py" to map tile data onto the grid that is associated with the tile space. + + Parameters: + ---------- + tile_data : Array in tile space, shape (N_tile, N_fields) + tile_coord : Dictionary containing tile coordinate information + resolution : Target grid resolution + nodata : Value for missing data + nodata_tol_frac : Tolerance fraction for comparing values to nodata + + Returns: + ------- + grid_data : Array in regular grid space, shape (N_lat, N_lon, N_fields) + lat_2d : Lat array in regular grid space, shape (N_lat, N_lon) + lon_2d : Lon array in regular grid space, shape (N_lat, N_lon) + + """ + + tile_data[np.abs(tile_data - nodata) < nodata*nodata_tol_frac] = np.nan + + # Verify input datasize + if tile_data.shape[0] != tile_coord['N_tile']: + print(f'Error: size of tile2grid input data does not match that of N_tile') + sys.exit() + + # if tile_data is 1-D [N_tile], expand into 2-D [N_tile,1] + if tile_data.ndim == 1: + tile_data = np.expand_dims(tile_data, axis=1) + + nf = tile_data.shape[-1] + + lat_grid = np.arange( -90+resolution/2, 90+resolution/2, resolution) + lon_grid = np.arange(-180+resolution/2, 180+resolution/2, resolution) + + lon_2d, lat_2d = np.meshgrid(lon_grid, lat_grid) + + grid_data = np.zeros((len(lat_grid), len(lon_grid), nf)) + N_tilecnt = np.zeros((len(lat_grid), len(lon_grid), nf)) + + + for k in range(tile_coord['N_tile']): + for v in range(nf): + if ~np.isnan(tile_data[k,v]): + j = np.searchsorted(lat_grid + resolution/2., tile_coord['com_lat'][k]) + i = np.searchsorted(lon_grid + resolution/2., tile_coord['com_lon'][k]) + # grid value takes the mean of all tiles within + # new_mean = old_mean + (new_value - old_mean)/count + N_tilecnt[j,i,v] += 1 + grid_data[j,i,v] = grid_data[j,i,v] + (tile_data[k,v]-grid_data[j,i,v])/N_tilecnt[j,i,v] + + grid_data[N_tilecnt == 0] = np.nan + + return grid_data.squeeze(), lat_2d, lon_2d + +# =========================== EOF ============================================================ diff --git a/LDAS_Shared/LDAS_TileCoordRoutines.F90 b/LDAS_Shared/LDAS_TileCoordRoutines.F90 index 7ddd5b55..43e5f86b 100644 --- a/LDAS_Shared/LDAS_TileCoordRoutines.F90 +++ b/LDAS_Shared/LDAS_TileCoordRoutines.F90 @@ -26,10 +26,9 @@ module LDAS_TileCoordRoutines use MAPL_ConstantsMod, ONLY: & MAPL_RADIUS ! Earth radius - use EASE_conv, ONLY: & - ease_convert, & - ease_inverse, & - ease_extent + use MAPL, ONLY: & + MAPL_ease_convert, & + MAPL_ease_extent use LDAS_ExceptionsMod, ONLY: & ldas_abort, & @@ -380,7 +379,7 @@ subroutine LDAS_create_grid_g( gridname, n_lon, n_lat, & tile_grid%i_dir = +1 tile_grid%j_dir = -1 - call ease_extent ( & + call MAPL_ease_extent ( & gridname, cols, rows, & cell_area = ease_cell_area, & ! [m^2] ll_lon = tile_grid%ll_lon, & @@ -1002,7 +1001,7 @@ subroutine get_ij_ind_from_latlon( tile_grid, lat, lon, i_ind, j_ind ) ! EASE grid lat/lon to index provides *global*, *0-based* index! - call ease_convert(tile_grid%gridtype, lat, lon, r, s) + call MAPL_ease_convert(tile_grid%gridtype, lat, lon, r, s) i_indg = nint(r) ! i_ind or lon_ind j_indg = nint(s) ! j_ind or lat_ind