diff --git a/esmvaltool/cmorizers/data/cmor_config/ESACCI-LST.yml b/esmvaltool/cmorizers/data/cmor_config/ESACCI-LST.yml index b15ce08be0..8128f267f5 100644 --- a/esmvaltool/cmorizers/data/cmor_config/ESACCI-LST.yml +++ b/esmvaltool/cmorizers/data/cmor_config/ESACCI-LST.yml @@ -1,31 +1,102 @@ # CMORIZE ESA CCI LST -# Follwing CRU and ESACCI-OC as examples -# Only looking at AQUA MONTHLY data but python has placeholders for different platforms +# Only looking at AQUA MONTHLY --- - # Common global attributes for Cmorizer output attributes: dataset_id: ESACCI-LST - version: '1.00' + version: '3.00' tier: 2 project_id: OBS source: 'ESA CCI' modeling_realm: sat reference: 'esacci_lst' comment: '' - start_year: 2003 - end_year: 2018 + start_year: 2006 + end_year: 2007 # Variables to cmorize # These go into the vals dictionary in the python script variables: - ts: + tsDay: + mip: Amon + raw: land surface temperature + raw_units: kelvin + file: 'ESACCI-LST-L3C-LST-MODISA-0.01deg_1MONTHLY_DAY-' + start_year: 2006 + end_year: 2007 + short_name: XYZ_TESTING + + + tsNight: mip: Amon raw: land surface temperature raw_units: kelvin - file_day: 'ESACCI-LST-L3C-LST-MODISA-0.05deg_1MONTHLY_DAY-' - file_night: 'ESACCI-LST-L3C-LST-MODISA-0.05deg_1MONTHLY_NIGHT-' - # plan to make it possible to change MODISA here but only placeholders in the python there for now - # Period covered: 2003-2018 - # 2003 Only has data from July onwards for Aqua MODIS + file: 'ESACCI-LST-L3C-LST-MODISA-0.01deg_1MONTHLY_DAY-' + start_year: 2006 + end_year: 2007 + + + tsLocalAtmErrDay: + mip: Amon + raw: uncertainty from locally correlated errors on atmospheric scales + raw_units: kelvin + file: 'ESACCI-LST-L3C-LST-MODISA-0.01deg_1MONTHLY_DAY-' + start_year: 2006 + end_year: 2007 + + tsLocalAtmErrNight: + mip: Amon + raw: uncertainty from locally correlated errors on atmospheric scales + raw_units: kelvin + file: 'ESACCI-LST-L3C-LST-MODISA-0.01deg_1MONTHLY_NIGHT-' + start_year: 2006 + end_year: 2007 + + tsLocalSfcErrDay: + mip: Amon + raw: uncertainty from locally correlated errors on surface scales + raw_units: kelvin + file: 'ESACCI-LST-L3C-LST-MODISA-0.01deg_1MONTHLY_DAY-' + start_year: 2006 + end_year: 2007 + + tsLocalSfcErrNight: + mip: Amon + raw: uncertainty from locally correlated errors on surface scales + raw_units: kelvin + file: 'ESACCI-LST-L3C-LST-MODISA-0.01deg_1MONTHLY_NIGHT-' + start_year: 2006 + end_year: 2007 + + tsLSSysErrDay: + mip: Amon + raw: uncertainty from large-scale systematic errors + raw_units: kelvin + file: 'ESACCI-LST-L3C-LST-MODISA-0.01deg_1MONTHLY_DAY-' + start_year: 2006 + end_year: 2007 + + tsLSSysErrNight: + mip: Amon + raw: uncertainty from large-scale systematic errors + raw_units: kelvin + file: 'ESACCI-LST-L3C-LST-MODISA-0.01deg_1MONTHLY_NIGHT-' + start_year: 2006 + end_year: 2007 + + tsUnCorErrDay: + mip: Amon + raw: uncertainty from uncorrelated errors + raw_units: kelvin + file: 'ESACCI-LST-L3C-LST-MODISA-0.01deg_1MONTHLY_DAY-' + start_year: 2006 + end_year: 2007 + + tsUnCorErrNight: + mip: Amon + raw: uncertainty from uncorrelated errors + raw_units: kelvin + file: 'ESACCI-LST-L3C-LST-MODISA-0.01deg_1MONTHLY_NIGHT-' + start_year: 2006 + end_year: 2007 diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_lst.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_lst.py index b61849034e..c61b261dcd 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/esacci_lst.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_lst.py @@ -1,33 +1,15 @@ -"""ESMValTool CMORizer for ESACCI-LST data. - -Tier - Tier 2: other freely-available dataset. - -Source - On CEDA-JASMIN - /gws/nopw/j04/esacci_lst/public - For access to this JASMIN group workspace please register at - https://accounts.jasmin.ac.uk/services/group_workspaces/esacci_lst/ - -Download and processing instructions - Put all files under a single directory (no subdirectories with years) - in ${RAWOBS}/Tier2/ESACCI-LST - BOTH DAY and NIGHT files are needed for each month +"""ESMValTool CMORizer for ESACCI-LST-UNCERT data. +BOTH DAY and NIGHT files are needed for each month Currently set to work with only the MODIS AQUA L3 monthly data - -Modification history - 20201015 Started by Robert King - 20201029 Day/Night averaging added along with CMOR utils """ import datetime import logging -from calendar import monthrange - import iris +import cf_units as unit -from esmvaltool.cmorizers.data import utilities as utils +from ...utilities import fix_coords logger = logging.getLogger(__name__) @@ -37,134 +19,100 @@ def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): cmor_table = cfg["cmor_table"] glob_attrs = cfg["attributes"] - # run the cmorization + # variable_list contains the variable list + # variable_keys has the short 'code' as a key for the variables. + # both these lists are in made in the same order # vals has the info from the yml file # var is set up in the yml file - for var, vals in cfg["variables"].items(): - # leave this loop in as might be useful in - # the future for getting other info - # like uncertainty information from the original files - - glob_attrs["mip"] = vals["mip"] - cmor_info = cmor_table.get_variable(vals["mip"], var) - var_name = cmor_info.short_name - - for key in vals.keys(): - logger.info("%s %s", key, vals[key]) - - variable = vals["raw"] - # not currently used, but referenced for future - # platform = 'MODISA' + print('........................') + print(cfg['variables']) + for var, vals in cfg['variables'].items(): + glob_attrs['mip'] = vals['mip'] # loop over years and months # get years from start_year and end_year - # note 2003 doesn't start until July so not included at this stage - for year in range( - glob_attrs["start_year"], glob_attrs["end_year"] + 1 - ): - this_years_cubes = iris.cube.CubeList() - for month0 in range(12): # Change this in final version - month = month0 + 1 + for year in range(vals['start_year'], vals['end_year'] + 1): + for month in range(1, 13): + logger.info(year) logger.info(month) - day_cube, night_cube = load_cubes( - in_dir, - vals["file_day"], - vals["file_night"], - year, - month, - variable, - ) - - monthly_cube = make_monthly_average( - day_cube, night_cube, year, month - ) - - # use CMORizer utils - monthly_cube = utils.fix_coords(monthly_cube) - - this_years_cubes.append(monthly_cube) - - # Use utils save - # This seems to save files all with the same name! - # Fixed by making yearly files - this_years_cubes = this_years_cubes.merge_cube() - this_years_cubes.long_name = "Surface Temperature" - this_years_cubes.standard_name = "surface_temperature" - - # Fix variable metadata - utils.fix_var_metadata(this_years_cubes, cmor_info) - - # Fix global metadata - utils.set_global_atts(this_years_cubes, glob_attrs) - - utils.save_variable( - this_years_cubes, - var_name, - out_dir, - glob_attrs, - unlimited_dimensions=["time"], - ) - - -def load_cubes(in_dir, file_day, file_night, year, month, variable): - """Variable description. - - variable = land surface temperature - platform = AQUA not used for now - but in place for future expansion to all ESC CCI LST platforms - """ - path = f"{in_dir}/{file_day}{year}{month:02d}*.nc" - logger.info("Loading %s", path) - day_cube = iris.load_cube(path, variable) - path = f"{in_dir}/{file_night}{year}{month:02d}*.nc" - logger.info("Loading %s", path) - night_cube = iris.load_cube(path, variable) - - return day_cube, night_cube - - -def make_monthly_average(day_cube, night_cube, year, month): - """Make the average LST form the day time and night time files.""" - day_cube.attributes.clear() - night_cube.attributes.clear() - - co_time = night_cube.coord("time") - co_time.points = co_time.points + 100.0 - # maybe the arbitrary difference should go on day cubes to - # take the timestamp to 12Z? - # not really an issue when using monthly files - - result = iris.cube.CubeList([day_cube, night_cube]).concatenate_cube() - - # This corrects the lonitude coord name issue - # This should be fixed in the next version of the CCI data - logger.info("Longitude coordinate correction being applied") - result.coords()[2].var_name = "longitude" - result.coords()[2].standard_name = "longitude" - result.coords()[2].long_name = "longitude" - - monthly_cube = result.collapsed("time", iris.analysis.MEAN) - - # fix time coordinate bounds - monthly_co_time = monthly_cube.coord("time") - - time_point = ( - datetime.datetime(year, month, 1, 0, 0) - - datetime.datetime(1981, 1, 1, 0, 0, 0) - ).total_seconds() - monthly_co_time.points = time_point - - num_days = monthrange(year, month)[1] - monthly_co_time.bounds = [ - time_point, - time_point + ((num_days - 1) * 24 * 3600), - ] - # should this be num_days or num_days-1 ### question for Valeriu or Axel - # or 23:59:59 ??? - - monthly_cube.attributes = { - "information": "Mean of Day and Night Aqua MODIS monthly LST" - } - - return monthly_cube + cubes = load_cubes(in_dir, + vals['file'], + year, + month, + vals['raw'], + ) + + # make time coords + time_units = 'hours since 1970-01-01 00:00:00' + time_point = unit.date2num(datetime.datetime(year, month, 1), + time_units, + unit.CALENDAR_STANDARD + ) + + time_coord = iris.coords.DimCoord(time_point, + standard_name='time', + long_name='time', + var_name='time', + units=time_units, + bounds=None, + attributes=None, + coord_system=None, + circular=False + ) + + cubes.attributes = {} + cubes.attributes['var'] = var + + try: + cubes.remove_coord('time') + except: + logger.info('Coord fix issue %s' % cubes.long_name) + + cubes.add_dim_coord(time_coord, 0) + + if cubes.long_name == 'land surface temperature': + cubes.long_name = 'surface_temperature' + cubes.standard_name = 'surface_temperature' + + try: + cubes = fix_coords(cubes) + except: + logger.info('skip fixing') + logger.info(cubes.long_name) + + try: + cubes.coords()[2].standard_name = 'longitude' + except: + # No change needed + pass + + var_name = cubes.attributes['var'] + + if cubes.var_name == 'lst': + cubes.var_name = 'ts' + + if 'Day' in var_name: + cubes.long_name += ' Day' + cubes.var_name += '_day' + + + if 'Night' in var_name: + cubes.long_name += ' Night' + cubes.var_name += '_night' + + # make use - and _ consistent!!! + save_name = f'{out_dir}/OBS_ESACCI-LST_sat_3.00_Amon_{var_name}_{year}{month:02d}.nc' + iris.save(cubes, + save_name + ) + + +def load_cubes(in_dir, file, year, month, variable_list): + """Load files into cubes based on variables wanted in variable_list.""" + logger.info(f'Loading {in_dir}/{file}{year}{month:02d}.nc') + cube = iris.load_cube(f'{in_dir}/{file}{year}{month:02d}*.nc', + variable_list + ) + + return cube diff --git a/esmvaltool/diag_scripts/lst/lst_uncert_prop.py b/esmvaltool/diag_scripts/lst/lst_uncert_prop.py new file mode 100644 index 0000000000..494419799e --- /dev/null +++ b/esmvaltool/diag_scripts/lst/lst_uncert_prop.py @@ -0,0 +1,770 @@ +""" +ESMValTool diagnostic for ESA CCI LST V3 data - Uncertainity Propagation +""" + +import logging +import iris +import iris.plot as iplt +import matplotlib +import matplotlib.colors as mcolors +import matplotlib.pyplot as plt +import matplotlib.dates as mdates +import numpy as np +import datetime + +from esmvaltool.diag_scripts.shared import ( + ProvenanceLogger, + get_plot_filename, + group_metadata, + run_diagnostic, +) + +# Colour scheme for plots +# blue cyan green yellow +# red purple grey +colour_list = ['#4477aa', '#66ccee', '#228833', '#ccbb44', + '#ee6677', '#aa3377', '#bbbbbb'] + +# This gives a colour list for the Land Cover type plots +lc_colour_list = sorted(list(mcolors.CSS4_COLORS.keys())) +lc_colour_list.reverse() + +plot_params = {'linewidth': 4, + 'ticksize': 24, + 'labelsize': 28, + 'legendsize': 20,} + +line_labels = { + 'lst_unc_loc_atm_day': 'Locally Correlated (Atm)', + 'lst_unc_loc_sfc_day': 'Locally Correlated (Sfc)', + 'lst_unc_sys_day': 'Systematic', + 'lst_unc_ran_day': 'Random', + 'lst_sampling_day': 'Sampling', + 'lst_total_unc_day': 'Total', + 'lst_unc_loc_atm_night': 'Locally Correlated (Atm)', + 'lst_unc_loc_sfc_night': 'Locally Correlated (Sfc)', + 'lst_unc_sys_night': 'Systematic', + 'lst_unc_ran_night': 'Random', + 'lst_sampling_night': 'Sampling', + 'lst_total_unc_night': 'Total', + } + +logger = logging.getLogger(__name__) + +def _get_input_cubes(metadata): + """Load the data files into cubes. + + Inputs: + metadata = List of dictionaries made from the preprocessor config + + Outputs: + inputs = Dictionary of cubes + ancestors = Dictionary of filename information + """ + inputs = {} + ancestors = {} + for attributes in metadata: + + short_name = attributes['short_name'] + filename = attributes['filename'] + logger.info("Loading variable %s", short_name) + cube = iris.load_cube(filename) + cube.attributes.clear() + inputs[short_name] = cube + ancestors[short_name] = [filename] + + return inputs, ancestors + + +def _get_provenance_record(attributes, ancestor_files): + """Create the provenance record dictionary. + + Inputs: + attributes = dictionary of ensembles/models used, the region bounds + and years of data used. + ancestor_files = list of data files used by the diagnostic. + + Outputs: + record = dictionary of provenance records. + """ + caption = "This needs a new caption.".format(**attributes) + + record = { + 'caption': caption, + 'statistics': ['mean'], + 'domains': ['reg'], + 'plot_types': ['times'], + 'authors': ['king_robert'], + # 'references': [], + 'ancestors': ancestor_files + } + + return record + + +def _diagnostic(config): + """Perform the control for the ESA CCI LST diagnostic. + + Parameters + ---------- + config: dict + the preprocessor nested dictionary holding + all the needed information. + + Returns + ------- + + """ + input_metadata = config['input_data'].values() + loaded_data = {} + ancestor_list = [] + + for dataset, metadata in group_metadata(input_metadata, 'dataset').items(): + cubes, ancestors = _get_input_cubes(metadata) + loaded_data[dataset] = cubes + + + for KEY in loaded_data['ESACCI-LST'].keys(): + iris.save(loaded_data['ESACCI-LST'][KEY], f'data_{KEY}.nc') + # Methodology: + # calcualte for day and night seperately + # calls to propagation equations for each compoent + # ts eq 1 eq_arithmetic_mean + # lst_unc_loc_atm eq 7 eq_weighted_sqrt_mean + # lst_unc_sys eq 5 = eq 1 eq_arithmetic_mean, no spatial propagation here + # lst_unc_loc_sfc follows the worked example in the E3UB + # use the worked example method in the E3UB document. + # lst_unc_ran eq 4 eq_propagate_random_with_sampling + # + # Then combine to get a total day and night value + # total uncert eq 9 eq_sum_in_quadrature + # a total all day uncertainty can then be obtained using + # eq 9 eq_sum_in_quadrature + # Day and night time values kept seperate + + lst_unc_variables = ['lst_unc_loc_atm', 'lst_unc_sys', + 'lst_unc_loc_sfc', 'lst_unc_ran'] + + # This will be a dictionary of variables and cubes + # of their propagated values + propagated_values = {} + + # These define the total number of points in the data + lat_len = len(loaded_data['ESACCI-LST']['ts_day'].coord('latitude').points) + lon_len = len(loaded_data['ESACCI-LST']['ts_day'].coord('longitude').points) + + # n_fill and n_use ad dictionaries with keys 'day' and 'night' + # the item for each key is an array + # These give the cloud/don't use pixel numbers, + # and the number of useable pixels for each timestep + n_fill = {} + n_use = {} + for time in ['day', 'night']: + if isinstance(loaded_data['ESACCI-LST'][f'lst_unc_ran_{time}'].data.mask, + np.ndarray): + # mask is an array so there are masked values + # do counting + n_fill[time] = np.array([np.sum(loaded_data['ESACCI-LST'][f'lst_unc_ran_{time}'][date].data.mask) for date in range(len(loaded_data['ESACCI-LST'][f'lst_unc_ran_{time}'].coord('time').points))]) + + elif loaded_data['ESACCI-LST'][f'lst_unc_ran_{time}'].data.mask: + # mask is single value of True so all masked values + # make a array of m*n + n_fill[time] = np.array([lat_len*lon_len for i in loaded_data['ESACCI-LST'][f'lst_unc_ran_{time}'].coord('time').points]) + + else: + # mask is a single value of False so no masked values + # make an array of zeros + n_fill[time] = np.zeros_like(loaded_data['ESACCI-LST'][f'lst_unc_ran_{time}'].coord('time').points) + + n_use['day'] = (lat_len * lon_len) - n_fill['day'] + n_use['night'] = (lat_len * lon_len) - n_fill['night'] + + # This loop call the propagation equations, once for 'day' and 'night' + for time in ['day', 'night']: + + # LST + propagated_values[f'ts_{time}'] = \ + eq_arithmetic_mean(loaded_data['ESACCI-LST'][f'ts_{time}']) + + # Locally correlated Atmosphere + # This is given as the wrong equation in the ATBD! + #propagated_values[f'lst_unc_loc_atm_{time}'] = \ + #eq_weighted_sqrt_mean(loaded_data['ESACCI-LST'][f'lst_unc_loc_atm_{time}'], + # n_use[f'{time}']) + + propagated_values[f'lst_unc_loc_atm_{time}'] = \ + two_step_calculation(loaded_data['ESACCI-LST'][f'lst_unc_loc_atm_{time}']) + + # no spatial propagation of the systamatic uncertainity + # no generalisation has been made at this stage for the single value input + propagated_values[f'lst_unc_sys_{time}'] = \ + loaded_data['ESACCI-LST'][f'lst_unc_sys_{time}'] + + # Locally correlated Surface + propagated_values[f'lst_unc_loc_sfc_{time}'] = \ + eq_correlation_with_biome(loaded_data['ESACCI-LST'][f'lst_unc_loc_sfc_{time}'], + loaded_data['ESACCI-LST'][f'lcc_{time}']) + + # Random uncertainity + propagated_values[f'lst_unc_ran_{time}'], \ + propagated_values[f'lst_sampling_{time}'] = \ + eq_propagate_random_with_sampling(loaded_data['ESACCI-LST'][f'lst_unc_ran_{time}'], + loaded_data['ESACCI-LST'][f'ts_{time}'], + n_use[f'{time}'], + n_fill[f'{time}']) + + # Combines all uncertainty types to get total uncertainty using sum in quadrature + # for 'day' and 'night' + for time in ['day', 'night']: + time_cubelist = iris.cube.CubeList([propagated_values[f'{variable}_{time}'] + for variable in lst_unc_variables]) + + propagated_values[f'lst_total_unc_{time}'] = \ + eq_sum_in_quadrature(time_cubelist) + + # iplt did not want to work with 360 day calendar of UKESM + # demote appears to work on the standard_name + # These line fix that issue + loaded_data['UKESM1-0-LL']['ts'].coord('time').var_name = 'time2' + loaded_data['UKESM1-0-LL']['ts'].coord('time').long_name = 'time2' + loaded_data['UKESM1-0-LL']['ts'].coord('time').standard_name = 'height' + iris.util.demote_dim_coord_to_aux_coord(loaded_data['UKESM1-0-LL']['ts'], + 'height') + loaded_data['UKESM1-0-LL']['ts'].add_aux_coord(propagated_values['ts_day'].coord('time'),0) + iris.util.promote_aux_coord_to_dim_coord(loaded_data['UKESM1-0-LL']['ts'], + 'time') + + # Make the plots: + plt.figure() + plt.plot(n_fill['day'], c='k') + plt.plot(n_use['day'], c='b') + plt.savefig('test_nuse_day.png') + plt.close() + + plt.figure() + plt.plot(n_fill['night'], c='k') + plt.plot(n_use['night'], c='b') + plt.savefig('test_nuse_night.png') + plt.close() + + for time in ['day','night']: + plt.figure() + iplt.plot(propagated_values[f'lst_unc_ran_{time}'], c='r') + iplt.plot(propagated_values[f'lst_sampling_{time}'], c='g') + plt.savefig(f'test_random_{time}.png') + plt.close() + + + plot_lc(loaded_data) + timeseries_plot(propagated_values) + plot_with_cmip(propagated_values, loaded_data) + timeseries_plot(propagated_values, zoom_in=True) +# These are the propagation equations + +def two_step_calculation(cube): + + lat_len = len(cube.coord('latitude').points) + lon_len = len(cube.coord('longitude').points) + time_len = len(cube.coord('time').points) + + timeseries = [] + + for time_index in range(time_len): + + grid_means = [] # this is for the 5*5 block means + # all cci lst v3 data is 0.01 resolution + # so use blocks of 5 to get 0.05 degree resolution + for i in range(0,lat_len,5): + for j in range(0,lon_len,5): + this_region = cube[time_index,i:i+5,j:j+5] + + value = (this_region).collapsed(['latitude','longitude'], iris.analysis.MEAN).data + # sqet removed email 23-24 4 2025 + # SUM to MEAN CLAIRE'S MARCH email, sum of squares to just sum from 18/4/25 email + # This line changes form CLAIRE's March email + grid_means.append(value)# / (25 - np.sum(this_region.data.mask))) # NO sqrt here now! + + overall_quadrature = (1/(np.sqrt(len(grid_means)))) * \ + np.sqrt(np.sum(np.array(grid_means)**2)/len(grid_means)) + timeseries.append(overall_quadrature) + + results_cube = iris.cube.Cube(np.array(timeseries), + dim_coords_and_dims = [(cube.coord('time'),0)], + units = 'Kelvin', + var_name = cube.var_name, + long_name = cube.long_name, + ) + + return results_cube + +def eq_correlation_with_biome(cube_loc_sfc, lcc): + """ + Propagate using the land cover/biome information. + Used for the locally correlated surface uncertainity. + Method: + Make a 0.05 degree grid of data + Find the matching biome matrix + Calc uncert for each correlated biome + Calc 0.05 box total uncert + With all of these, find the total uncert (0.05 -> arbitary) + """ + + lat_len = len(cube_loc_sfc.coord('latitude').points) + lon_len = len(cube_loc_sfc.coord('longitude').points) + time_len = len(cube_loc_sfc.coord('time').points) + + final_values = [] # this is for each overal main area value + lc_grid = [] # this is for each 5*5 block + for time_index in range(time_len): + + grid_means = [] # this is for the 5*5 block means + # all cci lst v3 data is 0.01 resolution + # so use blocks of 5 to get 0.05 degree resolution + for i in range(0,lat_len,5): + for j in range(0,lon_len,5): + this_region = lcc[time_index,i:i+5,j:j+5] + lc_grid.append(this_region) + uniques = np.unique(this_region.data.round(decimals=0), + return_index=True, + return_inverse=True) + # note order of uniques will depends on both options = True + num_of_biomes = len(uniques[0]) + + this_uncerts = cube_loc_sfc[time_index,i:i+5,j:j+5].data.flatten() + + uncert_by_biome = [[] for i in range(num_of_biomes)] + for k, item in enumerate(this_uncerts): + uncert_by_biome[uniques[2][k]].append(this_uncerts[k]) + + # E3UB gives two methods + # method 1 = eq 5.31 and worked example eq 5.32 + # method 2 is used by CCI at moment and is eq 5.34 + # here use method 2 + + # np.ma.mean allows masked boxes to be ignored + #mean_list = np.array([np.ma.mean(item) for item in uncert_by_biome] ) # make NP array + # old version: + # this_mean = (1/np.sqrt(len(uncert_by_biome))) * np.ma.mean(mean_list) + # new version: + + # THIS LOOP IS A NEW CHANGE - CHANGED ON 14/10/24 on call ******************* + biome_quadrature = np.array([]) + for item in uncert_by_biome: + biome_quadrature = np.append(biome_quadrature, # 1/n moved CLAIRE'S email MARCH + (1/len(item)) * np.sum(np.array(item)) # sqrt removed email 23 4 25 + # not squares to sum, see 18 4 25 email from Claire + ) + # UPDATE from CLAIRE'S email MARCH + this_mean = (1/np.sqrt(len(biome_quadrature))) * np.sqrt(np.sum(biome_quadrature**2)/len(biome_quadrature)) + grid_means.append(this_mean) + + grid_means = np.array(grid_means) + + # change from CLAIRE'S email MARCH + this_times_mean = (1/np.sqrt(len(grid_means))) * np.sqrt(np.sum(grid_means**2)/len(grid_means)) + final_values.append(this_times_mean) + + # need to make a cube to return + results_cube = iris.cube.Cube(np.array(final_values), + dim_coords_and_dims = [(lcc.coord('time'),0)], + units = 'Kelvin', + var_name = cube_loc_sfc.var_name, + long_name = cube_loc_sfc.long_name, + ) + + return results_cube + + +def eq_propagate_random_with_sampling(cube_unc_ran, cube_ts, n_use, n_fill): + """Propagate radom uncertatinty using the sampling uncertainty + ATBD eq 4 + + the sum in quadrature of the arithmetic mean of lst_unc_ran and + the sampling uncertainty + + Sampling uncertainty is + n_cloudy * Variance of LST / n_total-1 + + see ATBD section 3.13.3 + + Inputs: + cube_unc_ran: The cube with the lst_unc_ran day/night data + cube_ts: The lst use for day/night as appropriate + """ + + # total number of pixed + #n_total = n_fill + n_use + + n_lat = len(cube_unc_ran.coord('latitude').points) + n_lon = len(cube_unc_ran.coord('longitude').points) + max_points = n_lat*n_lon + print(f'{max_points=}') + #n_use = np.array([max_points - np.sum(item) for item in cube_unc_ran.data.mask]) + print(f'{n_use=}') + print(f'{cube_unc_ran[0].data=}') + + # the mean of the random uncertainty + + #unc_ran_mean = iris.analysis.maths.exponentiate(cube_unc_ran, 2, in_place=False) + #unc_ran_mean = unc_ran_mean.collapsed(['latitude','longitude'], iris.analysis.SUM) + ##iris.analysis.maths.exponentiate(unc_ran_mean, 0.5, in_place=True) + + #for i, _ in enumerate(n_use): + #unc_ran_mean.data[i] *= 1/(np.sqrt(n_use[i])) + # CHANGE FROM CLAIRE'S MARCH email + # unc_ran_mean.data[i] = (1/(np.sqrt(n_use[i]))) * np.sqrt(unc_ran_mean.data[i]/n_use[i]) + + + + + # the mean of the random uncertainty + unc_ran_mean =(cube_unc_ran**2).collapsed(['latitude','longitude'], iris.analysis.SUM) + #iris.analysis.maths.exponentiate(unc_ran_mean, 0.5, in_place=True) + for i, item in enumerate(n_use): + unc_ran_mean.data[i] = np.sqrt(unc_ran_mean.data[i]/item) * (1/np.sqrt(item)) + + #1/np.sqrt(item)* np.sqrt(unc_ran_mean.data[i]) + print('kkkkkkkkkkkkkkkkkkkkkkkkkkkkf') + print(unc_ran_mean.data) + + + # calculate the sampling error + # variance of the lst * n_fill/n_total-1 + lst_variance = cube_ts.collapsed(['latitude', 'longitude'], + iris.analysis.VARIANCE) + factor = (max_points-n_use)/(max_points - 1) + print(np.sum(cube_ts.data.mask,axis=0)) + print(f'{factor=}') + print(f'{lst_variance.data=}') + unc_sampling = iris.analysis.maths.multiply(lst_variance, + factor) + print(f'{unc_sampling.data=}') + print('ppppppppppppppppppppppppppppppppppppp') + print(unc_ran_mean.units) + print(unc_sampling.units) + + # This is needed to allow cubes to be passed to the sum in quadrature function + unc_sampling.units = 1 + unc_ran_mean.units = 1 + print(unc_ran_mean.data) + print(unc_sampling.data) + # apply the ATBD equation + # note the square of random uncertainty is needed + output = eq_sum_in_quadrature(iris.cube.CubeList([unc_ran_mean, + unc_sampling])) + + #output = unc_ran_mean.copy() + output.units = 'K' + print(output.units) + return output, unc_sampling + + +def eq_arithmetic_mean(cube): + """Arithmetic mean of cube, across latitude and longitude + ATBD eq 1 + """ + + out_cube = cube.collapsed(['latitude', 'longitude'], iris.analysis.MEAN) + + return out_cube + + +def eq_sum_in_quadrature(cubelist): + """Sum in quadrature + ATBD eq 9 + + Input: + cubelist : A cubelist of 1D cubes + """ + + # dont want to in-place replace the input + newlist = cubelist.copy() + for cube in newlist: + iris.analysis.maths.exponentiate(cube, 2, in_place=True) + + cubes_sum = 0 + for cube in newlist: + cubes_sum = cubes_sum + cube + output = iris.analysis.maths.exponentiate(cubes_sum, 0.5, + in_place=False) + + return output + + +def eq_weighted_sqrt_mean(cube, n_use): + """Mean with square root of n factor + ATBD eq 7 + + Inputs: + cube: + n_use: the number of useable pixels + NEED TO IMPLIMENT A CHECK ON MASKS BEING THE SAME? + """ + output = iris.analysis.maths.multiply(cube.collapsed(['latitude', + 'longitude'], + iris.analysis.MEAN), + 1/np.sqrt(n_use)) + return output + +# Plotting functions + +def plot_lc(loaded_data): + """Plot to show land cover and correlations + """ + + lc_types = np.unique(loaded_data['ESACCI-LST']['lcc_day'][0].data, + return_counts=True) + + class_list = lc_types[0] + bound_list = np.append(class_list - 0.5, 256) + + colours = lc_colour_list[0:len(lc_types[0])] + + cmap = matplotlib.colors.ListedColormap(colours) + norm = matplotlib.colors.BoundaryNorm(bound_list, cmap.N) + + fig = plt.figure(figsize=(25, 15)) + ax1 = plt.subplot(111) + + iplt.pcolormesh(loaded_data['ESACCI-LST']['lcc_day'][0], + cmap=cmap, norm=norm) + x_ticks = [loaded_data['ESACCI-LST']['lcc_day'][0].coord('longitude').points[0], + loaded_data['ESACCI-LST']['lcc_day'][0].coord('longitude').points[-1]] + y_ticks = [loaded_data['ESACCI-LST']['lcc_day'][0].coord('latitude').points[0], + loaded_data['ESACCI-LST']['lcc_day'][0].coord('latitude').points[-1]] + + x_ticks = [item for item in loaded_data['ESACCI-LST']['lcc_day'][0].coord('longitude').points[::5]] + x_tick_labels = ['' for item in x_ticks] + x_tick_labels[0] = f"{loaded_data['ESACCI-LST']['lcc_day'][0].coord('longitude').points[0]:.2f}" + x_tick_labels[-1] = f"{loaded_data['ESACCI-LST']['lcc_day'][0].coord('longitude').points[-1]:.2f}" + plt.xticks(x_ticks, x_tick_labels, fontsize=plot_params['ticksize']) + + y_ticks = [item for item in loaded_data['ESACCI-LST']['lcc_day'][0].coord('latitude').points[::5]] + y_tick_labels = ['' for item in y_ticks] + y_tick_labels[0] = f"{loaded_data['ESACCI-LST']['lcc_day'][0].coord('latitude').points[0]:.2f}" + y_tick_labels[-1] = f"{loaded_data['ESACCI-LST']['lcc_day'][0].coord('latitude').points[-1]:.2f}" + plt.yticks(y_ticks, y_tick_labels, fontsize=plot_params['ticksize']) + + plt.grid(c='k', linewidth=1) + + plt.xlabel('Longitude', fontsize=plot_params['labelsize']) + plt.ylabel('Latitude', fontsize=plot_params['labelsize']) + + cbar = plt.colorbar() + + Z = [(bound_list[i]+bound_list[i+1])/2 for i in range(len(bound_list)-1)] + labels = [int(item) for item in class_list] + cbar.set_ticks(Z, labels=labels) + cbar.ax.tick_params(labelsize=plot_params['ticksize']) + cbar.set_label('Land Cover Types', fontsize=plot_params['labelsize']) + + plt.tight_layout() + plt.savefig(f'LC_map.png') + + fig2 = plt.figure(figsize=(20,25)) + + ax1 = plt.subplot(111) + plt.bar(range(len(class_list)), lc_types[1], + label = labels, + color=colours, + linewidth=2, edgecolor='black', + tick_label = labels) + + ax1.tick_params(labelsize=plot_params['ticksize']) + + plt.xlabel('Land Cover Type', fontsize=plot_params['labelsize']) + plt.ylabel('Count', fontsize=plot_params['labelsize']) + + plt.tight_layout() + plt.savefig(f'LC_bar.png') + + +def plot_with_cmip(propagated_values, loaded_data): + """A plot for comparing OBS+uncertainity with CMIP value + """ + + years = mdates.YearLocator() # every year + months = mdates.MonthLocator() # every month + yearsfmt = mdates.DateFormatter('%Y') + + for time in ['day', 'night']: + # one plot for day and night seperately + fig = plt.figure(figsize=(25,15)) + + ax1 = plt.subplot(111) + iplt.plot(propagated_values[f'ts_{time}'], + c=colour_list[0], + linewidth=plot_params['linewidth'], + label=f'LST {time} (CCI)') + iplt.fill_between(propagated_values[f'ts_{time}'].coord('time'), + propagated_values[f'ts_{time}'] + propagated_values[f'lst_total_unc_{time}'], + propagated_values[f'ts_{time}'] - propagated_values[f'lst_total_unc_{time}'], + color=colour_list[0], + alpha=0.5, + label=f'Uncertainty {time} (CCI)' + ) + + iplt.plot(loaded_data['UKESM1-0-LL']['ts'], + c=colour_list[4], + linewidth=plot_params['linewidth'], + label='LST (CMIP6)') + + ax1.xaxis.set_major_locator(years) + ax1.xaxis.set_major_formatter(yearsfmt) + ax1.xaxis.set_minor_locator(months) + + plt.grid(which='major', color='k', linestyle='solid') + plt.grid(which='minor', color='k', linestyle='dotted', alpha=0.5) + + plt.xlabel('Date', fontsize=plot_params['labelsize']) + plt.ylabel('LST (K)', fontsize=plot_params['labelsize']) + + plt.legend(loc='lower left', + bbox_to_anchor=(1.05, 0), + fontsize=plot_params['legendsize']) + + ax1.tick_params(labelsize=plot_params['ticksize']) + + plt.tight_layout() + plt.savefig(f'cmip_{time}.png') + + # make a version with day and night both on same plot + fig = plt.figure(figsize=(25,15)) + + ax1 = plt.subplot(111) + colour_id = 0 + for time in ['day', 'night']: + iplt.plot(propagated_values[f'ts_{time}'], + c=colour_list[colour_id], + linewidth=plot_params['linewidth'], + label=f'LST {time} (CCI)') + iplt.fill_between(propagated_values[f'ts_{time}'].coord('time'), + propagated_values[f'ts_{time}'] + propagated_values[f'lst_total_unc_{time}'], + propagated_values[f'ts_{time}'] - propagated_values[f'lst_total_unc_{time}'], + color=colour_list[colour_id], + alpha=0.5, + label=f'Uncertainty {time}' + ) + colour_id =- 1 + + iplt.plot(loaded_data['UKESM1-0-LL']['ts'], + c=colour_list[4], + linewidth=plot_params['linewidth'], + label='LST (CMIP6)') + + ax1.xaxis.set_major_locator(years) + ax1.xaxis.set_major_formatter(yearsfmt) + ax1.xaxis.set_minor_locator(months) + + plt.grid(which='major', color='k', linestyle='solid') + plt.grid(which='minor', color='k', linestyle='dotted', alpha=0.5) + + plt.xlabel('Date', fontsize=plot_params['labelsize']) + plt.ylabel('LST (K)', fontsize=plot_params['labelsize']) + + plt.legend(loc='lower left', + bbox_to_anchor=(1.05, 0), + fontsize=plot_params['legendsize']) + + ax1.tick_params(labelsize=plot_params['ticksize']) + + plt.tight_layout() + plt.savefig('cmip_both.png') + + +def timeseries_plot(propagated_values, zoom_in = False): + """This is a very simple plot to just test the method + """ + + years = mdates.YearLocator() # every year + months = mdates.MonthLocator() # every month + yearsfmt = mdates.DateFormatter('%Y') + + for time in ['day', 'night']: + # one plot for day and night seperately + fig = plt.figure(figsize=(25,15)) + + ax1 = plt.subplot(211) + iplt.plot(propagated_values[f'ts_{time}'], + c=colour_list[0], + linewidth=plot_params['linewidth'], + label='LST') + iplt.fill_between(propagated_values[f'ts_{time}'].coord('time'), + propagated_values[f'ts_{time}'] + propagated_values[f'lst_total_unc_{time}'], + propagated_values[f'ts_{time}'] - propagated_values[f'lst_total_unc_{time}'], + color=colour_list[0], + alpha=0.5, + label='Uncertainty' + ) + + ax1.xaxis.set_major_locator(years) + ax1.xaxis.set_major_formatter(yearsfmt) + ax1.xaxis.set_minor_locator(months) + + plt.grid(which='major', color='k', linestyle='solid') + plt.grid(which='minor', color='k', linestyle='dotted', alpha=0.5) + + plt.ylabel('LST (K)', fontsize=plot_params['labelsize']) + + plt.legend(loc='lower left', + bbox_to_anchor=(1.05, 0), + fontsize=plot_params['legendsize']) + + ax2 = plt.subplot(212, sharex=ax1) + iplt.plot(propagated_values[f'lst_unc_loc_atm_{time}'], + c=colour_list[2], + linewidth=plot_params['linewidth'], + label=line_labels[f'lst_unc_loc_atm_{time}']) + iplt.plot(propagated_values[f'lst_unc_loc_sfc_{time}'], + c=colour_list[3], + linewidth=plot_params['linewidth'], + label=line_labels[f'lst_unc_loc_sfc_{time}']) + iplt.plot(propagated_values[f'lst_unc_sys_{time}'], + c=colour_list[4], + linewidth=plot_params['linewidth'], + label=line_labels[f'lst_unc_sys_{time}']) + iplt.plot(propagated_values[f'lst_unc_ran_{time}'], + c=colour_list[5], + linewidth=plot_params['linewidth'], + label=line_labels[f'lst_unc_ran_{time}']) + + #iplt.plot(propagated_values[f'lst_sampling_{time}'], + # '--', + # c=colour_list[1], + # linewidth=plot_params['linewidth'], + # label=line_labels[f'lst_sampling_{time}']) + + iplt.plot(propagated_values[f'lst_total_unc_{time}'], + c=colour_list[6], + linewidth=plot_params['linewidth'], + label=line_labels[f'lst_total_unc_{time}']) + + plt.grid(which='major', color='k', linestyle='solid') + plt.grid(which='minor', color='k', linestyle='dotted', alpha=0.5) + + if zoom_in: + ax2.set_ylim((0,0.2)) + + ax1.set_xlim(datetime.datetime(2002,12,31),datetime.datetime(2004,1,1)) + ax2.set_xlim(datetime.datetime(2002,12,31),datetime.datetime(2004,1,1)) + + plt.xlabel('Date', fontsize=plot_params['labelsize']) + plt.ylabel('Uncertainty (K)', fontsize=plot_params['labelsize']) + + plt.legend(loc='upper left', + bbox_to_anchor=(1.05, 1), + fontsize=plot_params['legendsize']) + + ax1.tick_params(labelsize=plot_params['ticksize']) + ax2.tick_params(labelsize=plot_params['ticksize']) + + plt.tight_layout() + if zoom_in: + plt.savefig(f'timeseries_{time}_zoom_in.png') + else: + plt.savefig(f'timeseries_{time}.png') +if __name__ == '__main__': + # always use run_diagnostic() to get the config (the preprocessor + # nested dictionary holding all the needed information) + with run_diagnostic() as config: + _diagnostic(config) diff --git a/esmvaltool/recipes/recipe_esacci_lst.yml b/esmvaltool/recipes/recipe_esacci_lst.yml index 5f4aa5ca9f..e972353247 100644 --- a/esmvaltool/recipes/recipe_esacci_lst.yml +++ b/esmvaltool/recipes/recipe_esacci_lst.yml @@ -1,4 +1,4 @@ -# Recipe to call ESA CCI LST diagnostic. +# Recipe to call ESA CCI LST V3 testing diagnostic. --- documentation: title: ESA CCI LST diagnostic @@ -16,26 +16,25 @@ documentation: projects: - cmug -datasets: - - {dataset: CESM2, project: CMIP6, exp: historical, ensemble: r(2:3)i1p1f1, - start_year: 2004, end_year: 2005, grid: gn} - - {dataset: UKESM1-0-LL, project: CMIP6, exp: historical, - ensemble: r(1:2)i1p1f2, start_year: 2004, end_year: 2005, grid: gn} - - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, - start_year: 2004, end_year: 2005, version: '1.00'} - preprocessors: - lst_preprocessor: + obs_preprocessor: + extract_region: + start_longitude: 35 + end_longitude: 45 + start_latitude: 55 + end_latitude: 60 + + cmip_preprocessor: regrid: target_grid: UKESM1-0-LL scheme: linear extract_region: start_longitude: 35 - end_longitude: 175 + end_longitude: 45 start_latitude: 55 - end_latitude: 70 + end_latitude: 60 mask_landsea: mask_out: sea @@ -43,10 +42,10 @@ preprocessors: area_statistics: operator: mean - multi_model_statistics: - span: overlap - statistics: [mean, std_dev] - exclude: [ESACCI-LST] + # ensemble_statistics: + # span: overlap + # statistics: [mean, std_dev] + # exclude: [ESACCI-LST] diagnostics: @@ -60,8 +59,72 @@ diagnostics: variables: ts: mip: Amon - preprocessor: lst_preprocessor + preprocessor: cmip_preprocessor + additional_datasets: + - {dataset: UKESM1-0-LL, project: CMIP6, exp: historical, ensemble: r1i1p1f2, start_year: 2006, end_year: 2007, grid: gn} + + tsDay: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2006, end_year: 2007, version: '3.00'} + + + tsNight: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2006, end_year: 2007, version: '3.00'} + + tsLocalSfcErrDay: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2006, end_year: 2007, version: '3.00'} + + tsLocalSfcErrNight: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2006, end_year: 2007, version: '3.00'} + + tsLocalAtmErrDay: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2006, end_year: 2007, version: '3.00'} + + tsLocalAtmErrNight: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2006, end_year: 2007, version: '3.00'} + + tsLSSysErrDay: + mip: Amon + # preprocessor: obs_preprocessor # Single value in files, no area cut out needed + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2006, end_year: 2007, version: '3.00'} + + tsLSSysErrNight: + mip: Amon + # preprocessor: obs_preprocessor # Single value in files, no area cut out needed + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2006, end_year: 2007, version: '3.00'} + + tsUnCorErrDay: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2006, end_year: 2007, version: '3.00'} + + tsUnCorErrNight: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2006, end_year: 2007, version: '3.00'} scripts: script1: - script: lst/lst.py + script: lst/lst_testing.py + diff --git a/esmvaltool/recipes/recipe_esacci_lst_uncert_prop.yml b/esmvaltool/recipes/recipe_esacci_lst_uncert_prop.yml new file mode 100644 index 0000000000..ce1c3a4ed5 --- /dev/null +++ b/esmvaltool/recipes/recipe_esacci_lst_uncert_prop.yml @@ -0,0 +1,166 @@ +# Recipe to call ESA CCI LST V3 Uncertainy Propagation Demonstrator Diagnostic. +--- +documentation: + title: ESA CCI LST diagnostic + description: | + Please add description here + authors: + - king_robert + + maintainer: + - king_robert + + references: + - esacci_lst + + projects: + - cmug + +preprocessors: + + obs_preprocessor: + extract_region: + start_longitude: 2.6 + end_longitude: 3.0 + start_latitude: 46.05 + end_latitude: 47.45 + + cmip_preprocessor: + regrid: + target_grid: UKESM1-0-LL + scheme: linear + + extract_region: + start_longitude: 1.875 + end_longitude: 3.75 + start_latitude: 45 + end_latitude: 47.5 + + mask_landsea: + mask_out: sea + + area_statistics: + operator: mean + + # ensemble_statistics: + # span: overlap + # statistics: [mean, std_dev] + # exclude: [ESACCI-LST] + + +diagnostics: + + timeseries: + description: ESACCI LST difference to model historical ensemble average + themes: + - phys + realms: + - land + variables: + ts: + mip: Amon + preprocessor: cmip_preprocessor + additional_datasets: + - {dataset: UKESM1-0-LL, project: CMIP6, exp: historical, ensemble: r1i1p1f2, start_year: 2003, end_year: 2014, grid: gn} + + tsDay: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + + tsNight: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + tsLocalSfcErrDay: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + tsLocalSfcErrNight: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + tsLocalAtmErrDay: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + tsLocalAtmErrNight: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + tsLSSysErrDay: + mip: Amon + # preprocessor: obs_preprocessor # Single value in files, no area cut out needed + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + tsLSSysErrNight: + mip: Amon + # preprocessor: obs_preprocessor # Single value in files, no area cut out needed + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + tsUnCorErrDay: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + tsUnCorErrNight: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + #tsTotalDay: + # mip: Amon + # preprocessor: obs_preprocessor + # additional_datasets: + # - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2003, version: '3.00'} + + #tsTotalNight: + # mip: Amon + # preprocessor: obs_preprocessor + # additional_datasets: + # - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2003, version: '3.00'} + + #tsVarDay: + # mip: Amon + # preprocessor: obs_preprocessor + # additional_datasets: + # - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2003, version: '3.00'} + + #tsVarNight: + # mip: Amon + # preprocessor: obs_preprocessor + # additional_datasets: + # - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2003, version: '3.00'} + + tsLCDay: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + tsLCNight: + mip: Amon + preprocessor: obs_preprocessor + additional_datasets: + - {dataset: ESACCI-LST, project: OBS, type: sat, tier: 2, start_year: 2003, end_year: 2014, version: '3.00'} + + + scripts: + script1: + script: lst/lst_uncert_prop.py \ No newline at end of file