|
| 1 | +#!/usr/bin/env python |
| 2 | + |
| 3 | +from os.path import join |
| 4 | + |
| 5 | +import numpy as np |
| 6 | +import matplotlib |
| 7 | +matplotlib.use('Agg') |
| 8 | +import matplotlib.pyplot as plt |
| 9 | +import netCDF4 as nc4 |
| 10 | + |
| 11 | +from e3sm_case_output import day_str |
| 12 | + |
| 13 | +REF_CASE_NAME = "timestep_all_300s" |
| 14 | +TEST_CASE_NAMES = ["timestep_CLUBB_MG2_10s", "timestep_all_60s"] |
| 15 | +SHORT_TEST_CASE_NAMES = ["CLUBBMICRO10", "ALL60"] |
| 16 | +OUTPUT_DIR = "/p/lustre2/santos36/timestep_precip/" |
| 17 | + |
| 18 | +USE_PRESAER = False |
| 19 | +TROPICS_ONLY = True |
| 20 | + |
| 21 | +START_DAY = 3 |
| 22 | +END_DAY = 15 |
| 23 | + |
| 24 | +if USE_PRESAER: |
| 25 | + STYLES = { |
| 26 | + "CLUBBMICRO10PA": ('indigo', '-'), |
| 27 | + "ALL10PA": ('dimgrey', '-'), |
| 28 | + "ZM10PA": ('g', '-'), |
| 29 | + "CLUBBMICRO10ZM10PA": ('saddlebrown', '-'), |
| 30 | + "CLD10PA": ('slateblue', '-'), |
| 31 | + } |
| 32 | +else: |
| 33 | + STYLES = { |
| 34 | + "DYN10": ('y', '-'), |
| 35 | + "CLUBB10": ('b', '-'), |
| 36 | + "MICRO10": ('r', '-'), |
| 37 | + "CLUBB10MICRO10": ('maroon', '-'), |
| 38 | + "CLUBBMICRO60": ('indigo', '--'), |
| 39 | + "CLUBBMICRO10": ('indigo', '-'), |
| 40 | + "ALL10": ('dimgrey', '-'), |
| 41 | + "ALL60": ('dimgrey', '--'), |
| 42 | + "ALL300": ('dimgrey', ':'), |
| 43 | + "ALLRAD10": ('orange', '-'), |
| 44 | + } |
| 45 | + |
| 46 | +num_tests = len(TEST_CASE_NAMES) |
| 47 | + |
| 48 | +suffix = '_d{}-d{}'.format(day_str(START_DAY), day_str(END_DAY)) |
| 49 | + |
| 50 | +if TROPICS_ONLY: |
| 51 | + suffix += '_tropics' |
| 52 | + |
| 53 | +out_file_template = "{}.freq.short.d{}-d{}.nc" |
| 54 | + |
| 55 | +first_file_name = out_file_template.format(REF_CASE_NAME, day_str(START_DAY), |
| 56 | + day_str(END_DAY)) |
| 57 | + |
| 58 | +first_file = nc4.Dataset(join(OUTPUT_DIR, first_file_name), 'r') |
| 59 | +ncol = len(first_file.dimensions['ncol']) |
| 60 | +nbins = len(first_file.dimensions['nbins']) |
| 61 | +bin_lower_bounds = first_file['bin_lower_bounds'][:] |
| 62 | +lat = first_file['lat'][:] |
| 63 | +lon = first_file['lon'][:] |
| 64 | +area = first_file['area'][:] |
| 65 | +# For tropics_only cases, just use a weight of 0 for all other columns. |
| 66 | +if TROPICS_ONLY: |
| 67 | + for i in range(ncol): |
| 68 | + if np.abs(lat[i]) > 30.: |
| 69 | + area[i] = 0. |
| 70 | +area_sum = area.sum() |
| 71 | +weights = area/area_sum |
| 72 | +first_file.close() |
| 73 | + |
| 74 | +ref_sample_num_total = 0 |
| 75 | +test_sample_num_totals = [0 for i in range(num_tests)] |
| 76 | + |
| 77 | +prec_vars = ("PRECC", "PRECL", "PRECT") |
| 78 | + |
| 79 | +ref_num_avgs = {} |
| 80 | +ref_amount_avgs = {} |
| 81 | +for var in prec_vars: |
| 82 | + ref_num_avgs[var] = np.zeros((nbins,)) |
| 83 | + ref_amount_avgs[var] = np.zeros((nbins,)) |
| 84 | + |
| 85 | +test_num_avgs = [{} for i in range (num_tests)] |
| 86 | +test_amount_avgs = [{} for i in range (num_tests)] |
| 87 | +for i in range(num_tests): |
| 88 | + for var in prec_vars: |
| 89 | + test_num_avgs[i][var] = np.zeros((nbins,)) |
| 90 | + test_amount_avgs[i][var] = np.zeros((nbins,)) |
| 91 | + |
| 92 | +out_file_name = out_file_template.format(REF_CASE_NAME, day_str(START_DAY), |
| 93 | + day_str(END_DAY)) |
| 94 | +out_file = nc4.Dataset(join(OUTPUT_DIR, out_file_name), 'r') |
| 95 | + |
| 96 | +ref_sample_num_total += out_file.sample_num |
| 97 | + |
| 98 | +for var in prec_vars: |
| 99 | + num_name = "{}_num".format(var) |
| 100 | + amount_name = "{}_amount".format(var) |
| 101 | + for j in range(ncol): |
| 102 | + ref_num_avgs[var] += out_file[num_name][j,:] * weights[j] |
| 103 | + for j in range(ncol): |
| 104 | + ref_amount_avgs[var] += out_file[amount_name][j,:] * weights[j] |
| 105 | + |
| 106 | +for i in range(num_tests): |
| 107 | + out_file_name = out_file_template.format(TEST_CASE_NAMES[i], day_str(START_DAY), |
| 108 | + day_str(END_DAY)) |
| 109 | + out_file = nc4.Dataset(join(OUTPUT_DIR, out_file_name), 'r') |
| 110 | + |
| 111 | + test_sample_num_totals[i] += out_file.sample_num |
| 112 | + |
| 113 | + for var in prec_vars: |
| 114 | + num_name = "{}_num".format(var) |
| 115 | + amount_name = "{}_amount".format(var) |
| 116 | + for j in range(ncol): |
| 117 | + test_num_avgs[i][var] += out_file[num_name][j,:] * weights[j] |
| 118 | + for j in range(ncol): |
| 119 | + test_amount_avgs[i][var] += out_file[amount_name][j,:] * weights[j] |
| 120 | + |
| 121 | +for var in prec_vars: |
| 122 | + ref_num_avgs[var] /= ref_sample_num_total |
| 123 | + ref_amount_avgs[var] /= ref_sample_num_total |
| 124 | + for i in range(num_tests): |
| 125 | + test_num_avgs[i][var] /= test_sample_num_totals[i] |
| 126 | + test_amount_avgs[i][var] /= test_sample_num_totals[i] |
| 127 | + |
| 128 | +for var in prec_vars: |
| 129 | + # Leave out zero bin from loglog plot. |
| 130 | + plt.loglog(bin_lower_bounds[1:], ref_num_avgs[var][1:], 'k') |
| 131 | + for i in range(num_tests): |
| 132 | + plt.loglog(bin_lower_bounds[1:], test_num_avgs[i][var][1:], |
| 133 | + color=STYLES[SHORT_TEST_CASE_NAMES[i]][0], |
| 134 | + linestyle=STYLES[SHORT_TEST_CASE_NAMES[i]][1]) |
| 135 | + plt.title("Frequency distribution of precipitation (days {}-{})".format( |
| 136 | + day_str(START_DAY), day_str(END_DAY))) |
| 137 | + plt.xlabel("Precipitation amount (mm/day)") |
| 138 | + plt.ylabel("fraction") |
| 139 | + plt.savefig("{}_freq{}.png".format(var, suffix)) |
| 140 | + plt.close() |
| 141 | + |
| 142 | + plt.semilogx(bin_lower_bounds[1:], ref_amount_avgs[var][1:], 'k') |
| 143 | + for i in range(num_tests): |
| 144 | + plt.semilogx(bin_lower_bounds[1:], test_amount_avgs[i][var][1:], |
| 145 | + color=STYLES[SHORT_TEST_CASE_NAMES[i]][0], |
| 146 | + linestyle=STYLES[SHORT_TEST_CASE_NAMES[i]][1]) |
| 147 | + plt.title("Amounts of precipitation (days {}-{})".format( |
| 148 | + day_str(START_DAY), day_str(END_DAY))) |
| 149 | + plt.xlabel("Precipitation amount (mm/day)") |
| 150 | + plt.ylabel("Average amount (mm/day)") |
| 151 | + plt.savefig("{}_amount{}.png".format(var, suffix)) |
| 152 | + plt.close() |
0 commit comments