|
| 1 | +# %% |
| 2 | +# Tests |
| 3 | + |
| 4 | +# %% |
| 5 | +from IPython import get_ipython |
| 6 | + |
| 7 | +ipython = get_ipython() |
| 8 | + |
| 9 | +# enable autoreload for interactive sessions |
| 10 | +if ipython is not None: |
| 11 | + ipython.run_line_magic("load_ext", "autoreload") |
| 12 | + ipython.run_line_magic("autoreload", "2") |
| 13 | + ipython.run_line_magic("load_ext", "log_cell_time") |
| 14 | + ipython.run_line_magic("matplotlib", "inline") |
| 15 | + |
| 16 | +# %% |
| 17 | +import numpy as np |
| 18 | +import pandas as pd |
| 19 | + |
| 20 | +import matplotlib.pyplot as plt |
| 21 | + |
| 22 | +from cs_util import plots as cs_plots |
| 23 | + |
| 24 | +from sp_validation import run_joint_cat as sp_joint |
| 25 | +from sp_validation import cat as cat |
| 26 | +from sp_validation.basic import metacal |
| 27 | +from sp_validation import calibration |
| 28 | +from sp_validation import io |
| 29 | +from sp_validation import plots as sp_plots |
| 30 | + |
| 31 | +# %% |
| 32 | +os.getcwd() |
| 33 | + |
| 34 | +# %% |
| 35 | +# Initialize calibration class instance |
| 36 | +obj = sp_joint.CalibrateCat() |
| 37 | + |
| 38 | +config = obj.read_config_set_params("config_minimal.yaml") |
| 39 | + |
| 40 | + |
| 41 | +# Get data. Set load_into_memory to False for very large files |
| 42 | +dat, _ = obj.read_cat(load_into_memory=False) |
| 43 | + |
| 44 | +# %% |
| 45 | +#n_test = -1 |
| 46 | +n_test = 1_000_000 |
| 47 | +if n_test > 0: |
| 48 | + print(f"MKDEBUG testing only first {n_test} objects") |
| 49 | + dat = dat[:n_test] |
| 50 | + |
| 51 | +# %% |
| 52 | +# Use only some columns in dat for efficiency |
| 53 | +use_all_columns = True |
| 54 | + |
| 55 | +if not use_all_columns: |
| 56 | + |
| 57 | + required_columns = set() |
| 58 | + for section, mask_list in config_data.items(): |
| 59 | + for mask_params in mask_list: |
| 60 | + required_columns.add(mask_params["col_name"]) |
| 61 | + |
| 62 | + #user_columns = ["NGMIX_T_NOSHEAR", "NGMIX_Tpsf_NOSHEAR", "NGMIX_FLUX_NOSHEAR", "NGMIX_FLUX_ERR_NOSHEAR"] |
| 63 | + #required_columns.update(user_columns) |
| 64 | + |
| 65 | + print(f"{len(dat.dtype.names)} -> {len(required_columns)}") |
| 66 | + |
| 67 | + # Unsolved error here |
| 68 | + #dat = {col: obj._hd5file['data'][col][:] for col in required_columns if col in obj._hd5file['data']} |
| 69 | + |
| 70 | +# %% |
| 71 | +if "applied_masks" in obj._hd5file: |
| 72 | + applied_masks = obj._hd5file["applied_masks"] |
| 73 | + applied_masks["desc"] |
| 74 | +else: |
| 75 | + applied_masks = None |
| 76 | + |
| 77 | +# ## Masking |
| 78 | +masks_to_apply = [ |
| 79 | + "N_EPOCH", |
| 80 | + "FLAGS", |
| 81 | + "4_Stars", |
| 82 | + "npoint3", |
| 83 | +] |
| 84 | + |
| 85 | +# Check that mask has not already been applied |
| 86 | +for mask in masks_to_apply: |
| 87 | + if applied_masks and mask in applied_masks["desc"]: |
| 88 | + print(f"Warning: Mask {mask} has already been applied") |
| 89 | + else: |
| 90 | + pass |
| 91 | + |
| 92 | +# Gather mask information for above list from config |
| 93 | +masks, labels = sp_joint.get_masks_from_config(config, dat, dat, masks_to_apply=masks_to_apply, verbose=obj._params["verbose"]) |
| 94 | + |
| 95 | +# Initialise combined mask |
| 96 | +label = "comb" |
| 97 | +my_mask = sp_joint.Mask(label, label, kind="combined", value=None) |
| 98 | + |
| 99 | +# Combine masks |
| 100 | +mask_combined = sp_joint.Mask.from_list( |
| 101 | + masks, |
| 102 | + label="combined", |
| 103 | + verbose=obj._params["verbose"], |
| 104 | +) |
| 105 | + |
| 106 | +# Output some mask statistics |
| 107 | +sp_joint.print_mask_stats(dat.shape[0], masks, mask_combined) |
| 108 | + |
| 109 | +# %% |
| 110 | +# Call metacal |
| 111 | +cm = config["metacal"] |
| 112 | +gal_metacal = metacal( |
| 113 | + dat, |
| 114 | + mask_combined._mask, |
| 115 | + snr_min=cm["gal_snr_min"], |
| 116 | + snr_max=cm["gal_snr_max"], |
| 117 | + rel_size_min=cm["gal_rel_size_min"], |
| 118 | + rel_size_max=cm["gal_rel_size_max"], |
| 119 | + size_corr_ell=cm["gal_size_corr_ell"], |
| 120 | + sigma_eps=cm["sigma_eps_prior"], |
| 121 | + global_R_weight=cm["global_R_weight"], |
| 122 | + col_2d=False, |
| 123 | + verbose=True, |
| 124 | +) |
| 125 | + |
| 126 | +# %% |
| 127 | + |
| 128 | +# Get metacal outputs; here mask is needed |
| 129 | +g_corr_mc, g_uncorr, w, mask_metacal, c, c_err = ( |
| 130 | + calibration.get_calibrated_m_c(gal_metacal) |
| 131 | +) |
| 132 | + |
| 133 | +# %% |
| 134 | +# Compute DES weights |
| 135 | + |
| 136 | +cat_gal = {} |
| 137 | + |
| 138 | +sp_joint.compute_weights_gatti( |
| 139 | + cat_gal, |
| 140 | + g_uncorr, |
| 141 | + gal_metacal, |
| 142 | + dat, |
| 143 | + mask_combined, |
| 144 | + mask_metacal, |
| 145 | + num_bins=20, |
| 146 | +) |
| 147 | + |
| 148 | +# %% |
| 149 | +# Correct for PSF leakage |
| 150 | + |
| 151 | +alpha_1, alpha_2 = sp_joint.compute_PSF_leakage( |
| 152 | + cat_gal, |
| 153 | + g_corr_mc, |
| 154 | + dat, |
| 155 | + mask_combined, |
| 156 | + mask_metacal, |
| 157 | + num_bins=20, |
| 158 | +) |
| 159 | + |
| 160 | +# Compute leakage-corrected ellipticities |
| 161 | +e1_leak_corrected = g_corr_mc[0] - alpha_1 * cat_gal["e1_PSF"] |
| 162 | +e2_leak_corrected = g_corr_mc[1] - alpha_2 * cat_gal["e2_PSF"] |
| 163 | + |
| 164 | +# %% |
| 165 | +# Get some memory back |
| 166 | +for mask in masks: |
| 167 | + del mask |
| 168 | + |
| 169 | + |
| 170 | +# %% |
| 171 | +ra = cat.get_col(dat, "RA", mask_combined._mask, mask_metacal) |
| 172 | +dec = cat.get_col(dat, "Dec", mask_combined._mask, mask_metacal) |
| 173 | + |
| 174 | + |
| 175 | +# %% |
| 176 | +# Correlation function |
| 177 | +import treecorr |
| 178 | +# %% |
| 179 | +minsep = 1 |
| 180 | +maxsep = 50 |
| 181 | +nbins = 10 |
| 182 | +npatch = 20 |
| 183 | +n_cpu = 24 |
| 184 | +config = { |
| 185 | + "ra_units": "deg", |
| 186 | + "dec_units": "deg", |
| 187 | + "min_sep": minsep, |
| 188 | + "max_sep": maxsep, |
| 189 | + "nbins": nbins, |
| 190 | + "num_threads": n_cpu, |
| 191 | + "sep_units": "arcmin", |
| 192 | + "var_method": "jackknife", |
| 193 | + "bin_type": "Log", |
| 194 | +} |
| 195 | +gg = treecorr.GGCorrelation(config, var_method='jackknife') |
| 196 | + |
| 197 | +# %% |
| 198 | +catalog = treecorr.Catalog( |
| 199 | + ra=ra, |
| 200 | + dec=dec, |
| 201 | + g1=e1_leak_corrected, |
| 202 | + g2=e2_leak_corrected, |
| 203 | + w=cat_gal["w_des"], |
| 204 | + ra_units="deg", |
| 205 | + dec_units="deg", |
| 206 | + npatch=npatch, |
| 207 | +) |
| 208 | + |
| 209 | +# %% |
| 210 | +gg.process(catalog, catalog, num_threads=n_cpu) |
| 211 | +# %% |
| 212 | +x = gg.rnom |
| 213 | +y = x * gg.xip |
| 214 | +dy = x * np.sqrt(gg.varxip) |
| 215 | +plt.errorbar(x, y, yerr=dy, marker="o", linestyle="") |
| 216 | +plt.xlabel(r"$\theta$ [arcmin]") |
| 217 | +plt.ylabel(r"$\theta \xi_+(\theta)$ [arcmin]") |
| 218 | +plt.show() |
| 219 | +# %% |
0 commit comments