Skip to content

Commit a301b93

Browse files
author
martinkilbinger
committed
removed hard-coded x_min, x_max in DES weight binning
1 parent 4255a51 commit a301b93

File tree

5 files changed

+247
-21
lines changed

5 files changed

+247
-21
lines changed

config/calibration/mask_v1.X.6.yaml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
# General parameters (can also given on command line)
55
params:
66
input_path: unions_shapepipe_comprehensive_struc_2024_v1.X.c.hdf5
7-
cmatrices: True
7+
cmatrices: False
88
sky_regions: False
99
verbose: True
1010

@@ -97,7 +97,7 @@ metacal:
9797

9898
# Signal-to-noise range
9999
gal_snr_min: 10
100-
gal_snr_max: 1000
100+
gal_snr_max: 500
101101

102102
# Relative-size (hlr / hlr_psf) range
103103
gal_rel_size_min: 0.707

notebooks/calibrate_comprehensive_cat.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -249,14 +249,16 @@
249249
my_mask.print_summary(f_out)
250250

251251
# %%
252-
from scipy import stats
253-
254-
all_masks = masks[:-3]
255252

256253
if not obj._params["cmatrices"]:
257254
print("Skipping cmatric calculations")
258255
sys.exit(0)
259256

257+
# %%
258+
from scipy import stats
259+
260+
all_masks = masks[:-3]
261+
260262
r_val, r_cl = sp_joint.correlation_matrix(all_masks)
261263

262264
n = len(all_masks)

notebooks/leakage_minimal.py

Lines changed: 19 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
if ipython is not None:
1111
ipython.run_line_magic("load_ext", "autoreload")
1212
ipython.run_line_magic("autoreload", "2")
13+
ipython.run_line_magic("load_ext", "log_cell_time")
1314

1415
# %%
1516
import numpy as np
@@ -26,10 +27,8 @@
2627
from sp_validation import io
2728
from sp_validation import plots as sp_plots
2829

29-
if ipython is not None:
30-
ipython.run_line_magic('matplotlib', 'inline')
31-
32-
30+
# %%
31+
os.getcwd()
3332

3433
# %%
3534
# Initialize calibration class instance
@@ -41,6 +40,13 @@
4140
# Get data. Set load_into_memory to False for very large files
4241
dat, _ = obj.read_cat(load_into_memory=False)
4342

43+
# %%
44+
n_test = -1
45+
#n_test = 1_000_000
46+
if n_test > 0:
47+
print(f"MKDEBUG testing only first {n_test} objects")
48+
dat = dat[:n_test]
49+
4450
# %%
4551
# Use only some columns in dat for efficiency
4652
use_all_columns = True
@@ -61,9 +67,13 @@
6167
#dat = {col: obj._hd5file['data'][col][:] for col in required_columns if col in obj._hd5file['data']}
6268

6369
# %%
64-
applied_masks = obj._hd5file["applied_masks"]
65-
66-
applied_masks["desc"]
70+
# Look for masks that were already applied to data if this
71+
# information is in header
72+
if "applied_masks" in obj._hd5file:
73+
applied_masks = obj._hd5file["applied_masks"]
74+
applied_masks["desc"]
75+
else:
76+
applied_masks = None
6777

6878
# ## Masking
6979
masks_to_apply = [
@@ -73,9 +83,10 @@
7383
"npoint3",
7484
]
7585

86+
# %%
7687
# Check that mask has not already been applied
7788
for mask in masks_to_apply:
78-
if mask in applied_masks["desc"]:
89+
if applied_masks and mask in applied_masks["desc"]:
7990
print(f"Warning: Mask {mask} has already been applied")
8091
else:
8192
pass
@@ -99,9 +110,7 @@
99110

100111
# %%
101112
# Call metacal
102-
103113
cm = config["metacal"]
104-
105114
gal_metacal = metacal(
106115
dat,
107116
mask_combined._mask,
@@ -118,10 +127,6 @@
118127
# Get metacal outputs; here mask is needed
119128
g_corr_mc, g_uncorr, w, mask_metacal, c, c_err = calibration.get_calibrated_m_c(gal_metacal)
120129

121-
# %%
122-
# Apply masks to data
123-
#datm = dat[mask_combined._mask][mask_metacal]
124-
125130
# %%
126131
cat_gal = {}
127132

notebooks/tests_bump.py

Lines changed: 219 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,219 @@
1+
# %%
2+
# Tests
3+
4+
# %%
5+
from IPython import get_ipython
6+
7+
ipython = get_ipython()
8+
9+
# enable autoreload for interactive sessions
10+
if ipython is not None:
11+
ipython.run_line_magic("load_ext", "autoreload")
12+
ipython.run_line_magic("autoreload", "2")
13+
ipython.run_line_magic("load_ext", "log_cell_time")
14+
ipython.run_line_magic("matplotlib", "inline")
15+
16+
# %%
17+
import numpy as np
18+
import pandas as pd
19+
20+
import matplotlib.pyplot as plt
21+
22+
from cs_util import plots as cs_plots
23+
24+
from sp_validation import run_joint_cat as sp_joint
25+
from sp_validation import cat as cat
26+
from sp_validation.basic import metacal
27+
from sp_validation import calibration
28+
from sp_validation import io
29+
from sp_validation import plots as sp_plots
30+
31+
# %%
32+
os.getcwd()
33+
34+
# %%
35+
# Initialize calibration class instance
36+
obj = sp_joint.CalibrateCat()
37+
38+
config = obj.read_config_set_params("config_minimal.yaml")
39+
40+
41+
# Get data. Set load_into_memory to False for very large files
42+
dat, _ = obj.read_cat(load_into_memory=False)
43+
44+
# %%
45+
#n_test = -1
46+
n_test = 1_000_000
47+
if n_test > 0:
48+
print(f"MKDEBUG testing only first {n_test} objects")
49+
dat = dat[:n_test]
50+
51+
# %%
52+
# Use only some columns in dat for efficiency
53+
use_all_columns = True
54+
55+
if not use_all_columns:
56+
57+
required_columns = set()
58+
for section, mask_list in config_data.items():
59+
for mask_params in mask_list:
60+
required_columns.add(mask_params["col_name"])
61+
62+
#user_columns = ["NGMIX_T_NOSHEAR", "NGMIX_Tpsf_NOSHEAR", "NGMIX_FLUX_NOSHEAR", "NGMIX_FLUX_ERR_NOSHEAR"]
63+
#required_columns.update(user_columns)
64+
65+
print(f"{len(dat.dtype.names)} -> {len(required_columns)}")
66+
67+
# Unsolved error here
68+
#dat = {col: obj._hd5file['data'][col][:] for col in required_columns if col in obj._hd5file['data']}
69+
70+
# %%
71+
if "applied_masks" in obj._hd5file:
72+
applied_masks = obj._hd5file["applied_masks"]
73+
applied_masks["desc"]
74+
else:
75+
applied_masks = None
76+
77+
# ## Masking
78+
masks_to_apply = [
79+
"N_EPOCH",
80+
"FLAGS",
81+
"4_Stars",
82+
"npoint3",
83+
]
84+
85+
# Check that mask has not already been applied
86+
for mask in masks_to_apply:
87+
if applied_masks and mask in applied_masks["desc"]:
88+
print(f"Warning: Mask {mask} has already been applied")
89+
else:
90+
pass
91+
92+
# Gather mask information for above list from config
93+
masks, labels = sp_joint.get_masks_from_config(config, dat, dat, masks_to_apply=masks_to_apply, verbose=obj._params["verbose"])
94+
95+
# Initialise combined mask
96+
label = "comb"
97+
my_mask = sp_joint.Mask(label, label, kind="combined", value=None)
98+
99+
# Combine masks
100+
mask_combined = sp_joint.Mask.from_list(
101+
masks,
102+
label="combined",
103+
verbose=obj._params["verbose"],
104+
)
105+
106+
# Output some mask statistics
107+
sp_joint.print_mask_stats(dat.shape[0], masks, mask_combined)
108+
109+
# %%
110+
# Call metacal
111+
cm = config["metacal"]
112+
gal_metacal = metacal(
113+
dat,
114+
mask_combined._mask,
115+
snr_min=cm["gal_snr_min"],
116+
snr_max=cm["gal_snr_max"],
117+
rel_size_min=cm["gal_rel_size_min"],
118+
rel_size_max=cm["gal_rel_size_max"],
119+
size_corr_ell=cm["gal_size_corr_ell"],
120+
sigma_eps=cm["sigma_eps_prior"],
121+
global_R_weight=cm["global_R_weight"],
122+
col_2d=False,
123+
verbose=True,
124+
)
125+
126+
# %%
127+
128+
# Get metacal outputs; here mask is needed
129+
g_corr_mc, g_uncorr, w, mask_metacal, c, c_err = (
130+
calibration.get_calibrated_m_c(gal_metacal)
131+
)
132+
133+
# %%
134+
# Compute DES weights
135+
136+
cat_gal = {}
137+
138+
sp_joint.compute_weights_gatti(
139+
cat_gal,
140+
g_uncorr,
141+
gal_metacal,
142+
dat,
143+
mask_combined,
144+
mask_metacal,
145+
num_bins=20,
146+
)
147+
148+
# %%
149+
# Correct for PSF leakage
150+
151+
alpha_1, alpha_2 = sp_joint.compute_PSF_leakage(
152+
cat_gal,
153+
g_corr_mc,
154+
dat,
155+
mask_combined,
156+
mask_metacal,
157+
num_bins=20,
158+
)
159+
160+
# Compute leakage-corrected ellipticities
161+
e1_leak_corrected = g_corr_mc[0] - alpha_1 * cat_gal["e1_PSF"]
162+
e2_leak_corrected = g_corr_mc[1] - alpha_2 * cat_gal["e2_PSF"]
163+
164+
# %%
165+
# Get some memory back
166+
for mask in masks:
167+
del mask
168+
169+
170+
# %%
171+
ra = cat.get_col(dat, "RA", mask_combined._mask, mask_metacal)
172+
dec = cat.get_col(dat, "Dec", mask_combined._mask, mask_metacal)
173+
174+
175+
# %%
176+
# Correlation function
177+
import treecorr
178+
# %%
179+
minsep = 1
180+
maxsep = 50
181+
nbins = 10
182+
npatch = 20
183+
n_cpu = 24
184+
config = {
185+
"ra_units": "deg",
186+
"dec_units": "deg",
187+
"min_sep": minsep,
188+
"max_sep": maxsep,
189+
"nbins": nbins,
190+
"num_threads": n_cpu,
191+
"sep_units": "arcmin",
192+
"var_method": "jackknife",
193+
"bin_type": "Log",
194+
}
195+
gg = treecorr.GGCorrelation(config, var_method='jackknife')
196+
197+
# %%
198+
catalog = treecorr.Catalog(
199+
ra=ra,
200+
dec=dec,
201+
g1=e1_leak_corrected,
202+
g2=e2_leak_corrected,
203+
w=cat_gal["w_des"],
204+
ra_units="deg",
205+
dec_units="deg",
206+
npatch=npatch,
207+
)
208+
209+
# %%
210+
gg.process(catalog, catalog, num_threads=n_cpu)
211+
# %%
212+
x = gg.rnom
213+
y = x * gg.xip
214+
dy = x * np.sqrt(gg.varxip)
215+
plt.errorbar(x, y, yerr=dy, marker="o", linestyle="")
216+
plt.xlabel(r"$\theta$ [arcmin]")
217+
plt.ylabel(r"$\theta \xi_+(\theta)$ [arcmin]")
218+
plt.show()
219+
# %%

sp_validation/calibration.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -266,8 +266,8 @@ def get_w_des(cat_gal, num_bins):
266266
df_gal = build_df(cat_gal)
267267

268268
#Create logarithmic bins in size and SNR
269-
cut_to_bins(df_gal, "snr", num_bins, type="log", x_max=300)
270-
cut_to_bins(df_gal, "size_ratio", num_bins, type="log", x_min=0.5)
269+
cut_to_bins(df_gal, "snr", num_bins, type="log")
270+
cut_to_bins(df_gal, "size_ratio", num_bins, type="log")
271271

272272
#Compute shape noise and the shear response in each bin
273273

0 commit comments

Comments
 (0)