diff --git a/config/calibration/mask_v1.X.7.yaml b/config/calibration/mask_v1.X.7.yaml new file mode 100644 index 0000000..49325e3 --- /dev/null +++ b/config/calibration/mask_v1.X.7.yaml @@ -0,0 +1,115 @@ +# Config file for masking and calibration. +# Standard cuts without halo masks, type v1.X.7. + +# General parameters (can also given on command line) +params: + input_path: unions_shapepipe_comprehensive_struc_2024_v1.X.c.hdf5 + cmatrices: False + sky_regions: False + verbose: True + +# Masks +## Using columns in 'dat' group (ShapePipe flags) +dat: + # SExtractor flags + - col_name: FLAGS + label: SE FLAGS + kind: equal + value: 0 + + # Duplicate objects + - col_name: overlap + label: tile overlap + kind: equal + value: True + + # ShapePipe flags + - col_name: IMAFLAGS_ISO + label: SP mask + kind: equal + value: 0 + + # Number of epochs + - col_name: N_EPOCH + label: r"$n_{\rm epoch}$" + kind: greater_equal + value: 2 + + # Magnitude range + - col_name: mag + label: mag range + kind: range + value: [15, 30] + + # ngmix flags + - col_name: NGMIX_MOM_FAIL + label: "ngmix moments failure" + kind: equal + value: 0 + + # invalid PSF ellipticities + - col_name: NGMIX_ELL_PSFo_NOSHEAR_0 + label: "bad PSF ellipticity comp 1" + kind: not_equal + value: -10 + - col_name: NGMIX_ELL_PSFo_NOSHEAR_1 + label: "bad PSF ellipticity comp 2" + kind: not_equal + value: -10 + +## Using columns in 'dat_ext' group (post-processing flags) +dat_ext: + + # Bright star halos + - col_name: 2_Bright_star_halos + label: "Bright star halos" + kind: equal + value: False + # Stars + - col_name: 4_Stars + label: "Stars" + kind: equal + value: False + + # Manual mask + - col_name: 8_Manual + label: "manual mask" + kind: equal + value: False + + # r-band footprint + - col_name: 64_r + label: "r-band imaging" + kind: equal + value: False + + # Maximask + - col_name: 1024_Maximask + label: "maximask" + kind: equal + value: False + + # Rough pointing coverage + - col_name: npoint3 + label: r"$n_{\rm pointing}$" + kind: greater_equal + value: 3 + +# Metacal parameters +metacal: + # Ellipticity dispersion + sigma_eps_prior: 0.34 + + # Signal-to-noise range + gal_snr_min: 10 + gal_snr_max: 500 + + # Relative-size (hlr / hlr_psf) range + gal_rel_size_min: 0.707 + gal_rel_size_max: 3 + + # Correct relative size for ellipticity? + gal_size_corr_ell: False + + # Weight for global response matrix, None for unweighted mean + global_R_weight: w diff --git a/config/calibration/mask_v1.X.8.yaml b/config/calibration/mask_v1.X.8.yaml new file mode 100644 index 0000000..c346b33 --- /dev/null +++ b/config/calibration/mask_v1.X.8.yaml @@ -0,0 +1,121 @@ +# Config file for masking and calibration. +# Standard cuts without halo masks, type v1.X.7. + +# General parameters (can also given on command line) +params: + input_path: unions_shapepipe_comprehensive_struc_2024_v1.X.c.hdf5 + cmatrices: False + sky_regions: False + verbose: True + +# Masks +## Using columns in 'dat' group (ShapePipe flags) +dat: + # SExtractor flags + - col_name: FLAGS + label: SE FLAGS + kind: equal + value: 0 + + # Duplicate objects + - col_name: overlap + label: tile overlap + kind: equal + value: True + + # ShapePipe flags + - col_name: IMAFLAGS_ISO + label: SP mask + kind: equal + value: 0 + + # Number of epochs + - col_name: N_EPOCH + label: r"$n_{\rm epoch}$" + kind: greater_equal + value: 2 + + # Magnitude range + - col_name: mag + label: mag range + kind: range + value: [15, 30] + + # ngmix flags + - col_name: NGMIX_MOM_FAIL + label: "ngmix moments failure" + kind: equal + value: 0 + + # invalid PSF ellipticities + - col_name: NGMIX_ELL_PSFo_NOSHEAR_0 + label: "bad PSF ellipticity comp 1" + kind: not_equal + value: -10 + - col_name: NGMIX_ELL_PSFo_NOSHEAR_1 + label: "bad PSF ellipticity comp 2" + kind: not_equal + value: -10 + +## Using columns in 'dat_ext' group (post-processing flags) +dat_ext: + + # Faint star halos + - col_name: 1_Faint_star_halos + label: "Faint star halos" + kind: equal + value: False + + # Bright star halos + - col_name: 2_Bright_star_halos + label: "Bright star halos" + kind: equal + value: False + # Stars + - col_name: 4_Stars + label: "Stars" + kind: equal + value: False + + # Manual mask + - col_name: 8_Manual + label: "manual mask" + kind: equal + value: False + + # r-band footprint + - col_name: 64_r + label: "r-band imaging" + kind: equal + value: False + + # Maximask + - col_name: 1024_Maximask + label: "maximask" + kind: equal + value: False + + # Rough pointing coverage + - col_name: npoint3 + label: r"$n_{\rm pointing}$" + kind: greater_equal + value: 3 + +# Metacal parameters +metacal: + # Ellipticity dispersion + sigma_eps_prior: 0.34 + + # Signal-to-noise range + gal_snr_min: 10 + gal_snr_max: 500 + + # Relative-size (hlr / hlr_psf) range + gal_rel_size_min: 0.707 + gal_rel_size_max: 3 + + # Correct relative size for ellipticity? + gal_size_corr_ell: False + + # Weight for global response matrix, None for unweighted mean + global_R_weight: w diff --git a/notebooks/demo_create_footprint_mask.py b/notebooks/demo_create_footprint_mask.py index 357ecb7..df710c6 100644 --- a/notebooks/demo_create_footprint_mask.py +++ b/notebooks/demo_create_footprint_mask.py @@ -39,7 +39,7 @@ # Set parameters obj._params["mask_dir"] = f"{os.environ['HOME']}/masks" -obj._params["aux_mask_files"] = f"{obj._params['mask_dir']}/coverage.hsp" +obj._params["aux_mask_files"] = f"{obj._params['mask_dir']}/coverage_v1.4.x.hsp" obj._params["aux_mask_labels"] = "npoint3" obj._params["verbose"] = True @@ -66,9 +66,9 @@ if mask_params["col_name"] in all_masks_bits: bits = bits | all_masks_bits[mask_params["col_name"]] - # Check auxillary masks + # Check auxiliary masks if "npoint3" == mask_params["col_name"]: - auxiliary_masks.append(f"{obj._params['mask_dir']}/coverage.hsp") + auxiliary_masks.append(obj._params["aux_mask_files"]) auxiliary_labels.append("npoint3") # Update bits for following function call diff --git a/notebooks/tests_bump.py b/notebooks/tests_bump.py index 18e3152..9eca268 100644 --- a/notebooks/tests_bump.py +++ b/notebooks/tests_bump.py @@ -1,5 +1,5 @@ # %% -# Tests +# Tests of rhe bump in xi_+ # %% from IPython import get_ipython @@ -14,8 +14,10 @@ ipython.run_line_magic("matplotlib", "inline") # %% +import os import numpy as np import pandas as pd +import treecorr import matplotlib.pyplot as plt @@ -29,159 +31,133 @@ from sp_validation import plots as sp_plots # %% -os.getcwd() +print("cwd = ", os.getcwd()) # %% # Initialize calibration class instance obj = sp_joint.CalibrateCat() - config = obj.read_config_set_params("config_minimal.yaml") - +# %% # Get data. Set load_into_memory to False for very large files dat, _ = obj.read_cat(load_into_memory=False) # %% -#n_test = -1 -n_test = 1_000_000 +n_test = -1 +#n_test = 1_000_000 if n_test > 0: print(f"MKDEBUG testing only first {n_test} objects") dat = dat[:n_test] # %% -# Use only some columns in dat for efficiency -use_all_columns = True - -if not use_all_columns: - - required_columns = set() - for section, mask_list in config_data.items(): - for mask_params in mask_list: - required_columns.add(mask_params["col_name"]) - - #user_columns = ["NGMIX_T_NOSHEAR", "NGMIX_Tpsf_NOSHEAR", "NGMIX_FLUX_NOSHEAR", "NGMIX_FLUX_ERR_NOSHEAR"] - #required_columns.update(user_columns) - - print(f"{len(dat.dtype.names)} -> {len(required_columns)}") - - # Unsolved error here - #dat = {col: obj._hd5file['data'][col][:] for col in required_columns if col in obj._hd5file['data']} - -# %% -if "applied_masks" in obj._hd5file: - applied_masks = obj._hd5file["applied_masks"] - applied_masks["desc"] -else: - applied_masks = None - -# ## Masking -masks_to_apply = [ - "N_EPOCH", - "FLAGS", - "4_Stars", - "npoint3", -] - -# Check that mask has not already been applied -for mask in masks_to_apply: - if applied_masks and mask in applied_masks["desc"]: - print(f"Warning: Mask {mask} has already been applied") - else: - pass - -# Gather mask information for above list from config -masks, labels = sp_joint.get_masks_from_config(config, dat, dat, masks_to_apply=masks_to_apply, verbose=obj._params["verbose"]) - -# Initialise combined mask -label = "comb" -my_mask = sp_joint.Mask(label, label, kind="combined", value=None) - -# Combine masks -mask_combined = sp_joint.Mask.from_list( - masks, - label="combined", - verbose=obj._params["verbose"], -) - -# Output some mask statistics -sp_joint.print_mask_stats(dat.shape[0], masks, mask_combined) - -# %% -# Call metacal -cm = config["metacal"] -gal_metacal = metacal( - dat, - mask_combined._mask, - snr_min=cm["gal_snr_min"], - snr_max=cm["gal_snr_max"], - rel_size_min=cm["gal_rel_size_min"], - rel_size_max=cm["gal_rel_size_max"], - size_corr_ell=cm["gal_size_corr_ell"], - sigma_eps=cm["sigma_eps_prior"], - global_R_weight=cm["global_R_weight"], - col_2d=False, - verbose=True, -) - -# %% - -# Get metacal outputs; here mask is needed -g_corr_mc, g_uncorr, w, mask_metacal, c, c_err = ( - calibration.get_calibrated_m_c(gal_metacal) -) - -# %% -# Compute DES weights - -cat_gal = {} - -sp_joint.compute_weights_gatti( - cat_gal, - g_uncorr, - gal_metacal, - dat, - mask_combined, - mask_metacal, - num_bins=20, -) - -# %% -# Correct for PSF leakage - -alpha_1, alpha_2 = sp_joint.compute_PSF_leakage( - cat_gal, - g_corr_mc, - dat, - mask_combined, - mask_metacal, - num_bins=20, -) - -# Compute leakage-corrected ellipticities -e1_leak_corrected = g_corr_mc[0] - alpha_1 * cat_gal["e1_PSF"] -e2_leak_corrected = g_corr_mc[1] - alpha_2 * cat_gal["e2_PSF"] - -# %% -# Get some memory back -for mask in masks: - del mask - - -# %% -ra = cat.get_col(dat, "RA", mask_combined._mask, mask_metacal) -dec = cat.get_col(dat, "Dec", mask_combined._mask, mask_metacal) - - -# %% -# Correlation function -import treecorr +def get_combined_mask(config, dat, masks_to_apply, verbose=False): + + # Gather mask information for above list from config + masks, labels = sp_joint.get_masks_from_config( + config, + dat, + dat, + masks_to_apply=masks_to_apply, + verbose=verbose, + ) + + # Initialise combined mask + label = "comb" + my_mask = sp_joint.Mask(label, label, kind="combined", value=None) + + # Combine masks + mask_combined = sp_joint.Mask.from_list( + masks, + label="combined", + verbose=verbose, + ) + + if True or verbose: + # Output some mask statistics + sp_joint.print_mask_stats(dat.shape[0], masks, mask_combined) + + return mask_combined + +# %% +def xi_from_dat(dat, mask_combined, cm, config_treecorr, n_cpu): + + # Call metacal + gal_metacal = metacal( + dat, + mask_combined._mask, + snr_min=cm["gal_snr_min"], + snr_max=cm["gal_snr_max"], + rel_size_min=cm["gal_rel_size_min"], + rel_size_max=cm["gal_rel_size_max"], + size_corr_ell=cm["gal_size_corr_ell"], + sigma_eps=cm["sigma_eps_prior"], + global_R_weight=cm["global_R_weight"], + col_2d=False, + verbose=True, + ) + + # Get metacal outputs; here mask is needed + g_corr_mc, g_uncorr, w, mask_metacal, c, c_err = ( + calibration.get_calibrated_m_c(gal_metacal) + ) + + # Compute DES weights + cat_gal = {} + + sp_joint.compute_weights_gatti( + cat_gal, + g_uncorr, + gal_metacal, + dat, + mask_combined, + mask_metacal, + num_bins=20, + ) + + # Correct for PSF leakage + alpha_1, alpha_2 = sp_joint.compute_PSF_leakage( + cat_gal, + g_corr_mc, + dat, + mask_combined, + mask_metacal, + num_bins=20, + ) + + # Compute leakage-corrected ellipticities + e1_leak_corrected = g_corr_mc[0] - alpha_1 * cat_gal["e1_PSF"] + e2_leak_corrected = g_corr_mc[1] - alpha_2 * cat_gal["e2_PSF"] + + + ra = cat.get_col(dat, "RA", mask_combined._mask, mask_metacal) + dec = cat.get_col(dat, "Dec", mask_combined._mask, mask_metacal) + + gg = treecorr.GGCorrelation(config_treecorr, var_method='jackknife') + + npatch = 20 + catalog = treecorr.Catalog( + ra=ra, + dec=dec, + g1=e1_leak_corrected, + g2=e2_leak_corrected, + w=cat_gal["w_des"], + ra_units="deg", + dec_units="deg", + npatch=npatch, + ) + + gg.process(catalog, catalog, num_threads=n_cpu) + + return gg + +# %% +# Set up correlation # %% minsep = 1 maxsep = 50 nbins = 10 -npatch = 20 n_cpu = 24 -config = { +config_treecorr = { "ra_units": "deg", "dec_units": "deg", "min_sep": minsep, @@ -192,28 +168,44 @@ "var_method": "jackknife", "bin_type": "Log", } -gg = treecorr.GGCorrelation(config, var_method='jackknife') +cm = config["metacal"] -# %% -catalog = treecorr.Catalog( - ra=ra, - dec=dec, - g1=e1_leak_corrected, - g2=e2_leak_corrected, - w=cat_gal["w_des"], - ra_units="deg", - dec_units="deg", - npatch=npatch, -) # %% -gg.process(catalog, catalog, num_threads=n_cpu) +# Basic masking +masks_to_apply = [] + +masks_to_add = [ + "4_Stars", + "1024_Maximask", + "N_EPOCH", + "2_Bright_star_halos", + "1_Faint_star_halos", + "npoint3", +] + # %% -x = gg.rnom -y = x * gg.xip -dy = x * np.sqrt(gg.varxip) -plt.errorbar(x, y, yerr=dy, marker="o", linestyle="") -plt.xlabel(r"$\theta$ [arcmin]") -plt.ylabel(r"$\theta \xi_+(\theta)$ [arcmin]") -plt.show() +# Main loop +ggs = [] +for mask in masks_to_add: + + print(f"Adding mask {mask}...") + + masks_to_apply.append(mask) + + # Get combined mask + mask_combined = get_combined_mask( + config, + dat, + masks_to_apply, + verbose=obj._params["verbose"], + ) + + # Compute xi_+ from data + print("Compute xi") + gg = xi_from_dat(dat, mask_combined, cm, config_treecorr, n_cpu) + + xi_out_name = f"xi_{mask}.txt" + gg.write(xi_out_name) + ggs.append(gg) # %%