From ba1e5bab45fc957b04ebcb64f74d19a6c7eb0f87 Mon Sep 17 00:00:00 2001 From: Lucie Baumont Date: Fri, 30 Jun 2023 16:49:36 +0300 Subject: [PATCH 01/12] candide batch submission script --- scripts/candide_CFIS_P3.sh | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 scripts/candide_CFIS_P3.sh diff --git a/scripts/candide_CFIS_P3.sh b/scripts/candide_CFIS_P3.sh new file mode 100644 index 0000000..8330e9a --- /dev/null +++ b/scripts/candide_CFIS_P3.sh @@ -0,0 +1,11 @@ +#!/bin/bash +#PBS -k o +### resource allocation +#PBS -l nodes=1:ppn=8,walltime=10:00:00 +### job name +#PBS -N peaks-mcmc +### Redirect stdout and stderr to same file +#PBS -j oe + +## your bash script here: +~/.conda/envs/sp-peaks/bin/python /home/baumont/software/shear-pipe-peaks/example/constraints_CFIS-P3.py From e3756249c7c56a067592766d1ecc526e9c146429 Mon Sep 17 00:00:00 2001 From: Lucie Baumont Date: Fri, 30 Jun 2023 16:50:21 +0300 Subject: [PATCH 02/12] constraints script --- example/constraints_CFIS-P3.py | 127 +++++++++++++++++++++++++++++++++ 1 file changed, 127 insertions(+) create mode 100644 example/constraints_CFIS-P3.py diff --git a/example/constraints_CFIS-P3.py b/example/constraints_CFIS-P3.py new file mode 100644 index 0000000..c596003 --- /dev/null +++ b/example/constraints_CFIS-P3.py @@ -0,0 +1,127 @@ +import numpy as np +import emcee +import numpy.linalg as la +import matplotlib.pyplot as plt +from getdist import plots, MCSamples, parampriors +from joblib import Parallel, delayed, cpu_count +from multiprocessing import cpu_count, Pool +import time +from chainconsumer import ChainConsumer +from utils import * +from likelihood import * +import os +import sys + +#for tex in ChainConsumer +pref = os.environ['CONDA_PREFIX'] +os.environ["PATH"] += os.pathsep + '/Library/TeX/texbin' + + +#Specify the free parameters for redshift, shear calibration, baryonic correction and cut in S/N +# Check the README to know the possibility for the different cases and reproduce the plots +param_z = '065' +param_z_cov = '0.65' +param_cal = 'dm_1deg' +param_baryonic_correction = 'Fid' +param_cut = 19 +n_patches = 13 + + +#Load the file names for peaks file +file_name_peaks_Maps = (np.array(np.loadtxt('.././input/list_cosmo_peaks_z{}.txt'.format(param_z), usecols=0,dtype=str))) + +#Extract cosmological parameters values from the simulations associated to each peak counts file +params = np.array([takes_params(f) for f in file_name_peaks_Maps]) +#identifies fiducial cosmology index +index_fiducial = 15 +params_fiducial = params[index_fiducial] + +#load the peaks distributions for the theoretical prediction +Peaks_Maps_DM = np.array([np.load('../input/peaks_z{}/%s'.format(param_z)%(fn), mmap_mode='r') for fn in file_name_peaks_Maps])[:,:,:param_cut] + +# load baryon corrections +bar_corr = np.load('.././input/{}_correction.npy'.format(param_baryonic_correction))[:param_cut] + +#Apply the baryonic correction +Peaks_Maps = np.copy(Peaks_Maps_DM) + +if(param_baryonic_correction == 'no'): + # no baryonic correction + for i in range(Peaks_Maps_DM.shape[0]): + for j in range(Peaks_Maps_DM.shape[1]): + Peaks_Maps[i,j,:] = Peaks_Maps_DM[i,j,:] * 1 +else: + # apply the choosen baryonic correction + for i in range(Peaks_Maps_DM.shape[0]): + for j in range(Peaks_Maps_DM.shape[1]): + Peaks_Maps[i,j,:] = Peaks_Maps_DM[i,j,:] * bar_corr + +# take the mean over realizations +Peaks_Maps_mean=np.mean(Peaks_Maps, axis=1) + +# create array for snr histogram +snr_array=np.linspace(-2,6,31) +snr_centers=0.5*(snr_array[1:]+snr_array[:-1]) + +#Load peaks distribution for data +peaks_data = np.load('.././input/peaks_mean_{}.npy'.format(param_cal), mmap_mode='r')[:param_cut] + +#Train Gaussian Processes regressor +#this returns a tuple consisting of (list of GP, scaling) +ncpu = cpu_count() +gp_scaling=np.array([Parallel(n_jobs = ncpu, verbose = 5)(delayed(gp_train)(index_bin, params_train = params, obs_train = Peaks_Maps) for index_bin in range(Peaks_Maps.shape[2]))]).reshape(Peaks_Maps.shape[2], 2) + +gp_list=gp_scaling[:,0] +scaling=gp_scaling[:,1] + +#Covariance matrix +#Load peaks to compute covariance matrix +cov_peaks_DM = np.load('.././input/convergence_gal_mnv0.00000_om0.30000_As2.1000_peaks_2arcmin_{}_b030_snr_min_max_ngal_7.npy'.format(param_z_cov), mmap_mode='r')[:,:param_cut] + +#Apply Baryonic Correction +cov_peaks = np.copy(cov_peaks_DM) +for i in range(Peaks_Maps_DM.shape[1]): + cov_peaks[i,:] = cov_peaks_DM[i,:] * bar_corr + +#Compute covariance matrix and scale it for sky coverage +cov=(1/n_patches)*np.cov(cov_peaks.T) + +#compute the inverse of the covariance +icov=la.inv(cov) + +# get constraints with MCMC +# compute hartlap factor +n_real=Peaks_Maps_DM.shape[1] +n_bins=len(peaks_data) +norm=(n_real-n_bins-2)/(n_real-1) + +#Define priors (range of sims) +M_nu_min = 0.06 # minimum from oscillation experiments +M_nu_max = 0.62 +Omega_m_min = 0.18 +Omega_m_max = 0.42 +A_s_min = 1.29 +A_s_max = 2.91 + +#Specify number of dimensions for parameter space, number of walkers, initial position + +ndim, nwalkers = 3,250 +pos = [params_fiducial + 1e-3*np.random.randn(ndim) for i in range(nwalkers)] + +print("{0} CPUs".format(ncpu)) + + +with Pool() as pool: + sampler = emcee.EnsembleSampler(nwalkers, ndim, lnpost, pool=pool, args=[peaks_data, icov,gp_list, scaling, norm,M_nu_min, M_nu_max, Omega_m_min, Omega_m_max, A_s_min, A_s_max]) + start = time.time() + sampler.run_mcmc(pos, 6500, progress=True) + end = time.time() + multi_time = end - start + print("Multiprocessing took {0:.1f} seconds".format(multi_time)) +# remove burn-in +samples = sampler.chain[:,200:, :].reshape((-1, ndim)) +#save results +np.save('.././output/constraints_z{}_{}_{}corr_{}snr.npy'.format(param_z,param_cal,param_baryonic_correction,param_cut),samples) + + + From 9b16f78448cf1b29da5f0ffbf5514376020b9cf9 Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Thu, 27 Jul 2023 10:43:59 +0200 Subject: [PATCH 03/12] Installation via conda and pyproject; two py versions (3.11, 3.6); example slics script --- .gitignore | 3 ++ environment.yml | 15 +++++++++ environment_3.6.yml | 15 +++++++++ example/test_slics.py | 46 ++++++++++++++++++++++++++ pyproject.toml | 76 +++++++++++++++++++++++++++++++++++++++++++ requirements.txt | 7 ++++ sp_peaks/__init__.py | 25 ++++++++++++++ 7 files changed, 187 insertions(+) create mode 100644 environment.yml create mode 100644 environment_3.6.yml create mode 100644 example/test_slics.py create mode 100644 pyproject.toml create mode 100644 requirements.txt create mode 100644 sp_peaks/__init__.py diff --git a/.gitignore b/.gitignore index 212d545..1859f04 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,5 @@ +build +sp_peaks.egg-info/ .ipynb_checkpoints +example/test_slics.ipynb __pycache__ diff --git a/environment.yml b/environment.yml new file mode 100644 index 0000000..e1d3b74 --- /dev/null +++ b/environment.yml @@ -0,0 +1,15 @@ +name: peaks +channels: + - conda-forge +dependencies: + - python>3.10 + - pip>=21 + - numpy + - astropy + - scipy + - scikit-learn + - matplotlib + - joblib + - tqdm + - pip: + - -r requirements.txt diff --git a/environment_3.6.yml b/environment_3.6.yml new file mode 100644 index 0000000..5321e6a --- /dev/null +++ b/environment_3.6.yml @@ -0,0 +1,15 @@ +name: peaks_3.6 +channels: + - conda-forge +dependencies: + - python==3.6.13 + - pip>=21 + - numpy + - astropy + - scipy + - scikit-learn==0.20 + - matplotlib + - joblib + - tqdm + - pip: + - -r requirements.txt diff --git a/example/test_slics.py b/example/test_slics.py new file mode 100644 index 0000000..6b91c02 --- /dev/null +++ b/example/test_slics.py @@ -0,0 +1,46 @@ +# --- +# jupyter: +# jupytext: +# formats: ipynb,py +# text_representation: +# extension: .py +# format_name: light +# format_version: '1.5' +# jupytext_version: 1.14.7 +# kernelspec: +# display_name: Python 3 (ipykernel) +# language: python +# name: python3 +# --- + +# # CosmoSLICS simulation testing +# 07/2023 + +# %matplotlib inline +# %load_ext autoreload +# %autoreload 2 + +# + +import sp_peaks +print(f"sp_peaks version = {sp_peaks.__version__}") + +from sp_peaks import slics +# - + +cat_path = "/n17data/tersenov/SLICS/Cosmo_DES/06_f/LOS3/DES_MocksCat_06_f_4_Bin3_LOS3_R19.dat" + +# Load catalogue, all columns +dat = slics.read_catalogue(cat_path) + +# Print column names +print(dat.dtype.names) + +# Print first line +print(dat[0]) + +# Load only essential columns +dat_ess = slics.read_catalogue(cat_path, all_col=False) + +print(dat_ess[0]) + + diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..69085e6 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,76 @@ +[project] +name = "sp_peaks" +readme = "README.md" +#requires-python = "==3.6.13" +requires-python = ">=3.10" +authors = [ + { "name" = "The CosmoStat Lab" }, +] +maintainers = [ + { "name" = "Andreas Tersenov", "email" = "andreas.tersenov@cea.fr" }, + { "name" = "Lucie Baumont", "email" = "lucie.baumont@cea.fr" }, + { "name" = "Martin Kilbinger", "email" = "martin.kilbinger@cea.fr" }, +] +dynamic = ["version"] +license = { "file" = "LICENSE" } +keywords = [ + "CosmoStat", + "cosmology", + "weak gravitational lensing", + "peak counts", +] +classifiers = [ + "Development Status :: 1 - Planning", + "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + "Operating System :: POSIX :: Linux", + "Operating System :: MacOS", + "Topic :: Scientific/Engineering", +] +dependencies = [ + "astropy>=4.0", + "numpy>=1.19", + "scipy>=1.5", + "scikit-learn>=1.3", +] + +[project.optional-dependencies] +lint = [ + "black", +] +release = [ + "build", + "twine", +] +test = [ + "pytest", + "pytest-black", + "pytest-cov", + "pytest-pydocstyle", +] + +# Install for development +dev = ["sp_peaks[lint,release,test]"] + +[project.urls] +Source = "https://github.com/CosmoStat/shear-pipe-peaks" +Documentation = "https://github.com/CosmoStat/shear-pipe-peaks" +Tracker = "https://github.com/CosmoStat/shear-pipe-peaks/issues" + +[tool.black] +line-length = 80 + +[tool.pydocstyle] +convention = "numpy" + +[tool.pytest.ini_options] +addopts = "--verbose --black --pydocstyle --cov=sp_peaks" +testpaths = ["sp_peaks"] + +# Select library directory(ies) +[tool.setuptools] +packages = ["sp_peaks"] + +[tool.setuptools.dynamic] +version = {attr = "sp_peaks.__version__"} diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..d75f7ef --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +emcee==3.1.1 +zenodo_get +chainconsumer +getdist +jupyter +jupytext +lenspack diff --git a/sp_peaks/__init__.py b/sp_peaks/__init__.py new file mode 100644 index 0000000..2df7834 --- /dev/null +++ b/sp_peaks/__init__.py @@ -0,0 +1,25 @@ +"""sp_peaks PACKAGE. + +CosmoStat utilities for weak lensing peaks from ShapePipe processed data. + +References +---------- +This package makes use of the following third-party packages: + +- `Numpy `_ :cite:`Harris:2020` + +.. warning:: + + `WPS410 `_ and `WPS412 `_ errors are + supressed in this module to allow the defintion of a package + ``__version__``, which is standard for most Python packages. + +""" + +from warnings import warn + +__version__ = "0.0.1" From aafc0b4ee47b9599b7a1ed65a84903e248d5cc49 Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Fri, 28 Jul 2023 15:54:34 +0200 Subject: [PATCH 04/12] Update README.md Added general description --- README.md | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index cd8f7b4..236adf9 100644 --- a/README.md +++ b/README.md @@ -1,21 +1,30 @@ -## The impact of systematic errors on weak-lensing peak counts for UNIONS - -This repository containts the results of the paper "UNIONS: The impact of systematic errors on weak-lensing peak counts". - -Authors: Emma Ayçoberry, Virginia Ajani, Axel Guinot, Martin Kilbinger, Valeria Pettorino, Samuel Farrens, Jean-Luc Starck, Raphaël Gavazzi, Michael J. Hudson +# Weak-lensing peak counts with ShapePipe processed data +This python package contains methods for cosmological parameter inference from weak-lensing peak counts. +It works with galaxy catalogue data processed with ShapePipe, but is general to be used with other data. -The Ultraviolet Near-Infrared Optical Northern Survey (UNIONS) is an ongoing deep photometric multi-band survey of the Northern sky. As part of UNIONS, the Canada-France Imaging Survey (CFIS) provides r-band data with a median seeing of 0.65 arcsec, which we use to study weak-lensing peak counts for cosmological inference. -This work aims to assess systematics effects for weak-lensing peak counts and their impact on cosmological parameters for the UNIONS survey. In particular, we present results on local calibration, metacalibration shear bias, baryonic feedback, the source galaxy redshift estimate, intrinsic alignment, and the cluster member dilution. We expect constraints to become more reliable with future (larger) data catalogues, for which the current pipeline will provide a starting point. This paper investigates for the first time with UNIONS weak-lensing data and peak counts the impact of the local calibration, residual multiplicative shear bias, redshift uncertainty, baryonic feedback, intrinsic alignment and cluster member dilution. The value of matter density parameter is the most impacted and can shift up to ~ 0.03 which corresponds to 0.5 sigma depending on the choices for each systematics. We expect constraints to become more reliable with future (larger) data catalogues, for which the current code provides a starting point. - +Included is code to reproduce results and plots from Ayçoberry et al. (2022); see below. ### Content +1. [The impact of systematic errors on weak-lensing peak counts for UNIONS](#the-impact-of-systematic-errors-on-weak-lensing-peak-counts-for-unions) 1. [Dependencies](#dependencies) 2. [Description of the input files](#description-of-the-input-files) 3. [Example](#example) +## The impact of systematic errors on weak-lensing peak counts for UNIONS + +This repository contains the results of the paper "UNIONS: The impact of systematic errors on weak-lensing peak counts". + +Authors: Emma Ayçoberry, Virginia Ajani, Axel Guinot, Martin Kilbinger, Valeria Pettorino, Samuel Farrens, Jean-Luc Starck, Raphaël Gavazzi, Michael J. Hudson + + +The Ultraviolet Near-Infrared Optical Northern Survey (UNIONS) is an ongoing deep photometric multi-band survey of the Northern sky. As part of UNIONS, the Canada-France Imaging Survey (CFIS) provides r-band data with a median seeing of 0.65 arcsec, which we use to study weak-lensing peak counts for cosmological inference. +This work aims to assess systematics effects for weak-lensing peak counts and their impact on cosmological parameters for the UNIONS survey. In particular, we present results on local calibration, metacalibration shear bias, baryonic feedback, the source galaxy redshift estimate, intrinsic alignment, and cluster member dilution. We expect constraints to become more reliable with future (larger) data catalogues, for which the current pipeline will provide a starting point. This paper investigates for the first time with UNIONS weak-lensing data and peak counts the impact of the local calibration, residual multiplicative shear bias, redshift uncertainty, baryonic feedback, intrinsic alignment and cluster member dilution. The value of matter density parameter is the most impacted and can shift up to ~ 0.03 which corresponds to 0.5 sigma depending on the choices for each systematics. We expect constraints to become more reliable with future (larger) data catalogues, for which the current code provides a starting point. + + + #### Dependencies To be able to run the example, the following python packages should be installed with their specific dependencies in a `python==3.6` environment. From a84c43e353ffdb3b32c8faf643c8ca0b39e69063 Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Fri, 28 Jul 2023 15:59:34 +0200 Subject: [PATCH 05/12] Update README.md Installation instructions using the env file --- README.md | 51 +++++++++++++++------------------------------------ 1 file changed, 15 insertions(+), 36 deletions(-) diff --git a/README.md b/README.md index 236adf9..e2203b5 100644 --- a/README.md +++ b/README.md @@ -5,53 +5,36 @@ It works with galaxy catalogue data processed with ShapePipe, but is general to Included is code to reproduce results and plots from Ayçoberry et al. (2022); see below. -### Content +## Content 1. [The impact of systematic errors on weak-lensing peak counts for UNIONS](#the-impact-of-systematic-errors-on-weak-lensing-peak-counts-for-unions) -1. [Dependencies](#dependencies) -2. [Description of the input files](#description-of-the-input-files) -3. [Example](#example) + 1. [Installation](#installation) + 1. [Description of the input files](#description-of-the-input-files) + 1. [Example](#example) ## The impact of systematic errors on weak-lensing peak counts for UNIONS -This repository contains the results of the paper "UNIONS: The impact of systematic errors on weak-lensing peak counts". +This part reproduces the results of the paper "UNIONS: The impact of systematic errors on weak-lensing peak counts". Authors: Emma Ayçoberry, Virginia Ajani, Axel Guinot, Martin Kilbinger, Valeria Pettorino, Samuel Farrens, Jean-Luc Starck, Raphaël Gavazzi, Michael J. Hudson - +Abstract: The Ultraviolet Near-Infrared Optical Northern Survey (UNIONS) is an ongoing deep photometric multi-band survey of the Northern sky. As part of UNIONS, the Canada-France Imaging Survey (CFIS) provides r-band data with a median seeing of 0.65 arcsec, which we use to study weak-lensing peak counts for cosmological inference. This work aims to assess systematics effects for weak-lensing peak counts and their impact on cosmological parameters for the UNIONS survey. In particular, we present results on local calibration, metacalibration shear bias, baryonic feedback, the source galaxy redshift estimate, intrinsic alignment, and cluster member dilution. We expect constraints to become more reliable with future (larger) data catalogues, for which the current pipeline will provide a starting point. This paper investigates for the first time with UNIONS weak-lensing data and peak counts the impact of the local calibration, residual multiplicative shear bias, redshift uncertainty, baryonic feedback, intrinsic alignment and cluster member dilution. The value of matter density parameter is the most impacted and can shift up to ~ 0.03 which corresponds to 0.5 sigma depending on the choices for each systematics. We expect constraints to become more reliable with future (larger) data catalogues, for which the current code provides a starting point. -#### Dependencies - -To be able to run the example, the following python packages should be installed with their specific dependencies in a `python==3.6` environment. -You can follow the instructions to install Anaconda [here](https://docs.anaconda.com/anaconda/install/index.html) or miniconda [here](https://conda.io/projects/conda/en/latest/user-guide/install/index.html) and create an environment as - -``conda create --name python==3.6.13`` - -then install the following packages: - - -- [numpy](https://numpy.org/install/) -- [astropy](https://www.astropy.org) -- [lenspack](https://github.com/CosmoStat/lenspack.git) -- [scipy](https://scipy.org/install/) -- [scikit-learn](https://scikit-learn.org/stable/install.html)==0.20 -- [matplotlib](https://matplotlib.org/stable/users/installing/index.html) -- [emcee](https://emcee.readthedocs.io/en/v2.2.1/user/install/)==3.1.1 -- [joblib](https://joblib.readthedocs.io/en/latest/installing.html) -- [tqdm](https://github.com/tqdm/tqdm#installation) -- [chainconsumer](https://samreay.github.io/ChainConsumer/) -- [getdist](https://getdist.readthedocs.io/en/latest/intro.html#getting-started) -- [zenodo_get](https://github.com/dvolgyes/zenodo_get) -- [jupyter](https://jupyter.org/install) - - +### Installation +First, download this repository by clicking on the `Clone` button above. +The easiest way to install the required software is using `conda`. +You can follow the instructions to install Anaconda [here](https://docs.anaconda.com/anaconda/install/index.html) or miniconda [here](https://conda.io/projects/conda/en/latest/user-guide/install/index.html). To create the conda environnment, type +```bash +conda create -f environment_3.6.yml +``` +Note that python version 3.6.13 is required. #### Description of the input files @@ -147,11 +130,7 @@ param_cut = 19, #### Example -To run the example you need to check that you have propery installed the [Dependencies](#dependencies) listed above in your environment and then git clone this repository as - -`git clone https://github.com/CosmoStat/shear-pipe-peaks.git` - -ad then go to the example folder and launch the jupyter notebook [constraints_CFIS-P3.ipynb](https://github.com/CosmoStat/shear-pipe-peaks/blob/main/example/constraints_CFIS-P3.ipynb) by doing: +After installing the package, see [Dependencies](#dependencies), go to the example folder and launch the jupyter notebook [constraints_CFIS-P3.ipynb](https://github.com/CosmoStat/shear-pipe-peaks/blob/main/example/constraints_CFIS-P3.ipynb) by typing: `cd example` From b04acedc077198348a759fa53275954a51ea6f9b Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Fri, 28 Jul 2023 16:26:09 +0200 Subject: [PATCH 06/12] Update README.md formatting --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index e2203b5..5fe4317 100644 --- a/README.md +++ b/README.md @@ -8,9 +8,9 @@ Included is code to reproduce results and plots from Ayçoberry et al. (2022); s ## Content 1. [The impact of systematic errors on weak-lensing peak counts for UNIONS](#the-impact-of-systematic-errors-on-weak-lensing-peak-counts-for-unions) - 1. [Installation](#installation) - 1. [Description of the input files](#description-of-the-input-files) - 1. [Example](#example) + 1. [Installation](#installation) + 1. [Description of the input files](#description-of-the-input-files) + 1. [Example](#example) ## The impact of systematic errors on weak-lensing peak counts for UNIONS From e7a0bae35a912b3fa119e204506b312499472f84 Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Fri, 28 Jul 2023 16:28:15 +0200 Subject: [PATCH 07/12] Update README.md --- README.md | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 5fe4317..2fbfa53 100644 --- a/README.md +++ b/README.md @@ -3,15 +3,23 @@ This python package contains methods for cosmological parameter inference from weak-lensing peak counts. It works with galaxy catalogue data processed with ShapePipe, but is general to be used with other data. +To use this code, download this repository by clicking on the `Clone` button above. + + Included is code to reproduce results and plots from Ayçoberry et al. (2022); see below. ## Content +1. [The python library](#the-python-library) + 1. [Installation](#installation) 1. [The impact of systematic errors on weak-lensing peak counts for UNIONS](#the-impact-of-systematic-errors-on-weak-lensing-peak-counts-for-unions) 1. [Installation](#installation) 1. [Description of the input files](#description-of-the-input-files) 1. [Example](#example) +## The python library + +### Installation ## The impact of systematic errors on weak-lensing peak counts for UNIONS @@ -27,8 +35,6 @@ This work aims to assess systematics effects for weak-lensing peak counts and th ### Installation -First, download this repository by clicking on the `Clone` button above. - The easiest way to install the required software is using `conda`. You can follow the instructions to install Anaconda [here](https://docs.anaconda.com/anaconda/install/index.html) or miniconda [here](https://conda.io/projects/conda/en/latest/user-guide/install/index.html). To create the conda environnment, type ```bash From 19a14a3b48f0a34cb3c7c66ebe7ca44925aa6eab Mon Sep 17 00:00:00 2001 From: Martin Kilbinger Date: Fri, 28 Jul 2023 17:21:16 +0200 Subject: [PATCH 08/12] Update README.md installation & usage of library --- README.md | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 2fbfa53..e103408 100644 --- a/README.md +++ b/README.md @@ -5,13 +5,17 @@ It works with galaxy catalogue data processed with ShapePipe, but is general to To use this code, download this repository by clicking on the `Clone` button above. +The easiest way to install the required software is using `conda`. +You can follow the instructions to install Anaconda [here](https://docs.anaconda.com/anaconda/install/index.html) or miniconda [here](https://conda.io/projects/conda/en/latest/user-guide/install/index.html). Included is code to reproduce results and plots from Ayçoberry et al. (2022); see below. ## Content 1. [The python library](#the-python-library) - 1. [Installation](#installation) + 1. [Installation](#installation) + 1. [Usage](#usage) + 1. [Examples](#examples) 1. [The impact of systematic errors on weak-lensing peak counts for UNIONS](#the-impact-of-systematic-errors-on-weak-lensing-peak-counts-for-unions) 1. [Installation](#installation) 1. [Description of the input files](#description-of-the-input-files) @@ -21,6 +25,24 @@ Included is code to reproduce results and plots from Ayçoberry et al. (2022); s ### Installation +To create the conda environment type +```bash +conda create -f environment.yml +``` + +### Usage + +Once installed, any library files can be used via `import` in a python script or notebook, e.g.: +```python +import sp_peaks +from sp_peaks import slics +``` + +### Examples + +Example notebooks and associated python scripts can be run in the `notebooks` folder. + + ## The impact of systematic errors on weak-lensing peak counts for UNIONS This part reproduces the results of the paper "UNIONS: The impact of systematic errors on weak-lensing peak counts". @@ -35,8 +57,7 @@ This work aims to assess systematics effects for weak-lensing peak counts and th ### Installation -The easiest way to install the required software is using `conda`. -You can follow the instructions to install Anaconda [here](https://docs.anaconda.com/anaconda/install/index.html) or miniconda [here](https://conda.io/projects/conda/en/latest/user-guide/install/index.html). To create the conda environnment, type +To create the conda environment type ```bash conda create -f environment_3.6.yml ``` From 3c83a52495465a7ae17052754791a1ada5c00c96 Mon Sep 17 00:00:00 2001 From: lbaumo Date: Tue, 29 Aug 2023 14:42:32 +0200 Subject: [PATCH 09/12] memory problems fixed for candide scripts --- example/constraints_CFIS-P3.py | 96 +++++++++++++++++++--------------- scripts/candide_CFIS_P3.sh | 4 +- 2 files changed, 56 insertions(+), 44 deletions(-) diff --git a/example/constraints_CFIS-P3.py b/example/constraints_CFIS-P3.py index c596003..2c21253 100644 --- a/example/constraints_CFIS-P3.py +++ b/example/constraints_CFIS-P3.py @@ -1,9 +1,9 @@ +import argparse import numpy as np import emcee import numpy.linalg as la import matplotlib.pyplot as plt -from getdist import plots, MCSamples, parampriors -from joblib import Parallel, delayed, cpu_count +from sklearn.externals.joblib import Parallel, delayed, cpu_count from multiprocessing import cpu_count, Pool import time from chainconsumer import ChainConsumer @@ -12,37 +12,53 @@ import os import sys -#for tex in ChainConsumer +parser = argparse.ArgumentParser( + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=""" + +SCRIPT NAME: +constraints_CFIS-P3.py + +EXAMPLE: +python constraints_CFIS-P3.py '/home/baumont/software/shear-pipe-peaks' --nproc 4 +""" +) +parser.add_argument('maindir', help='work directory containing input and output directories') +parser.add_argument('--nproc', help='number of processes', type=int, default=1) + +args = parser.parse_args() + +#for tex in ChainConsumer pref = os.environ['CONDA_PREFIX'] os.environ["PATH"] += os.pathsep + '/Library/TeX/texbin' - -#Specify the free parameters for redshift, shear calibration, baryonic correction and cut in S/N -# Check the README to know the possibility for the different cases and reproduce the plots -param_z = '065' +#Specify the free parameters for redshift, shear calibration, baryonic correction and cut in S/N +# Check the README to know the possibility for the different cases and reproduce the plots +param_z = '065' param_z_cov = '0.65' param_cal = 'dm_1deg' param_baryonic_correction = 'Fid' param_cut = 19 n_patches = 13 +maindir = args.maindir +nproc = args.nproc +#Load the file names for peaks file +file_name_peaks_Maps = (np.array(np.loadtxt(maindir+'/input/list_cosmo_peaks_z{}.txt'.format(param_z), usecols=0,dtype=str))) - -#Load the file names for peaks file -file_name_peaks_Maps = (np.array(np.loadtxt('.././input/list_cosmo_peaks_z{}.txt'.format(param_z), usecols=0,dtype=str))) - -#Extract cosmological parameters values from the simulations associated to each peak counts file +#Extract cosmological parameters values from the simulations associated to eachpeak counts file params = np.array([takes_params(f) for f in file_name_peaks_Maps]) + #identifies fiducial cosmology index index_fiducial = 15 params_fiducial = params[index_fiducial] -#load the peaks distributions for the theoretical prediction -Peaks_Maps_DM = np.array([np.load('../input/peaks_z{}/%s'.format(param_z)%(fn), mmap_mode='r') for fn in file_name_peaks_Maps])[:,:,:param_cut] +#load the peaks distributions for the theoretical prediction +Peaks_Maps_DM = np.array([np.load(maindir+'/input/peaks_z{}/%s'.format(param_z)%(fn), mmap_mode='r') for fn in file_name_peaks_Maps])[:,:,:param_cut] -# load baryon corrections -bar_corr = np.load('.././input/{}_correction.npy'.format(param_baryonic_correction))[:param_cut] +# load baryon corrections +bar_corr = np.load(maindir+'/input/{}_correction.npy'.format(param_baryonic_correction))[:param_cut] -#Apply the baryonic correction +#Apply the baryonic correction Peaks_Maps = np.copy(Peaks_Maps_DM) if(param_baryonic_correction == 'no'): @@ -55,73 +71,69 @@ for i in range(Peaks_Maps_DM.shape[0]): for j in range(Peaks_Maps_DM.shape[1]): Peaks_Maps[i,j,:] = Peaks_Maps_DM[i,j,:] * bar_corr - -# take the mean over realizations + +# take the mean over realizations Peaks_Maps_mean=np.mean(Peaks_Maps, axis=1) -# create array for snr histogram +# create array for snr histogram snr_array=np.linspace(-2,6,31) snr_centers=0.5*(snr_array[1:]+snr_array[:-1]) -#Load peaks distribution for data -peaks_data = np.load('.././input/peaks_mean_{}.npy'.format(param_cal), mmap_mode='r')[:param_cut] +#Load peaks distribution for data +peaks_data = np.load(maindir+'/input/peaks_mean_{}.npy'.format(param_cal), mmap_mode='r')[:param_cut] -#Train Gaussian Processes regressor -#this returns a tuple consisting of (list of GP, scaling) +#Train Gaussian Processes regressor +#this returns a tuple consisting of (list of GP, scaling) ncpu = cpu_count() gp_scaling=np.array([Parallel(n_jobs = ncpu, verbose = 5)(delayed(gp_train)(index_bin, params_train = params, obs_train = Peaks_Maps) for index_bin in range(Peaks_Maps.shape[2]))]).reshape(Peaks_Maps.shape[2], 2) gp_list=gp_scaling[:,0] scaling=gp_scaling[:,1] -#Covariance matrix +#Covariance matrix #Load peaks to compute covariance matrix -cov_peaks_DM = np.load('.././input/convergence_gal_mnv0.00000_om0.30000_As2.1000_peaks_2arcmin_{}_b030_snr_min_max_ngal_7.npy'.format(param_z_cov), mmap_mode='r')[:,:param_cut] +cov_peaks_DM = np.load(maindir+'/input/convergence_gal_mnv0.00000_om0.30000_As2.1000_peaks_2arcmin_{}_b030_snr_min_max_ngal_7.npy'.format(param_z_cov),\ + mmap_mode='r')[:,:param_cut] -#Apply Baryonic Correction +#Apply Baryonic Correction cov_peaks = np.copy(cov_peaks_DM) for i in range(Peaks_Maps_DM.shape[1]): cov_peaks[i,:] = cov_peaks_DM[i,:] * bar_corr -#Compute covariance matrix and scale it for sky coverage +#Compute covariance matrix and scale it for sky coverage cov=(1/n_patches)*np.cov(cov_peaks.T) -#compute the inverse of the covariance +#compute the inverse of the covariance icov=la.inv(cov) -# get constraints with MCMC -# compute hartlap factor +# get constraints with MCMC +# compute hartlap factor n_real=Peaks_Maps_DM.shape[1] n_bins=len(peaks_data) norm=(n_real-n_bins-2)/(n_real-1) -#Define priors (range of sims) -M_nu_min = 0.06 # minimum from oscillation experiments +#Define priors (range of sims) +M_nu_min = 0.06 # minimum from oscillation experiments M_nu_max = 0.62 Omega_m_min = 0.18 Omega_m_max = 0.42 A_s_min = 1.29 A_s_max = 2.91 -#Specify number of dimensions for parameter space, number of walkers, initial position - +#Specify number of dimensions for parameter space, number of walkers, initial position ndim, nwalkers = 3,250 pos = [params_fiducial + 1e-3*np.random.randn(ndim) for i in range(nwalkers)] print("{0} CPUs".format(ncpu)) - -with Pool() as pool: +with Pool(processes=nproc) as pool: sampler = emcee.EnsembleSampler(nwalkers, ndim, lnpost, pool=pool, args=[peaks_data, icov,gp_list, scaling, norm,M_nu_min, M_nu_max, Omega_m_min, Omega_m_max, A_s_min, A_s_max]) start = time.time() sampler.run_mcmc(pos, 6500, progress=True) end = time.time() multi_time = end - start print("Multiprocessing took {0:.1f} seconds".format(multi_time)) -# remove burn-in +# remove burn-in samples = sampler.chain[:,200:, :].reshape((-1, ndim)) #save results -np.save('.././output/constraints_z{}_{}_{}corr_{}snr.npy'.format(param_z,param_cal,param_baryonic_correction,param_cut),samples) - - - +np.save(maindir+'/output/constraints_z{}_{}_{}corr_{}snr.npy'.format(param_z,param_cal,param_baryonic_correction,param_cut),samples) diff --git a/scripts/candide_CFIS_P3.sh b/scripts/candide_CFIS_P3.sh index 8330e9a..df58356 100644 --- a/scripts/candide_CFIS_P3.sh +++ b/scripts/candide_CFIS_P3.sh @@ -1,11 +1,11 @@ #!/bin/bash #PBS -k o ### resource allocation -#PBS -l nodes=1:ppn=8,walltime=10:00:00 +#PBS -l nodes=1:ppn=4,walltime=10:00:00,mem=32GB ### job name #PBS -N peaks-mcmc ### Redirect stdout and stderr to same file #PBS -j oe ## your bash script here: -~/.conda/envs/sp-peaks/bin/python /home/baumont/software/shear-pipe-peaks/example/constraints_CFIS-P3.py +~/.conda/envs/sp-peaks/bin/python /home/baumont/software/shear-pipe-peaks/example/constraints_CFIS-P3.py '/home/baumont/software/shear-pipe-peaks' --nproc 4 From 64732727a2b0edcffc206db80833f7f280e8d2c1 Mon Sep 17 00:00:00 2001 From: lbaumo Date: Thu, 31 Aug 2023 12:38:22 +0200 Subject: [PATCH 10/12] comments on bookkeeping script --- example/cosmoSLICS_bookkeeping.ipynb | 27 ++++++++++++++++++--------- 1 file changed, 18 insertions(+), 9 deletions(-) diff --git a/example/cosmoSLICS_bookkeeping.ipynb b/example/cosmoSLICS_bookkeeping.ipynb index 1665b15..788a60c 100644 --- a/example/cosmoSLICS_bookkeeping.ipynb +++ b/example/cosmoSLICS_bookkeeping.ipynb @@ -35,8 +35,9 @@ "outputs": [], "source": [ "import numpy as np\n", + "# this should go in SLICS interface module- check martin's existing part of SLICS interface module, this may be carried out in a simplier way\n", "\n", - "def process_files(file_paths):\n", + "def read_SLICS_cats(file_paths):\n", " \"\"\"Reads the information in the filename.\n", "\n", " Parameters\n", @@ -92,7 +93,7 @@ " file_paths = file.readlines()\n", " file_paths = [path.strip() for path in file_paths]\n", "\n", - "data = process_files(file_paths)" + "data = read_SLICS_cats(file_paths)" ] }, { @@ -101,7 +102,8 @@ "metadata": {}, "outputs": [], "source": [ - "def read_cosmo_params(file_path):\n", + "# this should go in SLICS interface module\n", + "def read_SLICS_cosmo_params(file_path):\n", " \"\"\"Reads the cosmological parameters from the .dat file.\n", "\n", " Parameters\n", @@ -145,11 +147,13 @@ "metadata": {}, "outputs": [], "source": [ + "# this should be a function\n", + "\n", "# Path to the .dat file\n", "dat_file_path = \"/home/tersenov/shear-pipe-peaks/example/CosmoTable.dat\"\n", "\n", "# Read the cosmological parameters from the .dat file\n", - "cosmo_params = read_cosmo_params(dat_file_path)\n", + "cosmo_params = read_SLICS_cosmo_params(dat_file_path)\n", "\n", "# Map the IDs in the recarray to the corresponding cosmological parameters\n", "mapped_params = []\n", @@ -229,6 +233,7 @@ "metadata": {}, "outputs": [], "source": [ + "# I feel like we already have this function with the SLICS interface\n", "def read_catalog_data(catalog_data):\n", " ra = catalog_data[0]\n", " dec = catalog_data[1]\n", @@ -237,6 +242,10 @@ " kappa_sim = catalog_data[8]\n", " return ra, dec, g1_sim, g2_sim, kappa_sim\n", "\n", + "# we should discuss this- the following functions will go into a mass-map module that will \n", + "# contain the different methods. do they all require binned data?, \n", + "# at any rate, we should have a separate binning function\n", + "\n", "def create_kappa_map(ra, dec, g1_sim, g2_sim, size_x_deg=10, size_y_deg=10, pixel_size_emap_amin=0.4):\n", " x, y = radec2xy(np.mean(ra), np.mean(dec), ra, dec) # Project (ra,dec) -> (x,y)\n", "\n", @@ -249,6 +258,7 @@ " kappaE, kappaB = ks93(e1map, -e2map) # make kappa map (the minus sign has to be here for our data conventions)\n", " return kappaE, kappaB\n", "\n", + "\n", "def add_noise_to_kappa_map(kappa_map, shape_noise, n_gal, pix_arcmin):\n", " sigma_noise_CFIS = shape_noise / (np.sqrt(2 * n_gal * pix_arcmin**2))\n", " noise_map_CFIS_z05 = sigma_noise_CFIS * np.random.randn(kappa_map.shape[0], kappa_map.shape[1]) # generate noise map\n", @@ -296,7 +306,7 @@ " snr_maps.append(snr_map)\n", " \n", " return snr_maps\n", - "\n", + "# this should go into the summary statistics module that will have single scale peak counts, multi-scale peak counts, l1 norm, etc\n", "def compute_single_scale_peak_counts(snr_map, kappa_snr):\n", " \"\"\"\n", " Compute peak counts for a single SNR map.\n", @@ -615,7 +625,7 @@ ], "metadata": { "kernelspec": { - "display_name": "base", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -629,9 +639,8 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.9" - }, - "orig_nbformat": 4 + "version": "3.9.12" + } }, "nbformat": 4, "nbformat_minor": 2 From 7a4092a0a81ca41bf6a908311cc958ccc0e77da6 Mon Sep 17 00:00:00 2001 From: lbaumo Date: Thu, 31 Aug 2023 12:46:43 +0200 Subject: [PATCH 11/12] created new modules --- sp_peaks/mapping.py | 8 ++++++++ sp_peaks/summary_statistics.py | 8 ++++++++ 2 files changed, 16 insertions(+) create mode 100644 sp_peaks/mapping.py create mode 100644 sp_peaks/summary_statistics.py diff --git a/sp_peaks/mapping.py b/sp_peaks/mapping.py new file mode 100644 index 0000000..729cba2 --- /dev/null +++ b/sp_peaks/mapping.py @@ -0,0 +1,8 @@ +"""MAPPING. + +:Name: mapping.py + +:Description: This package contains methods to create kappa maps and snr maps from shear catalogs + +:Authors: Lucie Baumont Martin Kilbinger +""" diff --git a/sp_peaks/summary_statistics.py b/sp_peaks/summary_statistics.py new file mode 100644 index 0000000..4a281b0 --- /dev/null +++ b/sp_peaks/summary_statistics.py @@ -0,0 +1,8 @@ +"""SUMMARY STATISTICS. + +:Name: summary_statistics.py + +:Description: This package contains methods to compute summary statistics from mass maps + +:Authors: Lucie Baumont Martin Kilbinger +""" From d3a164e686dafd0caa700f937ff6d6404d9ea500 Mon Sep 17 00:00:00 2001 From: lbaumo Date: Thu, 31 Aug 2023 12:53:09 +0200 Subject: [PATCH 12/12] new modules --- sp_peaks/mapping.py | 62 +++++++++++++++++++++++++++++++++- sp_peaks/summary_statistics.py | 35 +++++++++++++++++++ 2 files changed, 96 insertions(+), 1 deletion(-) diff --git a/sp_peaks/mapping.py b/sp_peaks/mapping.py index 729cba2..0797a83 100644 --- a/sp_peaks/mapping.py +++ b/sp_peaks/mapping.py @@ -1,4 +1,4 @@ -"""MAPPING. +"""MAPPING.A :Name: mapping.py @@ -6,3 +6,63 @@ :Authors: Lucie Baumont Martin Kilbinger """ +def create_kappa_map(ra, dec, g1_sim, g2_sim, size_x_deg=10, size_y_deg=10, pixel_size_emap_amin=0.4): + x, y = radec2xy(np.mean(ra), np.mean(dec), ra, dec) # Project (ra,dec) -> (x,y) + + Nx = int(size_x_deg / pixel_size_emap_amin * 60) + Ny = int(size_y_deg / pixel_size_emap_amin * 60) + + e1map, e2map = bin2d(x, y, npix=(Nx, Ny), v=(g1_sim, g2_sim)) # bin the shear field into a 2D map + emap = np.array([e1map,e2map]) # stack the two components into a single array + + kappaE, kappaB = ks93(e1map, -e2map) # make kappa map (the minus sign has to be here for our data conventions) + return kappaE, kappaB + + +def add_noise_to_kappa_map(kappa_map, shape_noise, n_gal, pix_arcmin): + sigma_noise_CFIS = shape_noise / (np.sqrt(2 * n_gal * pix_arcmin**2)) + noise_map_CFIS_z05 = sigma_noise_CFIS * np.random.randn(kappa_map.shape[0], kappa_map.shape[1]) # generate noise map + kappa_map_noisy = kappa_map + noise_map_CFIS_z05 # Add noise to the mass map + return kappa_map_noisy, noise_map_CFIS_z05 + +def smooth_kappa_map(kappa_map, pixel_size_emap_amin): + # Set the standard deviation of the Gaussian filter based on the pixel size of the kappa map + precision_Peaks = 2 / pixel_size_emap_amin # pixel_size_emap_amin is the pixel size of the kappa map in arcminutes + kappa_map_smoothed = ndi.gaussian_filter(kappa_map, precision_Peaks) + return kappa_map_smoothed + +def convert_to_snr_map(kappa_map_smoothed, noise_map_smoothed): + snr_map = kappa_map_smoothed / np.std(noise_map_smoothed) + return snr_map + +def compute_multiscale_snr_maps(image, noise, nscales): + """ + Compute SNR maps for each wavelet scale of a noisy image. + + Parameters: + image (numpy.ndarray): The noiseless image. + noise (numpy.ndarray): The noise to be added to the image. + nscales (int): Number of wavelet scales for starlet decomposition. + + Returns: + snr_maps (list of numpy.ndarray): List of SNR maps for each scale. + """ + # Add noise to the noiseless image + image_noisy = image + noise + + # Perform starlet decomposition + image_starlet = starlet2d(image_noisy, nscales) + + # Estimate the noise level + noise_estimate = mad_std(image_noisy) + coeff_j = noise_coeff(image, nscales) + + snr_maps = [] + for image_j, std_co in zip(image_starlet, coeff_j): + sigma_j = std_co * noise_estimate + + # Compute SNR map + snr_map = image_j / sigma_j + snr_maps.append(snr_map) + + return snr_maps diff --git a/sp_peaks/summary_statistics.py b/sp_peaks/summary_statistics.py index 4a281b0..1d3d4e6 100644 --- a/sp_peaks/summary_statistics.py +++ b/sp_peaks/summary_statistics.py @@ -6,3 +6,38 @@ :Authors: Lucie Baumont Martin Kilbinger """ + +def compute_single_scale_peak_counts(snr_map, kappa_snr): + """ + Compute peak counts for a single SNR map. + + Parameters: + snr_map (numpy.ndarray): SNR map. + kappa_snr (numpy.ndarray): Array of kappa values corresponding to the SNR map. + + Returns: + kappa_th_center_snr (numpy.ndarray): Array of kappa threshold centers for peak counts. + peak_counts (numpy.ndarray): Peak counts for the given SNR map. + """ + kappa_th_center_snr = 0.5 * (kappa_snr[:-1] + kappa_snr[1:]) + peak_counts = peaks.peaks_histogram(snr_map, kappa_snr)[0] + return kappa_th_center_snr, peak_counts + +def compute_multiscale_peak_counts(snr_maps, kappa_snr): + """ + Compute peak counts for each wavelet scale of SNR maps. + + Parameters: + snr_maps (list of numpy.ndarray): List of SNR maps for each scale. + kappa_snr (numpy.ndarray): Array of kappa values corresponding to SNR maps. + + Returns: + kappa_th_center_snr (numpy.ndarray): Array of kappa threshold centers for peak counts. + peak_counts (list of numpy.ndarray): List of peak counts for each scale. + """ + kappa_th_center_snr = 0.5 * (kappa_snr[:-1] + kappa_snr[1:]) + + peak_counts = [peaks.peaks_histogram(snr_map, kappa_snr)[0] for snr_map in snr_maps] + + return kappa_th_center_snr, peak_counts +