-
Notifications
You must be signed in to change notification settings - Fork 11
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #3 from epaillas/main
Add BGS reconstruction examples
- Loading branch information
Showing
6 changed files
with
359 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
import numpy as np | ||
from pathlib import Path | ||
from pypower import setup_logging, mpi, CatalogFFTPower | ||
from pyrecon.metrics import MeshFFTCorrelator, MeshFFTPropagator, CatalogMesh | ||
from pprint import pprint | ||
|
||
# Set up logging | ||
setup_logging() | ||
mpicomm = mpi.COMM_WORLD | ||
mpiroot = 0 | ||
|
||
redshift = 0.2 | ||
box_size = 2000.0 | ||
box_center = 0 | ||
bias = 1.63 | ||
los = 'z' | ||
node, phase = '000', '000' | ||
|
||
nmeshs = [512] | ||
smooth_radii = [15.0] | ||
recon_algos = ['multigrid', 'ifft', 'ifftp'] | ||
conventions = ['recsym', 'reciso'] | ||
|
||
data_dir = f'local_abacus/cubic/z{redshift}/\ | ||
AbacusSummit_base_c{node}_ph{phase}/reconstruction/' | ||
|
||
for nmesh in nmeshs: | ||
for smooth_radius in smooth_radii: | ||
for recon_algo in recon_algos: | ||
for convention in conventions: | ||
|
||
if mpicomm.rank == mpiroot: | ||
# ----- read data | ||
data_fn = Path(data_dir, | ||
f'data_{recon_algo}_{convention}_z{redshift}_nmesh{nmesh}_Rs{smooth_radius}_bias{bias}.npy') | ||
data = np.load(data_fn, allow_pickle=True).item() | ||
print(f'Shape of data: {len(data["Position_rec"])}') | ||
|
||
# ----- read randoms | ||
randoms = {} | ||
randoms['Position_rec'] = np.empty((0, 3)) | ||
for i in range(1, 6): | ||
randoms_fn = Path(data_dir, | ||
f'randoms{i}_{recon_algo}_{convention}_z{redshift}_nmesh{nmesh}_Rs{smooth_radius}_bias{bias}.npy') | ||
randoms_i = np.load(randoms_fn, allow_pickle=True).item() | ||
randoms['Position_rec'] = np.append(randoms['Position_rec'], randoms_i['Position_rec'], axis=0) | ||
|
||
print(f'Shape of concatenated randoms: {len(randoms["Position_rec"])}') | ||
|
||
else: | ||
data = {'Position': None, 'Position_rec': None} | ||
randoms = {'Position': None, 'Position_rec': None} | ||
|
||
# ---- Compute power spectra | ||
|
||
kedges = np.arange(0.01, 0.5, 0.001) | ||
|
||
poles = CatalogFFTPower( | ||
data_positions1=data['Position'], | ||
boxsize=box_size, boxcenter=box_center, | ||
nmesh=512, resampler='cic', | ||
interlacing=2, ells=(0, 1, 2, 3, 4), los=los, | ||
edges=kedges, position_type='pos', | ||
wrap=True, mpicomm=mpicomm, mpiroot=mpiroot, | ||
).poles | ||
|
||
poles_recon = CatalogFFTPower( | ||
data_positions1=data['Position_rec'], | ||
boxsize=box_size, boxcenter=box_center, | ||
shifted_positions1=randoms['Position_rec'], | ||
nmesh=512, resampler='cic', interlacing=2, | ||
ells=(0, 1, 2, 3, 4), los=los, edges=kedges, position_type='pos', | ||
wrap=True, mpicomm=mpicomm, mpiroot=mpiroot, | ||
).poles | ||
|
||
output_dir = f'local_abacus/cubic/z{redshift}/AbacusSummit_base_c{node}_ph{phase}/reconstruction/pk' | ||
Path(output_dir).mkdir(parents=True, exist_ok=True) | ||
|
||
cout = { | ||
'k': poles.k, | ||
'Pk_0': poles(ell=0, complex=False), | ||
'Pk_1': poles(ell=1, complex=False), | ||
'Pk_2': poles(ell=2, complex=False), | ||
'Pk_3': poles(ell=3, complex=False), | ||
'Pk_4': poles(ell=4, complex=False), | ||
} | ||
output_fn = Path(output_dir, f'Pk_z{redshift}.npy') | ||
np.save(output_fn, cout) | ||
|
||
cout = { | ||
'k': poles.k, | ||
'Pk_0': poles_recon(ell=0, complex=False), | ||
'Pk_1': poles_recon(ell=1, complex=False), | ||
'Pk_2': poles_recon(ell=2, complex=False), | ||
'Pk_3': poles_recon(ell=3, complex=False), | ||
'Pk_4': poles_recon(ell=4, complex=False), | ||
} | ||
|
||
output_fn = Path(output_dir, f'Pk_{recon_algo}_{convention}_z{redshift}_nmesh{nmesh}_Rs{smooth_radius}_bias{bias}.npy') | ||
np.save(output_fn, cout) | ||
|
||
|
||
|
||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
#!/bin/bash | ||
#SBATCH --nodes=1 | ||
#SBATCH --account=desi | ||
#SBATCH --ntasks-per-node=32 | ||
#SBATCH --cpus-per-task=1 | ||
#SBATCH --constraint=haswell | ||
#SBATCH -q debug | ||
#SBATCH -t 00:30:00 | ||
|
||
source /global/common/software/desi/users/adematti/cosmodesi_environment.sh main | ||
module load openmpi | ||
export HDF5_USE_FILE_LOCKING=FALSE | ||
|
||
time srun -n 32 python $HOME/data/mock_challenge/bgs/pk_cubic.py |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,100 @@ | ||
import sys | ||
import asdf | ||
import numpy as np | ||
from pathlib import Path | ||
from pyrecon.metrics import MeshFFTCorrelator, MeshFFTPropagator, CatalogMesh | ||
from pypower.mesh import ArrayMesh | ||
from pypower import setup_logging, mpi, MeshFFTPower | ||
|
||
|
||
setup_logging() | ||
mpicomm = mpi.COMM_WORLD | ||
mpiroot = 0 | ||
|
||
node, phase = '000', '000' | ||
recon_algos = ['multigrid', 'ifft', 'ifftp'] | ||
conventions = ['recsym', 'reciso'] | ||
redshift = 0.2 | ||
nmesh = 512 | ||
box_size = 2000.0 | ||
box_center = 0.0 | ||
smooth_radii = [10.0] | ||
los = 'z' | ||
bias = 1.63 | ||
kedges = np.arange(0.01, 0.5, 0.001) | ||
muedges = np.linspace(-1., 1., 5) | ||
|
||
|
||
if mpicomm.rank == mpiroot: | ||
# Read initial condition density field | ||
data_dir = '/global/cfs/cdirs/desi/public/cosmosim/AbacusSummit/ic/AbacusSummit_base_c000_ph000' | ||
data_fn = Path(data_dir, 'ic_dens_N576.asdf') | ||
|
||
with asdf.open(data_fn, lazy_load=False) as af: | ||
mesh_ic = af['data']['density'] | ||
growth_table = af['header']['GrowthTable'] | ||
|
||
# rescale to z = 0.2 | ||
factor = growth_table[0.2] / growth_table[99.0] | ||
rescaled_mesh_ic = mesh_ic * factor | ||
rescaled_mesh_ic += 1.0 | ||
else: | ||
rescaled_mesh_ic = None | ||
|
||
rescaled_mesh_ic = ArrayMesh(rescaled_mesh_ic, box_size, mpiroot=mpiroot, mpicomm=mpicomm) | ||
|
||
for smooth_radius in smooth_radii: | ||
for recon_algo in recon_algos: | ||
for convention in conventions: | ||
|
||
if mpicomm.rank == mpiroot: | ||
|
||
# ----- read reconstructed data | ||
data_dir = f'local_abacus/cubic/z{redshift}/AbacusSummit_base_c{node}_ph{phase}/reconstruction' | ||
data_fn = Path(data_dir, | ||
f'data_{recon_algo}_{convention}_z{redshift}_nmesh{nmesh}_Rs{smooth_radius}_bias{bias}.npy') | ||
data = np.load(data_fn, allow_pickle=True).item() | ||
print(f'Shape of data: {len(data["Position_rec"])}') | ||
|
||
# ----- read reconstructed randoms | ||
randoms = {} | ||
randoms['Position'] = np.empty((0, 3)) | ||
randoms['Position_rec'] = np.empty((0, 3)) | ||
for i in range(1, 6): | ||
randoms_dir = f'local_abacus/cubic/z{redshift}/AbacusSummit_base_c{node}_ph{phase}/reconstruction' | ||
randoms_fn = Path(randoms_dir, | ||
f'randoms{i}_{recon_algo}_{convention}_z{redshift}_nmesh{nmesh}_Rs{smooth_radius}_bias{bias}.npy') | ||
randoms_i = np.load(randoms_fn, allow_pickle=True).item() | ||
randoms['Position'] = np.append(randoms['Position'], randoms_i['Position'], axis=0) | ||
randoms['Position_rec'] = np.append(randoms['Position_rec'], randoms_i['Position_rec'], axis=0) | ||
print(f'Shape of concatenated randoms: {len(randoms["Position_rec"])}') | ||
|
||
else: | ||
data = {'Position': None, 'Position_rec': None} | ||
randoms = {'Position': None, 'Position_rec': None} | ||
|
||
# paint reconstructed positions to mesh | ||
mesh_pre = CatalogMesh( | ||
data['Position'], | ||
boxsize=box_size, boxcenter=box_center, nmesh=576, resampler='tsc', | ||
interlacing=2, position_type='pos', mpicomm=mpicomm, mpiroot=mpiroot | ||
) | ||
|
||
mesh_recon = CatalogMesh( | ||
data['Position_rec'], shifted_positions=randoms['Position_rec'], | ||
boxsize=box_size, boxcenter=box_center, nmesh=576, resampler='tsc', | ||
interlacing=2, position_type='pos', mpicomm=mpicomm, mpiroot=mpiroot | ||
) | ||
|
||
# compute correlator/propagator | ||
correlator = MeshFFTCorrelator(mesh_recon, rescaled_mesh_ic, edges=(kedges, muedges), los=los) | ||
|
||
propagator = correlator.to_propagator(growth=bias) | ||
transfer = correlator.to_transfer(growth=bias) | ||
|
||
output_dir = f'local_abacus/cubic/z{redshift}/AbacusSummit_base_c{node}_ph{phase}/reconstruction/propagator/' | ||
Path(output_dir).mkdir(parents=True, exist_ok=True) | ||
output_fn = Path(output_dir, | ||
f'propagator_{recon_algo}_{convention}_z{redshift}_nmesh{nmesh}_Rs{smooth_radius}_bias{bias}.npy') | ||
propagator.save(output_fn) | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
#!/bin/bash | ||
#SBATCH --nodes=1 | ||
#SBATCH --account=desi | ||
#SBATCH --ntasks-per-node=32 | ||
#SBATCH --cpus-per-task=1 | ||
#SBATCH --constraint=haswell | ||
#SBATCH -q shared | ||
#SBATCH -t 2:00:00 | ||
|
||
source /global/common/software/desi/users/adematti/cosmodesi_environment.sh main | ||
module load openmpi | ||
|
||
srun -n 32 python $HOME/data/mock_challenge/bgs/propagator.py |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,112 @@ | ||
import numpy as np | ||
import h5py | ||
from pathlib import Path | ||
from cosmoprimo.fiducial import DESI | ||
from pyrecon import (setup_logging, MultiGridReconstruction, | ||
IterativeFFTReconstruction, IterativeFFTParticleReconstruction) | ||
|
||
|
||
# Set up logging | ||
setup_logging() | ||
|
||
# define simulation and configuration parameters | ||
node, phase = '000', '000' | ||
redshift = 0.2 | ||
bias = 1.63 | ||
magnitude_cut = -21 | ||
box_size = 2000. | ||
box_center = 0. | ||
nmeshs = [512] | ||
recon_algos = ['multigrid', 'ifft'] | ||
conventions = ['recsym', 'reciso'] | ||
smoothing_radii = [15.0] | ||
los = 'z' | ||
offset = box_center - box_size / 2 | ||
|
||
# define fiducial cosmology | ||
cosmo = DESI() | ||
power = cosmo.get_fourier().pk_interpolator().to_1d(z=redshift) | ||
f = (cosmo.sigma8_z(z=redshift, of='theta_cb') | ||
/ cosmo.sigma8_z(z=redshift, of='delta_cb')) | ||
H_0 = 100.0 | ||
az = 1/(1 + redshift) | ||
Omega_m = cosmo._params['Omega_cdm'] + cosmo._params['Omega_b'] | ||
Omega_l = 1 - Omega_m | ||
Hz = H_0 * np.sqrt(Omega_m * (1 + redshift)**3 + Omega_l) | ||
|
||
# ----- read data catalogue | ||
data_dir = f'/global/homes/e/epaillas/data/mock_challenge/bgs/\ | ||
abacus_cubic/z{redshift:.3f}/AbacusSummit_base_c{node}_ph{phase}' | ||
data_fn = Path(data_dir, f'BGS_box_ph{phase}.hdf5') | ||
data = {} | ||
fin = h5py.File(data_fn, 'r') | ||
M_r = fin['Data']['abs_mag'][()] | ||
idx1 = M_r < magnitude_cut | ||
data['Position'] = fin['Data']['pos'][()][idx1] | ||
data['Velocity'] = fin['Data']['vel'][()][idx1] | ||
fin.close() | ||
|
||
# apply redshift-space distortions | ||
data['Position'][:, 2] += data['Velocity'][:, 2] / (az * Hz) | ||
data['Position'] = (data['Position'] - offset) % box_size + offset | ||
|
||
nden = len(data['Position']) / box_size ** 3 | ||
|
||
print(f"Shape of data pos: {np.shape(data['Position'])}") | ||
|
||
# ----- run reconstruction | ||
output_dir = f'local_abacus/cubic/z{redshift}/\ | ||
AbacusSummit_base_c{node}_ph{phase}/reconstruction/' | ||
Path(output_dir).mkdir(parents=True, exist_ok=True) | ||
for nmesh in nmeshs: | ||
for smooth_radius in smoothing_radii: | ||
for recon_algo in recon_algos: | ||
for convention in conventions: | ||
|
||
if recon_algo == 'multigrid': | ||
ReconstructionAlgorithm = MultiGridReconstruction | ||
elif recon_algo == 'ifft': | ||
ReconstructionAlgorithm = IterativeFFTReconstruction | ||
elif recon_algo == 'ifftp': | ||
ReconstructionAlgorithm = IterativeFFTParticleReconstruction | ||
|
||
recon = ReconstructionAlgorithm( | ||
f=f, bias=bias, los=los, nmesh=nmesh, | ||
boxsize=box_size, boxcenter=box_center, | ||
wrap=True | ||
) | ||
recon.assign_data(data['Position']) | ||
recon.set_density_contrast(smoothing_radius=smooth_radius) | ||
recon.run() | ||
|
||
data['Position_rec'] = data['Position'] - recon.read_shifts(data['Position'], field='disp+rsd') | ||
data['Position_rec'] = (data['Position_rec'] - offset) % box_size + offset | ||
output_fn = Path(output_dir, | ||
f'data_{recon_algo}_{convention}_z{redshift}_nmesh{nmesh}_Rs{smooth_radius}_bias{bias}.npy') | ||
np.save(output_fn, data) | ||
|
||
# ----- build random catalogue | ||
nrand = 20 * int(nden * box_size ** 3) | ||
nrand_split = int(nrand / 5) | ||
for i in range(1, 6): | ||
randoms = {} | ||
randoms['Position'] = np.array([np.random.uniform(box_center - box_size/2., | ||
box_center + box_size/2., nrand_split) for i in range(3)]).T | ||
|
||
print(f"Shape of randoms_{i} pos: {np.shape(randoms['Position'])}") | ||
|
||
if convention == 'recsym': | ||
field = 'disp+rsd' | ||
elif convention == 'reciso': | ||
field = 'disp' | ||
else: | ||
raise Exception('Invalid RSD convention.') | ||
|
||
randoms['Position_rec'] = randoms['Position'] - recon.read_shifts(randoms['Position'], field=field) | ||
randoms['Position_rec'] = (randoms['Position_rec'] - offset) % box_size + offset | ||
|
||
output_fn = Path(output_dir, | ||
f'randoms{i}_{recon_algo}_{convention}_z{redshift}_nmesh{nmesh}_Rs{smooth_radius}_bias{bias}.npy') | ||
np.save(output_fn, randoms) | ||
|
||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
#!/bin/bash | ||
#SBATCH --nodes=1 | ||
#SBATCH --account=desi | ||
#SBATCH --ntasks-per-node=1 | ||
#SBATCH --cpus-per-task=64 | ||
#SBATCH --constraint=haswell | ||
#SBATCH -q debug | ||
#SBATCH -t 00:30:00 | ||
|
||
source /global/common/software/desi/users/adematti/cosmodesi_environment.sh main | ||
module load openmpi | ||
export HDF5_USE_FILE_LOCKING=FALSE | ||
export OMP_NUM_THREADS=64 | ||
|
||
time srun -n 1 python $HOME/data/mock_challenge/bgs/reconstruction_abacus_cubic.py |