Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 56 additions & 1 deletion txpipe/lsstools/lsstools.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@ def __init__(self, tomobin=None): # add fracdet option here
self.ndens_err = None
self.covmat = None

#place to save histogram of SP pixels (more bins than 1D trends and always equal spacing)
self.s_fine_grid = np.array([])
self.npix_fine_grid = np.array([])
self.map_index_fine_grid = np.array([])

self.ndens_models = {}
self.chi2 = {}

Expand All @@ -49,6 +54,7 @@ def add_correlation(
weight=None,
sys_name=None,
use_precompute=False,
do_grid_hist=True,
):
"""
Add a 1d density correlation
Expand All @@ -75,6 +81,8 @@ def add_correlation(
use_precompute: bool
If True will use pre-computed boolean arrays and pixel number counts for this SP map
If False will compute the density in bins using numpy histogram
do_grid_hist: bool
if True, will also compute a histogram of the SP map values (on a fine grid, equal spaced bins)
"""
if sys_name is not None:
self.mapnames[map_index] = str(sys_name)
Expand Down Expand Up @@ -114,6 +122,13 @@ def add_correlation(

# sumof sys value * frac in sys bin (to make mean)
sumsys_sys, _ = np.histogram(sys_vals, bins=edges, weights=sys_vals * frac)

if do_grid_hist:
s_fine_grid_edges = np.linspace(edges.min(), edges.max(), 3*len(edges))
npix_fine_grid, _ = np.histogram(sys_vals, bins=s_fine_grid_edges, weights=frac)
self.s_fine_grid = np.append(self.s_fine_grid, (s_fine_grid_edges[1:]+s_fine_grid_edges[:-1])/2. )
self.npix_fine_grid = np.append(self.npix_fine_grid, npix_fine_grid)
self.map_index_fine_grid = np.append(self.map_index_fine_grid, np.ones(len(s_fine_grid_edges) - 1) * map_index)

# do we want to include any excluded outliers in this normalization?
norm = np.ones(len(edges) - 1) * 1.0 / (np.sum(nobj_sys) / np.sum(npix_sys))
Expand Down Expand Up @@ -217,8 +232,38 @@ def get_covmat_singlemap(self, map_index):
return np.array([line[select_map] for line in self.covmat[select_map]])

def plot1d_singlemap(
self, filepath, map_index, label=None, extra_density_correlations=None, extra_density_labels=None
self, filepath, map_index, label=None, extra_density_correlations=None, extra_density_labels=None, plot_hist=False,
):
"""
Plot the 1D galaxy density correlation for a single systematic map.

This method plots the normalized galaxy number density as a function of the
mean value of a specified systematic map.

Parameters
----------
filepath : str
Path where the output figure will be saved.
map_index : int
Index identifying the systematic map to plot.
label : str, optional
Label used for the legend entry of the primary density correlation (this object)
extra_density_correlations : list, optional
List of additional density-correlation objects. These are plotted
for the same `map_index` for comparison.
extra_density_labels : list of str, optional
Labels corresponding to each entry in `extra_density_correlations`.
Must have the same length if provided.
plot_hist : bool, optional
if True will also plot a histogram of the number of pixels in each SP bin in the background

Notes
-----
- If the systematic maps were normalized, they are rescaled to physical
units using the stored mean and standard deviation before plotting.
- If density uncertainties are available, they are shown as error bars
(for the primary data) or shaded regions (for extra correlations).
"""
import matplotlib.pyplot as plt
import textwrap

Expand All @@ -235,6 +280,16 @@ def plot1d_singlemap(
sys_mean = self.sys_meta["mean"][int(map_index)]
smean = smean * sys_width + sys_mean

if plot_hist:
select_map_fine_grid = self.map_index_fine_grid == map_index
s_grid = self.s_fine_grid[select_map_fine_grid]
if self.sys_meta["normed"]:
s_grid = s_grid * sys_width + sys_mean
npix_grid = self.npix_fine_grid[select_map_fine_grid]
ax2 = ax.twinx()
ax2.set_yticklabels([])
ax2.fill_between(s_grid, np.zeros(len(s_grid)), npix_grid, color='k', alpha=0.1)

if self.ndens_err is None:
ax.plot(smean, ndens, ".", color="b")
else:
Expand Down
6 changes: 4 additions & 2 deletions txpipe/lssweights.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
FileCollection,
FiducialCosmology,
TomographyCatalog,
QPNOfZFile,
)
import numpy as np
import glob
Expand Down Expand Up @@ -358,7 +359,7 @@ class TXLSSDensityNullTests(TXLSSDensityBase):
TomographyCatalog,
), # this file is used by the stage to compute density correlations
("mask", MapsFile),
("lens_photoz_stack", HDFFile), # Photoz stack (need if using theory curve in covariance)
("lens_photoz_stack", QPNOfZFile), # Photoz stack (need if using theory curve in covariance)
(
"fiducial_cosmology",
FiducialCosmology,
Expand Down Expand Up @@ -462,6 +463,7 @@ def summarize_density(self, output_dir, dens_output, density_correlation):
density_correlation.plot1d_singlemap(
filepath,
imap,
plot_hist=True,
)

filepath = output_dir.path_for_file(f"chi2_hist_lens{ibin}.png")
Expand Down Expand Up @@ -703,7 +705,7 @@ def load_tracer(self, tomobin):
We need this to compute the theory guess
for the SV term
"""
with self.open_input("lens_photoz_stack") as f_lens:
with self.open_input("lens_photoz_stack", wrapper=True) as f_lens:
z, nz = f_lens.get_bin_n_of_z(tomobin)
return z, nz

Expand Down