Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
963440d
vectorized mass_distribution purity exact first version
henriquelettieri Dec 5, 2025
239d70b
recipe updated with 3d calculation for impure cluster counts
henriquelettieri Dec 9, 2025
74fbc1f
combined purity non purity in one function
henriquelettieri Dec 11, 2025
96fc85d
binned_counts tests for new counts with purity code
henriquelettieri Dec 12, 2025
47304c3
binned_counts tests for new counts with purity code
henriquelettieri Dec 12, 2025
ec3bf74
add new assert in _get_theory_prediction_counts and purity in shear c…
henriquelettieri Dec 27, 2025
abcffb2
add new assert in _get_theory_prediction_counts and purity in shear c…
henriquelettieri Dec 27, 2025
4716996
add new assert in _get_theory_prediction_counts and purity in shear c…
henriquelettieri Dec 27, 2025
bf27930
add new assert in _get_theory_prediction_counts and purity in shear c…
henriquelettieri Dec 27, 2025
9e06d7c
added comparisons in test_grid_shear_matches_exact_within_tolerance a…
henriquelettieri Jan 5, 2026
02a616e
added comparisons in test_grid_shear_matches_exact_within_tolerance a…
henriquelettieri Jan 5, 2026
9c2101b
added comparisons in test_grid_shear_matches_exact_within_tolerance a…
henriquelettieri Jan 5, 2026
9150e09
added comparisons in test_grid_shear_matches_exact_within_tolerance a…
henriquelettieri Jan 5, 2026
04644b7
added comparisons in test_grid_shear_matches_exact_within_tolerance a…
henriquelettieri Jan 5, 2026
a5af570
new version
henriquelettieri Jan 5, 2026
efbd70d
new version
henriquelettieri Jan 5, 2026
cc63cef
updating shear calculation
henriquelettieri Jan 13, 2026
2a93f25
solving the low rel_err for completeness in shear calculation
henriquelettieri Jan 13, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion crow/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,4 +8,4 @@
from .recipes.binned_grid import GridBinnedClusterRecipe
from .recipes.binned_parent import BinnedClusterRecipe

__version__ = "1.0.0"
__version__ = "1.0.1"
162 changes: 96 additions & 66 deletions crow/recipes/binned_exact.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,74 +60,62 @@ def _setup_with_completeness(self):
self._completeness_distribution = self.completeness.distribution

def _setup_with_purity(self):
"""Makes mass distribution use additional integral with completeness"""
"""Makes mass distribution use additional integral with purity"""
if self.purity is None:
self._mass_distribution_distribution = self.mass_distribution.distribution
else:
self._mass_distribution_distribution = self._impure_mass_distribution
self._mass_distribution_distribution = self._mass_distribution_purity

def _impure_mass_distribution(self, log_mass, z, log_mass_proxy_limits):
def _mass_distribution_purity(self, log_mass, z, log_mass_proxy):

##############################
# Fix this function, Henrique
# Good luck!
##############################

integrator = NumCosmoIntegrator(
relative_tolerance=1e-6,
absolute_tolerance=1e-12,
return (
self.mass_distribution.gaussian_kernel(log_mass, z, log_mass_proxy)
/ self.purity.distribution(log_mass_proxy, z)
* np.log(10)
)

def integration_func(int_args, extra_args):
ln_mass_proxy = int_args[:, 0]
log_mass_proxy = ln_mass_proxy / np.log(10.0)
return np.array(
[
self.mass_distribution.gaussian_kernel(
log_mass, z, np.array([_log_mass_proxy])
)
/ self.purity.distribution(np.array([_log_mass_proxy]), z)
for _log_mass_proxy in log_mass_proxy
]
)

integrator.integral_bounds = [
(
np.log(10.0) * log_mass_proxy_limits[0],
np.log(10.0) * log_mass_proxy_limits[1],
)
]

return integrator.integrate(integration_func)

def _get_theory_prediction_counts(
self,
average_on: None | ClusterProperty = None,
) -> Callable[
[npt.NDArray[np.float64], npt.NDArray[np.float64], tuple[float, float], float],
[
npt.NDArray[np.float64],
npt.NDArray[np.float64],
npt.NDArray[np.float64],
float,
],
npt.NDArray[np.float64],
]:
"""Get a callable that evaluates a cluster theory prediction.

Returns a callable function that accepts mass, redshift, mass proxy limits,
Returns a callable function that accepts mass, redshift, mass_proxy,
and the sky area of your survey and returns the theoretical prediction for the
expected number of clusters.
expected number of clusters including false detections.
"""

def theory_prediction(
mass: npt.NDArray[np.float64],
z: npt.NDArray[np.float64],
mass_proxy_limits: tuple[float, float],
mass_proxy: npt.NDArray[np.float64],
sky_area: float,
):
prediction = (
self.cluster_theory.comoving_volume(z, sky_area)
* self.cluster_theory.mass_function(mass, z)
* self._completeness_distribution(mass, z)
* self.redshift_distribution.distribution()
* self._mass_distribution_distribution(mass, z, mass_proxy_limits)
)

if self.purity == None:
assert (
len(mass_proxy) == 2
), "mass_proxy with no purity should be size 2"
prediction *= self._mass_distribution_distribution(
mass, z, (mass_proxy[0], mass_proxy[1])
)
else:
prediction *= self._mass_distribution_distribution(mass, z, mass_proxy)

if average_on is None:
return prediction

Expand All @@ -149,7 +137,7 @@ def _get_function_to_integrate_counts(
[
npt.NDArray[np.float64],
npt.NDArray[np.float64],
tuple[float, float],
npt.NDArray[np.float64],
float,
],
npt.NDArray[np.float64],
Expand All @@ -167,11 +155,14 @@ def function_mapper(
mass = int_args[:, 0]
z = int_args[:, 1]

mass_proxy_low = extra_args[0]
mass_proxy_high = extra_args[1]
sky_area = extra_args[2]
if self.purity == None:
mass_proxy = np.array([extra_args[0], extra_args[1]])
sky_area = extra_args[2]
else:
mass_proxy = int_args[:, 2]
sky_area = extra_args[0]

return prediction(mass, z, (mass_proxy_low, mass_proxy_high), sky_area)
return prediction(mass, z, mass_proxy, sky_area)

return function_mapper

Expand All @@ -188,11 +179,22 @@ def evaluate_theory_prediction_counts(
using the Murata 2019 binned mass-richness relation and assuming perfectly
measured redshifts.
"""
self.integrator.integral_bounds = [
self.mass_interval,
z_edges,
]
self.integrator.extra_args = np.array([*log_proxy_edges, sky_area])
assert len(log_proxy_edges) == 2, "log_proxy_edges should be size 2"
assert len(z_edges) == 2, "z_edges should be size 2"

if self.purity == None:
self.integrator.integral_bounds = [
self.mass_interval,
z_edges,
]
self.integrator.extra_args = np.array([*log_proxy_edges, sky_area])
else:
self.integrator.integral_bounds = [
self.mass_interval,
z_edges,
log_proxy_edges,
]
self.integrator.extra_args = np.array([sky_area])

theory_prediction = self._get_theory_prediction_counts(average_on)
prediction_wrapper = self._get_function_to_integrate_counts(theory_prediction)
Expand All @@ -208,7 +210,7 @@ def _get_theory_prediction_shear_profile(
[
npt.NDArray[np.float64],
npt.NDArray[np.float64],
tuple[float, float],
npt.NDArray[np.float64],
float,
float,
],
Expand All @@ -224,7 +226,7 @@ def _get_theory_prediction_shear_profile(
def theory_prediction(
mass: npt.NDArray[np.float64],
z: npt.NDArray[np.float64],
mass_proxy_limits: tuple[float, float],
mass_proxy: npt.NDArray[np.float64],
sky_area: float,
radius_center: float,
):
Expand All @@ -233,7 +235,6 @@ def theory_prediction(
* self.cluster_theory.mass_function(mass, z)
* self.redshift_distribution.distribution()
* self._completeness_distribution(mass, z)
* self.mass_distribution.distribution(mass, z, mass_proxy_limits)
)
if average_on is None:
# pylint: disable=no-member
Expand All @@ -248,6 +249,18 @@ def theory_prediction(
z=z,
radius_center=radius_center,
)
if self.purity == None:
assert (
len(mass_proxy) == 2
), "mass_proxy with no purity should be size 2"
prediction *= self._mass_distribution_distribution(
mass, z, (mass_proxy[0], mass_proxy[1])
)
else:
prediction *= self._mass_distribution_distribution(
mass, z, mass_proxy
)

return prediction

return theory_prediction
Expand All @@ -258,7 +271,7 @@ def _get_function_to_integrate_shear_profile(
[
npt.NDArray[np.float64],
npt.NDArray[np.float64],
tuple[float, float],
npt.NDArray[np.float64],
float,
float,
],
Expand All @@ -274,16 +287,20 @@ def _get_function_to_integrate_shear_profile(
def function_mapper(
int_args: npt.NDArray, extra_args: npt.NDArray
) -> npt.NDArray[np.float64]:

mass = int_args[:, 0]
z = int_args[:, 1]

mass_proxy_low = extra_args[0]
mass_proxy_high = extra_args[1]
sky_area = extra_args[2]
radius_center = extra_args[3]
return prediction(
mass, z, (mass_proxy_low, mass_proxy_high), sky_area, radius_center
)
if self.purity == None:
mass_proxy = np.array([extra_args[0], extra_args[1]])
sky_area = extra_args[2]
radius_center = extra_args[3]
else:
mass_proxy = int_args[:, 2]
sky_area = extra_args[0]
radius_center = extra_args[1]

return prediction(mass, z, mass_proxy, sky_area, radius_center)

return function_mapper

Expand All @@ -301,15 +318,28 @@ def evaluate_theory_prediction_lensing_profile(
using the Murata 2019 binned mass-richness relation and assuming perfectly
measured redshifts.
"""
self.integrator.integral_bounds = [
self.mass_interval,
z_edges,
]
assert len(log_proxy_edges) == 2, "log_proxy_edges should be size 2"
assert len(z_edges) == 2, "z_edges should be size 2"

if self.purity == None:
self.integrator.integral_bounds = [
self.mass_interval,
z_edges,
]
extra_args = [*log_proxy_edges]
else:
self.integrator.integral_bounds = [
self.mass_interval,
z_edges,
log_proxy_edges,
]
extra_args = []

deltasigma_list = []

for radius_center in radius_centers:
self.integrator.extra_args = np.array(
[*log_proxy_edges, sky_area, radius_center]
)
extra_args.extend([sky_area, radius_center])
self.integrator.extra_args = np.array(extra_args)
if self.cluster_theory._beta_parameters is not None:
self.cluster_theory.set_beta_s_interp(*z_edges)
theory_prediction = self._get_theory_prediction_shear_profile(average_on)
Expand Down
2 changes: 0 additions & 2 deletions crow/recipes/binned_grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@ def _get_hmf_grid(
vol = self.cluster_theory.comoving_volume(z, sky_area)
# assign
self._hmf_grid[key] = vol[:, np.newaxis] * mass_function_2d

return self._hmf_grid[key]

def _get_mass_richness_grid(
Expand Down Expand Up @@ -147,7 +146,6 @@ def _get_completeness_grid(self, z: npt.NDArray[np.float64], key):
self._completeness_grid[key] = self._completeness_distribution(
self.log_mass_grid[np.newaxis, :], z[:, np.newaxis]
)

return self._completeness_grid[key]

def _get_purity_grid(
Expand Down
Loading
Loading