Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
963440d
vectorized mass_distribution purity exact first version
henriquelettieri Dec 5, 2025
239d70b
recipe updated with 3d calculation for impure cluster counts
henriquelettieri Dec 9, 2025
74fbc1f
combined purity non purity in one function
henriquelettieri Dec 11, 2025
96fc85d
binned_counts tests for new counts with purity code
henriquelettieri Dec 12, 2025
47304c3
binned_counts tests for new counts with purity code
henriquelettieri Dec 12, 2025
ec3bf74
add new assert in _get_theory_prediction_counts and purity in shear c…
henriquelettieri Dec 27, 2025
abcffb2
add new assert in _get_theory_prediction_counts and purity in shear c…
henriquelettieri Dec 27, 2025
4716996
add new assert in _get_theory_prediction_counts and purity in shear c…
henriquelettieri Dec 27, 2025
bf27930
add new assert in _get_theory_prediction_counts and purity in shear c…
henriquelettieri Dec 27, 2025
9e06d7c
added comparisons in test_grid_shear_matches_exact_within_tolerance a…
henriquelettieri Jan 5, 2026
02a616e
added comparisons in test_grid_shear_matches_exact_within_tolerance a…
henriquelettieri Jan 5, 2026
9c2101b
added comparisons in test_grid_shear_matches_exact_within_tolerance a…
henriquelettieri Jan 5, 2026
9150e09
added comparisons in test_grid_shear_matches_exact_within_tolerance a…
henriquelettieri Jan 5, 2026
04644b7
added comparisons in test_grid_shear_matches_exact_within_tolerance a…
henriquelettieri Jan 5, 2026
a5af570
new version
henriquelettieri Jan 5, 2026
efbd70d
new version
henriquelettieri Jan 5, 2026
cc63cef
updating shear calculation
henriquelettieri Jan 13, 2026
2a93f25
solving the low rel_err for completeness in shear calculation
henriquelettieri Jan 13, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 47 additions & 48 deletions crow/recipes/binned_exact.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,74 +60,59 @@ def _setup_with_completeness(self):
self._completeness_distribution = self.completeness.distribution

def _setup_with_purity(self):
"""Makes mass distribution use additional integral with completeness"""
"""Makes mass distribution use additional integral with purity"""
if self.purity is None:
self._mass_distribution_distribution = self.mass_distribution.distribution
else:
self._mass_distribution_distribution = self._impure_mass_distribution
self._mass_distribution_distribution = self._mass_distribution_purity

def _impure_mass_distribution(self, log_mass, z, log_mass_proxy_limits):
def _mass_distribution_purity(self, log_mass, z, log_mass_proxy):

##############################
# Fix this function, Henrique
# Good luck!
##############################

integrator = NumCosmoIntegrator(
relative_tolerance=1e-6,
absolute_tolerance=1e-12,
return (
self.mass_distribution.gaussian_kernel(log_mass, z, log_mass_proxy)
/ self.purity.distribution(log_mass_proxy, z)
* np.log(10)
)

def integration_func(int_args, extra_args):
ln_mass_proxy = int_args[:, 0]
log_mass_proxy = ln_mass_proxy / np.log(10.0)
return np.array(
[
self.mass_distribution.gaussian_kernel(
log_mass, z, np.array([_log_mass_proxy])
)
/ self.purity.distribution(np.array([_log_mass_proxy]), z)
for _log_mass_proxy in log_mass_proxy
]
)

integrator.integral_bounds = [
(
np.log(10.0) * log_mass_proxy_limits[0],
np.log(10.0) * log_mass_proxy_limits[1],
)
]

return integrator.integrate(integration_func)

def _get_theory_prediction_counts(
self,
average_on: None | ClusterProperty = None,
) -> Callable[
[npt.NDArray[np.float64], npt.NDArray[np.float64], tuple[float, float], float],
[
npt.NDArray[np.float64],
npt.NDArray[np.float64],
npt.NDArray[np.float64],
float,
],
npt.NDArray[np.float64],
]:
"""Get a callable that evaluates a cluster theory prediction.

Returns a callable function that accepts mass, redshift, mass proxy limits,
Returns a callable function that accepts mass, redshift, mass_proxy,
and the sky area of your survey and returns the theoretical prediction for the
expected number of clusters.
expected number of clusters including false detections.
"""

def theory_prediction(
mass: npt.NDArray[np.float64],
z: npt.NDArray[np.float64],
mass_proxy_limits: tuple[float, float],
mass_proxy: npt.NDArray[np.float64],
sky_area: float,
):
prediction = (
self.cluster_theory.comoving_volume(z, sky_area)
* self.cluster_theory.mass_function(mass, z)
* self._completeness_distribution(mass, z)
* self.redshift_distribution.distribution()
* self._mass_distribution_distribution(mass, z, mass_proxy_limits)
)

if self.purity == None:
prediction *= self._mass_distribution_distribution(
mass, z, (mass_proxy[0], mass_proxy[1])
)
else:
prediction *= self._mass_distribution_distribution(mass, z, mass_proxy)

if average_on is None:
return prediction

Expand All @@ -149,7 +134,7 @@ def _get_function_to_integrate_counts(
[
npt.NDArray[np.float64],
npt.NDArray[np.float64],
tuple[float, float],
npt.NDArray[np.float64],
float,
],
npt.NDArray[np.float64],
Expand All @@ -167,11 +152,14 @@ def function_mapper(
mass = int_args[:, 0]
z = int_args[:, 1]

mass_proxy_low = extra_args[0]
mass_proxy_high = extra_args[1]
sky_area = extra_args[2]
if self.purity == None:
mass_proxy = np.array([extra_args[0], extra_args[1]])
sky_area = extra_args[2]
else:
mass_proxy = int_args[:, 2]
sky_area = extra_args[0]

return prediction(mass, z, (mass_proxy_low, mass_proxy_high), sky_area)
return prediction(mass, z, mass_proxy, sky_area)

return function_mapper

Expand All @@ -188,11 +176,22 @@ def evaluate_theory_prediction_counts(
using the Murata 2019 binned mass-richness relation and assuming perfectly
measured redshifts.
"""
self.integrator.integral_bounds = [
self.mass_interval,
z_edges,
]
self.integrator.extra_args = np.array([*log_proxy_edges, sky_area])
assert len(log_proxy_edges) == 2
assert len(z_edges) == 2
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This test should be in get_theorey_prediction and not here. Even if here things are 2d, if the integrator calls the function get_theory_prediction with these intervals, it will give a NDArray with several values, and if purity was not properly set, this can be a problem.


if self.purity == None:
self.integrator.integral_bounds = [
self.mass_interval,
z_edges,
]
self.integrator.extra_args = np.array([*log_proxy_edges, sky_area])
else:
self.integrator.integral_bounds = [
self.mass_interval,
z_edges,
log_proxy_edges,
]
self.integrator.extra_args = np.array([sky_area])

theory_prediction = self._get_theory_prediction_counts(average_on)
prediction_wrapper = self._get_function_to_integrate_counts(theory_prediction)
Expand Down
2 changes: 0 additions & 2 deletions crow/recipes/binned_grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@ def _get_hmf_grid(
vol = self.cluster_theory.comoving_volume(z, sky_area)
# assign
self._hmf_grid[key] = vol[:, np.newaxis] * mass_function_2d

return self._hmf_grid[key]

def _get_mass_richness_grid(
Expand Down Expand Up @@ -147,7 +146,6 @@ def _get_completeness_grid(self, z: npt.NDArray[np.float64], key):
self._completeness_grid[key] = self._completeness_distribution(
self.log_mass_grid[np.newaxis, :], z[:, np.newaxis]
)

return self._completeness_grid[key]

def _get_purity_grid(
Expand Down
58 changes: 34 additions & 24 deletions notebooks/grid_new.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 7,
"id": "0b708fef-7cdb-4239-84af-3f1520364076",
"metadata": {},
"outputs": [],
Expand Down Expand Up @@ -140,6 +140,7 @@
"name": "stdout",
"output_type": "stream",
"text": [
"ola\n",
"Simpson Integral 87221.31147840535, dblquad integral 87220.6489541402\n",
"Abs error 0.6625242651498411, rel error 7.595956612371779e-06\n"
]
Expand Down Expand Up @@ -353,8 +354,8 @@
"\n",
"\n",
" (12, 10)\n",
"Simpson Integral 0.13497569756262148, dblquad integral 0.1349756810357927\n",
"Abs error 1.6526828794383164e-08, rel error 1.224430110635666e-07\n"
"Simpson Integral 0.13497569756262148, dblquad integral 0.06724380521649297\n",
"Abs error 0.06773189234612852, rel error 1.0072584698034883\n"
]
}
],
Expand Down Expand Up @@ -505,8 +506,8 @@
"(0.2, 0.4) (1.0, 1.3)\n",
"Simpson Integral 465.86054457432704, dblquad integral 465.88615450982513\n",
"Abs error 0.025609935498096092, rel error 5.4970372590390504e-05\n",
"First eval took: 0.0015347003936767578, second eval took: 0.0007061958312988281, integral took: 0.0427401065826416\n",
"After reset: 0.0011837482452392578\n",
"First eval took: 0.0014820098876953125, second eval took: 0.0008060932159423828, integral took: 0.33889269828796387\n",
"After reset: 0.0011088848114013672\n",
" Counts 1, 2 ,3: (np.float64(465.86054457432704), np.float64(465.86054457432704), np.float64(465.86054457432704))\n"
]
}
Expand Down Expand Up @@ -555,8 +556,8 @@
"[3.67451479e+15]\n",
"Simpson Integral [3.67407585e+15], dblquad integral [3.67451479e+15]\n",
"Abs error [4.38941366e+11], rel error [0.00011946]\n",
"First eval took: 0.0026946067810058594, second eval took: 0.0008661746978759766, integral took: 0.11332583427429199\n",
"After reset: 0.0024514198303222656\n",
"First eval took: 0.002621173858642578, second eval took: 0.0007369518280029297, integral took: 0.09407925605773926\n",
"After reset: 0.0021927356719970703\n",
" Counts 1, 2 ,3: (array([3.67407585e+15]), array([3.67407585e+15]), array([3.67407585e+15]))\n"
]
}
Expand Down Expand Up @@ -636,8 +637,8 @@
"text": [
"Simpson Integral [1.22802065], dblquad integral [1.36087356]\n",
"Abs error [0.13285291], rel error [0.09762325]\n",
"First eval took: 0.04190349578857422, second eval took: 0.0007469654083251953, integral took: 0.28732752799987793\n",
"After reset: 0.0007152557373046875\n",
"First eval took: 0.02831244468688965, second eval took: 0.0010771751403808594, integral took: 0.22215914726257324\n",
"After reset: 0.0007371902465820312\n",
" Values 1, 2 ,3: (array([1.22802065]), array([1.22802065]), array([1.22802065]))\n"
]
}
Expand Down Expand Up @@ -686,8 +687,8 @@
"(0.2, 0.4) (1.0, 1.3)\n",
"Simpson Integral 6643.852449550316, dblquad integral 6644.228454470479\n",
"Abs error 0.3760049201628135, rel error 5.659120885737057e-05\n",
"First eval took: 0.0013675689697265625, second eval took: 0.0006885528564453125, integral took: 0.0020668506622314453\n",
"After reset: 0.0012717247009277344\n",
"First eval took: 0.0015234947204589844, second eval took: 0.0005614757537841797, integral took: 0.002446413040161133\n",
"After reset: 0.0008502006530761719\n",
" Counts 1, 2 ,3: (np.float64(6643.852449550316), np.float64(6643.852449550316), np.float64(6643.852449550316))\n"
]
}
Expand Down Expand Up @@ -738,8 +739,8 @@
"(0.2, 0.4) (1.0, 1.3)\n",
"Simpson Integral 145.5788864174151, dblquad integral 145.5957901462481\n",
"Abs error 0.016903728833000287, rel error 0.00011610039559539764\n",
"First eval took: 0.0013070106506347656, second eval took: 0.0006926059722900391, integral took: 0.0016515254974365234\n",
"After reset: 0.0012030601501464844\n",
"First eval took: 0.0014867782592773438, second eval took: 0.0005896091461181641, integral took: 0.0023458003997802734\n",
"After reset: 0.0009424686431884766\n",
" Counts 1, 2 ,3: (np.float64(145.5788864174151), np.float64(145.5788864174151), np.float64(145.5788864174151))\n"
]
}
Expand Down Expand Up @@ -841,12 +842,12 @@
"text": [
"\n",
"--- One-shot counts ---\n",
"Grid time: 0.0523 s\n",
"Integral time: 0.0023 s\n",
"Grid time: 0.0346 s\n",
"Integral time: 0.0019 s\n",
"\n",
"--- One-shot shear ---\n",
"Grid time: 0.0021 s\n",
"Integral time: 0.4202 s\n"
"Grid time: 0.0020 s\n",
"Integral time: 0.3056 s\n"
]
}
],
Expand Down Expand Up @@ -936,8 +937,8 @@
"output_type": "stream",
"text": [
"\n",
"TOTAL GRID TIME = 0.23 s\n",
"TOTAL INTEGRAL TIME = 24.13 s\n",
"TOTAL GRID TIME = 0.16 s\n",
"TOTAL INTEGRAL TIME = 17.28 s\n",
"\n",
"--- RESULTS COMPARISON ---\n",
"Total number of bins computed: 20\n",
Expand Down Expand Up @@ -1014,7 +1015,7 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 20,
"id": "80383885-0945-4e1c-bf2e-ed64072d9130",
"metadata": {
"scrolled": true
Expand Down Expand Up @@ -1076,7 +1077,16 @@
" shear diff = 0.000007\n",
" counts diff = 0.000010\n",
"\n",
"Trying mass_grid_size = 80\n"
"Trying mass_grid_size = 80\n",
" shear diff = 0.000007\n",
" counts diff = 0.000010\n",
"\n",
"Trying mass_grid_size = 120\n",
" shear diff = 0.000007\n",
" counts diff = 0.000010\n",
"\n",
"===== END ACCURACY SWEEP WITH COUNTS =====\n",
"\n"
]
}
],
Expand Down Expand Up @@ -1253,9 +1263,9 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python (firecrown20.0)",
"display_name": "crow",
"language": "python",
"name": "firecrown_20"
"name": "crow"
},
"language_info": {
"codemirror_mode": {
Expand All @@ -1267,7 +1277,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.13.0"
"version": "3.14.1"
}
},
"nbformat": 4,
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If you did not add anything to this notebook, just ran it, please restore it before commiting

Expand Down
Loading
Loading