Skip to content

Commit

Permalink
🔨 refactors noncentralt module
Browse files Browse the repository at this point in the history
  • Loading branch information
MaaniBeigy committed Aug 16, 2024
1 parent e964f69 commit 53f061b
Show file tree
Hide file tree
Showing 8 changed files with 104 additions and 202 deletions.
6 changes: 3 additions & 3 deletions .logs/bandit.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"errors": [],
"generated_at": "2024-08-16T09:31:50Z",
"generated_at": "2024-08-16T12:26:47Z",
"metrics": {
"_totals": {
"CONFIDENCE.HIGH": 0,
Expand All @@ -11,7 +11,7 @@
"SEVERITY.LOW": 0,
"SEVERITY.MEDIUM": 0,
"SEVERITY.UNDEFINED": 0,
"loc": 1740,
"loc": 1779,
"nosec": 0,
"skipped_tests": 0
},
Expand Down Expand Up @@ -115,7 +115,7 @@
"SEVERITY.LOW": 0,
"SEVERITY.MEDIUM": 0,
"SEVERITY.UNDEFINED": 0,
"loc": 94,
"loc": 133,
"nosec": 0,
"skipped_tests": 0
},
Expand Down
4 changes: 2 additions & 2 deletions .logs/bandit.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
Run started:2024-08-16 09:31:52.510266
Run started:2024-08-16 12:26:49.489295

Test results:
No issues identified.

Code scanned:
Total lines of code: 1740
Total lines of code: 1779
Total lines skipped (#nosec): 0
Total potential issues skipped due to specifically being disabled (e.g., #nosec BXXX): 0

Expand Down
Binary file modified .logs/complexity.txt
Binary file not shown.
Binary file modified .logs/docstring.txt
Binary file not shown.
2 changes: 1 addition & 1 deletion .logs/maintainability.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
"maintainability": "90.1%"
"maintainability": "90.0%"
}
Binary file modified .logs/maintainability.txt
Binary file not shown.
154 changes: 0 additions & 154 deletions assets/docs/pycvcqv/examples.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -42,140 +42,6 @@
"plt.ion()"
]
},
{
"cell_type": "code",
"execution_count": 113,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
" message: Solution found.\n",
" success: True\n",
" status: 0\n",
" fun: 2.28705961145802e-19\n",
" x: 4.815359140504376\n",
" nit: 13\n",
" nfev: 13"
]
},
"execution_count": 113,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"ncp1 = 2.83\n",
"dof1 = 126\n",
"conf_level1 = 0.95\n",
"alpha_lower1 = (1 - conf_level1) / 2\n",
"min_ncp = min(-150, -5 * ncp1)\n",
"max_ncp = max(150, 5 * ncp1)\n",
"tol1 = 1e-9\n",
"\n",
"\n",
"def ci_nct_lower(val_of_interest):\n",
" return (nct.ppf(alpha_lower1, dof1, val_of_interest, loc=0) - ncp1) ** 2\n",
"\n",
"\n",
"minimize_scalar(\n",
" ci_nct_lower,\n",
" bounds=(min_ncp, max_ncp),\n",
" method=\"bounded\",\n",
" options={\"xatol\": tol1},\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 134,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"4.815359140504376\n",
"0.8337502600175457\n"
]
}
],
"source": [
"ncp1 = 2.83\n",
"dof1 = 126\n",
"conf_level1 = 0.95\n",
"alpha_lower1 = (1 - conf_level1) / 2\n",
"alpha_upper1 = (1 - conf_level1) / 2\n",
"min_ncp = min(-150, -5 * ncp1)\n",
"max_ncp = max(150, 5 * ncp1)\n",
"tol1 = 1e-9\n",
"\n",
"\n",
"def ci_nct_lower(val_of_interest):\n",
" return (nct.ppf(alpha_lower1, dof1, val_of_interest, loc=0) - ncp1) ** 2\n",
"\n",
"\n",
"def ci_nct_upper(val_of_interest):\n",
" return (nct.ppf(1 - alpha_upper1, dof1, val_of_interest, loc=0) - ncp1) ** 2\n",
"\n",
"\n",
"lower_limit = minimize_scalar(\n",
" ci_nct_lower,\n",
" bounds=(min_ncp, max_ncp),\n",
" method=\"bounded\",\n",
" options={\"xatol\": tol1},\n",
")\n",
"\n",
"upper_limit = minimize_scalar(\n",
" ci_nct_upper,\n",
" bounds=(min_ncp, max_ncp),\n",
" method=\"bounded\",\n",
" options={\"xatol\": tol1},\n",
")\n",
"print(lower_limit.x)\n",
"print(upper_limit.x)"
]
},
{
"cell_type": "code",
"execution_count": 135,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"0.024999999971943743"
]
},
"execution_count": 135,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"nct.cdf(ncp1, dof1, lower_limit.x, loc=0)"
]
},
{
"cell_type": "code",
"execution_count": 138,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"0.02499999995262825"
]
},
"execution_count": 138,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"1 - nct.cdf(ncp1, dof1, upper_limit.x, loc=0)"
]
},
{
"cell_type": "code",
"execution_count": 7,
Expand Down Expand Up @@ -344,26 +210,6 @@
")"
]
},
{
"cell_type": "code",
"execution_count": 133,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"False"
]
},
"execution_count": 133,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"0.9750000000280563 > 0.9750000000473718"
]
},
{
"cell_type": "code",
"execution_count": 103,
Expand Down
140 changes: 98 additions & 42 deletions pycvcqv/noncentralt.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Noncentral t-distribution module."""

# --------------------------- Import libraries and functions --------------------------
from typing import Dict, Optional, Union
from typing import Any, Dict, Optional, Union

import numpy as np
from scipy.optimize import minimize_scalar
Expand All @@ -10,8 +10,76 @@
from pycvcqv.checkers import is_dof_positive_natural_number, is_ncp_huge
from pycvcqv.sanitizers import validate_ncp_confidence_level_arguments


# -------------------------------- function definition --------------------------------


def _ci_nct_lower(
val_of_interest: float, alpha_lower: float, dof: int, ncp: float
) -> float:
"""Computes lower confidence limit for noncentral t parameter."""
# ------------------------ Ensuring alpha_lower is not None -----------------------
result: float = nct.ppf(1 - alpha_lower, dof, val_of_interest, loc=0)
return (result - ncp) ** 2


def _ci_nct_upper(
val_of_interest: float, alpha_upper: float, dof: int, ncp: float
) -> float:
"""Computes upper confidence limit for noncentral t parameter."""
# ------------------------ Ensuring alpha_lower is not None -----------------------
result: float = nct.ppf(alpha_upper, dof, val_of_interest, loc=0)
return (result - ncp) ** 2


def _calculate_alpha_tails(
conf_level: Optional[float] = None,
alpha_lower: Optional[float] = None,
alpha_upper: Optional[float] = None,
) -> Dict[str, Any]:
"""Calculates alpha tails of noncentral t parameter confidence interval."""
# ----- If all three are None, use default conf_level and compute alpha values ----
if all(scalar is None for scalar in [conf_level, alpha_lower, alpha_upper]):
conf_level = 0.95
alpha_lower = alpha_upper = (1 - conf_level) / 2
# ------ Calculate the alpha_lower and alpha_upper based on given conf_level ------
elif conf_level is not None and all(
scalar is None for scalar in [alpha_lower, alpha_upper]
):
alpha_lower = alpha_upper = (1 - conf_level) / 2
# ------------------------ Preparing the alpha tails output -----------------------
alpha_tails = {"alpha_lower": alpha_lower, "alpha_upper": alpha_upper}
return alpha_tails


def _calculate_out_of_range_probabilities(
ncp: float,
dof: int,
ncp_lower_limit: float,
valid_alpha_lower: float,
ncp_upper_limit: float,
valid_alpha_upper: float,
) -> Dict[str, Any]:
"""
Calculates the probabilities for out of range of noncentral t parameter
confidence interval.
"""
# ------------- Probability that the NCP is less than the lower limit -------------
prob_less_lower = (
1 - nct.cdf(ncp, dof, ncp_lower_limit, loc=0) if valid_alpha_lower != 0 else 0
)
# ------------ Probability that the NCP is greater than the upper limit -----------
prob_greater_upper = (
nct.cdf(ncp, dof, ncp_upper_limit, loc=0) if valid_alpha_upper != 0 else 0
)
# -------------------- Preparing the out of range probabilities -------------------
out_of_range_probabilities = {
"prob_less_lower": prob_less_lower,
"prob_greater_upper": prob_greater_upper,
}
return out_of_range_probabilities


# ------------------ Decorators to check validity of input arguments ------------------
@is_dof_positive_natural_number
@is_ncp_huge
@validate_ncp_confidence_level_arguments
Expand Down Expand Up @@ -58,55 +126,43 @@ def conf_limits_nct_minimize_scalar(
... 'prob_greater_upper': 0.024999999971943743
... }
"""
# --- If all three are None, use default conf_level and compute alpha values --
if conf_level is None and alpha_lower is None and alpha_upper is None:
conf_level = 0.95
alpha_lower = (1 - conf_level) / 2
alpha_upper = (1 - conf_level) / 2
# ---- Calculate the alpha_lower and alpha_upper based on given conf_level ----
elif conf_level is not None and alpha_lower is None and alpha_upper is None:
alpha_lower = (1 - conf_level) / 2
alpha_upper = (1 - conf_level) / 2

def _ci_nct_lower(val_of_interest: float) -> float:
"""Internal function to compute lower confidence limit."""
assert alpha_lower is not None # Ensuring alpha_lower is not None
result: float = nct.ppf(
1 - alpha_lower, dof, val_of_interest, loc=0
) # Explicit type declaration
return (result - ncp) ** 2

def _ci_nct_upper(val_of_interest: float) -> float:
"""Internal function to compute upper confidence limit."""
assert alpha_upper is not None # Ensuring alpha_upper is not None
result: float = nct.ppf(
alpha_upper, dof, val_of_interest, loc=0
) # Explicit type declaration
return (result - ncp) ** 2

# ------ Calculates alpha tails of noncentral t parameter confidence interval -----
alpha_tails = _calculate_alpha_tails(conf_level, alpha_lower, alpha_upper)
valid_alpha_lower = alpha_tails["alpha_lower"]
valid_alpha_upper = alpha_tails["alpha_upper"]
# ------------------------ allowed minimum and maximum NCP ------------------------
min_ncp = min(-150, -5 * ncp)
max_ncp = max(150, 5 * ncp)

lower_limit = minimize_scalar(
# ------------------------- calculate lower_limit for NCP -------------------------
ncp_lower_limit = minimize_scalar(
_ci_nct_lower,
bounds=(min_ncp, max_ncp),
method="bounded",
options={"xatol": tol, "disp": 0, "maxiter": max_iter},
)
upper_limit = minimize_scalar(
args=(valid_alpha_lower, dof, ncp),
).x
# ------------------------- calculate upper_limit for NCP -------------------------
ncp_upper_limit = minimize_scalar(
_ci_nct_upper,
bounds=(min_ncp, max_ncp),
method="bounded",
options={"xatol": tol, "disp": 0, "maxiter": max_iter},
args=(valid_alpha_upper, dof, ncp),
).x
# -------------- Calculates the probabilities for out of range values -------------
out_of_range_probabilities = _calculate_out_of_range_probabilities(
ncp,
dof,
ncp_lower_limit,
valid_alpha_lower,
ncp_upper_limit,
valid_alpha_upper,
)

return {
"lower_limit": lower_limit.x if alpha_lower != 0 else -np.inf,
"prob_less_lower": (
1 - nct.cdf(ncp, dof, lower_limit.x, loc=0) if alpha_lower != 0 else 0
),
"upper_limit": upper_limit.x if alpha_upper != 0 else np.inf,
"prob_greater_upper": (
nct.cdf(ncp, dof, upper_limit.x, loc=0) if alpha_upper != 0 else 0
),
# ----------------------------- preparing the result -----------------------------
result = {
"lower_limit": ncp_lower_limit if valid_alpha_lower != 0 else -np.inf,
"prob_less_lower": out_of_range_probabilities["prob_less_lower"],
"upper_limit": ncp_upper_limit if valid_alpha_upper != 0 else np.inf,
"prob_greater_upper": out_of_range_probabilities["prob_greater_upper"],
}
return result

0 comments on commit 53f061b

Please sign in to comment.