Skip to content

Commit

Permalink
Update docs
Browse files Browse the repository at this point in the history
  • Loading branch information
reidjohnson committed Feb 11, 2024
1 parent f3929d3 commit e6a63be
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 12 deletions.
2 changes: 1 addition & 1 deletion docs/user_guide.rst
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ This approach was first proposed by :cite:t:`2006:meinshausen`.
Fitting and Predicting
----------------------

Quantile forests can be fit and used to predict like standard scikit-learn estimators. In this package, the quantile forests extend standard scikit-learn forest regressors and inherent their model parameters, in addition to offering additional parameters related to quantile regression. We'll discuss the many of the important model parameters below.
Quantile forests can be fit and used to predict like standard scikit-learn estimators. In this package, the quantile forests extend standard scikit-learn forest regressors and inherent their model parameters, in addition to offering additional parameters related to quantile regression. We'll discuss many of the important model parameters below.

Fitting a Model
~~~~~~~~~~~~~~~
Expand Down
10 changes: 6 additions & 4 deletions examples/plot_quantile_multioutput.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
An example on a toy dataset that demonstrates fitting a single quantile
regressor for multiple target variables. For each target, multiple quantiles
can be estimated.
can be estimated simulatenously.
"""

Expand All @@ -29,7 +29,7 @@
]


def make_func_Xy(funcs, bounds, n_samples):
def make_Xy(funcs, bounds, n_samples):
x = np.linspace(bounds[0], bounds[1], n_samples)
y = np.empty((len(x), 3))
y[:, 0] = funcs[0](x) + np.random.normal(scale=0.01 * np.abs(x))
Expand All @@ -38,7 +38,7 @@ def make_func_Xy(funcs, bounds, n_samples):
return x, y


X, y = make_func_Xy(funcs, bounds, n_samples)
X, y = make_Xy(funcs, bounds, n_samples)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.5, random_state=0)

qrf = RandomForestQuantileRegressor(random_state=0)
Expand All @@ -49,7 +49,9 @@ def make_func_Xy(funcs, bounds, n_samples):

def plot_multioutputs(colors, funcs, X, y):
for i in range(y.shape[-1]):
plt.fill_between(X, y_pred[:, 0, i], y_pred[:, 2, i], color=colors[i], label=f"Target {i}")
y1 = y_pred[:, 0, i]
y2 = y_pred[:, 2, i]
plt.fill_between(X, y1, y2, color=colors[i], label=f"Target {i}")
plt.plot(X, funcs[i](X), c="black")
plt.xlim(bounds)
plt.ylim([-8, 8])
Expand Down
7 changes: 0 additions & 7 deletions quantile_forest/_quantile_forest.py
Original file line number Diff line number Diff line change
Expand Up @@ -535,13 +535,6 @@ def predict(
if not isinstance(interpolation, (bytes, bytearray)):
interpolation = interpolation.encode()

if weighted_leaves and not weighted_quantile:
warn(
"`weighted_leaves` is True, but is only used if `weighted_quantile=True`. "
"`weighted_leaves` will be set to False."
)
weighted_leaves = False

if oob_score:
if not self.bootstrap:
raise ValueError("Out-of-bag estimation only available if bootstrap=True.")
Expand Down

0 comments on commit e6a63be

Please sign in to comment.