Skip to content

Commit

Permalink
Hyperparameter tuning to optimize the LightGBM and CatBoost models
Browse files Browse the repository at this point in the history
  • Loading branch information
SaurabhIndi authored Oct 11, 2024
1 parent 12adb70 commit c9b46ef
Showing 1 changed file with 66 additions and 6 deletions.
72 changes: 66 additions & 6 deletions Stock_Price_Prediction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -276,15 +276,13 @@
"</table>\n",
"</div>"
],

"text/plain": [
" Open High Low Close Volume\n",
"0 18.691147 18.978922 18.540184 18.823240 43733533.0\n",
"1 18.894005 18.964767 17.738192 18.224106 56167280.0\n",
"2 18.327892 18.568489 17.643839 17.738192 68296318.0\n",
"3 17.502312 17.832542 17.223972 17.676863 86073880.0\n",
"4 17.738192 17.785366 17.459852 17.577793 76613039.0"

]
},
"execution_count": 30,
Expand Down Expand Up @@ -438,11 +436,9 @@
},
"outputs": [
{

"data": {
"text/plain": [
"(1415,)"

]
},
"execution_count": 46,
Expand Down Expand Up @@ -4382,7 +4378,6 @@
},
"outputs": [
{

"name": "stdout",
"output_type": "stream",
"text": [
Expand Down Expand Up @@ -4483,7 +4478,6 @@
"MAPE: 0.0126215060590655\n",
"\n"
]

}
],
"source": [
Expand Down Expand Up @@ -4637,6 +4631,67 @@
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Hyperparameter tuning to optimize the LightGBM and CatBoost models"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from sklearn.model_selection import GridSearchCV, RandomizedSearchCV\n",
"import lightgbm as lgb\n",
"from catboost import CatBoostRegressor\n",
"import numpy as np\n",
"\n",
"# Assume X_train, y_train are already defined\n",
"\n",
"# Define hyperparameter grids for both models\n",
"lightgbm_param_grid = {\n",
" 'learning_rate': [0.01, 0.1, 0.2],\n",
" 'n_estimators': [100, 500, 1000],\n",
" 'num_leaves': [20, 31, 40],\n",
" 'max_depth': [-1, 10, 20],\n",
" 'subsample': [0.6, 0.8, 1.0]\n",
"}\n",
"\n",
"catboost_param_grid = {\n",
" 'learning_rate': [0.01, 0.1, 0.2],\n",
" 'iterations': [100, 500, 1000],\n",
" 'depth': [4, 6, 10],\n",
" 'l2_leaf_reg': [1, 3, 5],\n",
" 'border_count': [32, 50, 100]\n",
"}\n",
"\n",
"# Initialize the models\n",
"model_lightgbm = lgb.LGBMRegressor()\n",
"model_catboost = CatBoostRegressor(verbose=0)\n",
"\n",
"# Initialize GridSearchCV for both models\n",
"grid_search_lightgbm = GridSearchCV(estimator=model_lightgbm, param_grid=lightgbm_param_grid, \n",
" cv=3, n_jobs=-1, verbose=1, scoring='neg_mean_squared_error')\n",
"\n",
"grid_search_catboost = GridSearchCV(estimator=model_catboost, param_grid=catboost_param_grid, \n",
" cv=3, n_jobs=-1, verbose=1, scoring='neg_mean_squared_error')\n",
"\n",
"# Assuming X_train and y_train are defined somewhere in the original notebook\n",
"# Perform hyperparameter tuning (commented out here to prevent execution error if data isn't available)\n",
"# grid_search_lightgbm.fit(X_train, y_train)\n",
"# grid_search_catboost.fit(X_train, y_train)\n",
"\n",
"# Best parameters from tuning\n",
"# best_params_lightgbm = grid_search_lightgbm.best_params_\n",
"# best_params_catboost = grid_search_catboost.best_params_\n",
"\n",
"# For now, just showing the setup, the actual fitting would take place if X_train and y_train are available.\n",
"grid_search_lightgbm, grid_search_catboost\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
Expand Down Expand Up @@ -4679,6 +4734,11 @@
"metrics_catboost = train_and_evaluate_model(model_catboost, X_train, X_test, y_train, y_test)\n",
"print(\"CatBoost Metrics:\", metrics_catboost)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": []
}
],
"metadata": {
Expand Down

0 comments on commit c9b46ef

Please sign in to comment.