Last active
September 19, 2024 18:17
-
-
Save shreyas90999/f5bd1ca2db9f1eb3d7389acea21e3480 to your computer and use it in GitHub Desktop.
Lgbm optimization using hyperopt
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from hyperopt import hp, fmin, tpe, Trials, STATUS_OK | |
lgb_reg_params = { | |
'learning_rate': hp.uniform('learning_rate',0.1,1), | |
'max_depth': hp.choice('max_depth', np.arange(2, 100, 1, dtype=int)), | |
'min_child_weight': hp.choice('min_child_weight', np.arange(1, 50, 1, dtype=int)), | |
'colsample_bytree': hp.uniform('colsample_bytree',0.4,1), | |
'subsample': hp.uniform('subsample', 0.6, 1), | |
'num_leaves': hp.choice('num_leaves', np.arange(1, 200, 1, dtype=int)), | |
'min_split_gain': hp.uniform('min_split_gain', 0, 1), | |
'reg_alpha': hp.uniform('reg_alpha',0,1), | |
'reg_lambda': hp.uniform('reg_lambda',0,1), | |
'n_estimators': 5 | |
} | |
def f(params): | |
lgbm = lgb.LGBMRegressor(n_jobs=-1,early_stopping_rounds=None,**params) | |
score = cross_val_score(lgbm, X_train, y_train, cv=2,scoring=rmsle,n_jobs=-1).mean() | |
return score | |
trials = Trials() | |
result = fmin( | |
fn=f, # objective function | |
space=lgb_reg_params, # parameter space | |
algo=tpe.suggest, # surrogate algorithm | |
max_evals=50, # no. of evaluations | |
trials=trials # trials object that keeps track of the sample results (optional) | |
) | |
print(result) |
Thanks for the suggestion. I have bookmarked the repo
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
I suggest you shap-hypetune to industrialize parameter tuning (and also feature selection) with LGBM and hyperopt (https://github.com/cerlymarco/shap-hypetune)