Training
param = {
'learning_rate' : 0.1,
'max_depth' : 5,
'min_child_samples' : 10,
'n_estimators' : 20,
'num_leaves' : 5,
'reg_alpha' : 0.1,
}
lgb_base = lgb.LGBMRegressor(**param).fit(X_train, y_train)
Bayes Optimization
def target(
learning_rate,
max_depth,
min_child_samples,
n_estimators,
num_leaves,
reg_alpha,
):
param = {
'learning_rate' : learning_rate,
'max_depth' : int(max_depth),
'min_child_samples' : int(min_child_samples),
'n_estimators' : int(n_estimators),
'num_leaves' : int(num_leaves),
'reg_alpha' : reg_alpha,
}
model = lgb.LGBMRegressor(**param).fit(X_train, y_train)
y_val_pred = model.predict(X_val)
return -mean_squared_error(y_val, y_val_pred)
search_range={
'learning_rate' : (0.01, 0.3),
'max_depth' : (1,10),
'min_child_samples' : (5,50),
'n_estimators' : (1, 50),
'num_leaves' : (3, 50),
'reg_alpha' : (0.01, 0.6),
}
search_engine = BayesianOptimization(target, search_range, verbose=1)
search_engine.maximize(
init_points = 50,
n_iter = 300,
)
params_bo = search_engine.max['params']
params_bo['max_depth'] = int(round(params_bo['max_depth']))
params_bo['num_leaves'] = int(round(params_bo['num_leaves']))
params_bo['n_estimators'] = int(round(params_bo['n_estimators']))
params_bo['min_child_samples'] = int(round(params_bo['min_child_samples']))
params_bo
lgb_bo = lgb.LGBMRegressor(**params_bo).fit(X_in, y_in)