1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
|
from hyperopt import fmin, tpe, hp, Trials, space_eval
## 定义参数空间
param_space = {'max_depth': hp.choice('max_depth', range(4, 9, 1)),
'reg_lambda': hp.uniform('reg_lambda', 0.0, 1.0),
'learning_rate' : hp.quniform('learning_rate', 0.1, 0.5, 0.1),
'n_estimators' : hp.choice('n_estimators', range(50, 250, 30)),
'num_leaves': hp.choice('num_leaves',[15, 31, 63, 127]),
'subsample': hp.choice('subsample',[0.6, 0.7, 0.8, 1.0]),
'colsample_bytree': hp.choice('colsample_bytree',[0.6, 0.7, 0.8, 1.0]),}
## 定义评判函数,调参机器认为返回值越小越好,所以返回1-AUC
def objective_lgbm(params):
x_train,x_val,y_train,y_val = train_test_split(train,labels,test_size=0.1,random_state=0,stratify=labels)
x_train,x_val,y_train, = target_labels(x_train,x_val,y_train)
clf = LGBMClassifier(**params)
clf.fit(x_train,y_train)
y1 = clf.predict_proba(x_val)[:,1].flatten()
score = roc_auc_score(y_val,y1)
return 1 - score
## 八股文代码
trials = Trials()
best = fmin(fn=objective_lgbm, space=param_space,
max_evals=10, ## 迭代的次数
rstate=np.random.RandomState(0),
algo=tpe.suggest,
trials=trials
)
## 查看最佳代码
best_params = space_eval(param_space, best)
print("最佳参数:",best_params)
## 之后直接创建最佳模型
clf = LGBMClassifier(**best_params)
|