Beinsearch
12/25/2018 - 1:52 AM

参数优化

#https://blog.csdn.net/luanpeng825485697/article/details/79831703
https://blog.csdn.net/qq_34139222/article/details/60322995

from sklearn.model_selection import cross_val_score
import pickle
from hyperopt import fmin, tpe, hp,space_eval,rand,Trials,partial,STATUS_OK
from xgboost.sklearn import XGBClassifier
import xgboost as xgb

# 定义一个目标函数,接受一个变量,计算后返回一个函数的损失值,
def GBM(argsDict):
    max_depth = argsDict["max_depth"] + 5
    n_estimators = argsDict['n_estimators'] * 5 + 50
    learning_rate = argsDict["learning_rate"] * 0.02 + 0.05
    subsample = argsDict["subsample"] * 0.1 + 0.7
    min_child_weight = argsDict["min_child_weight"]+1
    global attr_train,label_train

    gbm = xgb.XGBClassifier(nthread=4,    #进程数
                            max_depth=max_depth,  #最大深度
                            n_estimators=n_estimators,   #树的数量
                            learning_rate=learning_rate, #学习率
                            subsample=subsample,      #采样数
                            min_child_weight=min_child_weight,   #孩子数
                            max_delta_step = 10,  #10步不降则停止
                            objective="binary:logistic")

    metric = cross_val_score(gbm,attr_train,label_train,cv=5,scoring="roc_auc").mean()
    print(metric)
    return -metric

# 定义参数的搜索空间

space = {"max_depth":hp.randint("max_depth",15),
         "n_estimators":hp.randint("n_estimators",10),  #[0,1,2,3,4,5] -> [50,]
         "learning_rate":hp.randint("learning_rate",6),  #[0,1,2,3,4,5] -> 0.05,0.06
         "subsample":hp.randint("subsample",4),#[0,1,2,3] -> [0.7,0.8,0.9,1.0]
         "min_child_weight":hp.randint("min_child_weight",5), #
        }
algo = partial(tpe.suggest,n_startup_jobs=1)  # 定义随机搜索算法。搜索算法本身也有内置的参数决定如何去优化目标函数
best = fmin(GBM,space,algo=algo,max_evals=4)  # 对定义的参数范围,调用搜索算法,对模型进行搜索

print(best)
print(GBM(best))