import matplotlib.pyplot as plt
import numpy as np
from sklearn import ensemble,datasets,model_selection
def load_data_regression():
diabetes = datasets.load_diabetes()
return model_selection.train_test_split(diabetes.data,diabetes.target,test_size=0.3,random_state = 0)
def test_GradientBoostingRegression(*data):
X_train,X_test,Y_train,Y_test = data
regr = ensemble.GradientBoostingRegressor()
regr.fit(X_train,Y_train)
print("Training score:%f"%regr.score(X_train,Y_train))
print("Testing score:%f"%regr.score(X_test,Y_test))
X_train,X_test,Y_train,Y_test = load_data_regression()
test_GradientBoostingRegression(X_train,X_test,Y_train,Y_test)
Training score:0.876188
Testing score:0.275905
考察個體回歸樹的數(shù)量對GBRT的影響
def test_GradientBoostingRegression_estimators_num(*data):
X_train,X_test,Y_train,Y_test = data
nums = np.arange(1,100,step=2)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
train_scores = []
test_scores = []
for i,num in enumerate(nums):
clf = ensemble.GradientBoostingRegressor(n_estimators=num)
clf.fit(X_train,Y_train)
train_scores.append(clf.score(X_train,Y_train))
test_scores.append(clf.score(X_test,Y_test))
ax.plot(nums,train_scores,label="training score")
ax.plot(nums,test_scores,label="testing score")
ax.set_xlabel("estimators num")
ax.set_ylabel("score")
ax.legend(loc="lower right")
ax.set_title("GradientBoostRegression")
plt.show()
X_train,X_test,Y_train,Y_test = load_data_regression()
test_GradientBoostingRegression_estimators_num(X_train,X_test,Y_train,Y_test)
output_6_0.png
運行結(jié)果分析:
- 隨著個體回歸樹數(shù)量的增長前塔,GBRT的性能對與訓(xùn)練集一直在提高嚣艇。
- 但是對于測試集測試得分先快速上升后基本緩慢下降。
考察個體回歸樹的最大深度對于集成回歸器預(yù)測性能的影響
def test_GradientBoostingRegression_max_depth(*data):
X_train,X_test,Y_train,Y_test = data
maxdepths = np.arange(1,20+1)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
train_scores = []
test_scores = []
for i,maxdepth in enumerate(maxdepths):
clf = ensemble.GradientBoostingRegressor(max_depth=maxdepth,max_leaf_nodes=None)
clf.fit(X_train,Y_train)
train_scores.append(clf.score(X_train,Y_train))
test_scores.append(clf.score(X_test,Y_test))
ax.plot(maxdepths,train_scores,label="Training score")
ax.plot(maxdepths,test_scores,label="Testing score")
ax.set_xlabel("maxdepth num")
ax.set_ylabel("score")
ax.legend(loc="lower right")
ax.set_ylim(-1,1.05)
ax.set_title("GradientBoostingRegression")
plt.show()
X_train,X_test,Y_train,Y_test = load_data_regression()
test_GradientBoostingRegression_max_depth(X_train,X_test,Y_train,Y_test)
output_10_0.png
運行結(jié)果分析:
- 隨著個體回歸樹的最大深度增長华弓,對訓(xùn)練集的擬合越來越好食零,但是對測試集的擬合越來越差,產(chǎn)生了過擬合現(xiàn)象该抒。
考察學(xué)習(xí)率對于GBRT的預(yù)測性能的影響
def test_GradientBoostingRegression_learning_rate(*data):
X_train,X_test,Y_train,Y_test = data
learing_rates = np.linspace(0.01,1.0)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
train_scores = []
test_scores = []
for i,learing_rate in enumerate(learing_rates):
clf = ensemble.GradientBoostingRegressor(learning_rate=learing_rate)
clf.fit(X_train,Y_train)
train_scores.append(clf.score(X_train,Y_train))
test_scores.append(clf.score(X_test,Y_test))
ax.plot(learing_rates,train_scores,label="Training score")
ax.plot(learing_rates,test_scores,label="Testing score")
ax.set_xlabel("learing_rate")
ax.set_ylabel("score")
ax.legend(loc="best")
ax.set_ylim(-1,1.05)
ax.set_title("GradientBoostingRegression_learning_rate")
plt.show()
X_train,X_test,Y_train,Y_test = load_data_regression()
test_GradientBoostingRegression_learning_rate(X_train,X_test,Y_train,Y_test)
output_14_0.png
運行結(jié)果分析:
- GBRT對預(yù)測集的預(yù)測得分隨著學(xué)習(xí)率的增長而一直降低
考察subsample的影響
def test_GradientBoostingRegression_subsample(*data):
X_train,X_test,Y_train,Y_test = data
subsamples = np.linspace(0.01,1.0)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
train_scores = []
test_scores = []
for i,subsample in enumerate(subsamples):
clf = ensemble.GradientBoostingRegressor(subsample=subsample,n_estimators=100)
clf.fit(X_train,Y_train)
train_scores.append(clf.score(X_train,Y_train))
test_scores.append(clf.score(X_test,Y_test))
ax.plot(subsamples,train_scores,label="Training score")
ax.plot(subsamples,test_scores,label="Testing score")
ax.set_xlabel("subsample")
ax.set_ylabel("score")
ax.legend(loc="best")
ax.set_ylim(-1,1.05)
ax.set_title("GradientBoostingRegression_subsample")
plt.show()
X_train,X_test,Y_train,Y_test = load_data_regression()
test_GradientBoostingRegression_subsample(X_train,X_test,Y_train,Y_test)
output_18_0.png
運行結(jié)果分析:
- 本問題中,subsample對GBRT預(yù)測的影響不大顶燕,主要對GBRT的訓(xùn)練擬合能力起作用凑保。
考察max_features參數(shù)的影響
def test_GradientBoostingRegression_max_features(*data):
X_train,X_test,Y_train,Y_test = data
max_features = np.linspace(0.01,1.0)
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
train_scores = []
test_scores = []
for i,features in enumerate(max_features):
clf = ensemble.GradientBoostingRegressor(max_features=features,n_estimators=100)
clf.fit(X_train,Y_train)
train_scores.append(clf.score(X_train,Y_train))
test_scores.append(clf.score(X_test,Y_test))
ax.plot(max_features,train_scores,label="Training score")
ax.plot(max_features,test_scores,label="Testing score")
ax.set_xlabel("max_features")
ax.set_ylabel("score")
ax.legend(loc="best")
ax.set_ylim(0,1.05)
ax.set_title("GradientBoostingRegression_max_features")
plt.show()
X_train,X_test,Y_train,Y_test = load_data_regression()
test_GradientBoostingRegression_max_features(X_train,X_test,Y_train,Y_test)
output_22_0.png
運行結(jié)果分析:
- GBRT對于特征集合的選取不是很敏感
考察損失函數(shù)的影響
def test_GradientBoostingRegression_loss(*data):
X_train,X_test,Y_train,Y_test = data
fig = plt.figure(figsize=(15,15))
nums = np.arange(1,200,step=2)
###ls:平方損失函數(shù)
###lad:絕對值損失函數(shù)
###huber:上述兩者的加權(quán)結(jié)合
losses = ["ls","lad","huber"]
ax = fig.add_subplot(2,1,1)
alphas = np.linspace(0.01,1.0,endpoint=False,num=5)
for alpha in alphas:
test_scores = []
train_scores = []
for num in nums:
regr = ensemble.GradientBoostingRegressor(n_estimators=num,loss="huber",alpha=alpha)
regr.fit(X_train,Y_train)
train_scores.append(regr.score(X_train,Y_train))
test_scores.append(regr.score(X_test,Y_test))
ax.plot(nums,train_scores,label="Training score:alpha=%f"%alpha)
ax.plot(nums,test_scores,label="Testing score:alpha=%f"%alpha)
ax.set_xlabel("estimator num")
ax.set_ylabel("score")
ax.legend(loc="best",framealpha=0.4)
ax.set_ylim(0,1.05)
ax.set_title("loss=huber")
plt.suptitle("GradientBoostRegressor")
######繪制ls和lad#######
ax = fig.add_subplot(2,1,2)
for loss in ["ls","lad"]:
test_scores = []
train_scores = []
for num in nums:
regr = ensemble.GradientBoostingRegressor(n_estimators=num,loss=loss)
regr.fit(X_train,Y_train)
train_scores.append(regr.score(X_train,Y_train))
test_scores.append(regr.score(X_test,Y_test))
ax.plot(nums,train_scores,label="Training score:loss=%s"%loss)
ax.plot(nums,test_scores,label="Testing score:loss=%s"%loss)
ax.set_xlabel("estimator num")
ax.set_ylabel("score")
ax.legend(loc="best",framealpha=0.4)
ax.set_ylim(0,1.05)
ax.set_title("loss=ls,lad")
plt.suptitle("GradientBoostRegressor")
plt.show()
output.png