问答

应用python随机森林回归模型训练好模型后,如何进行预测?为什么

作者:admin 2021-06-09 我要评论

from sklearn.datasets import load_bostonfrom sklearn.model_selection import train_test_splitfrom sklearn.preprocessing import StandardScalerfrom sklea...

在说正事之前,我要推荐一个福利:你还在原价购买阿里云、腾讯云、华为云服务器吗?那太亏啦!来这里,新购、升级、续费都打折,能够为您省60%的钱呢!2核4G企业级云服务器低至69元/年,点击进去看看吧>>>)


from sklearn.datasets import load_boston
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.ensemble import RandomForestRegressor, ExtraTreesRegressor, GradientBoostingRegressor
from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error
import numpy as np


#随机森林回归


# 1 准备数据
# 读取波士顿地区房价信息
boston = load_boston()
#print("boston:", boston)
# 查看数据描述
# print(boston.DESCR)   # 共506条波士顿地区房价信息,每条13项数值特征描述和目标房价
# 查看数据的差异情况
# print("最大房价:", np.max(boston.target))   # 50
# print("最小房价:",np.min(boston.target))    # 5
# print("平均房价:", np.mean(boston.target))   # 22.532806324110677

x = boston.data
y = boston.target
print("x.shape:", x.shape)
print("y.shape:", y.shape)
# 2 分割训练数据和测试数据
# 随机采样25%作为测试 75%作为训练
x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.25, random_state=33)
print("x_train.shape:", x_train.shape)
print("x_test.shape:", x_test.shape)
print("y_train.shape:", y_train.shape)
print("y_test.shape:", y_test.shape)
# 3 训练数据和测试数据进行标准化处理
ss_x = StandardScaler()
x_train = ss_x.fit_transform(x_train)
x_test = ss_x.transform(x_test)

ss_y = StandardScaler()
y_train = ss_y.fit_transform(y_train.reshape(-1, 1))
y_test = ss_y.transform(y_test.reshape(-1, 1))


# 随机森林回归
rfr = RandomForestRegressor()
# 训练
rfr.fit(x_train, y_train)
# 预测 保存预测结果
rfr_y_predict = rfr.predict(x_test)
#对所有特征数据进行预测
Y_predict=rfr.predict(x)


# 随机森林回归模型评估
print("随机森林回归的默认评估值为:", rfr.score(x_test, y_test))
print("随机森林回归的R_squared值为:", r2_score(y_test, rfr_y_predict))
print("随机森林回归的均方误差为:", mean_squared_error(ss_y.inverse_transform(y_test),
                                          ss_y.inverse_transform(rfr_y_predict)))
print("随机森林回归的平均绝对误差为:", mean_absolute_error(ss_y.inverse_transform(y_test),
                                             ss_y.inverse_transform(rfr_y_predict)))
print(y)
print(Y_predict)

#输出的结果
x.shape: (506, 13)
y.shape: (506,)
x_train.shape: (379, 13)
x_test.shape: (127, 13)
y_train.shape: (379,)
y_test.shape: (127,)
随机森林回归的默认评估值为: 0.8469322253577488
随机森林回归的R_squared值为: 0.8469322253577488
随机森林回归的均方误差为: 11.869073401574813
随机森林回归的平均绝对误差为: 2.229212598425197
[24.  21.6 34.7 33.4 36.2 28.7 22.9 27.1 16.5 18.9 15.  18.9 21.7 20.4
 18.2 19.9 23.1 17.5 20.2 18.2 13.6 19.6 15.2 14.5 15.6 13.9 16.6 14.8
 18.4 21.  12.7 14.5 13.2 13.1 13.5 18.9 20.  21.  24.7 30.8 34.9 26.6
 25.3 24.7 21.2 19.3 20.  16.6 14.4 19.4 19.7 20.5 25.  23.4 18.9 35.4
 24.7 31.6 23.3 19.6 18.7 16.  22.2 25.  33.  23.5 19.4 22.  17.4 20.9
 24.2 21.7 22.8 23.4 24.1 21.4 20.  20.8 21.2 20.3 28.  23.9 24.8 22.9
 23.9 26.6 22.5 22.2 23.6 28.7 22.6 22.  22.9 25.  20.6 28.4 21.4 38.7
 43.8 33.2 27.5 26.5 18.6 19.3 20.1 19.5 19.5 20.4 19.8 19.4 21.7 22.8
 18.8 18.7 18.5 18.3 21.2 19.2 20.4 19.3 22.  20.3 20.5 17.3 18.8 21.4
 15.7 16.2 18.  14.3 19.2 19.6 23.  18.4 15.6 18.1 17.4 17.1 13.3 17.8
 14.  14.4 13.4 15.6 11.8 13.8 15.6 14.6 17.8 15.4 21.5 19.6 15.3 19.4
 17.  15.6 13.1 41.3 24.3 23.3 27.  50.  50.  50.  22.7 25.  50.  23.8
 23.8 22.3 17.4 19.1 23.1 23.6 22.6 29.4 23.2 24.6 29.9 37.2 39.8 36.2
 37.9 32.5 26.4 29.6 50.  32.  29.8 34.9 37.  30.5 36.4 31.1 29.1 50.
 33.3 30.3 34.6 34.9 32.9 24.1 42.3 48.5 50.  22.6 24.4 22.5 24.4 20.
 21.7 19.3 22.4 28.1 23.7 25.  23.3 28.7 21.5 23.  26.7 21.7 27.5 30.1
 44.8 50.  37.6 31.6 46.7 31.5 24.3 31.7 41.7 48.3 29.  24.  25.1 31.5
 23.7 23.3 22.  20.1 22.2 23.7 17.6 18.5 24.3 20.5 24.5 26.2 24.4 24.8
 29.6 42.8 21.9 20.9 44.  50.  36.  30.1 33.8 43.1 48.8 31.  36.5 22.8
 30.7 50.  43.5 20.7 21.1 25.2 24.4 35.2 32.4 32.  33.2 33.1 29.1 35.1
 45.4 35.4 46.  50.  32.2 22.  20.1 23.2 22.3 24.8 28.5 37.3 27.9 23.9
 21.7 28.6 27.1 20.3 22.5 29.  24.8 22.  26.4 33.1 36.1 28.4 33.4 28.2
 22.8 20.3 16.1 22.1 19.4 21.6 23.8 16.2 17.8 19.8 23.1 21.  23.8 23.1
 20.4 18.5 25.  24.6 23.  22.2 19.3 22.6 19.8 17.1 19.4 22.2 20.7 21.1
 19.5 18.5 20.6 19.  18.7 32.7 16.5 23.9 31.2 17.5 17.2 23.1 24.5 26.6
 22.9 24.1 18.6 30.1 18.2 20.6 17.8 21.7 22.7 22.6 25.  19.9 20.8 16.8
 21.9 27.5 21.9 23.1 50.  50.  50.  50.  50.  13.8 13.8 15.  13.9 13.3
 13.1 10.2 10.4 10.9 11.3 12.3  8.8  7.2 10.5  7.4 10.2 11.5 15.1 23.2
  9.7 13.8 12.7 13.1 12.5  8.5  5.   6.3  5.6  7.2 12.1  8.3  8.5  5.
 11.9 27.9 17.2 27.5 15.  17.2 17.9 16.3  7.   7.2  7.5 10.4  8.8  8.4
 16.7 14.2 20.8 13.4 11.7  8.3 10.2 10.9 11.   9.5 14.5 14.1 16.1 14.3
 11.7 13.4  9.6  8.7  8.4 12.8 10.5 17.1 18.4 15.4 10.8 11.8 14.9 12.6
 14.1 13.  13.4 15.2 16.1 17.8 14.9 14.1 12.7 13.5 14.9 20.  16.4 17.7
 19.5 20.2 21.4 19.9 19.  19.1 19.1 20.1 19.9 19.6 23.2 29.8 13.8 13.3
 16.7 12.  14.6 21.4 23.  23.7 25.  21.8 20.6 21.2 19.1 20.6 15.2  7.
  8.1 13.6 20.1 21.8 24.5 23.1 19.7 18.3 21.2 17.5 16.8 22.4 20.6 23.9
 22.  11.9]
[1.22397047 1.17989645 1.17989645 1.22246183 1.22246183 1.22246183
 1.18140509 1.18140509 1.18000421 1.18000421 1.18000421 1.18140509
 1.18140509 1.19993989 1.19993989 1.19993989 1.20845297 1.19530619
 1.19185786 1.19509067 1.19810796 1.19325874 1.19810796 1.20662104
 1.19519843 1.19325874 1.19993989 1.20026317 1.19519843 1.20651328
 1.19810796 1.19724588 1.19724588 1.19810796 1.18269822 1.17246098
 1.17246098 1.17246098 1.17106009 1.18884057 1.20640552 1.17989645
 1.17989645 1.17989645 1.17989645 1.17849556 1.17849556 1.17515499
 1.17515499 1.17849556 1.18884057 1.18884057 1.18884057 1.18884057
 1.18884057 1.22838865 1.22838865 1.22838865 1.18884057 1.18884057
 1.18884057 1.18743968 1.18884057 1.18884057 1.23140594 1.18884057
 1.18884057 1.18884057 1.18884057 1.18884057 1.17989645 1.17989645
 1.17989645 1.17849556 1.17989645 1.17989645 1.17989645 1.17989645
 1.17989645 1.17989645 1.18884057 1.18884057 1.18884057 1.18884057
 1.17989645 1.17989645 1.17989645 1.17989645 1.17246098 1.17246098
 1.17246098 1.17246098 1.18884057 1.18884057 1.18884057 1.17989645
 1.17989645 1.17989645 1.17989645 1.17989645 1.17246098 1.17246098
 1.16771952 1.17106009 1.17246098 1.17246098 1.17106009 1.17246098
 1.17246098 1.16771952 1.17246098 1.17246098 1.17246098 1.17106009
 1.17246098 1.17106009 1.17246098 1.17246098 1.17246098 1.17246098
 1.17246098 1.17246098 1.17246098 1.17246098 1.17246098 1.17106009
 1.17774124 1.16771952 1.17774124 1.19325874 1.17774124 1.19810796
 1.19993989 1.17774124 1.20662104 1.19993989 1.17774124 1.17774124
 1.16771952 1.19993989 1.16987473 1.18269822 1.16610311 1.14832264
 1.16610311 1.16610311 1.17353858 1.16610311 1.16610311 1.16610311
 1.17116785 1.18571551 1.18517671 1.17353858 1.18571551 1.15230977
 1.16610311 1.19810796 1.19724588 1.18571551 1.19810796 1.2026339
 1.18981041 1.19724588 1.17763348 1.17763348 1.18506895 1.18269822
 1.17763348 1.17763348 1.19810796 1.17763348 1.17246098 1.17246098
 1.17246098 1.17246098 1.17246098 1.17246098 1.17246098 1.17246098
 1.17246098 1.17246098 1.17246098 1.17246098 1.17246098 1.17246098
 1.17246098 1.18884057 1.18884057 1.18884057 1.18884057 1.18884057
 1.18884057 1.18884057 1.18884057 1.22849641 1.23140594 1.23140594
 1.23140594 1.23140594 1.23140594 1.23140594 1.23140594 1.18884057
 1.18884057 1.17246098 1.16771952 1.16771952 1.17246098 1.21017713
 1.17106009 1.21017713 1.17106009 1.17246098 1.20231062 1.17106009
 1.17246098 1.17246098 1.17246098 1.17246098 1.17774124 1.17774124
 1.19993989 1.19993989 1.21017713 1.23237579 1.21017713 1.21017713
 1.21017713 1.21017713 1.23237579 1.21017713 1.21114698 1.17774124
 1.17774124 1.17774124 1.19993989 1.19993989 1.18884057 1.18884057
 1.18884057 1.18884057 1.18884057 1.18884057 1.18743968 1.18743968
 1.22655672 1.18743968 1.18743968 1.18743968 1.18884057 1.18743968
 1.18884057 1.22655672 1.18884057 1.18884057 1.18884057 1.20888401
 1.20888401 1.20888401 1.20888401 1.20888401 1.20888401 1.20220286
 1.20888401 1.20414255 1.20241838 1.20888401 1.20888401 1.18884057
 1.22655672 1.18743968 1.18884057 1.18743968 1.18884057 1.18884057
 1.18884057 1.18884057 1.18884057 1.18743968 1.18884057 1.18884057
 1.18884057 1.22838865 1.18884057 1.23140594 1.23140594 1.18884057
 1.18884057 1.18884057 1.18884057 1.18884057 1.18884057 1.17989645
 1.17989645 1.17989645 1.17989645 1.17989645 1.23140594 1.23140594
 1.23140594 1.18884057 1.18884057 1.18884057 1.23140594 1.23140594
 1.23140594 1.23140594 1.17774124 1.17774124 1.17763348 1.19185786
 1.16771952 1.16771952 1.17774124 1.16771952 1.17774124 1.16771952
 1.17774124 1.17774124 1.17106009 1.17106009 1.21017713 1.18323702
 1.21017713 1.17106009 1.21017713 1.16771952 1.17989645 1.17989645
 1.17989645 1.18884057 1.18884057 1.17246098 1.17246098 1.17246098
 1.17246098 1.17246098 1.17246098 1.17246098 1.17246098 1.23140594
 1.21502635 1.18140509 1.18140509 1.17989645 1.17989645 1.18884057
 1.23140594 1.22838865 1.22838865 1.23140594 1.23140594 1.23140594
 1.23140594 1.23140594 1.16567207 1.16384014 1.159853   1.159853
 1.159853   1.16384014 1.16384014 1.159853   1.16384014 1.159853
 1.16384014 1.16567207 1.159853   1.159853   1.15058561 1.16125389
 1.15543483 1.1644867  1.16125389 1.16567207 1.16567207 1.16567207
 1.16567207 1.16567207 1.16567207 1.16567207 1.16567207 1.159853
 1.16567207 1.16567207 1.16567207 1.16567207 1.16567207 1.159853
 1.159853   1.159853   1.16567207 1.159853   1.16567207 1.159853
 1.159853   1.159853   1.16567207 1.16567207 1.16567207 1.16567207
 1.16567207 1.16567207 1.16567207 1.16567207 1.1644867  1.16567207
 1.159853   1.16567207 1.16567207 1.16567207 1.16567207 1.16567207
 1.16567207 1.16567207 1.16567207 1.16567207 1.16567207 1.16567207
 1.16567207 1.159853   1.16567207 1.159853   1.159853   1.16567207
 1.16567207 1.16567207 1.159853   1.16567207 1.159853   1.16567207
 1.159853   1.159853   1.16567207 1.16567207 1.16567207 1.16567207
 1.16567207 1.16567207 1.16567207 1.16567207 1.159853   1.16567207
 1.16567207 1.16567207 1.159853   1.16567207 1.16567207 1.159853
 1.26653585 1.159853   1.159853   1.159853   1.16567207 1.159853
 1.159853   1.159853   1.159853   1.159853   1.159853   1.16384014
 1.159853   1.159853   1.159853   1.17763348 1.16384014 1.159853
 1.16567207 1.16567207 1.159853   1.159853   1.16384014 1.159853
 1.159853   1.159853   1.159853   1.16567207 1.16567207 1.16567207
 1.159853   1.159853   1.159853   1.17763348 1.17763348 1.16384014
 1.159853   1.159853   1.17246098 1.17106009 1.17106009 1.17246098
 1.17246098 1.17106009 1.15080113 1.17106009 1.16987473 1.16771952
 1.16771952 1.17106009 1.17106009 1.17246098 1.17246098 1.17246098
 1.17246098 1.17246098]
###

随机森林也是要调参数的,不是拿来就直接用的。

版权声明:本文转载自网络,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。本站转载出于传播更多优秀技术知识之目的,如有侵权请联系QQ/微信:153890879删除

相关文章
  • Eventloop 是什么?是为了解决什么问题

    Eventloop 是什么?是为了解决什么问题

  • 关于JS中的闭包,求解答

    关于JS中的闭包,求解答

  • 有没有通过韵母查汉字的api?

    有没有通过韵母查汉字的api?

  • 假设接收方一直不从接收缓冲区读数据,

    假设接收方一直不从接收缓冲区读数据,

腾讯云代理商
海外云服务器