赞
踩
本文归纳整理了线性回归(LinearRegression)、岭回归(Ridge)、Lasso回归(Lasso)、弹性网(ElasticNet)、K近邻、SVM等10多种机器学习模块sklearn中回归算法调用及实现过程,有需要的可以参考一下:
import scipy import mglearn import numpy as np import pandas as pd import seaborn as sns import matplotlib.pyplot as plt from sklearn.model_selection import train_test_split from sklearn import preprocessing #Method:1-4 from sklearn import linear_model from sklearn.linear_model import LinearRegression,Ridge,Lasso,ElasticNet #Method:5-10 from sklearn import neighbors,svm,tree,ensemble #Method:11-12 from sklearn.ensemble import BaggingRegressor from sklearn.tree import ExtraTreeRegressor
##2.Choose Regression Method model = [] #2.1 LinearRegression model_LinearRegression = linear_model.LinearRegression() model.append(model_LinearRegression) #2.2 RidgeRegression model_Ridge = Ridge() model.append(model_Ridge) #2.3 Lasso model_Lasso=Lasso() model.append(model_Lasso) #2.4 ElasticNet model_ElasticNET = linear_model.ElasticNet(alpha=0.1,l1_ratio=0.5) model.append(model_ElasticNET) #2.5 KNN Regression model_KNeighborsRegressor = neighbors.KNeighborsRegressor() model.append(model_KNeighborsRegressor) #2.6 SVM Regression model_SVR = svm.SVR() model.append(model_SVR) #2.7 DecisionTreeRegression model_DecisionTreeRegressor = tree.DecisionTreeRegressor() model.append(model_DecisionTreeRegressor) #2.8 RandomForestRegression model_RandomForestRegressor = ensemble.RandomForestRegressor(n_estimators=20) model.append(model_RandomForestRegressor) #2.9 Adaboost Regression model_AdaBoostRegressor = ensemble.AdaBoostRegressor(n_estimators=50) model.append(model_AdaBoostRegressor) #2.10 GBRT Regression model_GradientBoostingRegressor = ensemble.GradientBoostingRegressor(n_estimators=100) model.append(model_GradientBoostingRegressor) #2.11 Bagging Regression model_BaggingRegressor = BaggingRegressor() model.append(model_BaggingRegressor) #2.12 ExtraTree Regression model_ExtraTreeRegressor = ExtraTreeRegressor() model.append(model_ExtraTreeRegressor)
##3.Load data
path = '/home/Desktop/qqq.xlsx'
file = pd.read_excel(path,sheet_name='Sheet1')
data = file.copy()
#Data split
#X_data = data.iloc[:,1:]
#归一化处理
def regulait(df_X):
df_scaler = preprocessing.MinMaxScaler()
X_scaler = df_scaler.fit_transform(df_X)
X_scaler = pd.DataFrame(X_scaler,columns=df_X.columns)
return X_scaler
X_data = regulait(data.iloc[:,1:])
y_data = data.iloc[:,0]
X_train,X_test,y_train,y_test = train_test_split(X_data,y_data,test_size=0.2,random_state=0)
##4.Regression def try_different_method(model): model.fit(X_train,y_train) score = model.score(X_test, y_test) result = model.predict(X_test) plt.figure(figsize=(10,8),dpi=200) plt.plot(np.arange(len(result)), y_test,'go-',label='true value') plt.plot(np.arange(len(result)),result,'ro-',label='predict value') plt.title("Model Score of {} is: {:.2f}".format(method,score)) #Get axis ax=plt.gca() ax.spines['bottom'].set_linewidth(1) ax.spines['left'].set_linewidth(1) ax.spines['right'].set_linewidth(1) ax.spines['top'].set_linewidth(1) plt.grid(True) plt.legend() plt.show()
##5.Method call
for method in model:
try_different_method(method)
特别注明:本文属于Python学习笔记,不以盈利为目的,纯手工码字不容易,若整理的笔记中,对您有所助益,麻烦点个赞或者收藏,万分感谢!如有构成侵权的地方,请联系作者删除,谢谢合作!
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。