赞
踩
- import numpy as np
- import pandas as pd
- import seaborn as sns
- import matplotlib.pyplot as plt
-
- from sklearn.linear_model import RidgeCV
- from sklearn.linear_model import Ridge
- from statsmodels.stats.outliers_influence import variance_inflation_factor
-
- sns.set_theme()# 设置风格
- %config InlineBackend.figure_format = 'retina' # 让图片更清晰
-
- df = pd.read_excel('D:/ml_data/auto-mpg.xlsx')# 导入数据
- # print(df)
-
- df_scaled = (df-df.mean())/(df.std())
-
- xs = df_scaled.iloc[:,1:]
- ys = df_scaled.iloc[:,0]
-
- X = xs
- vif=pd.DataFrame()
- vif["VIF Factor"] = [variance_inflation_factor(X.values,i) for i in range(X.shape[1])]
- vif["features"]=X.columns
-
- print(vif.round(1))
VIF Factor features
0 10.7 Cylinders
1 21.8 Displacement
2 9.9 Horsepower
3 10.8 Weight
4 2.6 Acceleration
5 1.2 Model Year
6 1.8 Origin
- coef=[]
- alphas = np.linspace(0.1,300,300)
-
- for a in alphas:
- ridgereg=Ridge(alpha=a)
- ridgereg.fit(xs,ys)
- coef.append(ridgereg.coef_)
-
- fig=plt.figure()
- ax=fig.add_subplot(111)
- ax.plot(alphas,coef)
- ax.set_xlabel('Alpha (Regularization Parameter)')
- ax.set_ylabel('Beta (Predictor Coefficients)')
- ax.set_title('Ridge Coefficients vs Regularization Parameters')
- ax.axis('tight')
- ax.legend(list(xs.keys()),loc='best')
- plt.savefig("Ridge.png", dpi = 600, bbox_inches = 'tight')
- plt.show()
- regr_cv=RidgeCV(alphas = alphas)
- model_cv=regr_cv.fit(xs,ys)
- print(model_cv.alpha_)
2.106020066889632
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。