python中常用的三种线性回归

偶尔善良 提交于 2020-01-15 05:48:30
from sklearn.linear_model import LinearRegression
import numpy as np
import matplotlib.pyplot as plt
X = [[1],[4],[3]] #输入X
y = [3,5,3]     #输入y
lr = LinearRegression().fit(X,y)
z = np.linspace(0,5,20) 
plt.scatter(X,y,s=80)
plt.plot(z, lr.predict(z.reshape(-1,1)),c='k')#reshape将z改为任意行,一列
plt.title('Straight Line')
plt.show()
print('\n\n直线方程为:')
print('==============')
print('y = {:.3f}'.format(lr.coef_[0]),'x','+{:.3f}'.format(lr.intercept_))
print('==============')

from sklearn.datasets import make_regression
from sklearn.linear_model import LinearRegression
import numpy as np
import matplotlib.pyplot as plt

X,y=make_regression(n_samples=50,n_features=1,n_informative=1,noise=50,random_state=1)
lr = LinearRegression().fit(X,y)
z = np.linspace(-3,3,100) 
plt.scatter(X,y,s=80)
plt.plot(z, lr.predict(z.reshape(-1,1)),c='k')#reshape任意行,一列
plt.title('Straight Line')
plt.show()
print('\n\n直线方程为:')
print('==============')
print('y = {:.3f}'.format(lr.coef_[0]),'x','+{:.3f}'.format(lr.intercept_))
print('==============')
print('直线系数为:{:.2f}'.format(lr.coef_[0]))
print('==============')
print('直线截距为:{:.2f}'.format(lr.intercept_))
print('==============')

 1.最小二乘法

from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression  #最小二乘法
X,y=make_regression(n_samples=100,n_features=2,n_informative=2,random_state=38)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=8)
lr = LinearRegression().fit(X_train, y_train)

print("lr.coef_: {}".format(lr.coef_[:]))
print("lr.intercept_: {}".format(lr.intercept_))

print("训练数据集得分:{:.2f}".format(lr.score(X_train, y_train)))
print("测试数据集得分:{:.2f}".format(lr.score(X_test, y_test)))

2.岭回归

from sklearn.model_selection import train_test_split
from sklearn.linear_model import Ridge  #岭回归
X,y=make_regression(n_samples=100,n_features=2,n_informative=2,random_state=38)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=8)
lr = Ridge().fit(X_train, y_train)

print("lr.coef_: {}".format(lr.coef_[:]))
print("lr.intercept_: {}".format(lr.intercept_))

print("训练数据集得分:{:.2f}".format(lr.score(X_train, y_train)))
print("测试数据集得分:{:.2f}".format(lr.score(X_test, y_test)))

3.套索回归

from sklearn.model_selection import train_test_split
from sklearn.linear_model import Lasso  #套索回归
X,y=make_regression(n_samples=100,n_features=2,n_informative=2,random_state=38)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=8)
lr = Lasso().fit(X_train, y_train)

print("lr.coef_: {}".format(lr.coef_[:]))
print("lr.intercept_: {}".format(lr.intercept_))

print("训练数据集得分:{:.2f}".format(lr.score(X_train, y_train)))
print("测试数据集得分:{:.2f}".format(lr.score(X_test, y_test)))

作者:ChenBD

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!