from sklearn.linear_model import LinearRegression
import numpy as np
import matplotlib.pyplot as plt
X = [[1],[4],[3]] #输入X
y = [3,5,3] #输入y
lr = LinearRegression().fit(X,y)
z = np.linspace(0,5,20)
plt.scatter(X,y,s=80)
plt.plot(z, lr.predict(z.reshape(-1,1)),c='k')#reshape将z改为任意行,一列
plt.title('Straight Line')
plt.show()
print('\n\n直线方程为:')
print('==============')
print('y = {:.3f}'.format(lr.coef_[0]),'x','+{:.3f}'.format(lr.intercept_))
print('==============')
from sklearn.datasets import make_regression
from sklearn.linear_model import LinearRegression
import numpy as np
import matplotlib.pyplot as plt
X,y=make_regression(n_samples=50,n_features=1,n_informative=1,noise=50,random_state=1)
lr = LinearRegression().fit(X,y)
z = np.linspace(-3,3,100)
plt.scatter(X,y,s=80)
plt.plot(z, lr.predict(z.reshape(-1,1)),c='k')#reshape任意行,一列
plt.title('Straight Line')
plt.show()
print('\n\n直线方程为:')
print('==============')
print('y = {:.3f}'.format(lr.coef_[0]),'x','+{:.3f}'.format(lr.intercept_))
print('==============')
print('直线系数为:{:.2f}'.format(lr.coef_[0]))
print('==============')
print('直线截距为:{:.2f}'.format(lr.intercept_))
print('==============')
1.最小二乘法
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression #最小二乘法
X,y=make_regression(n_samples=100,n_features=2,n_informative=2,random_state=38)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=8)
lr = LinearRegression().fit(X_train, y_train)
print("lr.coef_: {}".format(lr.coef_[:]))
print("lr.intercept_: {}".format(lr.intercept_))
print("训练数据集得分:{:.2f}".format(lr.score(X_train, y_train)))
print("测试数据集得分:{:.2f}".format(lr.score(X_test, y_test)))
2.岭回归
from sklearn.model_selection import train_test_split
from sklearn.linear_model import Ridge #岭回归
X,y=make_regression(n_samples=100,n_features=2,n_informative=2,random_state=38)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=8)
lr = Ridge().fit(X_train, y_train)
print("lr.coef_: {}".format(lr.coef_[:]))
print("lr.intercept_: {}".format(lr.intercept_))
print("训练数据集得分:{:.2f}".format(lr.score(X_train, y_train)))
print("测试数据集得分:{:.2f}".format(lr.score(X_test, y_test)))
3.套索回归
from sklearn.model_selection import train_test_split
from sklearn.linear_model import Lasso #套索回归
X,y=make_regression(n_samples=100,n_features=2,n_informative=2,random_state=38)
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=8)
lr = Lasso().fit(X_train, y_train)
print("lr.coef_: {}".format(lr.coef_[:]))
print("lr.intercept_: {}".format(lr.intercept_))
print("训练数据集得分:{:.2f}".format(lr.score(X_train, y_train)))
print("测试数据集得分:{:.2f}".format(lr.score(X_test, y_test)))
作者:ChenBD
来源:CSDN
作者:小游园
链接:https://blog.csdn.net/s0302017/article/details/103946897