python⼿写代码实现线性回归⼀元⼀次
导包
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
from sklearn.linear_model import LinearRegression
创建数据
X = np.linspace(2,10,20).reshape(-1,1)
# f(x) = wx + b
y = np.random.randint(1,6,size =1)*X + np.random.randint(-5,5,size =1)
# 噪声,加盐
y += np.random.randn(20,1)*0.8
plt.scatter(X,y,color ='red')
使⽤已有的线性回归拟合函数
lr = LinearRegression()
lr.fit(X,y)
w = lr.coef_[0,0]
b = lr.intercept_[0]
print(w,b)
plt.scatter(X,y)
x = np.linspace(1,11,50)
plt.plot(x,w*x + b,color ='green')
⾃⼰实现了线性回归(简版)
# 使⽤梯度下降解决⼀元⼀次的线性问题:w,b
class LinearModel(object):
def__init__(self):
self.w = np.random.randn(1)[0]
self.b = np.random.randn(1)[0]
# 数学建模:将数据X和⽬标值关系⽤数学公式表达
def model(self,x):#model 模型,f(x) = wx + b
return self.w*x + self.b
def loss(self,x,y):#最⼩⼆乘
cost =(y - del(x))**2
# 梯度就是偏导数,求解两个未知数:w,b
gradient_w =2*(y - del(x))*(-x)
gradient_b =2*(y - del(x))*(-1)
return cost,gradient_w,gradient_b
# 梯度下降
def gradient_descent(self,gradient_w,gradient_b,learning_rate =0.1):
# 更新w,b
self.w -= gradient_w*learning_rate
self.b -= gradient_b*learning_rate
# 训练fit
def fit(self,X,y):
count =0#算法执⾏优化了3000次,退出
tol =0.0001
last_w = self.w +0.1
last_b = self.b +0.1
length =len(X)
while True:
linspace numpyif count >3000:#执⾏的次数到了
break
# 求解的斜率和截距的精确度达到要求
if(abs(last_w - self.w)< tol)and(abs(last_b - self.b)< tol):
break
cost =0
gradient_w =0
gradient_b =0
for i in range(length):
cost_,gradient_w_,gradient_b_ = self.loss(X[i,0],y[i,0])
cost += cost_/length
gradient_w += gradient_w_/length
gradient_b += gradient_b_/length
# print('---------------------执⾏次数:%d。损失值是:%0.2f'%(count,cost)) last_w = self.w
last_b = self.b
# 更新截距和斜率
count+=1
def result(self):
return self.w,self.b
使⽤⾃⼰实现的线性回归拟合函数
lm = LinearModel()
lm.fit(X,y)
w_,b_ = lm.result()
plt.scatter(X,y,c ='red')
plt.plot(x,1.9649*x -4.64088,color ='green')
plt.plot(x,w*x + b,color ='blue')
plt.title('⾃定义的算法拟合曲线',fontproperties ='KaiTi')
⼀元⼆次
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
from sklearn.linear_model import LinearRegression # ⼀元⼆次
# f(x) = w1*x**2 + w2*x + b
# ⼆元⼀次
# f(x1,x2) = w1*x1 + w2*x2 + b
X = np.linspace(0,10,num =500).reshape(-1,1)
X = np.concatenate([X**2,X],axis =1)
X.shape
#(500, 2)
w = np.random.randint(1,10,size =2)
b = np.random.randint(-5,5,size =1)
# 矩阵乘法
y = X.dot(w)+ b
plt.plot(X[:,1],y,color ='r')
plt.title('w1:%d.w2:%d.b:%d'%(w[0],w[1],b[0]))
使⽤sklearn⾃带的算法,预测
lr = LinearRegression()
lr.fit(X,y)
f_,lr.intercept_)
plt.scatter(X[:,1],y,marker ='*')
x = np.linspace(-2,12,100)
plt.plot(x,1*x**2+6*x +1,color ='green')
⾃⼰⼿写的线性回归,拟合多属性,多元⽅程
版权声明:本站内容均来自互联网,仅供演示用,请勿用于商业和其他非法用途。如果侵犯了您的权益请与我们联系QQ:729038198,我们将在24小时内删除。
发表评论