⼆分类线性分类器python实现(⼀)回归(Regression)与最⼩⼆乘法(Least squares)
⽤回归⽅法解决分类问题
Python代码
⾸先先⽣成数据集
import numpy as np
import matplotlib.pyplot as plt
import pickle
# 创建训练样本
N = 1000
# CLASS 1
x_c1 = np.random.randn(N, 2)
x_c1 = np.add(x_c1, [10, 10])
y_c1 = np.ones((N, 1), dtype=np.double)
# CLASS 2
x_c2 = np.random.randn(N, 2)
x_c2 = np.add(x_c2, [2, 5])
y_c2 = np.zeros((N, 1), dtype=np.double)
# 扩展权向量
ex_c1 = np.concatenate((x_c1, np.ones((N, 1))), 1) ex_c2 = np.concatenate((x_c2, np.ones((N, 1))), 1) # ⽣成数据
data_x = np.concatenate((ex_c1, ex_c2), 0)
data_y = np.concatenate((y_c1, y_c2), 0)
x1 = x_c1[:, 0].T
y1 = x_c1[:, 1].T
x2 = x_c2[:, 0].T
y2 = x_c2[:, 1].T
plt.plot(x1, y1, "bo", markersize=2)
plt.plot(x2, y2, "r*", markersize=2)
plt.show()
pickle_file = open('data_x.pkl', 'wb')
pickle.dump(data_x, pickle_file)
pickle_file.close()
pickle_file2 = open('data_y.pkl', 'wb')
pickle.dump(data_y, pickle_file2)
pickle_file2.close()
然后训练模型
from scipy.optimize import minimize
import numpy as np
import pickle
import matplotlib.pyplot as plt
pickle_file = open('data_x.pkl', 'rb')
data_x = pickle.load(pickle_file)
pickle_file.close()
pickle_file2 = open('data_y.pkl', 'rb')
data_y = pickle.load(pickle_file2)
pickle_file2.close()
N = np.size(data_y)
def fun(beta):
import picklesum = 0
for j in range(N):
sum += (-data_y[j] * np.dot(beta, data_x[j].T) + np.log(1 + np.exp(np.dot(beta, data_x[j].T))))    return sum
def fun_jac(beta):
jac = np.zeros(np.shape(beta), dtype=np.double)
p1 = np.zeros(N, dtype=np.double)
for j in range(N):
p1[j] = np.exp(np.dot(beta, data_x[j].T)) / (1 + np.exp(np.dot(beta, data_x[j].T)))
jac = jac - (data_x[j]) * (data_y[j] - p1[j])
return jac
def fun_hess(beta):
hess = np.zeros((np.size(beta), np.size(beta)), dtype=np.double)
p1 = np.zeros(N, dtype=np.double)
for j in range(N):

版权声明:本站内容均来自互联网,仅供演示用,请勿用于商业和其他非法用途。如果侵犯了您的权益请与我们联系QQ:729038198,我们将在24小时内删除。