实现线性逻辑回归代码
时间: 2024-06-01 20:06:27 浏览: 14
线性逻辑回归是一种经典的分类算法,其基本思想是根据已知样本数据,建立分类模型,对新数据进行分类。下面是一个简单的Python实现示例:
```
import numpy as np
import matplotlib.pyplot as plt
def sigmoid(z):
return 1 / (1 + np.exp(-z))
def cost(theta, X, y):
m = len(y)
h = sigmoid(X.dot(theta))
J = -1/m * (y.T.dot(np.log(h)) + (1-y).T.dot(np.log(1-h)))
grad = 1/m * X.T.dot(h-y)
return J, grad
def predict(theta, X):
p = sigmoid(X.dot(theta)) >= 0.5
return p.astype('int')
def plotDecisionBoundary(theta, X, y):
x1_min, x1_max = X[:, 1].min(), X[:, 1].max()
x2_min, x2_max = X[:, 2].min(), X[:, 2].max()
xx1, xx2 = np.meshgrid(np.linspace(x1_min, x1_max), np.linspace(x2_min, x2_max))
h = sigmoid(np.c_[np.ones((xx1.ravel().shape, 1)), xx1.ravel(), xx2.ravel()].dot(theta))
h = h.reshape(xx1.shape)
plt.contour(xx1, xx2, h, [0.5], linewidths=1, colors='g')
data = np.loadtxt('data.txt', delimiter=',')
X = data[:, :-1]
y = data[:, -1]
m, n = X.shape
X = np.concatenate([np.ones((m, 1)), X], axis=1)
initial_theta = np.zeros(n+1)
cost0, grad0 = cost(initial_theta, X, y)
from scipy.optimize import minimize
res = minimize(fun=cost, x0=initial_theta, args=(X,y), method='TNC', jac=True)
theta = res.x
p = predict(theta, X)
accuracy = np.mean(p == y) * 100
print('accuracy = {:.2f}%'.format(accuracy))
plt.scatter(X[y==0][:, 1], X[y==0][:, 2], marker='o', color='b', label='y=0')
plt.scatter(X[y==1][:, 1], X[y==1][:, 2], marker='+', color='r', label='y=1')
plt.xlabel('Exam 1 score')
plt.ylabel('Exam 2 score')
plt.legend(loc='upper right')
plotDecisionBoundary(theta, X, y)
plt.show()
```
在这个示例中,我们使用了Numpy和Matplotlib库。首先定义了Sigmoid函数和代价函数,然后使用梯度下降方法求解模型参数,最后使用训练好的模型进行预测并可视化结果。你可以通过更改数据集和参数来进一步理解该算法的实现。