softmax回归代码
时间: 2023-09-24 18:10:07 浏览: 89
当然,以下是一个使用softmax回归进行多类别分类的示例代码:
```python
import numpy as np
def softmax(z):
exps = np.exp(z - np.max(z, axis=1, keepdims=True))
return exps / np.sum(exps, axis=1, keepdims=True)
class SoftmaxRegression:
def __init__(self, num_classes, num_features, learning_rate=0.01, num_iterations=1000):
self.num_classes = num_classes
self.num_features = num_features
self.learning_rate = learning_rate
self.num_iterations = num_iterations
self.weights = np.zeros((self.num_features, self.num_classes))
self.bias = np.zeros((1, self.num_classes))
def train(self, X, y):
for iteration in range(self.num_iterations):
scores = np.dot(X, self.weights) + self.bias
probabilities = softmax(scores)
# Compute gradient
dW = (1 / X.shape[0]) * np.dot(X.T, (probabilities - y))
db = (1 / X.shape[0]) * np.sum(probabilities - y, axis=0)
# Update weights and bias
self.weights -= self.learning_rate * dW
self.bias -= self.learning_rate * db
def predict(self, X):
scores = np.dot(X, self.weights) + self.bias
probabilities = softmax(scores)
return np.argmax(probabilities, axis=1)
```
使用示例:
```python
# 准备训练数据
X_train = np.array([[2.5, 1.2], [1.5, 2.5], [3.5, 1.9], [3.0, 2.8]])
y_train = np.array([0, 1, 1, 2])
# 创建并训练模型
model = SoftmaxRegression(num_classes=3, num_features=2)
model.train(X_train, y_train)
# 准备测试数据
X_test = np.array([[2.0, 1.0], [3.0, 2.0]])
# 预测
predictions = model.predict(X_test)
print(predictions)
```
希望以上代码能对你有帮助!如有任何疑问,请随时提出。
阅读全文