python写softmax训练权重分类三类平面上的二维点的代码
时间: 2024-04-29 19:25:13 浏览: 98
以下是使用Python实现三类平面上二维点的softmax训练权重分类代码:
```
import numpy as np
# 生成三类平面上的二维点
X = np.concatenate((
np.random.randn(100, 2) * 0.5 + [0, 0],
np.random.randn(100, 2) * 0.5 + [2, 2],
np.random.randn(100, 2) * 0.5 + [-2, 2]
), axis=0)
# 生成标签
y = np.concatenate((
np.ones(100) * 0,
np.ones(100) * 1,
np.ones(100) * 2
), axis=0)
# 将标签转化为独热编码
def to_one_hot(y):
n_values = np.max(y) + 1
return np.eye(n_values)[y.astype(int)]
y_one_hot = to_one_hot(y)
# 初始化权重
W = np.random.randn(X.shape[1], y_one_hot.shape[1]) * 0.01
# 定义softmax函数
def softmax(x):
exp_x = np.exp(x)
return exp_x / np.sum(exp_x, axis=1, keepdims=True)
# 定义交叉熵损失函数
def cross_entropy_loss(y_pred, y_true):
n_samples = y_pred.shape[0]
return -np.sum(y_true * np.log(y_pred + 1e-12)) / n_samples
# 定义梯度下降算法
def gradient_descent(X, y_one_hot, W, learning_rate, n_iterations):
for i in range(n_iterations):
y_pred = softmax(np.dot(X, W))
loss = cross_entropy_loss(y_pred, y_one_hot)
gradient = np.dot(X.T, y_pred - y_one_hot)
W -= learning_rate * gradient
if i % 100 == 0:
print("Iteration {}: Loss = {}".format(i, loss))
return W
# 训练权重
W = gradient_descent(X, y_one_hot, W, learning_rate=0.01, n_iterations=1000)
# 预测标签
y_pred = np.argmax(softmax(np.dot(X, W)), axis=1)
# 计算准确率
accuracy = np.mean(y_pred == y)
print("Accuracy = {}".format(accuracy))
```
希望能够帮助你解决问题!
阅读全文