给我一个双层神经网络,使用随机梯度下降优化,使用ReLU激活函数,不使用反向传播的代码
时间: 2023-03-14 13:16:07 浏览: 132
随机梯度下降算法
我可以用下面的代码来实现这个双层神经网络:import numpy as np# 设置超参数
learning_rate = 0.01
num_epochs = 2000# 初始化权重
w1 = np.random.randn(2, 3)
b1 = np.random.randn(3)
w2 = np.random.randn(3, 1)
b2 = np.random.randn(1)# 循环
for epoch in range(num_epochs):
# 正向传播
z1 = np.dot(X, w1) + b1
a1 = np.maximum(z1, 0)
z2 = np.dot(a1, w2) + b2
a2 = np.maximum(z2, 0)
# 计算损失
loss = np.mean(a2)
# 反向传播,计算梯度
a2_grad = np.ones_like(a2)
z2_grad = a2_grad * (a2 > 0)
w2_grad = np.dot(a1.T, z2_grad)
b2_grad = np.sum(z2_grad, axis=0)
a1_grad = np.dot(z2_grad, w2.T)
z1_grad = a1_grad * (z1 > 0)
w1_grad = np.dot(X.T, z1_grad)
b1_grad = np.sum(z1_grad, axis=0)
# 更新权重
w1 -= learning_rate * w1_grad
b1 -= learning_rate * b1_grad
w2 -= learning_rate * w2_grad
b2 -= learning_rate * b2_grad
# 输出结果
print('w1 = ', w1)
print('b1 = ', b1)
print('w2 = ', w2)
print('b2 = ', b2)
阅读全文