bp神经网络多元回归预测单层隐藏层relu函数代码
时间: 2023-12-31 11:06:17 浏览: 80
单个隐藏层神经网络bp反馈
以下是使用单层隐藏层和ReLU函数的BP神经网络多元回归预测的Python代码示例:
```
import numpy as np
# 定义ReLU函数
def relu(x):
return np.maximum(0, x)
# 定义BP神经网络类
class BPNeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
# 初始化权重和偏置
self.weights_input_hidden = np.random.randn(input_size, hidden_size) * 0.1
self.bias_input_hidden = np.zeros((1, hidden_size))
self.weights_hidden_output = np.random.randn(hidden_size, output_size) * 0.1
self.bias_hidden_output = np.zeros((1, output_size))
# 前向传播
def forward(self, X):
self.hidden_layer = relu(np.dot(X, self.weights_input_hidden) + self.bias_input_hidden)
self.output_layer = np.dot(self.hidden_layer, self.weights_hidden_output) + self.bias_hidden_output
return self.output_layer
# 反向传播
def backward(self, X, y, learning_rate):
# 计算输出层误差
output_error = self.output_layer - y
# 计算隐藏层误差
hidden_error = np.dot(output_error, self.weights_hidden_output.T) * (self.hidden_layer > 0)
# 更新权重和偏置
self.weights_hidden_output -= learning_rate * np.dot(self.hidden_layer.T, output_error)
self.bias_hidden_output -= learning_rate * np.sum(output_error, axis=0, keepdims=True)
self.weights_input_hidden -= learning_rate * np.dot(X.T, hidden_error)
self.bias_input_hidden -= learning_rate * np.sum(hidden_error, axis=0, keepdims=True)
# 训练模型
def train(self, X, y, epochs, learning_rate):
for i in range(epochs):
output = self.forward(X)
self.backward(X, y, learning_rate)
# 每1000次迭代输出一次损失函数值
if i % 1000 == 0:
loss = np.mean((output - y) ** 2)
print("Epoch {0}: loss = {1}".format(i, loss))
# 测试代码
if __name__ == '__main__':
# 生成随机数据
X = np.random.randn(100, 5)
y = np.random.randn(100, 1)
# 初始化BP神经网络
nn = BPNeuralNetwork(5, 10, 1)
# 训练模型
nn.train(X, y, 10000, 0.01)
# 预测
test_X = np.random.randn(10, 5)
preds = nn.forward(test_X)
print("Predictions: \n", preds)
```
阅读全文