用代码写出一个BP神经网络
时间: 2023-03-26 07:01:38 浏览: 59
我可以回答这个问题。以下是一个简单的BP神经网络的代码示例:
```python
import numpy as np
class BPNeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
# 初始化权重和偏置
self.weights1 = np.random.randn(self.input_size, self.hidden_size)
self.bias1 = np.random.randn(self.hidden_size)
self.weights2 = np.random.randn(self.hidden_size, self.output_size)
self.bias2 = np.random.randn(self.output_size)
def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
def sigmoid_derivative(self, x):
return x * (1 - x)
def forward(self, x):
# 前向传播
self.hidden_layer = self.sigmoid(np.dot(x, self.weights1) + self.bias1)
self.output_layer = self.sigmoid(np.dot(self.hidden_layer, self.weights2) + self.bias2)
return self.output_layer
def backward(self, x, y, output):
# 反向传播
error = y - output
output_delta = error * self.sigmoid_derivative(output)
hidden_error = np.dot(output_delta, self.weights2.T)
hidden_delta = hidden_error * self.sigmoid_derivative(self.hidden_layer)
# 更新权重和偏置
self.weights2 += np.dot(self.hidden_layer.T, output_delta)
self.bias2 += np.sum(output_delta, axis=)
self.weights1 += np.dot(x.T, hidden_delta)
self.bias1 += np.sum(hidden_delta, axis=)
def train(self, x, y, epochs):
for i in range(epochs):
output = self.forward(x)
self.backward(x, y, output)
def predict(self, x):
return self.forward(x)
```
这个代码实现了一个具有一个隐藏层的BP神经网络,可以用于分类和回归任务。