帮我写一个bp神经网络的python代码吗
时间: 2024-03-02 08:22:14 浏览: 83
当然可以,以下是一个简单的BP神经网络的Python代码示例:
```python
import numpy as np
class NeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.weights1 = np.random.randn(self.input_size, self.hidden_size)
self.bias1 = np.zeros((1, self.hidden_size))
self.weights2 = np.random.randn(self.hidden_size, self.output_size)
self.bias2 = np.zeros((1, self.output_size))
def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
def sigmoid_derivative(self, x):
return x * (1 - x)
def feedforward(self, X):
self.layer1 = self.sigmoid(np.dot(X, self.weights1) + self.bias1)
self.layer2 = self.sigmoid(np.dot(self.layer1, self.weights2) + self.bias2)
return self.layer2
def backpropagation(self, X, y, output):
d_output = y - output
d_weights2 = np.dot(self.layer1.T, d_output * self.sigmoid_derivative(output))
d_bias2 = np.sum(d_output * self.sigmoid_derivative(output), axis=0, keepdims=True)
d_hidden = np.dot(d_output * self.sigmoid_derivative(output), self.weights2.T)
d_weights1 = np.dot(X.T, d_hidden * self.sigmoid_derivative(self.layer1))
d_bias1 = np.sum(d_hidden * self.sigmoid_derivative(self.layer1), axis=0, keepdims=True)
self.weights1 += d_weights1
self.bias1 += d_bias1
self.weights2 += d_weights2
self.bias2 += d_bias2
def train(self, X, y, epochs):
for epoch in range(epochs):
output = self.feedforward(X)
self.backpropagation(X, y, output)
def predict(self, X):
return self.feedforward(X)
```
使用方法:
```python
# 创建神经网络
nn = NeuralNetwork(2, 3, 1)
# 准备数据
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y = np.array([[0], [1], [1], [0]])
# 训练神经网络
nn.train(X, y, 10000)
# 预测新数据
print(nn.predict(np.array([[0, 1]]))) # 输出 [[0.98842585]]
```
这个代码实现了一个2-3-1的神经网络,可以对二元异或问题进行预测。你可以根据需要修改输入、隐藏和输出层的大小,以及训练数据和预测数据。
阅读全文