多层bp神经网络python
时间: 2023-10-31 17:06:08 浏览: 38
好的,关于多层bp神经网络的Python实现,可以使用NumPy库来实现。以下是一个简单的示例代码:
```python
import numpy as np
class NeuralNetwork:
def __init__(self, layers):
self.layers = layers
self.weights = [np.random.randn(layers[i], layers[i+1]) for i in range(len(layers)-1)]
self.biases = [np.random.randn(layers[i+1]) for i in range(len(layers)-1)]
def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
def sigmoid_derivative(self, x):
return x * (1 - x)
def feedforward(self, x):
a = x
for i in range(len(self.layers)-1):
z = np.dot(a, self.weights[i]) + self.biases[i]
a = self.sigmoid(z)
return a
def backpropagation(self, x, y, learning_rate):
a = [x]
z_list = []
for i in range(len(self.layers)-1):
z = np.dot(a[i], self.weights[i]) + self.biases[i]
z_list.append(z)
a.append(self.sigmoid(z))
delta = (a[-1] - y) * self.sigmoid_derivative(a[-1])
for i in range(len(self.layers)-2, -1, -1):
delta = np.dot(delta, self.weights[i].T) * self.sigmoid_derivative(a[i])
self.weights[i] -= learning_rate * np.dot(a[i].T, delta)
self.biases[i] -= learning_rate * np.sum(delta, axis=0)
def train(self, X, y, learning_rate, epochs):
for i in range(epochs):
for j in range(len(X)):
self.backpropagation(X[j], y[j], learning_rate)
def predict(self, X):
y_pred = []
for x in X:
y_pred.append(self.feedforward(x))
return np.array(y_pred)
# 示例用法
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y = np.array([[0], [1], [1], [0]])
nn = NeuralNetwork([2, 4, 1])
nn.train(X, y, 0.1, 10000)
print(nn.predict(X))
```
该代码实现了一个包含两个输入层、一个隐藏层和一个输出层的神经网络,使用反向传播算法进行训练,并使用sigmoid函数作为激活函数。在示例中,我们使用该神经网络来解决异或问题。