BP神经网络python代码实现
时间: 2023-10-11 20:05:19 浏览: 134
BP神经网络PYthon代码实现
5星 · 资源好评率100%
BP神经网络的Python代码实现如下:
```python
import numpy as np
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def sigmoid_derivative(x):
return x * (1 - x)
class NeuralNetwork:
def __init__(self, layers):
self.layers = layers
self.weights = []
for i in range(1, len(layers)):
w = 2 * np.random.random((layers[i-1] + 1, layers[i])) - 1
self.weights.append(w)
def forward_propagation(self, X):
self.activations = []
self.activations.append(np.insert(X, 0, 1, axis=1))
for i in range(len(self.weights)):
a = sigmoid(np.dot(self.activations[i], self.weights[i]))
a = np.insert(a, 0, 1, axis=1)
self.activations.append(a)
return self.activations[-1]
def backward_propagation(self, X, y, learning_rate):
error = y - self.activations[-1]
delta = error * sigmoid_derivative(self.activations[-1])
deltas = [delta]
for i in range(len(self.weights)-1, -1, -1):
delta = np.dot(deltas[0], self.weights[i].T) * sigmoid_derivative(self.activations[i])
deltas.insert(0, delta)
for i in range(len(self.weights)):
self.weights[i] += learning_rate * np.dot(self.activations[i].T, deltas[i])
def train(self, X, y, epochs, learning_rate):
for _ in range(epochs):
output = self.forward_propagation(X)
self.backward_propagation(X, y, learning_rate)
def predict(self, X):
output = self.forward_propagation(X)
return np.round(output)
```
阅读全文