bp神经网络python代码
时间: 2023-08-26 15:07:07 浏览: 85
下面是一个基于Python的BP神经网络代码示例:
```python
import numpy as np
class NeuralNetwork:
def __init__(self, layers, activation='sigmoid', learning_rate=0.1):
self.layers = layers
self.activation = activation
self.learning_rate = learning_rate
self.weights = []
self.bias = []
for i in range(1, len(layers)):
self.weights.append(np.random.randn(layers[i-1], layers[i]))
self.bias.append(np.random.randn(layers[i]))
def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
def sigmoid_derivative(self, x):
return x * (1 - x)
def relu(self, x):
return np.maximum(0, x)
def relu_derivative(self, x):
return np.greater(x, 0).astype(int)
def forward_propagation(self, x):
a = [x]
z = []
for i in range(len(self.layers)-1):
z_i = np.dot(a[i], self.weights[i]) + self.bias[i]
if self.activation == 'sigmoid':
a_i = self.sigmoid(z_i)
elif self.activation == 'relu':
a_i = self.relu(z_i)
z.append(z_i)
a.append(a_i)
return a, z
def backward_propagation(self, a, z, y):
delta = [None] * len(self.layers)
delta[-1] = (a[-1] - y) * self.sigmoid_derivative(a[-1]) if self.activation == 'sigmoid' else (a[-1] - y) * self.relu_derivative(a[-1])
for i in range(len(self.layers)-2, 0, -1):
delta[i] = np.dot(delta[i+1], self.weights[i].T) * self.sigmoid_derivative(a[i]) if self.activation == 'sigmoid' else np.dot(delta[i+1], self.weights[i].T) * self.relu_derivative(a[i])
dw = []
db = []
for i in range(len(self.layers)-1):
dw_i = np.dot(a[i].T, delta[i+1])
db_i = np.sum(delta[i+1], axis=0)
dw.append(dw_i)
db.append(db_i)
return dw, db
def update_weights(self, dw, db):
for i in range(len(self.layers)-1):
self.weights[i] -= self.learning_rate * dw[i]
self.bias[i] -= self.learning_rate * db[i]
def train(self, x_train, y_train, epochs):
for i in range(epochs):
loss = 0
for j in range(len(x_train)):
x = x_train[j]
y = y_train[j]
a, z = self.forward_propagation(x)
dw, db = self.backward_propagation(a, z, y)
self.update_weights(dw, db)
loss += np.sum(np.square(a[-1] - y))
print('Epoch %d - Loss: %.4f' % (i+1, loss))
def predict(self, x):
a, _ = self.forward_propagation(x)
return a[-1]
```
这个代码实现了一个简单的BP神经网络,支持sigmoid和ReLU激活函数,以及随机权重和偏差初始化和学习率设置。在训练时,使用反向传播算法来更新权重和偏差,以最小化损失。