人工神经网络的BP算法程序
时间: 2023-06-19 18:03:37 浏览: 94
以下是基于Python实现的BP算法程序,适用于多层前馈神经网络:
```python
import numpy as np
class BPNN:
def __init__(self, input_size, hidden_size, output_size, learning_rate):
# 初始化权重和偏置
self.weights1 = np.random.randn(input_size, hidden_size)
self.bias1 = np.zeros((1, hidden_size))
self.weights2 = np.random.randn(hidden_size, output_size)
self.bias2 = np.zeros((1, output_size))
# 设置学习率
self.learning_rate = learning_rate
def sigmoid(self, x):
# Sigmoid函数
return 1 / (1 + np.exp(-x))
def sigmoid_derivative(self, x):
# Sigmoid函数的导数
return x * (1 - x)
def feedforward(self, X):
# 前向传播
self.hidden_layer = self.sigmoid(np.dot(X, self.weights1) + self.bias1)
self.output_layer = self.sigmoid(np.dot(self.hidden_layer, self.weights2) + self.bias2)
return self.output_layer
def backpropagation(self, X, y, output):
# 反向传播
self.output_error = y - output
self.output_delta = self.output_error * self.sigmoid_derivative(output)
self.hidden_error = np.dot(self.output_delta, self.weights2.T)
self.hidden_delta = self.hidden_error * self.sigmoid_derivative(self.hidden_layer)
# 更新权重和偏置
self.weights2 += self.learning_rate * np.dot(self.hidden_layer.T, self.output_delta)
self.bias2 += self.learning_rate * np.sum(self.output_delta, axis=0, keepdims=True)
self.weights1 += self.learning_rate * np.dot(X.T, self.hidden_delta)
self.bias1 += self.learning_rate * np.sum(self.hidden_delta, axis=0)
def train(self, X, y, epochs):
for i in range(epochs):
output = self.feedforward(X)
self.backpropagation(X, y, output)
def predict(self, X):
return self.feedforward(X)
```
其中,`input_size`、`hidden_size`、`output_size`分别代表输入层、隐藏层和输出层的神经元个数;`learning_rate`为学习率;`sigmoid`函数为Sigmoid激活函数;`sigmoid_derivative`为Sigmoid函数的导数;`feedforward`为前向传播函数;`backpropagation`为反向传播函数;`train`为训练函数;`predict`为预测函数。
阅读全文