BP神经网络算法实例python
时间: 2024-11-06 11:12:53 浏览: 53
bp 神经网络算法. python代码实例
BP(Back Propagation,反向传播)神经网络是一种常用的监督学习算法,用于训练人工神经网络。它通过反复调整权重和偏置值来最小化预测结果与实际标签之间的误差。以下是使用Python(如NumPy和Scikit-learn库)实现一个简单的BP神经网络示例:
```python
import numpy as np
class NeuralNetwork:
def __init__(self, input_nodes, hidden_nodes, output_nodes, learning_rate):
self.input_nodes = input_nodes
self.hidden_nodes = hidden_nodes
self.output_nodes = output_nodes
self.learning_rate = learning_rate
# 初始化权重
self.weights_input_hidden = np.random.rand(input_nodes, hidden_nodes)
self.weights_hidden_output = np.random.rand(hidden_nodes, output_nodes)
def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
def sigmoid_derivative(self, x):
return x * (1 - x)
def train(self, inputs_list, targets_list):
inputs = np.array(inputs_list, ndmin=2).T
targets = np.array(targets_list, ndmin=2).T
hidden_inputs = np.dot(inputs, self.weights_input_hidden)
hidden_outputs = self.sigmoid(hidden_inputs)
final_inputs = np.dot(hidden_outputs, self.weights_hidden_output)
final_outputs = self.sigmoid(final_inputs)
# 计算误差
output_errors = targets - final_outputs
# 反向传播误差
d_weights_output_hidden = np.dot(hidden_outputs.T, output_errors * self.sigmoid_derivative(final_outputs))
d_weights_input_hidden = np.dot(inputs.T, np.dot(output_errors * self.sigmoid_derivative(final_outputs), self.weights_hidden_output.T) * self.sigmoid_derivative(hidden_outputs))
# 更新权重
self.weights_hidden_output += self.learning_rate * d_weights_output_hidden
self.weights_input_hidden += self.learning_rate * d_weights_input_hidden
# 使用示例
nn = NeuralNetwork(2, 3, 1, 0.5) # 输入节点2,隐藏节点3,输出节点1,学习率0.5
inputs = [[0, 0], [0, 1], [1, 0], [1, 1]] # 输入数据
targets = [[0], [1], [1], [0]] # 目标输出
for _ in range(1000): # 迭代次数
nn.train(inputs, targets)
阅读全文