python实现三层BP神经网络完整代码
时间: 2024-07-01 08:00:30 浏览: 62
在Python中,实现一个三层的BP(Back Propagation,反向传播)神经网络通常会用到一些机器学习库,如TensorFlow或Keras。这里我将给出一个使用简单版的代码示例,不包含所有复杂的优化和验证步骤,但基本结构是清晰的。假设你没有使用深度学习框架,我们将手动构建权重更新过程。
```python
import numpy as np
# 定义sigmoid激活函数
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# 定义sigmoid导数(对于反向传播)
def sigmoid_derivative(output):
return output * (1 - output)
class NeuralNetwork:
def __init__(self, input_nodes, hidden_nodes, output_nodes):
self.input_nodes = input_nodes
self.hidden_nodes = hidden_nodes
self.output_nodes = output_nodes
# 初始化权重(随机值)
self.weights1 = np.random.uniform(size=(input_nodes, hidden_nodes))
self.weights2 = np.random.uniform(size=(hidden_nodes, output_nodes))
def feedforward(self, inputs):
self.layer_1 = sigmoid(np.dot(inputs, self.weights1))
self.layer_2 = sigmoid(np.dot(self.layer_1, self.weights2))
return self.layer_2
def backpropagation(self, inputs, targets, learning_rate):
error = targets - self.layer_2
delta2 = error * sigmoid_derivative(self.layer_2)
# 更新隐藏层权重
delta1 = np.dot(delta2, self.weights2.T) * sigmoid_derivative(self.layer_1)
self.weights2 += learning_rate * np.dot(self.layer_1.T, delta2)
self.weights1 += learning_rate * np.dot(inputs.T, delta1)
# 使用例子
input_nodes = 3
hidden_nodes = 4
output_nodes = 2
nn = NeuralNetwork(input_nodes, hidden_nodes, output_nodes)
inputs = np.array([[0, 0, 1], [0, 1, 1], [1, 0, 1], [1, 1, 1]])
targets = np.array([[0, 1], [1, 0], [1, 1], [0, 1]])
for _ in range(1000): # 迭代次数
outputs = nn.feedforward(inputs)
nn.backpropagation(inputs, targets, learning_rate=0.1) # 假设学习率是0.1