利用BP网络实现一个预测案例,写出python代码
时间: 2024-02-03 19:03:40 浏览: 67
以下是一个基于BP神经网络的预测案例的Python代码:
```python
import numpy as np
class BPNeuralNetwork:
def __init__(self, input_nodes, hidden_nodes, output_nodes, learning_rate):
self.input_nodes = input_nodes
self.hidden_nodes = hidden_nodes
self.output_nodes = output_nodes
self.learning_rate = learning_rate
# 初始化权重矩阵,加上一个偏置项
self.weights_input_hidden = np.random.normal(0.0, self.hidden_nodes**-0.5, (self.hidden_nodes, self.input_nodes + 1))
self.weights_hidden_output = np.random.normal(0.0, self.output_nodes**-0.5, (self.output_nodes, self.hidden_nodes + 1))
def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
def softmax(self, x):
return np.exp(x) / np.sum(np.exp(x), axis=0)
def train(self, inputs, targets):
# 隐藏层输入值计算
hidden_inputs = np.dot(self.weights_input_hidden, inputs)
# 隐藏层输出值计算
hidden_outputs = self.sigmoid(hidden_inputs)
# 输出层输入值计算
final_inputs = np.dot(self.weights_hidden_output, hidden_outputs)
# 输出层输出值计算
final_outputs = self.softmax(final_inputs)
# 输出误差计算
output_errors = targets - final_outputs
# 隐藏层误差计算
hidden_errors = np.dot(self.weights_hidden_output.T, output_errors) * hidden_outputs * (1 - hidden_outputs)
# 更新输出层权重
self.weights_hidden_output += self.learning_rate * np.dot((output_errors * final_outputs * (1 - final_outputs)), hidden_outputs.T)
# 更新隐藏层权重
self.weights_input_hidden += self.learning_rate * np.dot((hidden_errors * hidden_outputs * (1 - hidden_outputs)), inputs.T)
def predict(self, inputs):
# 隐藏层输入值计算
hidden_inputs = np.dot(self.weights_input_hidden, inputs)
# 隐藏层输出值计算
hidden_outputs = self.sigmoid(hidden_inputs)
# 输出层输入值计算
final_inputs = np.dot(self.weights_hidden_output, hidden_outputs)
# 输出层输出值计算
final_outputs = self.softmax(final_inputs)
return final_outputs
```
下面是一个使用上述BP神经网络实现的预测案例的示例代码:
```python
# 设置输入、隐藏和输出节点数量以及学习率
input_nodes = 2
hidden_nodes = 3
output_nodes = 1
learning_rate = 0.1
# 创建一个BP神经网络实例
nn = BPNeuralNetwork(input_nodes, hidden_nodes, output_nodes, learning_rate)
# 训练数据集
train_inputs = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
train_targets = np.array([[0], [1], [1], [0]])
# 训练神经网络
for i in range(10000):
for inputs, targets in zip(train_inputs, train_targets):
nn.train(inputs, targets)
# 预测新数据
test_inputs = np.array([[0, 1], [1, 0], [0, 0], [1, 1]])
for inputs in test_inputs:
print(nn.predict(inputs))
```
以上代码实现了一个简单的XOR逻辑运算的预测案例,通过BP神经网络进行训练和预测。
阅读全文