代码:用bp神经网络进行时间序列预测的代码?
时间: 2024-04-14 13:09:22 浏览: 98
以下是用bp神经网络进行时间序列预测的Python代码示例:
``` python
import numpy as np
import matplotlib.pyplot as plt
# 创建时间序列数据
x = np.arange(0, 10, 0.1)
y = np.sin(x)
# 分割数据集
train_size = int(len(x) * 0.7)
train_X, test_X = x[:train_size], x[train_size:]
train_Y, test_Y = y[:train_size], y[train_size:]
# 构建神经网络
class BPNN:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.W1 = np.random.randn(self.input_size, self.hidden_size)
self.b1 = np.zeros((1, self.hidden_size))
self.W2 = np.random.randn(self.hidden_size, self.output_size)
self.b2 = np.zeros((1, self.output_size))
def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
def forward(self, X):
self.A1 = np.dot(X, self.W1) + self.b1
self.H1 = self.sigmoid(self.A1)
self.A2 = np.dot(self.H1, self.W2) + self.b2
return self.A2
def sigmoid_derivative(self, x):
return x * (1 - x)
def train(self, X, Y, epochs, learning_rate):
for epoch in range(epochs):
# 前向传播
output = self.forward(X)
# 反向传播
error = output - Y
dA2 = error
dW2 = np.dot(self.H1.T, dA2)
db2 = np.sum(dA2, axis=0)
dH1 = np.dot(dA2, self.W2.T)
dA1 = dH1 * self.sigmoid_derivative(self.H1)
dW1 = np.dot(X.T, dA1)
db1 = np.sum(dA1, axis=0)
# 更新权重和偏置
self.W1 -= learning_rate * dW1
self.b1 -= learning_rate * db1
self.W2 -= learning_rate * dW2
self.b2 -= learning_rate * db2
# 训练神经网络并预测结果
input_size = 1
hidden_size = 3
output_size = 1
learning_rate = 0.01
epochs = 1000
model = BPNN(input_size, hidden_size, output_size)
model.train(train_X.reshape(-1, 1), train_Y.reshape(-1, 1), epochs, learning_rate)
predicted_Y = model.forward(test_X.reshape(-1, 1))
# 可视化结果
plt.plot(x, y, label="sin(x)")
plt.plot(test_X, predicted_Y, label="predicted sin(x)")
plt.legend()
plt.show()
```
请注意,此代码示例仅用于演示目的,实际上在使用神经网络进行时间序列预测时,需要根据具体情况调整模型架构和调整超参数。
阅读全文
相关推荐

















