基于遗传算法优化BP神经网络 y=x^2实现
时间: 2023-05-19 08:05:28 浏览: 136
非常感谢您的提问。这是一个编程类的问题,我可以回答。以下是基于遗传算法优化BP神经网络 y=x^2实现的代码:
```python
import numpy as np
import matplotlib.pyplot as plt
# 定义sigmoid函数
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# 定义BP神经网络类
class BPNN:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.W1 = np.random.randn(self.input_size, self.hidden_size)
self.b1 = np.random.randn(self.hidden_size)
self.W2 = np.random.randn(self.hidden_size, self.output_size)
self.b2 = np.random.randn(self.output_size)
# 前向传播
def forward(self, x):
self.z1 = np.dot(x, self.W1) + self.b1
self.a1 = sigmoid(self.z1)
self.z2 = np.dot(self.a1, self.W2) + self.b2
self.y = self.z2
# 计算损失函数
def loss(self, x, t):
self.forward(x)
return np.sum((self.y - t) ** 2)
# 计算梯度
def gradient(self, x, t):
self.loss(x, t)
delta2 = self.y - t
dW2 = np.dot(self.a1.T, delta2)
db2 = np.sum(delta2, axis=0)
delta1 = np.dot(delta2, self.W2.T) * self.a1 * (1 - self.a1)
dW1 = np.dot(x.T, delta1)
db1 = np.sum(delta1, axis=0)
return dW1, db1, dW2, db2
# 更新权重和偏置
def update(self, x, t, learning_rate):
dW1, db1, dW2, db2 = self.gradient(x, t)
self.W1 -= learning_rate * dW1
self.b1 -= learning_rate * db1
self.W2 -= learning_rate * dW2
self.b2 -= learning_rate * db2
# 定义遗传算法类
class GA:
def __init__(self, pop_size, gene_size, pc, pm, max_iter):
self.pop_size = pop_size
self.gene_size = gene_size
self.pc = pc
self.pm = pm
self.max_iter = max_iter
self.pop = np.random.rand(self.pop_size, self.gene_size) * 10 - 5
# 适应度函数
def fitness(self, x):
return -x ** 2
# 选择操作
def selection(self, fitness):
idx = np.random.choice(np.arange(self.pop_size), size=self.pop_size, replace=True, p=fitness / np.sum(fitness))
return self.pop[idx]
# 交叉操作
def crossover(self, parent1, parent2):
if np.random.rand() < self.pc:
point = np.random.randint(1, self.gene_size)
child1 = np.concatenate((parent1[:point], parent2[point:]))
child2 = np.concatenate((parent2[:point], parent1[point:]))
return child1, child2
else:
return parent1, parent2
# 变异操作
def mutation(self, child):
if np.random.rand() < self.pm:
point = np.random.randint(0, self.gene_size)
child[point] += np.random.randn() * 0.1
return child
else:
return child
# 运行遗传算法
def run(self):
best_fitness = []
for i in range(self.max_iter):
fitness = self.fitness(self.pop)
best_fitness.append(np.max(fitness))
parents = self.selection(fitness)
offspring = []
for j in range(0, self.pop_size, 2):
child1, child2 = self.crossover(parents[j], parents[j+1])
child1 = self.mutation(child1)
child2 = self.mutation(child2)
offspring.append(child1)
offspring.append(child2)
self.pop = np.array(offspring)
return self.pop[np.argmax(self.fitness(self.pop))]
# 定义主函数
def main():
# 生成训练数据
x_train = np.linspace(-5, 5, 100).reshape(-1, 1)
t_train = x_train ** 2
# 定义BP神经网络
bpnn = BPNN(1, 10, 1)
# 定义遗传算法
ga = GA(100, 10, 0.8, 0.1, 100)
# 训练BP神经网络
for i in range(100):
w = ga.run()
bpnn.W1 = w[:10].reshape(1, 10)
bpnn.b1 = w[10:20]
bpnn.W2 = w[20:30].reshape(10, 1)
bpnn.b2 = w[30]
bpnn.update(x_train, t_train, 0.1)
# 生成测试数据
x_test = np.linspace(-5, 5, 100).reshape(-1, 1)
t_test = x_test ** 2
# 测试BP神经网络
bpnn.forward(x_test)
y_test = bpnn.y
# 绘制结果图像
plt.plot(x_test, t_test, label='true')
plt.plot(x_test, y_test, label='predict')
plt.legend()
plt.show()
if __name__ == '__main__':
main()
```
希望这个代码能够帮助您实现基于遗传算法优化BP神经网络 y=x^2。如果您有任何问题,请随时问我。
阅读全文