粒子群算法优化神经网络python代码
时间: 2023-10-29 07:03:14 浏览: 142
神经网络粒子群优化算法代码
粒子群算法(Particle Swarm Optimization, PSO)是一种启发式优化算法,可以用来优化神经网络的权重和阈值。下面是使用Python实现粒子群算法优化神经网络的代码示例:
```python
import numpy as np
# 定义神经网络类
class NeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.weights_hidden = np.random.rand(input_size, hidden_size)
self.weights_output = np.random.rand(hidden_size, output_size)
self.bias_hidden = np.random.rand(hidden_size)
self.bias_output = np.random.rand(output_size)
def forward_propagation(self, X):
self.hidden_layer_output = sigmoid(np.dot(X, self.weights_hidden) + self.bias_hidden)
self.output_layer_output = sigmoid(np.dot(self.hidden_layer_output, self.weights_output) + self.bias_output)
return self.output_layer_output
def calculate_loss(self, X, y):
y_predicted = self.forward_propagation(X)
loss = np.mean(0.5 * (y_predicted - y) ** 2)
return loss
def update_weights(self, weights):
self.weights_hidden = weights[:self.input_size * self.hidden_size].reshape(self.input_size, self.hidden_size)
self.weights_output = weights[self.input_size * self.hidden_size:].reshape(self.hidden_size, self.output_size)
def update_biases(self, biases):
self.bias_hidden = biases[:self.hidden_size]
self.bias_output = biases[self.hidden_size:]
# 定义粒子群优化类
class PSO:
def __init__(self, num_particles, max_iterations, cost_function, num_dimensions):
self.num_particles = num_particles
self.max_iterations = max_iterations
self.cost_function = cost_function
self.num_dimensions = num_dimensions
self.particles_position = np.random.rand(num_particles, num_dimensions)
self.particles_velocity = np.random.rand(num_particles, num_dimensions)
self.particles_best_position = self.particles_position.copy()
self.global_best_position = None
self.global_best_cost = np.inf
def optimize(self):
for t in range(self.max_iterations):
for i in range(self.num_particles):
cost = self.cost_function(self.particles_position[i])
if cost < self.cost_function(self.particles_best_position[i]):
self.particles_best_position[i] = self.particles_position[i]
if cost < self.global_best_cost:
self.global_best_cost = cost
self.global_best_position = self.particles_position[i]
w = 0.5 + np.random.rand() / 2
c1 = 2 * np.random.rand()
c2 = 2 * np.random.rand()
self.particles_velocity[i] = (w * self.particles_velocity[i]
+ c1 * (self.particles_best_position[i] - self.particles_position[i])
+ c2 * (self.global_best_position - self.particles_position[i]))
self.particles_position[i] = self.particles_position[i] + self.particles_velocity[i]
# sigmoid 激活函数
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# 定义损失函数
def loss_function(weights):
model = NeuralNetwork(2, 3, 1)
model.update_weights(weights)
return model.calculate_loss(X, y)
# 设定训练数据集
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y = np.array([[0], [1], [1], [0]])
# 设置粒子群算法的参数并进行优化
pso = PSO(num_particles=10, max_iterations=100, cost_function=loss_function, num_dimensions=11)
pso.optimize()
# 打印优化后的权重
print("Optimized Weights:")
print(pso.global_best_position)
```
以上代码实现了一个简单的两层神经网络,并使用粒子群算法对其权重进行优化。输入层有2个神经元,隐藏层有3个神经元,输出层有1个神经元。神经网络的前向传播使用sigmoid激活函数。PSO算法使用10个粒子和100次迭代进行优化。
阅读全文