ga-bp神经网络python代码
时间: 2023-09-12 08:03:50 浏览: 87
以下是一个简单的基于Python的全连接神经网络(使用反向传播算法):
```
import numpy as np
# 定义sigmoid函数
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# 定义sigmoid函数的导数
def sigmoid_prime(x):
return sigmoid(x) * (1 - sigmoid(x))
class NeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.weights_input_hidden = np.random.normal(scale=0.1, size=(input_size, hidden_size))
self.weights_hidden_output = np.random.normal(scale=0.1, size=(hidden_size, output_size))
def forward(self, inputs):
hidden_layer_inputs = np.dot(inputs, self.weights_input_hidden)
hidden_layer_outputs = sigmoid(hidden_layer_inputs)
output_layer_inputs = np.dot(hidden_layer_outputs, self.weights_hidden_output)
output_layer_outputs = sigmoid(output_layer_inputs)
return output_layer_outputs
def train(self, inputs, targets, learning_rate):
hidden_layer_inputs = np.dot(inputs, self.weights_input_hidden)
hidden_layer_outputs = sigmoid(hidden_layer_inputs)
output_layer_inputs = np.dot(hidden_layer_outputs, self.weights_hidden_output)
output_layer_outputs = sigmoid(output_layer_inputs)
output_layer_errors = targets - output_layer_outputs
output_layer_delta = output_layer_errors * sigmoid_prime(output_layer_inputs)
hidden_layer_errors = np.dot(output_layer_delta, self.weights_hidden_output.T)
hidden_layer_delta = hidden_layer_errors * sigmoid_prime(hidden_layer_inputs)
self.weights_hidden_output += learning_rate * np.dot(hidden_layer_outputs.T, output_layer_delta)
self.weights_input_hidden += learning_rate * np.dot(inputs.T, hidden_layer_delta)
# 测试
inputs = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
targets = np.array([[0], [1], [1], [0]])
nn = NeuralNetwork(2, 3, 1)
for i in range(10000):
nn.train(inputs, targets, 0.1)
print(nn.forward(inputs)) # 期望输出为[[0], [1], [1], [0]]
```