用python写一个关于神经网络的代码
时间: 2023-12-17 08:03:49 浏览: 86
以下是一个简单的神经网络代码,使用Python和NumPy库实现:
import numpy as np
# 定义神经网络类
class NeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
# 初始化权重和偏置
self.weights1 = np.random.randn(self.input_size, self.hidden_size)
self.bias1 = np.random.randn(self.hidden_size)
self.weights2 = np.random.randn(self.hidden_size, self.output_size)
self.bias2 = np.random.randn(self.output_size)
# 定义sigmoid函数
def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
# 定义前向传播函数
def forward(self, X):
self.z1 = np.dot(X, self.weights1) + self.bias1
self.a1 = self.sigmoid(self.z1)
self.z2 = np.dot(self.a1, self.weights2) + self.bias2
self.a2 = self.sigmoid(self.z2)
return self.a2
# 定义反向传播函数
def backward(self, X, y, output):
self.error = output - y
self.delta2 = self.error * self.sigmoid_derivative(self.z2)
self.weights2 -= self.a1.T.dot(self.delta2)
self.bias2 -= np.sum(self.delta2, axis=0)
self.delta1 = self.delta2.dot(self.weights2.T) * self.sigmoid_derivative(self.z1)
self.weights1 -= X.T.dot(self.delta1)
self.bias1 -= np.sum(self.delta1, axis=0)
# 定义sigmoid函数的导数
def sigmoid_derivative(self, x):
return x * (1 - x)
# 定义训练函数
def train(self, X, y, epochs):
for i in range(epochs):
output = self.forward(X)
self.backward(X, y, output)
if i % 1000 == 0:
print("Epoch:", i, "Loss:", np.mean(np.square(y - output)))
# 测试代码
if __name__ == "__main__":
X = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y = np.array([[0], [1], [1], [0]])
nn = NeuralNetwork(2, 4, 1)
nn.train(X, y, 10000)
print(nn.forward(X))
阅读全文