Python手写数据集全连接神经网络识别代码
时间: 2024-03-15 14:17:51 浏览: 17
以下是Python手写数据集全连接神经网络识别代码示例:
```
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_digits
# 加载手写数据集
digits = load_digits()
# 打印数据集信息
print(digits.DESCR)
# 数据集划分
X = digits.data
y = digits.target
train_ratio = 0.8
train_size = int(X.shape[0] * train_ratio)
X_train, X_test = X[:train_size], X[train_size:]
y_train, y_test = y[:train_size], y[train_size:]
# 数据归一化
X_train = X_train / 16.0
X_test = X_test / 16.0
# 神经网络模型
class NeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
# 随机初始化权重矩阵
self.W1 = np.random.randn(self.input_size, self.hidden_size)
self.W2 = np.random.randn(self.hidden_size, self.output_size)
def forward(self, X):
# 前向传播
self.z2 = np.dot(X, self.W1)
self.a2 = self.sigmoid(self.z2)
self.z3 = np.dot(self.a2, self.W2)
y_hat = self.sigmoid(self.z3)
return y_hat
def sigmoid(self, z):
# sigmoid函数
return 1 / (1 + np.exp(-z))
def sigmoid_prime(self, z):
# sigmoid函数的导数
return self.sigmoid(z) * (1 - self.sigmoid(z))
def cost_function(self, X, y):
# 损失函数
self.y_hat = self.forward(X)
J = 0.5 * sum((y - self.y_hat)**2)
return J
def cost_function_prime(self, X, y):
# 损失函数的导数
self.y_hat = self.forward(X)
delta3 = np.multiply(-(y - self.y_hat), self.sigmoid_prime(self.z3))
dJdW2 = np.dot(self.a2.T, delta3)
delta2 = np.dot(delta3, self.W2.T) * self.sigmoid_prime(self.z2)
dJdW1 = np.dot(X.T, delta2)
return dJdW1, dJdW2
def train(self, X, y, iterations=100, learning_rate=0.5):
# 训练模型
self.J_history = []
for i in range(iterations):
dJdW1, dJdW2 = self.cost_function_prime(X, y)
self.W1 -= learning_rate * dJdW1
self.W2 -= learning_rate * dJdW2
J = self.cost_function(X, y)
self.J_history.append(J)
# 训练全连接神经网络
nn = NeuralNetwork(64, 30, 10)
nn.train(X_train, y_train, iterations=1000, learning_rate=0.1)
# 测试模型
y_pred = np.argmax(nn.forward(X_test), axis=1)
accuracy = np.mean(y_pred == y_test)
print("测试集准确率:", accuracy)
# 绘制损失函数下降曲线
plt.plot(nn.J_history)
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.show()
```
该代码将手写数据集划分为训练集和测试集,并使用全连接神经网络模型进行训练和测试。模型使用sigmoid函数作为激活函数,使用随机梯度下降算法进行优化。训练完成后,绘制损失函数下降曲线,评估测试集准确率。