跑通MLP分类器实例代码
时间: 2024-10-14 09:00:36 浏览: 40
在Python中,我们可以使用深度学习库如PyTorch或TensorFlow来创建一个简单的Multi-Layer Perceptron (MLP)分类器实例。这里以PyTorch为例,我们将构建一个两层全连接神经网络模型用于MNIST手写数字识别:
```python
import torch
import torch.nn as nn
import torchvision.datasets as datasets
from torchvision.transforms import ToTensor
# 数据加载
train_dataset = datasets.MNIST(root='./data', train=True, transform=ToTensor(), download=True)
test_dataset = datasets.MNIST(root='./data', train=False, transform=ToTensor())
# 创建数据加载器
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=64, shuffle=True)
test_loader = torch.utils.data.DataLoader(test_dataset, batch_size=64)
# 定义MLP模型
class MLP(nn.Module):
def __init__(self):
super(MLP, self).__init__()
self.fc1 = nn.Linear(784, 128) # 输入层到隐藏层
self.relu = nn.ReLU() # 激活函数
self.fc2 = nn.Linear(128, 10) # 隐藏层到输出层(假设我们有10个类别)
def forward(self, x):
x = x.view(x.shape[0], -1) # 展平输入
x = self.fc1(x)
x = self.relu(x)
out = self.fc2(x)
return out
model = MLP()
# 定义损失函数和优化器
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(model.parameters(), lr=0.001)
# 训练过程
num_epochs = 5
for epoch in range(num_epochs):
for i, (images, labels) in enumerate(train_loader):
optimizer.zero_grad()
outputs = model(images)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
print(f"Epoch {epoch+1}/{num_epochs}, Loss: {loss.item()}")
# 测试模型
model.eval() # 设置为评估模式
with torch.no_grad():
correct = 0
total = 0
for images, labels in test_loader:
outputs = model(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
accuracy = 100 * correct / total
print(f"Test Accuracy: {accuracy}%")
```
阅读全文