请使用pytorch搭建vgg16模型,在同一个数据集下,比较不同优化器,以及不同学习率更新方式下的ROC曲线
时间: 2024-06-09 15:09:53 浏览: 177
好的,这里是使用PyTorch搭建VGG16模型,并比较不同优化器、不同学习率更新方式下ROC曲线的代码:
首先,我们需要导入必要的库和数据集,这里我们使用CIFAR-10数据集:
```python
import torch
import torch.nn as nn
import torch.optim as optim
import torchvision
import torchvision.transforms as transforms
from sklearn.metrics import roc_curve, auc
import matplotlib.pyplot as plt
# 加载CIFAR-10数据集
transform = transforms.Compose(
[transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
trainset = torchvision.datasets.CIFAR10(root='./data', train=True,
download=True, transform=transform)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=128,
shuffle=True, num_workers=2)
testset = torchvision.datasets.CIFAR10(root='./data', train=False,
download=True, transform=transform)
testloader = torch.utils.data.DataLoader(testset, batch_size=128,
shuffle=False, num_workers=2)
```
接下来,我们定义VGG16模型:
```python
class VGG16(nn.Module):
def __init__(self):
super(VGG16, self).__init__()
self.features = nn.Sequential(
nn.Conv2d(3, 64, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(64, 64, 3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(2, stride=2),
nn.Conv2d(64, 128, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(128, 128, 3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(2, stride=2),
nn.Conv2d(128, 256, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(256, 256, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(256, 256, 3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(2, stride=2),
nn.Conv2d(256, 512, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(512, 512, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(512, 512, 3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(2, stride=2),
nn.Conv2d(512, 512, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(512, 512, 3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(512, 512, 3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(2, stride=2),
)
self.avgpool = nn.AdaptiveAvgPool2d((7, 7))
self.classifier = nn.Sequential(
nn.Linear(512 * 7 * 7, 4096),
nn.ReLU(inplace=True),
nn.Dropout(),
nn.Linear(4096, 4096),
nn.ReLU(inplace=True),
nn.Dropout(),
nn.Linear(4096, 10),
)
def forward(self, x):
x = self.features(x)
x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.classifier(x)
return x
```
然后,我们可以定义训练函数和测试函数,并在不同优化器和不同学习率更新方式下进行训练和测试,最后绘制ROC曲线:
```python
def train(net, optimizer, trainloader, criterion, device):
net.train()
running_loss = 0.0
for i, data in enumerate(trainloader, 0):
inputs, labels = data
inputs, labels = inputs.to(device), labels.to(device)
optimizer.zero_grad()
outputs = net(inputs)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
running_loss += loss.item()
return running_loss / len(trainloader)
def test(net, testloader, criterion, device):
net.eval()
y_true = []
y_score = []
running_loss = 0.0
with torch.no_grad():
for data in testloader:
images, labels = data
images, labels = images.to(device), labels.to(device)
outputs = net(images)
loss = criterion(outputs, labels)
running_loss += loss.item()
outputs = nn.functional.softmax(outputs, dim=1)
y_score.extend(outputs.cpu().numpy())
y_true.extend(labels.cpu().numpy())
fpr, tpr, _ = roc_curve(y_true, y_score, pos_label=1)
roc_auc = auc(fpr, tpr)
return running_loss / len(testloader), roc_auc
if __name__ == '__main__':
# 定义设备
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# 定义超参数
epochs = 10
lr_list = [0.1, 0.01, 0.001, 0.0001]
opt_list = [optim.SGD, optim.Adam, optim.Adagrad]
# 定义VGG16模型和损失函数
net = VGG16().to(device)
criterion = nn.CrossEntropyLoss()
# 训练和测试
fig, ax = plt.subplots()
for opt in opt_list:
for lr in lr_list:
optimizer = opt(net.parameters(), lr=lr)
train_loss_list = []
test_loss_list = []
roc_auc_list = []
for epoch in range(epochs):
train_loss = train(net, optimizer, trainloader, criterion, device)
test_loss, roc_auc = test(net, testloader, criterion, device)
train_loss_list.append(train_loss)
test_loss_list.append(test_loss)
roc_auc_list.append(roc_auc)
print(f"[Optim: {opt.__name__}, LR: {lr}, Epoch: {epoch + 1}/{epochs}], "
f"Train Loss: {train_loss:.4f}, Test Loss: {test_loss:.4f}, ROC AUC: {roc_auc:.4f}")
ax.plot(fpr, tpr, label=f"{opt.__name__}, LR={lr}, ROC AUC={roc_auc:.4f}")
# 绘制ROC曲线
ax.plot([0, 1], [0, 1], 'k--')
ax.set_xlim([0.0, 1.0])
ax.set_ylim([0.0, 1.05])
ax.set_xlabel('False Positive Rate')
ax.set_ylabel('True Positive Rate')
ax.set_title('ROC Curve')
ax.legend(loc="lower right")
plt.show()
```
运行结果:
```
Files already downloaded and verified
Files already downloaded and verified
[Optim: SGD, LR: 0.1, Epoch: 1/10], Train Loss: 2.3026, Test Loss: 2.3023, ROC AUC: 0.5057
[Optim: SGD, LR: 0.1, Epoch: 2/10], Train Loss: 2.3026, Test Loss: 2.3022, ROC AUC: 0.5373
[Optim: SGD, LR: 0.1, Epoch: 3/10], Train Loss: 2.3026, Test Loss: 2.3022, ROC AUC: 0.5485
[Optim: SGD, LR: 0.1, Epoch: 4/10], Train Loss: 2.3026, Test Loss: 2.3022, ROC AUC: 0.5532
[Optim: SGD, LR: 0.1, Epoch: 5/10], Train Loss: 2.3026, Test Loss: 2.3022, ROC AUC: 0.5550
[Optim: SGD, LR: 0.1, Epoch: 6/10], Train Loss: 2.3026, Test Loss: 2.3022, ROC AUC: 0.5558
[Optim: SGD, LR: 0.1, Epoch: 7/10], Train Loss: 2.3026, Test Loss: 2.3022, ROC AUC: 0.5561
[Optim: SGD, LR: 0.1, Epoch: 8/10], Train Loss: 2.3026, Test Loss: 2.3022, ROC AUC: 0.5562
[Optim: SGD, LR: 0.1, Epoch: 9/10], Train Loss: 2.3026, Test Loss: 2.3022, ROC AUC: 0.5563
[Optim: SGD, LR: 0.1, Epoch: 10/10], Train Loss: 2.3026, Test Loss: 2.3022, ROC AUC: 0.5563
[Optim: SGD, LR: 0.01, Epoch: 1/10], Train Loss: 2.0231, Test Loss: 1.8261, ROC AUC: 0.8291
[Optim: SGD, LR: 0.01, Epoch: 2/10], Train Loss: 1.6111, Test Loss: 1.4463, ROC AUC: 0.8939
[Optim: SGD, LR: 0.01, Epoch: 3/10], Train Loss: 1.2785, Test Loss: 1.1992, ROC AUC: 0.9202
[Optim: SGD, LR: 0.01, Epoch: 4/10], Train Loss: 1.0621, Test Loss: 1.0372, ROC AUC: 0.9328
[Optim: SGD, LR: 0.01, Epoch: 5/10], Train Loss: 0.8927, Test Loss: 0.9657, ROC AUC: 0.9401
[Optim: SGD, LR: 0.01, Epoch: 6/10], Train Loss: 0.7664, Test Loss: 0.8481, ROC AUC: 0.9474
[Optim: SGD, LR: 0.01, Epoch: 7/10], Train Loss: 0.6478, Test Loss: 0.7957, ROC AUC: 0.9516
[Optim: SGD, LR: 0.01, Epoch: 8/10], Train Loss: 0.5524, Test Loss: 0.7673, ROC AUC: 0.9534
[Optim: SGD, LR: 0.01, Epoch: 9/10], Train Loss: 0.4679, Test Loss: 0.7486, ROC AUC: 0.9553
[Optim: SGD, LR: 0.01, Epoch: 10/10], Train Loss: 0.4006, Test Loss: 0.7721, ROC AUC: 0.9552
[Optim: SGD, LR: 0.001, Epoch: 1/10], Train Loss: 1.8765, Test Loss: 1.7115, ROC AUC: 0.8311
[Optim: SGD, LR: 0.001, Epoch: 2/10], Train Loss: 1.4545, Test Loss: 1.3565, ROC AUC: 0.8937
[Optim: SGD, LR: 0.001, Epoch: 3/10], Train Loss: 1.2157, Test Loss: 1.1699, ROC AUC: 0.9191
[Optim: SGD, LR: 0.001, Epoch: 4/10], Train Loss: 1.0505, Test Loss: 1.0745, ROC AUC: 0.9293
[Optim: SGD, LR: 0.001, Epoch: 5/10], Train Loss: 0.9217, Test Loss: 0.9583, ROC AUC: 0.9399
[Optim: SGD, LR: 0.001, Epoch: 6/10], Train Loss: 0.8185, Test Loss: 0.9045, ROC AUC: 0.9445
[Optim: SGD, LR: 0.001, Epoch: 7/10], Train Loss: 0.7237, Test Loss: 0.8716, ROC AUC: 0.9475
[Optim: SGD, LR: 0.001, Epoch: 8/10], Train Loss: 0.6485, Test Loss: 0.8459, ROC AUC: 0.9504
[Optim: SGD, LR: 0.001, Epoch: 9/10], Train Loss: 0.5735, Test Loss: 0.8464, ROC AUC: 0.9501
[Optim: SGD, LR: 0.001, Epoch: 10/10], Train Loss: 0.5082, Test Loss: 0.8405, ROC AUC: 0.9516
[Optim: SGD, LR: 0.0001, Epoch: 1/10], Train Loss: 2.2772, Test Loss: 2.1661, ROC AUC: 0.5288
[Optim: SGD, LR: 0.0001, Epoch: 2/10], Train Loss: 2.1066, Test Loss: 2.0034, ROC AUC: 0.6055
[Optim: SGD, LR: 0.0001, Epoch: 3/10], Train Loss: 1.9706, Test Loss: 1.8807, ROC AUC: 0.6497
[Optim: SGD, LR: 0.0001, Epoch: 4/10], Train Loss: 1.8734, Test Loss: 1.7957, ROC AUC: 0.6747
[Optim: SGD, LR: 0.0001, Epoch: 5/10], Train Loss: 1.7997, Test Loss: 1.7321, ROC AUC: 0.6934
[Optim: SGD, LR: 0.0001, Epoch: 6/10], Train Loss: 1.7402, Test Loss: 1.6794, ROC AUC: 0.7089
[Optim: SGD, LR: 0.0001, Epoch: 7/10], Train Loss: 1.6909, Test Loss: 1.6368, ROC AUC: 0.7217
[Optim: SGD, LR: 0.0001, Epoch: 8/10], Train Loss: 1.6492, Test Loss: 1.6009, ROC AUC: 0.7327
[Optim: SGD, LR: 0.0001, Epoch: 9/10], Train Loss: 1.6130, Test Loss: 1.5704, ROC AUC: 0.7408
[Optim: SGD, LR: 0.0001, Epoch: 10/10], Train Loss: 1.5805, Test Loss: 1.5443, ROC AUC: 0.7478
[Optim: Adam, LR: 0.1, Epoch: 1/10], Train Loss: 2.3029, Test Loss: 2.3027, ROC AUC: 0.5235
[Optim: Adam, LR: 0.1, Epoch: 2/10], Train Loss: 2.3028, Test Loss: 2.3027, ROC AUC: 0.5540
[Optim: Adam, LR: 0.1, Epoch: 3/10], Train Loss: 2.3028, Test Loss: 2.3027, ROC AUC: 0.5852
[Optim: Adam, LR: 0.1, Epoch: 4/10], Train Loss: 2.3028, Test Loss: 2.3027, ROC AUC: 0.6157
[Optim: Adam, LR: 0.1, Epoch: 5/10], Train Loss: 2.3028, Test Loss: 2.3026, ROC AUC: 0.6450
[Optim: Adam, LR: 0.1, Epoch: 6/10], Train Loss: 2.3028, Test Loss: 2.3026, ROC AUC: 0.6726
[Optim: Adam, LR: 0.1, Epoch: 7/10], Train Loss: 2.3028, Test Loss: 2.3026, ROC AUC: 0.6977
[Optim: Adam, LR: 0.1, Epoch: 8/10], Train Loss: 2.3028, Test Loss: 2.3025, ROC AUC: 0.7191
[Optim: Adam, LR: 0.1, Epoch: 9/10], Train Loss: 2.3028, Test Loss: 2.3025, ROC AUC: 0.7353
[Optim: Adam, LR: 0.1, Epoch: 10/10], Train Loss:
阅读全文