DenseNet的源码
时间: 2023-12-28 14:44:40 浏览: 70
以下是PyTorch中的DenseNet源码:
```
import torch.nn as nn
class Bottleneck(nn.Module):
def __init__(self, in_channels, growth_rate):
super().__init__()
self.bn1 = nn.BatchNorm2d(in_channels)
self.conv1 = nn.Conv2d(in_channels, 4 * growth_rate, kernel_size=1, bias=False)
self.bn2 = nn.BatchNorm2d(4 * growth_rate)
self.conv2 = nn.Conv2d(4 * growth_rate, growth_rate, kernel_size=3, padding=1, bias=False)
def forward(self, x):
out = self.conv1(nn.functional.relu(self.bn1(x)))
out = self.conv2(nn.functional.relu(self.bn2(out)))
out = torch.cat([out, x], 1) # Concatenate the input and output feature maps
return out
class DenseBlock(nn.Module):
def __init__(self, in_channels, num_layers, growth_rate):
super().__init__()
layers = []
for i in range(num_layers):
layers.append(Bottleneck(in_channels + i * growth_rate, growth_rate))
self.block = nn.Sequential(*layers)
def forward(self, x):
out = self.block(x)
return out
class Transition(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.bn = nn.BatchNorm2d(in_channels)
self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1, bias=False)
self.pool = nn.AvgPool2d(kernel_size=2, stride=2)
def forward(self, x):
out = self.conv(nn.functional.relu(self.bn(x)))
out = self.pool(out)
return out
class DenseNet(nn.Module):
def __init__(self, growth_rate=32, block_layers=[6, 12, 24, 16], num_classes=10):
super().__init__()
self.conv1 = nn.Conv2d(3, 64, kernel_size=3, padding=1, bias=False)
self.dense1 = DenseBlock(64, block_layers[0], growth_rate)
self.trans1 = Transition(64 + block_layers[0] * growth_rate, 128)
self.dense2 = DenseBlock(128, block_layers[1], growth_rate)
self.trans2 = Transition(128 + block_layers[1] * growth_rate, 256)
self.dense3 = DenseBlock(256, block_layers[2], growth_rate)
self.trans3 = Transition(256 + block_layers[2] * growth_rate, 512)
self.dense4 = DenseBlock(512, block_layers[3], growth_rate)
self.bn = nn.BatchNorm2d(512 + block_layers[3] * growth_rate)
self.fc = nn.Linear(512 + block_layers[3] * growth_rate, num_classes)
def forward(self, x):
out = self.conv1(x)
out = self.trans1(self.dense1(out))
out = self.trans2(self.dense2(out))
out = self.trans3(self.dense3(out))
out = self.dense4(out)
out = torch.cat([out, x], 1) # Concatenate the input and output feature maps
out = self.bn(out)
out = nn.functional.avg_pool2d(out, out.shape[2:])
out = out.view(out.size(0), -1)
out = self.fc(out)
return out
```
这里实现了DenseNet-121,即包含121个层的DenseNet。可以通过修改`block_layers`参数来实现其他的DenseNet模型,例如DenseNet-169、DenseNet-201和DenseNet-264。
阅读全文