ShuffleNet的代码
时间: 2024-05-02 16:17:20 浏览: 61
以下是ShuffleNet的PyTorch实现代码:
```python
import torch.nn as nn
import torch.nn.functional as F
class ShuffleUnit(nn.Module):
def __init__(self, in_channels, out_channels, groups=3, stride=1):
super(ShuffleUnit, self).__init__()
mid_channels = out_channels // 4
self.groups = groups
self.stride = stride
if stride != 1 or in_channels != out_channels:
self.shortcut = nn.Sequential(
nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=stride, padding=1, groups=in_channels, bias=False),
nn.BatchNorm2d(in_channels),
nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, padding=0, bias=False),
nn.BatchNorm2d(out_channels),
)
else:
self.shortcut = nn.Sequential()
self.conv1 = nn.Conv2d(in_channels, mid_channels, kernel_size=1, stride=1, padding=0, bias=False)
self.bn1 = nn.BatchNorm2d(mid_channels)
self.conv2 = nn.Conv2d(mid_channels, mid_channels, kernel_size=3, stride=stride, padding=1, groups=groups, bias=False)
self.bn2 = nn.BatchNorm2d(mid_channels)
self.conv3 = nn.Conv2d(mid_channels, out_channels, kernel_size=1, stride=1, padding=0, bias=False)
self.bn3 = nn.BatchNorm2d(out_channels)
def forward(self, x):
residual = self.shortcut(x)
out = F.relu(self.bn1(self.conv1(x)))
out = F.shuffle_channel(out, self.groups)
out = self.bn2(self.conv2(out))
out = F.relu(self.bn3(self.conv3(out)))
if self.stride != 1:
residual = F.avg_pool2d(residual, kernel_size=3, stride=self.stride, padding=1)
out += residual
out = F.relu(out)
return out
class ShuffleNet(nn.Module):
def __init__(self, num_classes=1000, input_size=224):
super(ShuffleNet, self).__init__()
assert input_size % 32 == 0
self.stage_repeats = [3, 7, 3]
if input_size == 224:
self.out_channels = [24, 240, 480, 960]
elif input_size == 192:
self.out_channels = [24, 200, 400, 800]
elif input_size == 160:
self.out_channels = [24, 168, 336, 672]
elif input_size == 128:
self.out_channels = [24, 116, 232, 464]
elif input_size == 96:
self.out_channels = [24, 72, 144, 288]
else:
raise ValueError("Unsupported input size!")
self.conv1 = nn.Conv2d(3, self.out_channels[0], kernel_size=3, stride=2, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(self.out_channels[0])
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.stage2 = self._make_stage(self.out_channels[0], self.out_channels[1], self.stage_repeats[0], 2)
self.stage3 = self._make_stage(self.out_channels[1], self.out_channels[2], self.stage_repeats[1], 2)
self.stage4 = self._make_stage(self.out_channels[2], self.out_channels[3], self.stage_repeats[2], 2)
self.avgpool = nn.AvgPool2d(kernel_size=7)
self.fc = nn.Linear(self.out_channels[3], num_classes)
def _make_stage(self, in_channels, out_channels, num_blocks, stride):
stage = []
stage.append(ShuffleUnit(in_channels, out_channels, stride=stride))
for i in range(num_blocks - 1):
stage.append(ShuffleUnit(out_channels, out_channels, groups=4))
return nn.Sequential(*stage)
def forward(self, x):
x = F.relu(self.bn1(self.conv1(x)))
x = self.maxpool(x)
x = self.stage2(x)
x = self.stage3(x)
x = self.stage4(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.fc(x)
return x
```
这里实现了ShuffleNet v1,包括ShuffleUnit和ShuffleNet模块。可以根据需要修改输入大小以及输出类别数。
阅读全文