class ASPP(nn.Module): def __init__(self, dim_in, dim_out, rate=1, bn_mom=0.1): super(ASPP, self).__init__() self.branch1 = nn.Sequential( nn.Conv2d(dim_in, dim_out, 1, 1, padding=0, dilation=rate, bias=True), nn.BatchNorm2d(dim_out, momentum=bn_mom), nn.ReLU(inplace=True), ) self.branch2 = nn.Sequential( nn.Conv2d(dim_in, dim_out, 3, 1, padding=4 * rate, dilation=4 * rate, bias=True), nn.BatchNorm2d(dim_out, momentum=bn_mom), nn.ReLU(inplace=True), ) self.branch3 = nn.Sequential( nn.Conv2d(dim_in, dim_out, 3, 1, padding=8 * rate, dilation=8 * rate, bias=True), nn.BatchNorm2d(dim_out, momentum=bn_mom), nn.ReLU(inplace=True), ) self.branch4 = nn.Sequential( nn.Conv2d(dim_in, dim_out, 3, 1, padding=12 * rate, dilation=12 * rate, bias=True), nn.BatchNorm2d(dim_out, momentum=bn_mom), nn.ReLU(inplace=True), ) self.branch5 = nn.Sequential( nn.Conv2d(dim_in, dim_out, 3, 1, padding=16 * rate, dilation=16 * rate, bias=True), nn.BatchNorm2d(dim_out, momentum=bn_mom), nn.ReLU(inplace=True), ) self.branch6 = nn.Sequential( nn.Conv2d(dim_in, dim_out, 3, 1, padding=20 * rate, dilation=20 * rate, bias=True), nn.BatchNorm2d(dim_out, momentum=bn_mom), nn.ReLU(inplace=True) ) self.branch7 = nn.Sequential( nn.Conv2d(dim_in, dim_out, 3, 1, padding=24 * rate, dilation=24 * rate, bias=True), nn.BatchNorm2d(dim_out, momentum=bn_mom), nn.ReLU(inplace=True) ) self.branch8_conv = nn.Conv2d(dim_in, dim_out, 1, 1, 0, bias=True) self.branch8_bn = nn.BatchNorm2d(dim_out, momentum=bn_mom) self.branch8_relu = nn.ReLU(inplace=True) self.conv_cat = nn.Sequential( nn.Conv2d(dim_out * 8, dim_out, 1, 1, padding=0, bias=True), nn.BatchNorm2d(dim_out, momentum=bn_mom), nn.ReLU(inplace=True), )用1×3卷积和3×1卷积代替这段代码的3×3卷积
时间: 2023-12-06 18:03:52 浏览: 88
可以将self.branch2~self.branch7中的3×3卷积改为1×3卷积和3×1卷积的组合,代码如下:
```
self.branch2 = nn.Sequential(
nn.Conv2d(dim_in, dim_out, (1, 3), 1, padding=(0, 4 * rate), dilation=4 * rate, bias=True),
nn.Conv2d(dim_out, dim_out, (3, 1), 1, padding=(4 * rate, 0), dilation=4 * rate, bias=True),
nn.BatchNorm2d(dim_out, momentum=bn_mom),
nn.ReLU(inplace=True),
)
self.branch3 = nn.Sequential(
nn.Conv2d(dim_in, dim_out, (1, 3), 1, padding=(0, 8 * rate), dilation=8 * rate, bias=True),
nn.Conv2d(dim_out, dim_out, (3, 1), 1, padding=(8 * rate, 0), dilation=8 * rate, bias=True),
nn.BatchNorm2d(dim_out, momentum=bn_mom),
nn.ReLU(inplace=True),
)
self.branch4 = nn.Sequential(
nn.Conv2d(dim_in, dim_out, (1, 3), 1, padding=(0, 12 * rate), dilation=12 * rate, bias=True),
nn.Conv2d(dim_out, dim_out, (3, 1), 1, padding=(12 * rate, 0), dilation=12 * rate, bias=True),
nn.BatchNorm2d(dim_out, momentum=bn_mom),
nn.ReLU(inplace=True),
)
self.branch5 = nn.Sequential(
nn.Conv2d(dim_in, dim_out, (1, 3), 1, padding=(0, 16 * rate), dilation=16 * rate, bias=True),
nn.Conv2d(dim_out, dim_out, (3, 1), 1, padding=(16 * rate, 0), dilation=16 * rate, bias=True),
nn.BatchNorm2d(dim_out, momentum=bn_mom),
nn.ReLU(inplace=True),
)
self.branch6 = nn.Sequential(
nn.Conv2d(dim_in, dim_out, (1, 3), 1, padding=(0, 20 * rate), dilation=20 * rate, bias=True),
nn.Conv2d(dim_out, dim_out, (3, 1), 1, padding=(20 * rate, 0), dilation=20 * rate, bias=True),
nn.BatchNorm2d(dim_out, momentum=bn_mom),
nn.ReLU(inplace=True),
)
self.branch7 = nn.Sequential(
nn.Conv2d(dim_in, dim_out, (1, 3), 1, padding=(0, 24 * rate), dilation=24 * rate, bias=True),
nn.Conv2d(dim_out, dim_out, (3, 1), 1, padding=(24 * rate, 0), dilation=24 * rate, bias=True),
nn.BatchNorm2d(dim_out, momentum=bn_mom),
nn.ReLU(inplace=True),
)
```
其中,(1,3)表示1×3卷积核,(3,1)表示3×1卷积核,padding参数按照3×3卷积的padding进行设置。
阅读全文