class LeNet5_2(nn.Cell): # 定义算子 def __init__(self, num_class=10, num_channel=3): super(LeNet5_2, self).__init__() # 卷积层 self.conv1 = nn.Conv2d(num_channel, 64, 3, pad_mode='valid', weight_init=Normal(0.06)) self.conv2 = nn.Conv2d(64, 64, 3, pad_mode='valid', weight_init=Normal(0.06)) self.conv3 = nn.Conv2d(64, 128, 3, pad_mode='valid', weight_init=Normal(0.06)) # 全连接层 self.fc1 = nn.Dense(128* 2* 2, 120, weight_init=Normal(0.06)) self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.06)) self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.06)) # 激活函数 self.relu = nn.ReLU() # 最大池化成 self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2) # 网络展开 self.flatten = nn.Flatten() #? self.num_class = num_class # 构建网络 def construct(self, x): x = self.conv1(x) x = self.relu(x) x = self.max_pool2d(x) x = self.conv2(x) x = self.relu(x) x = self.max_pool2d(x) x = self.conv3(x) x = self.relu(x) x = self.max_pool2d(x) x = self.flatten(x) x = self.fc1(x) x = self.relu(x) x = self.fc2(x) x = self.relu(x) x = self.fc3(x) return x 添加批量归一层
时间: 2023-11-15 11:06:26 浏览: 146
可以在LeNet5_2类的__init__方法中添加BatchNorm2d层来实现批量归一化操作,如下所示:
```
class LeNet5_2(nn.Cell):
def __init__(self, num_class=10, num_channel=3):
super(LeNet5_2, self).__init__()
# 卷积层
self.conv1 = nn.Conv2d(num_channel, 64, 3, pad_mode='valid', weight_init=Normal(0.06))
self.bn1 = nn.BatchNorm2d(64)
self.conv2 = nn.Conv2d(64, 64, 3, pad_mode='valid', weight_init=Normal(0.06))
self.bn2 = nn.BatchNorm2d(64)
self.conv3 = nn.Conv2d(64, 128, 3, pad_mode='valid', weight_init=Normal(0.06))
self.bn3 = nn.BatchNorm2d(128)
# 全连接层
self.fc1 = nn.Dense(128*2*2, 120, weight_init=Normal(0.06))
self.bn4 = nn.BatchNorm1d(120)
self.fc2 = nn.Dense(120, 84, weight_init=Normal(0.06))
self.bn5 = nn.BatchNorm1d(84)
self.fc3 = nn.Dense(84, num_class, weight_init=Normal(0.06))
# 激活函数
self.relu = nn.ReLU()
# 最大池化层
self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
# 网络展开
self.flatten = nn.Flatten()
self.num_class = num_class
# 构建网络
def construct(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.max_pool2d(x)
x = self.conv2(x)
x = self.bn2(x)
x = self.relu(x)
x = self.max_pool2d(x)
x = self.conv3(x)
x = self.bn3(x)
x = self.relu(x)
x = self.max_pool2d(x)
x = self.flatten(x)
x = self.fc1(x)
x = self.bn4(x)
x = self.relu(x)
x = self.fc2(x)
x = self.bn5(x)
x = self.relu(x)
x = self.fc3(x)
return x
```
其中,BatchNorm2d层用于卷积层的批量归一化操作,BatchNorm1d层用于全连接层的批量归一化操作。在构建网络时,需要将批量归一化层添加到对应的卷积层或全连接层后面。
阅读全文