self.SA_modules.append( nn.Sequential( PointnetSAModuleMSG( npoint=cfg.RPN.SA_CONFIG.NPOINTS[k], radii=cfg.RPN.SA_CONFIG.RADIUS[k], nsamples=cfg.RPN.SA_CONFIG.NSAMPLE[k], mlps=mlps, use_xyz=use_xyz, bn=cfg.RPN.USE_BN ), SelfAttention(channel_out) ) )这是SA_modules的定义代码块,而 for i in range(len(self.SA_modules)): li_xyz, li_features = self.SA_modules[i](l_xyz[i], l_features[i]) l_xyz.append(li_xyz) l_features.append(li_features)是SA_modules的调用代码块,而这是PointnetSAModuleMSG类的父类的代码:class _PointnetSAModuleBase(nn.Module): def init(self): super().init() self.npoint = None self.groupers = None self.mlps = None self.pool_method = 'max_pool' def forward(self, xyz: torch.Tensor, features: torch.Tensor = None, new_xyz=None) -> (torch.Tensor, torch.Tensor): """ :param xyz: (B, N, 3) tensor of the xyz coordinates of the features :param features: (B, N, C) tensor of the descriptors of the the features :param new_xyz: :return: new_xyz: (B, npoint, 3) tensor of the new features' xyz new_features: (B, npoint, \sum_k(mlps[k][-1])) tensor of the new_features descriptors """ new_features_list = [] xyz_flipped = xyz.transpose(1, 2).contiguous() if new_xyz is None: new_xyz = pointnet2_utils.gather_operation( xyz_flipped, pointnet2_utils.furthest_point_sample(xyz, self.npoint) ).transpose(1, 2).contiguous() if self.npoint is not None else None for i in range(len(self.groupers)): new_features = self.groupers[i](xyz, new_xyz, features) # (B, C, npoint, nsample) new_features = self.mlpsi # (B, mlp[-1], npoint, nsample) if self.pool_method == 'max_pool': new_features = F.max_pool2d( new_features, kernel_size=[1, new_features.size(3)] ) # (B, mlp[-1], npoint, 1) elif self.pool_method == 'avg_pool': new_features = F.avg_pool2d( new_features, kernel_size=[1, new_features.size(3)] ) # (B, mlp[-1], npoint, 1) else: raise NotImplementedError new_features = new_features.squeeze(-1) # (B, mlp[-1], npoint) new_features_list.append(new_features) return new_xyz, torch.cat(new_features_list, dim=1);运行时程序报错提示我在调用SA_modules时传递的三个参数,现在看来应该是多出了参数channel_out,我该怎么修改代码才能让SA_modules顺利接受三个参数并正常运行
时间: 2023-11-22 18:51:41 浏览: 170
浅谈keras通过model.fit_generator训练模型(节省内存)
首先,你需要在PointnetSAModuleMSG类中增加一个名为channel_out的参数,并在init函数中对其进行初始化:
```
class PointnetSAModuleMSG(_PointnetSAModuleBase):
def __init__(self, npoint, radii, nsamples, mlps, use_xyz=True, bn=True, channel_out=None):
super().__init__()
self.npoint = npoint
self.groupers = nn.ModuleList()
self.mlps = nn.ModuleList()
self.use_xyz = use_xyz
self.channel_out = channel_out
for i in range(len(mlps)):
if channel_out is not None and i == len(mlps)-1:
mlps[i][-1] = channel_out
self.mlps.append(pt_utils.SharedMLP(mlps[i], bn=bn))
radius_list = np.array(radii)
nsample_list = np.array(nsamples)
for i in range(len(radius_list)):
self.groupers.append(
pt_utils.QueryAndGroup(radius_list[i], nsample_list[i], use_xyz=use_xyz)
)
```
然后,在SA_modules的定义代码块中,你需要修改SelfAttention的调用方式,将channel_out作为参数传递给SelfAttention:
```
self.SA_modules.append(
nn.Sequential(
PointnetSAModuleMSG(
npoint=cfg.RPN.SA_CONFIG.NPOINTS[k],
radii=cfg.RPN.SA_CONFIG.RADIUS[k],
nsamples=cfg.RPN.SA_CONFIG.NSAMPLE[k],
mlps=mlps,
use_xyz=use_xyz,
bn=cfg.RPN.USE_BN,
channel_out=channel_out
),
SelfAttention(channel_out=channel_out)
)
)
```
最后,在调用SA_modules的代码块中,你需要将channel_out传递给SA_modules:
```
for i in range(len(self.SA_modules)):
li_xyz, li_features = self.SA_modules[i](l_xyz[i], l_features[i], channel_out=channel_out)
l_xyz.append(li_xyz)
l_features.append(li_features)
```
这样就可以让SA_modules顺利接受三个参数并正常运行。
阅读全文