-
Notifications
You must be signed in to change notification settings - Fork 11
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
92419eb
commit ed844ed
Showing
15 changed files
with
810 additions
and
285 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -7,7 +7,6 @@ data/ | |
venv/ | ||
*.idea/ | ||
*.so | ||
*.yaml | ||
*.sh | ||
*.pth | ||
*.pkl | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,9 @@ | ||
from .base_bev_backbone import BaseBEVBackbone, BaseBEVBackboneV1, BaseBEVResBackbone | ||
from .bev_backbone_ded import CascadeDEDBackbone | ||
|
||
__all__ = { | ||
'BaseBEVBackbone': BaseBEVBackbone, | ||
'BaseBEVBackboneV1': BaseBEVBackboneV1, | ||
'BaseBEVResBackbone': BaseBEVResBackbone, | ||
'CascadeDEDBackbone': CascadeDEDBackbone, | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
import torch.nn as nn | ||
from .base_bev_backbone import BasicBlock | ||
|
||
|
||
class DEDBackbone(nn.Module): | ||
|
||
def __init__(self, model_cfg, input_channels): | ||
super().__init__() | ||
|
||
num_SBB = model_cfg.NUM_SBB | ||
down_strides = model_cfg.DOWN_STRIDES | ||
dim = model_cfg.FEATURE_DIM | ||
assert len(num_SBB) == len(down_strides) | ||
|
||
num_levels = len(down_strides) | ||
|
||
first_block = [] | ||
if input_channels != dim: | ||
first_block.append(BasicBlock(input_channels, dim, down_strides[0], 1, True)) | ||
first_block += [BasicBlock(dim, dim) for _ in range(num_SBB[0])] | ||
self.encoder = nn.ModuleList([nn.Sequential(*first_block)]) | ||
|
||
for idx in range(1, num_levels): | ||
cur_layers = [BasicBlock(dim, dim, down_strides[idx], 1, True)] | ||
cur_layers.extend([BasicBlock(dim, dim) for _ in range(num_SBB[idx])]) | ||
self.encoder.append(nn.Sequential(*cur_layers)) | ||
|
||
self.decoder = nn.ModuleList() | ||
self.decoder_norm = nn.ModuleList() | ||
for idx in range(num_levels - 1, 0, -1): | ||
self.decoder.append( | ||
nn.Sequential( | ||
nn.ConvTranspose2d(dim, dim, down_strides[idx], down_strides[idx], bias=False), | ||
nn.BatchNorm2d(dim, eps=1e-3, momentum=0.01), | ||
nn.ReLU() | ||
) | ||
) | ||
self.decoder_norm.append(nn.BatchNorm2d(dim, eps=1e-3, momentum=0.01)) | ||
|
||
self.num_bev_features = dim | ||
self.init_weights() | ||
|
||
def init_weights(self): | ||
for _, m in self.named_modules(): | ||
if isinstance(m, nn.Conv2d): | ||
nn.init.kaiming_normal_(m.weight, a=0, mode='fan_out', nonlinearity='relu') | ||
if hasattr(m, 'bias') and m.bias is not None: | ||
nn.init.constant_(m.bias, 0) | ||
elif isinstance(m, nn.BatchNorm2d): | ||
nn.init.constant_(m.weight, 1) | ||
if hasattr(m, 'bias') and m.bias is not None: | ||
nn.init.constant_(m.bias, 0) | ||
|
||
def forward(self, data_dict): | ||
x = data_dict['spatial_features'] | ||
x = self.encoder[0](x) | ||
|
||
feats = [x] | ||
for conv in self.encoder[1:]: | ||
x = conv(x) | ||
feats.append(x) | ||
|
||
for deconv, norm, up_x in zip(self.decoder, self.decoder_norm, feats[:-1][::-1]): | ||
x = norm(deconv(x) + up_x) | ||
|
||
data_dict['spatial_features_2d'] = x | ||
data_dict['spatial_features'] = x | ||
return data_dict | ||
|
||
|
||
class CascadeDEDBackbone(nn.Module): | ||
|
||
def __init__(self, model_cfg, input_channels): | ||
super().__init__() | ||
|
||
num_layers = model_cfg.NUM_LAYERS | ||
|
||
self.layers = nn.ModuleList() | ||
for idx in range(num_layers): | ||
input_dim = input_channels if idx == 0 else model_cfg.FEATURE_DIM | ||
self.layers.append(DEDBackbone(model_cfg, input_dim)) | ||
|
||
self.num_bev_features = model_cfg.FEATURE_DIM | ||
|
||
def forward(self, data_dict): | ||
for layer in self.layers: | ||
data_dict = layer(data_dict) | ||
data_dict['spatial_features_2d'] = data_dict['spatial_features'] | ||
return data_dict |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,139 @@ | ||
|
||
import torch.nn as nn | ||
from functools import partial | ||
from ...utils.spconv_utils import replace_feature, spconv | ||
from .spconv_backbone import post_act_block, SparseBasicBlock | ||
|
||
|
||
class SEDBlock(spconv.SparseModule): | ||
|
||
def __init__(self, dim, kernel_size, stride, num_SBB, norm_fn, indice_key): | ||
super(SEDBlock, self).__init__() | ||
|
||
first_block = post_act_block( | ||
dim, dim, kernel_size=kernel_size, stride=stride, padding=kernel_size // 2, | ||
norm_fn=norm_fn, indice_key=f'spconv_{indice_key}', conv_type='spconv') | ||
|
||
block_list = [first_block if stride > 1 else nn.Identity()] | ||
for _ in range(num_SBB): | ||
block_list.append( | ||
SparseBasicBlock(dim, dim, norm_fn=norm_fn, indice_key=indice_key)) | ||
|
||
self.blocks = spconv.SparseSequential(*block_list) | ||
|
||
def forward(self, x): | ||
return self.blocks(x) | ||
|
||
|
||
class SEDLayer(spconv.SparseModule): | ||
|
||
def __init__(self, dim: int, down_kernel_size: list, down_stride: list, num_SBB: list, norm_fn, indice_key): | ||
super().__init__() | ||
|
||
assert down_stride[0] == 1 # hard code | ||
assert len(down_kernel_size) == len(down_stride) == len(num_SBB) | ||
|
||
self.encoder = nn.ModuleList() | ||
for idx in range(len(down_stride)): | ||
self.encoder.append( | ||
SEDBlock(dim, down_kernel_size[idx], down_stride[idx], num_SBB[idx], norm_fn, f"{indice_key}_{idx}")) | ||
|
||
downsample_times = len(down_stride[1:]) | ||
self.decoder = nn.ModuleList() | ||
self.decoder_norm = nn.ModuleList() | ||
for idx, kernel_size in enumerate(down_kernel_size[1:]): | ||
self.decoder.append( | ||
post_act_block( | ||
dim, dim, kernel_size, norm_fn=norm_fn, conv_type='inverseconv', | ||
indice_key=f'spconv_{indice_key}_{downsample_times - idx}')) | ||
self.decoder_norm.append(norm_fn(dim)) | ||
|
||
def forward(self, x): | ||
features = [] | ||
for conv in self.encoder: | ||
x = conv(x) | ||
features.append(x) | ||
|
||
x = features[-1] | ||
for deconv, norm, up_x in zip(self.decoder, self.decoder_norm, features[:-1][::-1]): | ||
x = deconv(x) | ||
x = replace_feature(x, x.features + up_x.features) | ||
x = replace_feature(x, norm(x.features)) | ||
return x | ||
|
||
|
||
class HEDNet(nn.Module): | ||
|
||
def __init__(self, model_cfg, input_channels, grid_size, **kwargs): | ||
super().__init__() | ||
|
||
self.sparse_shape = grid_size[::-1] + [1, 0, 0] | ||
norm_fn = partial(nn.BatchNorm1d, eps=1e-3, momentum=0.01) | ||
|
||
dim = model_cfg.FEATURE_DIM | ||
num_layers = model_cfg.NUM_LAYERS | ||
num_SBB = model_cfg.NUM_SBB | ||
down_kernel_size = model_cfg.DOWN_KERNEL_SIZE | ||
down_stride = model_cfg.DOWN_STRIDE | ||
|
||
# [1888, 1888, 41] -> [944, 944, 21] | ||
self.conv1 = spconv.SparseSequential( | ||
post_act_block(input_channels, 16, 3, norm_fn=norm_fn, padding=1, indice_key='subm1', conv_type='subm'), | ||
SparseBasicBlock(16, 16, norm_fn=norm_fn, indice_key='stem'), | ||
SparseBasicBlock(16, 16, norm_fn=norm_fn, indice_key='stem'), | ||
post_act_block(16, 32, 3, norm_fn=norm_fn, stride=2, padding=1, indice_key='spconv1', conv_type='spconv'), | ||
) | ||
|
||
# [944, 944, 21] -> [472, 472, 11] | ||
self.conv2 = spconv.SparseSequential( | ||
SEDLayer(32, down_kernel_size, down_stride, num_SBB, norm_fn=norm_fn, indice_key='sedlayer2'), | ||
post_act_block(32, 64, 3, norm_fn=norm_fn, stride=2, padding=1, indice_key='spconv2', conv_type='spconv'), | ||
) | ||
|
||
# [472, 472, 11] -> [236, 236, 11] | ||
self.conv3 = spconv.SparseSequential( | ||
SEDLayer(64, down_kernel_size, down_stride, num_SBB, norm_fn=norm_fn, indice_key='sedlayer3'), | ||
post_act_block(64, dim, 3, norm_fn=norm_fn, stride=(1, 2, 2), padding=1, indice_key='spconv3', conv_type='spconv'), | ||
) | ||
|
||
self.layers = nn.ModuleList() | ||
for idx in range(num_layers): | ||
conv = SEDLayer(dim, down_kernel_size, down_stride, num_SBB, norm_fn=norm_fn, indice_key=f'sedlayer{idx+4}') | ||
self.layers.append(conv) | ||
|
||
# [236, 236, 11] -> [236, 236, 5] --> [236, 236, 2] | ||
self.conv_out = spconv.SparseSequential( | ||
spconv.SparseConv3d(dim, dim, (3, 1, 1), stride=(2, 1, 1), padding=0, bias=False, indice_key='spconv4'), | ||
norm_fn(dim), | ||
nn.ReLU(), | ||
spconv.SparseConv3d(dim, dim, (3, 1, 1), stride=(2, 1, 1), padding=0, bias=False, indice_key='spconv5'), | ||
norm_fn(dim), | ||
nn.ReLU(), | ||
) | ||
|
||
self.num_point_features = dim | ||
|
||
def forward(self, batch_dict): | ||
voxel_features = batch_dict['voxel_features'] | ||
voxel_coords = batch_dict['voxel_coords'] | ||
batch_size = batch_dict['batch_size'] | ||
|
||
x = spconv.SparseConvTensor( | ||
features=voxel_features, | ||
indices=voxel_coords.int(), | ||
spatial_shape=self.sparse_shape, | ||
batch_size=batch_size | ||
) | ||
|
||
x = self.conv1(x) | ||
x = self.conv2(x) | ||
x = self.conv3(x) | ||
for conv in self.layers: | ||
x = conv(x) | ||
x = self.conv_out(x) | ||
|
||
batch_dict.update({ | ||
'encoded_spconv_tensor': x, | ||
'encoded_spconv_tensor_stride': 8 | ||
}) | ||
return batch_dict |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.