|
''' |
|
SENet (Squeeze-and-Excitation Networks) in PyTorch. |
|
|
|
SENet通过引入SE模块来自适应地重新校准通道特征响应。SE模块可以集成到现有的网络架构中, |
|
通过显式建模通道之间的相互依赖关系,自适应地重新校准通道特征响应。 |
|
|
|
主要特点: |
|
1. 引入Squeeze-and-Excitation(SE)模块,增强特征的表示能力 |
|
2. SE模块包含squeeze操作(全局平均池化)和excitation操作(两个FC层) |
|
3. 通过attention机制来增强重要通道的权重,抑制不重要通道 |
|
4. 几乎可以嵌入到任何现有的网络结构中 |
|
|
|
Reference: |
|
[1] Jie Hu, Li Shen, Samuel Albanie, Gang Sun, Enhua Wu |
|
Squeeze-and-Excitation Networks. CVPR 2018. |
|
''' |
|
import torch |
|
import torch.nn as nn |
|
import torch.nn.functional as F |
|
|
|
|
|
class BasicBlock(nn.Module): |
|
"""基础残差块+SE模块 |
|
|
|
结构: |
|
x -> Conv -> BN -> ReLU -> Conv -> BN -> SE -> (+) -> ReLU |
|
|------------------------------------------| |
|
|
|
Args: |
|
in_channels: 输入通道数 |
|
channels: 输出通道数 |
|
stride: 步长,用于下采样,默认为1 |
|
""" |
|
def __init__(self, in_channels, channels, stride=1): |
|
super(BasicBlock, self).__init__() |
|
self.conv1 = nn.Conv2d(in_channels, channels, kernel_size=3, stride=stride, padding=1, bias=False) |
|
self.bn1 = nn.BatchNorm2d(channels) |
|
self.conv2 = nn.Conv2d(channels, channels, kernel_size=3, stride=1, padding=1, bias=False) |
|
self.bn2 = nn.BatchNorm2d(channels) |
|
|
|
|
|
self.shortcut = nn.Sequential() |
|
if stride != 1 or in_channels != channels: |
|
self.shortcut = nn.Sequential( |
|
nn.Conv2d(in_channels, channels, kernel_size=1, stride=stride, bias=False), |
|
nn.BatchNorm2d(channels) |
|
) |
|
|
|
|
|
self.squeeze = nn.AdaptiveAvgPool2d(1) |
|
self.excitation = nn.Sequential( |
|
nn.Conv2d(channels, channels//16, kernel_size=1), |
|
nn.ReLU(inplace=True), |
|
nn.Conv2d(channels//16, channels, kernel_size=1), |
|
nn.Sigmoid() |
|
) |
|
|
|
def forward(self, x): |
|
|
|
out = F.relu(self.bn1(self.conv1(x))) |
|
out = self.bn2(self.conv2(out)) |
|
|
|
|
|
w = self.squeeze(out) |
|
w = self.excitation(w) |
|
out = out * w |
|
|
|
|
|
out += self.shortcut(x) |
|
out = F.relu(out) |
|
return out |
|
|
|
|
|
class PreActBlock(nn.Module): |
|
"""Pre-activation版本的基础块+SE模块 |
|
|
|
结构: |
|
x -> BN -> ReLU -> Conv -> BN -> ReLU -> Conv -> SE -> (+) |
|
|-------------------------------------------| |
|
|
|
Args: |
|
in_channels: 输入通道数 |
|
channels: 输出通道数 |
|
stride: 步长,用于下采样,默认为1 |
|
""" |
|
def __init__(self, in_channels, channels, stride=1): |
|
super(PreActBlock, self).__init__() |
|
self.bn1 = nn.BatchNorm2d(in_channels) |
|
self.conv1 = nn.Conv2d(in_channels, channels, kernel_size=3, stride=stride, padding=1, bias=False) |
|
self.bn2 = nn.BatchNorm2d(channels) |
|
self.conv2 = nn.Conv2d(channels, channels, kernel_size=3, stride=1, padding=1, bias=False) |
|
|
|
|
|
if stride != 1 or in_channels != channels: |
|
self.shortcut = nn.Sequential( |
|
nn.Conv2d(in_channels, channels, kernel_size=1, stride=stride, bias=False) |
|
) |
|
|
|
|
|
self.squeeze = nn.AdaptiveAvgPool2d(1) |
|
self.excitation = nn.Sequential( |
|
nn.Conv2d(channels, channels//16, kernel_size=1), |
|
nn.ReLU(inplace=True), |
|
nn.Conv2d(channels//16, channels, kernel_size=1), |
|
nn.Sigmoid() |
|
) |
|
|
|
def forward(self, x): |
|
|
|
out = F.relu(self.bn1(x)) |
|
shortcut = self.shortcut(out) if hasattr(self, 'shortcut') else x |
|
|
|
|
|
out = self.conv1(out) |
|
out = self.conv2(F.relu(self.bn2(out))) |
|
|
|
|
|
w = self.squeeze(out) |
|
w = self.excitation(w) |
|
out = out * w |
|
|
|
|
|
out += shortcut |
|
return out |
|
|
|
|
|
class SENet(nn.Module): |
|
"""SENet模型 |
|
|
|
网络结构: |
|
1. 一个卷积层进行特征提取 |
|
2. 四个残差层,每层包含多个带SE模块的残差块 |
|
3. 平均池化和全连接层进行分类 |
|
|
|
Args: |
|
block: 残差块类型(BasicBlock或PreActBlock) |
|
num_blocks: 每层残差块数量的列表 |
|
num_classes: 分类数量,默认为10 |
|
""" |
|
def __init__(self, block, num_blocks, num_classes=10): |
|
super(SENet, self).__init__() |
|
self.in_channels = 64 |
|
|
|
|
|
self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False) |
|
self.bn1 = nn.BatchNorm2d(64) |
|
|
|
|
|
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1) |
|
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2) |
|
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2) |
|
self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2) |
|
|
|
|
|
self.avg_pool = nn.AdaptiveAvgPool2d(1) |
|
self.classifier = nn.Linear(512, num_classes) |
|
|
|
|
|
self._initialize_weights() |
|
|
|
def _make_layer(self, block, channels, num_blocks, stride): |
|
"""构建残差层 |
|
|
|
Args: |
|
block: 残差块类型 |
|
channels: 输出通道数 |
|
num_blocks: 残差块数量 |
|
stride: 第一个残差块的步长(用于下采样) |
|
|
|
Returns: |
|
nn.Sequential: 残差层 |
|
""" |
|
strides = [stride] + [1]*(num_blocks-1) |
|
layers = [] |
|
for stride in strides: |
|
layers.append(block(self.in_channels, channels, stride)) |
|
self.in_channels = channels |
|
return nn.Sequential(*layers) |
|
|
|
def forward(self, x): |
|
"""前向传播 |
|
|
|
Args: |
|
x: 输入张量,[N,3,32,32] |
|
|
|
Returns: |
|
out: 输出张量,[N,num_classes] |
|
""" |
|
|
|
out = F.relu(self.bn1(self.conv1(x))) |
|
|
|
|
|
out = self.layer1(out) |
|
out = self.layer2(out) |
|
out = self.layer3(out) |
|
out = self.layer4(out) |
|
|
|
|
|
out = self.avg_pool(out) |
|
out = out.view(out.size(0), -1) |
|
out = self.classifier(out) |
|
return out |
|
|
|
def _initialize_weights(self): |
|
"""初始化模型权重 |
|
|
|
采用kaiming初始化方法: |
|
- 卷积层权重采用kaiming_normal_初始化 |
|
- BN层参数采用常数初始化 |
|
- 线性层采用正态分布初始化 |
|
""" |
|
for m in self.modules(): |
|
if isinstance(m, nn.Conv2d): |
|
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') |
|
if m.bias is not None: |
|
nn.init.constant_(m.bias, 0) |
|
elif isinstance(m, nn.BatchNorm2d): |
|
nn.init.constant_(m.weight, 1) |
|
nn.init.constant_(m.bias, 0) |
|
elif isinstance(m, nn.Linear): |
|
nn.init.normal_(m.weight, 0, 0.01) |
|
nn.init.constant_(m.bias, 0) |
|
|
|
|
|
def SENet18(): |
|
"""SENet-18模型""" |
|
return SENet(PreActBlock, [2,2,2,2]) |
|
|
|
|
|
def test(): |
|
"""测试函数""" |
|
|
|
net = SENet18() |
|
print('Model Structure:') |
|
print(net) |
|
|
|
|
|
x = torch.randn(1,3,32,32) |
|
y = net(x) |
|
print('\nInput Shape:', x.shape) |
|
print('Output Shape:', y.shape) |
|
|
|
|
|
from torchinfo import summary |
|
device = 'cuda' if torch.cuda.is_available() else 'cpu' |
|
net = net.to(device) |
|
summary(net, (1,3,32,32)) |
|
|
|
|
|
if __name__ == '__main__': |
|
test() |