group Convolution
在普通的卷積中,channels 即同一個(gè)卷積對(duì)所有的channels操作,然后相加.
而group convolution,即簡單的講就是把 channel 做N等分(N個(gè)group)奶躯,然后每一份(一個(gè)group)分別與上一層的輸出的M/N個(gè)channel獨(dú)立連接帚桩,之后將每個(gè)group的輸出疊在一起(concatenate),作為這一層的輸出 channel.
group conv最早出現(xiàn)在AlexNet[1]中嘹黔,因?yàn)轱@卡顯存不夠账嚎,只好把網(wǎng)絡(luò)分在兩塊卡里.
mobilenet v1 中的depthwise convolution操作其實(shí)是每一個(gè)channel都為一個(gè)group的特殊情況
shufflenet
Channel Shuffle
介于每個(gè)channel都用單獨(dú)一個(gè)卷積 (Pointwise convolution)or 所有通道共用一個(gè)卷積(傳統(tǒng)卷積).
Channel Shuffle 提出了將channel 分組,然后僅在分組內(nèi)進(jìn)行Pointwise卷積.
但是,如果多個(gè)組卷積疊加在一起,則會(huì)產(chǎn)生一個(gè)副作用:某個(gè)通道的輸出僅來自一小部分輸入通道郭蕉。如圖(a)所示.該組的輸出僅與該組內(nèi)的輸入有關(guān),阻礙了通道間的信息流.
如果我們?cè)试Sgroup convolution 從不同的channel中獲取信息(如圖b所示),則輸入通道和輸出通道信息完全相關(guān)
shufflenet用 channel shuffle 來實(shí)現(xiàn)這一效果(如圖c)
具體做法:
假設(shè)一個(gè)卷積層有個(gè)輸出channel(g 個(gè)group),
- (1)先將輸出通道reshape維度為
- (2)transpose:將通道信息變?yōu)?img class="math-inline" src="https://math.jianshu.com/math?formula=(n%2Cg)" alt="(n,g)" mathimg="1">,通道信息隨機(jī)變換
- (3)reshape,將通道恢復(fù)原來的shape
在transpose過程中進(jìn)行了通道混亂
pytroch 代碼如圖:
def shuffle_channels(x, groups):
"""shuffle channels of a 4-D Tensor"""
batch_size, channels, height, width = x.size()
assert channels % groups == 0
channels_per_group = channels // groups
# split into groups
x = x.view(batch_size, groups, channels_per_group,
height, width)
# transpose 1, 2 axis
x = x.transpose(1, 2).contiguous()
# reshape into orignal
x = x.view(batch_size, channels, height, width)
return x
shufflenet v1 bottleneck
shuffle net的組件如圖所示:
(b)代表了stride 為 1 ,
(c)stride 為2
import torch
import torch.nn as nn
import torch.nn.functional as F
def shuffle_channels(x, groups):
"""shuffle channels of a 4-D Tensor"""
batch_size, channels, height, width = x.size()
assert channels % groups == 0
channels_per_group = channels // groups
# split into groups
x = x.view(batch_size, groups, channels_per_group,
height, width)
# transpose 1, 2 axis
x = x.transpose(1, 2).contiguous()
# reshape into orignal
x = x.view(batch_size, channels, height, width)
return x
class ShuffleBottleNeck(nn.Module):
def __init__(self,in_channels,out_channels,stride,groups):
super(ShuffleBottleNeck,self).__init__()
self.stride = stride
self.groups = groups
# bottleneck層中間層的channel數(shù)變?yōu)檩敵鯿hannel數(shù)的1/4
#we set the number of bottleneck channels to 1/4 of the output channels for each ShuffleNetunit.
mid_channels = int(out_channels / 4)
set_groups = groups if in_channels!=24 else 1
# 作者提到不在stage2的第一個(gè)pointwise層使用組卷積,因?yàn)檩斎隿hannel數(shù)量太少,只有24
self.conv1 = nn.Conv2d(in_channels,mid_channels,kernel_size=1,
groups=set_groups,bias=False)
self.bn1 = nn.BatchNorm2d(mid_channels)
self.conv2 = nn.Conv2d(mid_channels,mid_channels,kernel_size=3,
groups=mid_channels,padding=1,stride=stride,
bias=False)
self.bn2 = nn.BatchNorm2d(mid_channels)
self.conv3 = nn.Conv2d(mid_channels,out_channels,kernel_size=1,
groups=groups,bias=False)
self.bn3 = nn.BatchNorm2d(out_channels)
self.shortcut = nn.Sequential()
if stride ==2:
self.shortcut = nn.Sequential(nn.AvgPool2d(3,stride=2,padding=1))
def forward(self, x):
out = torch.nn.functional.relu(self.bn1(self.conv1(x)))
out = shuffle_channels(out,self.groups)
out = self.bn2(self.conv2(out))
out = self.bn3(self.conv3(out))
res = self.shortcut(x)
out = F.relu(torch.cat([out, res], 1)) if self.stride == 2 else F.relu(out + res)
return out
class ShuffleNet(nn.Module):
def __init__(self, cfg):
super(ShuffleNet, self).__init__()
out_planes = cfg['out_planes']
num_blocks = cfg['num_blocks']
groups = cfg['groups']
self.conv1 = nn.Conv2d(3, 24, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(24)
self.in_planes = 24
self.layer1 = self._make_layer(out_planes[0], num_blocks[0], groups)
self.layer2 = self._make_layer(out_planes[1], num_blocks[1], groups)
self.layer3 = self._make_layer(out_planes[2], num_blocks[2], groups)
self.linear = nn.Linear(out_planes[2], 10)
def _make_layer(self, out_planes, num_blocks, groups):
layers = []
for i in range(num_blocks):
if i == 0:
layers.append(ShuffleBottleNeck(self.in_planes,
out_planes-self.in_planes,
stride=2, groups=groups))
else:
layers.append(ShuffleBottleNeck(self.in_planes,
out_planes,
stride=1, groups=groups))
self.in_planes = out_planes
return nn.Sequential(*layers)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
out = self.linear(out)
return out
def ShuffleNetG2():
cfg = {
'out_planes': [200,400,800],
'num_blocks': [4,8,4],
'groups': 2
}
return ShuffleNet(cfg)
def ShuffleNetG3():
cfg = {
'out_planes': [240,480,960],
'num_blocks': [4,8,4],
'groups': 3
}
return ShuffleNet(cfg)
def test():
net = ShuffleNetG2()
x = torch.randn(1,3,32,32)
y = net(x)
print(y)
if __name__ == '__main__':
test()