import torch
# conv_BN_ReLu
class ConvolutionalLayer(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride, padding, bias=False):
super(ConvolutionalLayer, self).__init__()
self.sub_module = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size, stride, padding, bias=bias),
nn.BatchNorm2d(out_channels),
nn.LeakyReLU()
)
def forward(self, x):
return self.sub_module(x)
# 殘差1x1 -> 3x3 -> 1x1
class Residual(nn.Module):
def __init__(self, in_channels, out_channels):
super(Residual, self).__init__()
self.sub_module = nn.Sequential(
ConvolutionalLayer(in_channels, out_channels, 1, 1, 0),
ConvolutionalLayer(out_channels, out_channels, 3, 1, 1),
ConvolutionalLayer(out_channels, in_channels, 1, 1, 0),
)
def forward(self, x):
return x + self.sub_module(x)
# 預處理,減小1/4
class Preprocessing(nn.Module):
def __init__(self, in_channels, out_channels):
super(Preprocessing, self).__init__()
self.sub_module = nn.Sequential(
ConvolutionalLayer(in_channels, out_channels, 3, 2, 1),
Residual(out_channels, out_channels),
nn.MaxPool2d(2, 2)
)
def forward(self, x):
return self.sub_module(x)
# 下采樣1/2
class DownSampling(nn.Module):
def __init__(self, in_channels, out_channels):
super(DownSampling, self).__init__()
self.layer = nn.Sequential(
nn.Conv2d(in_channels, out_channels, 3, 2, 1),
nn.LeakyReLU()
)
def forward(self, x):
return self.layer(x)
# 上采樣x2
class UpSampling(nn.Module):
def __init__(self, in_channels, out_channels):
super(UpSampling, self).__init__()
self.layer = nn.Sequential(
nn.ConvTranspose2d(in_channels, out_channels, 3, 2, 1, 1)
)
def forward(self, x):
return self.layer(x)
class HourGlass(nn.Module):
def __init__(self, nChannels, numReductions=4, numModules=2):
super(HourGlass, self).__init__()
self.nChannels = nChannels
self.numReductions = numReductions
self.numModules = numModules
skip = []
skip.append(nn.Conv2d(self.nChannels[self.numReductions], self.nChannels[self.numReductions-1], 1, 1))
for _ in range(self.numModules):
skip.append(Residual(self.nChannels[self.numReductions-1], self.nChannels[self.numReductions-1]))
self.skip = nn.Sequential(*skip)
self.down = DownSampling(self.nChannels[self.numReductions], self.nChannels[self.numReductions-1])
afterpool = []
for _ in range(self.numModules):
afterpool.append(Residual(self.nChannels[self.numReductions-1], self.nChannels[self.numReductions-1]))
self.afterpool = nn.Sequential(*afterpool)
if numReductions > 1:
self.hg = HourGlass(self.nChannels, self.numReductions - 1, self.numModules)
else:
num1res = []
for _ in range(self.numModules):
num1res.append(Residual(self.nChannels[self.numReductions-1], self.nChannels[self.numReductions-1]))
self.num1res = nn.Sequential(*num1res)
lowers = []
lowers.append(nn.Conv2d(self.nChannels[self.numReductions-1], self.nChannels[self.numReductions], 1, 1))
for _ in range(self.numModules):
lowers.append(Residual(self.nChannels[self.numReductions], self.nChannels[self.numReductions]))
self.lowers = nn.Sequential(*lowers)
self.up = UpSampling(self.nChannels[self.numReductions], self.nChannels[self.numReductions-1])
self.merge = nn.Conv2d(self.nChannels[self.numReductions-1], self.nChannels[self.numReductions], 1, 1)
def forward(self, x):
out1 = self.skip(x) # 2個殘差
out2 = self.down(x) # 下采樣1/2
out2 = self.afterpool(out2) # 2個殘差
if self.numReductions > 1:
out2 = self.hg(out2) # 遞歸調用
else:
out2 = self.num1res(out2) # 2個殘差
out2 = self.lowers(out2) # 2個殘差
out2 = self.up(out2) # 上采樣x2
out2 = out1 + out2
out3 = self.merge(out2)
return out3
class StackHourGlass(nn.Module):
def __init__(self, stack_num, nChannels):
super(StackHourGlass, self).__init__()
self.stack_num = stack_num
self.nChannels = nChannels
self.hg = HourGlass([nChannels*8, nChannels*4, nChannels*2, nChannels, nChannels])
self.res = ConvolutionalLayer(self.nChannels, 1, 1, 1, 0)
self.joints = ConvolutionalLayer(1, self.nChannels, 1, 1, 0)
def forward(self, x):
out = []
for i in range(self.stack_num):
x1 = self.hg(x)
x1 = self.res(x1)
out.append(x1)
if i != self.stack_num - 1:
x = x + self.joints(x1)
return out
class Main(nn.Module):
def __init__(self, stack_num=2, nChannels=8):
super(Main, self).__init__()
self.pre_process = Preprocessing(in_channels=3, out_channels=nChannels)
self.hg = StackHourGlass(stack_num, nChannels)
def forward(self, x):
pre_process = self.pre_process(x)
hg = self.hg(pre_process)
return hg
if __name__ == "__main__":
hg = Main().cuda()
x = torch.randn((2, 3, 512, 512)).cuda()
out = hg(x)
print(out)
StackHourGlass的pytorch實現(xiàn)
最后編輯于 :
?著作權歸作者所有,轉載或內(nèi)容合作請聯(lián)系作者
- 文/潘曉璐 我一進店門,熙熙樓的掌柜王于貴愁眉苦臉地迎上來柄粹,“玉大人喘鸟,你說我怎么就攤上這事×耄” “怎么了迷守?”我有些...
- 文/不壞的土叔 我叫張陵,是天一觀的道長旺入。 經(jīng)常有香客問我兑凿,道長凯力,這世上最難降的妖魔是什么? 我笑而不...
- 正文 為了忘掉前任礼华,我火速辦了婚禮咐鹤,結果婚禮上,老公的妹妹穿的比我還像新娘圣絮。我一直安慰自己祈惶,他們只是感情好,可當我...
- 文/花漫 我一把揭開白布扮匠。 她就那樣靜靜地躺著捧请,像睡著了一般。 火紅的嫁衣襯著肌膚如雪棒搜。 梳的紋絲不亂的頭發(fā)上疹蛉,一...
- 文/蒼蘭香墨 我猛地睜開眼埃叭,長吁一口氣:“原來是場噩夢啊……” “哼摸恍!你這毒婦竟也來了?” 一聲冷哼從身側響起游盲,我...
- 正文 年R本政府宣布蛙卤,位于F島的核電站狠半,受9級特大地震影響噩死,放射性物質發(fā)生泄漏。R本人自食惡果不足惜神年,卻給世界環(huán)境...
- 文/蒙蒙 一已维、第九天 我趴在偏房一處隱蔽的房頂上張望。 院中可真熱鬧已日,春花似錦垛耳、人聲如沸。這莊子的主人今日做“春日...
- 文/蒼蘭香墨 我抬頭看了看天上的太陽。三九已至护奈,卻和暖如春泡嘴,著一層夾襖步出監(jiān)牢的瞬間,已是汗流浹背逆济。 一陣腳步聲響...
推薦閱讀更多精彩內(nèi)容
- 作者:aiqiu_gogogo來源:CSDN原文:https://blog.csdn.net/aiqiu_gogo...
- 本篇內(nèi)容參考官方文檔自己總結而來僅供自學自查,詳細需求請查閱官方文檔岛马。 數(shù)據(jù)類型 張量(Tnsor) 什么是張量棉姐?...
- 在本教程中,您將使用PyTorch框架來介紹深度學習啦逆,并且根據(jù)其結論伞矩,您可以輕松地將其應用到您的深度學習模型中。臉...
- 原文鏈接 PyTorch由于使用了強大的GPU加速的Tensor計算(類似numpy)和基于tape的autogr...
- 萬里美景,沒有你沟蔑,那也不是我歸屬的城湿诊。 "明天周末,我們一起去看櫻花吧瘦材√耄” 你告訴我:“我明天要工作,沒時間去食棕±屎停”...