python實(shí)現(xiàn)maxpooling/avgpooling凶异,及其反向傳播

參考Python和PyTorch對比實(shí)現(xiàn)池化層MaxPool函數(shù)及反向傳播

maxpooling

import numpy as np
import torch
class MaxPooling2D:
    def __init__(self, kernel_size=(2, 2), stride=2):
        self.kernel_size = kernel_size
        self.w_height = kernel_size[0]
        self.w_width = kernel_size[1]

        self.stride = stride

        self.x = None
        self.in_height = None
        self.in_width = None

        self.out_height = None
        self.out_width = None

        self.arg_max = None

    def __call__(self, x):
        self.x = x
        self.in_height = np.shape(x)[0]
        self.in_width = np.shape(x)[1]

        self.out_height = int((self.in_height - self.w_height) / self.stride) + 1
        self.out_width = int((self.in_width - self.w_width) / self.stride) + 1

        out = np.zeros((self.out_height, self.out_width))
        self.arg_max = np.zeros_like(out, dtype=np.int32)

        for i in range(self.out_height):
            for j in range(self.out_width):
                start_i = i * self.stride
                start_j = j * self.stride
                end_i = start_i + self.w_height
                end_j = start_j + self.w_width
                out[i, j] = np.max(x[start_i: end_i, start_j: end_j])
                self.arg_max[i, j] = np.argmax(x[start_i: end_i, start_j: end_j])
        self.arg_max = self.arg_max
        return out

    def backward(self, d_loss):
        dx = np.zeros_like(self.x)
        for i in range(self.out_height):
            for j in range(self.out_width):
                start_i = i * self.stride
                start_j = j * self.stride
                end_i = start_i + self.w_height
                end_j = start_j + self.w_width
                index = np.unravel_index(self.arg_max[i, j], self.kernel_size)
                dx[start_i:end_i, start_j:end_j][index] = d_loss[i, j] #
        return dx



np.set_printoptions(precision=8, suppress=True, linewidth=120)
x_numpy = np.random.random((1, 1, 6, 9))
x_tensor = torch.tensor(x_numpy, requires_grad=True)

max_pool_tensor = torch.nn.MaxPool2d((2, 2), 2)
max_pool_numpy = MaxPooling2D((2, 2), stride=2)

out_numpy = max_pool_numpy(x_numpy[0, 0])
out_tensor = max_pool_tensor(x_tensor)

d_loss_numpy = np.random.random(out_tensor.shape)
d_loss_tensor = torch.tensor(d_loss_numpy, requires_grad=True)
out_tensor.backward(d_loss_tensor)

dx_numpy = max_pool_numpy.backward(d_loss_numpy[0, 0])
dx_tensor = x_tensor.grad
# print('input \n', x_numpy)
print("out_numpy \n", out_numpy)
print("out_tensor \n", out_tensor.data.numpy())

print("dx_numpy \n", dx_numpy)
print("dx_tensor \n", dx_tensor.data.numpy())

結(jié)果對比:

input 
 [[[[0.23154623 0.39067432 0.1935981  0.57404555 0.45836731 0.17431257 0.73531471 0.15065228 0.24519314]
   [0.03184393 0.23829775 0.56975363 0.35438115 0.33226485 0.4235955  0.20789183 0.6806974  0.38405406]
   [0.03310472 0.99881498 0.94234671 0.04932191 0.42538918 0.86215804 0.68977391 0.76811963 0.41257792]
   [0.57807187 0.14109779 0.34925223 0.9519405  0.82116835 0.04807382 0.8303489  0.81035534 0.95730942]
   [0.24868524 0.39256031 0.11658718 0.51279824 0.37514214 0.23667244 0.29587014 0.1269187  0.96846764]
   [0.95877717 0.64374607 0.05483185 0.08409835 0.99503626 0.57803468 0.29041625 0.15926311 0.16737524]]]]
out_numpy 
 [[0.39067432 0.57404555 0.45836731 0.73531471]
 [0.99881498 0.9519405  0.86215804 0.8303489 ]
 [0.95877717 0.51279824 0.99503626 0.29587014]]
out_tensor 
 [[[[0.39067432 0.57404555 0.45836731 0.73531471]
   [0.99881498 0.9519405  0.86215804 0.8303489 ]
   [0.95877717 0.51279824 0.99503626 0.29587014]]]]
dx_numpy 
 [[0.         0.9304535  0.         0.15515155 0.65241189 0.         0.45043355 0.         0.        ]
 [0.         0.         0.         0.         0.         0.         0.         0.         0.        ]
 [0.         0.26357898 0.         0.         0.         0.75194057 0.         0.         0.        ]
 [0.         0.         0.         0.65124641 0.         0.         0.84716348 0.         0.        ]
 [0.         0.         0.         0.49120861 0.         0.         0.01629701 0.         0.        ]
 [0.91338436 0.         0.         0.         0.31244633 0.         0.         0.         0.        ]]
dx_tensor 
 [[[[0.         0.9304535  0.         0.15515155 0.65241189 0.         0.45043355 0.         0.        ]
   [0.         0.         0.         0.         0.         0.         0.         0.         0.        ]
   [0.         0.26357898 0.         0.         0.         0.75194057 0.         0.         0.        ]
   [0.         0.         0.         0.65124641 0.         0.         0.84716348 0.         0.        ]
   [0.         0.         0.         0.49120861 0.         0.         0.01629701 0.         0.        ]
   [0.91338436 0.         0.         0.         0.31244633 0.         0.         0.         0.        ]]]]

avgpooling

import numpy as np
import torch

class AvgPooling2D:
    def __init__(self, kernel_size=(2, 2), stride=2):
        self.stride = stride
        self.kernel_size = kernel_size
        self.w_height = kernel_size[0]
        self.w_width = kernel_size[1]

    def __call__(self, x):
        self.x = x
        self.in_height = x.shape[0]
        self.in_width = x.shape[1]

        self.out_height = int((self.in_height - self.w_height) / self.stride) + 1
        self.out_width = int((self.in_width - self.w_width) / self.stride) + 1
        out = np.zeros((self.out_height, self.out_width))

        for i in range(self.out_height):
            for j in range(self.out_width):
                start_i = i * self.stride
                start_j = j * self.stride
                end_i = start_i + self.w_height
                end_j = start_j + self.w_width
                out[i, j] = np.mean(x[start_i: end_i, start_j: end_j])
        return out

    def backward(self, d_loss):
        dx = np.zeros_like(self.x)

        for i in range(self.out_height):
            for j in range(self.out_width):
                start_i = i * self.stride
                start_j = j * self.stride
                end_i = start_i + self.w_height
                end_j = start_j + self.w_width
                dx[start_i: end_i, start_j: end_j] = d_loss[i, j] / (self.w_width * self.w_height)
        return dx

np.set_printoptions(precision=8, suppress=True, linewidth=120)
x_numpy = np.random.random((1, 1, 6, 9))
x_tensor = torch.tensor(x_numpy, requires_grad=True)

avg_pool_tensor = torch.nn.AvgPool2d((2, 2), 2)
avg_pool_numpy = AvgPooling2D((2, 2), stride=2)

out_numpy = avg_pool_numpy(x_numpy[0, 0])
out_tensor = avg_pool_tensor(x_tensor)

d_loss_numpy = np.random.random(out_tensor.shape)
d_loss_tensor = torch.tensor(d_loss_numpy, requires_grad=True)
out_tensor.backward(d_loss_tensor)

dx_numpy = avg_pool_numpy.backward(d_loss_numpy[0, 0])
dx_tensor = x_tensor.grad
# print('input \n', x_numpy)
print("out_numpy \n", out_numpy)
print("out_tensor \n", out_tensor.data.numpy())

print("dx_numpy \n", dx_numpy)
print("dx_tensor \n", dx_tensor.data.numpy())

結(jié)果對比:

input 
 [[[[0.67762597 0.784904   0.1742022  0.77898319 0.51368794 0.08239002 0.81457734 0.02356839 0.22477326]
   [0.96088153 0.06949592 0.73221965 0.66271638 0.00440624 0.78853139 0.89342955 0.89005521 0.91831133]
   [0.29421028 0.8554962  0.05581848 0.71596048 0.70460538 0.63569891 0.01734993 0.13829778 0.30057628]
   [0.75630389 0.73970589 0.38891535 0.18060703 0.8896449  0.19123307 0.38337412 0.48160742 0.49614005]
   [0.36208588 0.13197825 0.110713   0.0618705  0.01305788 0.17365524 0.34396082 0.67473201 0.0877381 ]
   [0.13336738 0.88374808 0.49674372 0.62228087 0.73629159 0.04834309 0.99103707 0.93708058 0.39369763]]]]
out_numpy 
 [[0.62322686 0.58703035 0.3472539  0.65540762]
 [0.66142906 0.33532534 0.60529557 0.25515731]
 [0.3777949  0.32290202 0.24283695 0.73670262]]
out_tensor 
 [[[[0.62322686 0.58703035 0.3472539  0.65540762]
   [0.66142906 0.33532534 0.60529557 0.25515731]
   [0.3777949  0.32290202 0.24283695 0.73670262]]]]
dx_numpy 
 [[0.05336788 0.05336788 0.01867329 0.01867329 0.0331215  0.0331215  0.11753746 0.11753746 0.        ]
 [0.05336788 0.05336788 0.01867329 0.01867329 0.0331215  0.0331215  0.11753746 0.11753746 0.        ]
 [0.02911582 0.02911582 0.12474389 0.12474389 0.18454687 0.18454687 0.24717661 0.24717661 0.        ]
 [0.02911582 0.02911582 0.12474389 0.12474389 0.18454687 0.18454687 0.24717661 0.24717661 0.        ]
 [0.09239687 0.09239687 0.11108155 0.11108155 0.12044172 0.12044172 0.12191826 0.12191826 0.        ]
 [0.09239687 0.09239687 0.11108155 0.11108155 0.12044172 0.12044172 0.12191826 0.12191826 0.        ]]
dx_tensor 
 [[[[0.05336788 0.05336788 0.01867329 0.01867329 0.0331215  0.0331215  0.11753746 0.11753746 0.        ]
   [0.05336788 0.05336788 0.01867329 0.01867329 0.0331215  0.0331215  0.11753746 0.11753746 0.        ]
   [0.02911582 0.02911582 0.12474389 0.12474389 0.18454687 0.18454687 0.24717661 0.24717661 0.        ]
   [0.02911582 0.02911582 0.12474389 0.12474389 0.18454687 0.18454687 0.24717661 0.24717661 0.        ]
   [0.09239687 0.09239687 0.11108155 0.11108155 0.12044172 0.12044172 0.12191826 0.12191826 0.        ]
   [0.09239687 0.09239687 0.11108155 0.11108155 0.12044172 0.12044172 0.12191826 0.12191826 0.        ]]]]
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
  • 序言:七十年代末,一起剝皮案震驚了整個(gè)濱河市,隨后出現(xiàn)的幾起案子钙勃,更是在濱河造成了極大的恐慌祖秒,老刑警劉巖,帶你破解...
    沈念sama閱讀 222,627評論 6 517
  • 序言:濱河連續(xù)發(fā)生了三起死亡事件田轧,死亡現(xiàn)場離奇詭異暴匠,居然都是意外死亡,警方通過查閱死者的電腦和手機(jī)傻粘,發(fā)現(xiàn)死者居然都...
    沈念sama閱讀 95,180評論 3 399
  • 文/潘曉璐 我一進(jìn)店門每窖,熙熙樓的掌柜王于貴愁眉苦臉地迎上來,“玉大人弦悉,你說我怎么就攤上這事窒典。” “怎么了稽莉?”我有些...
    開封第一講書人閱讀 169,346評論 0 362
  • 文/不壞的土叔 我叫張陵瀑志,是天一觀的道長。 經(jīng)常有香客問我污秆,道長劈猪,這世上最難降的妖魔是什么? 我笑而不...
    開封第一講書人閱讀 60,097評論 1 300
  • 正文 為了忘掉前任良拼,我火速辦了婚禮战得,結(jié)果婚禮上,老公的妹妹穿的比我還像新娘庸推。我一直安慰自己常侦,他們只是感情好,可當(dāng)我...
    茶點(diǎn)故事閱讀 69,100評論 6 398
  • 文/花漫 我一把揭開白布贬媒。 她就那樣靜靜地躺著聋亡,像睡著了一般。 火紅的嫁衣襯著肌膚如雪掖蛤。 梳的紋絲不亂的頭發(fā)上杀捻,一...
    開封第一講書人閱讀 52,696評論 1 312
  • 那天,我揣著相機(jī)與錄音,去河邊找鬼致讥。 笑死仅仆,一個(gè)胖子當(dāng)著我的面吹牛,可吹牛的內(nèi)容都是我干的垢袱。 我是一名探鬼主播墓拜,決...
    沈念sama閱讀 41,165評論 3 422
  • 文/蒼蘭香墨 我猛地睜開眼,長吁一口氣:“原來是場噩夢啊……” “哼请契!你這毒婦竟也來了咳榜?” 一聲冷哼從身側(cè)響起,我...
    開封第一講書人閱讀 40,108評論 0 277
  • 序言:老撾萬榮一對情侶失蹤爽锥,失蹤者是張志新(化名)和其女友劉穎涌韩,沒想到半個(gè)月后,有當(dāng)?shù)厝嗽跇淞掷锇l(fā)現(xiàn)了一具尸體氯夷,經(jīng)...
    沈念sama閱讀 46,646評論 1 319
  • 正文 獨(dú)居荒郊野嶺守林人離奇死亡臣樱,尸身上長有42處帶血的膿包…… 初始之章·張勛 以下內(nèi)容為張勛視角 年9月15日...
    茶點(diǎn)故事閱讀 38,709評論 3 342
  • 正文 我和宋清朗相戀三年,在試婚紗的時(shí)候發(fā)現(xiàn)自己被綠了腮考。 大學(xué)時(shí)的朋友給我發(fā)了我未婚夫和他白月光在一起吃飯的照片雇毫。...
    茶點(diǎn)故事閱讀 40,861評論 1 353
  • 序言:一個(gè)原本活蹦亂跳的男人離奇死亡,死狀恐怖踩蔚,靈堂內(nèi)的尸體忽然破棺而出棚放,到底是詐尸還是另有隱情,我是刑警寧澤馅闽,帶...
    沈念sama閱讀 36,527評論 5 351
  • 正文 年R本政府宣布飘蚯,位于F島的核電站,受9級特大地震影響捞蛋,放射性物質(zhì)發(fā)生泄漏孝冒。R本人自食惡果不足惜,卻給世界環(huán)境...
    茶點(diǎn)故事閱讀 42,196評論 3 336
  • 文/蒙蒙 一拟杉、第九天 我趴在偏房一處隱蔽的房頂上張望庄涡。 院中可真熱鬧,春花似錦搬设、人聲如沸穴店。這莊子的主人今日做“春日...
    開封第一講書人閱讀 32,698評論 0 25
  • 文/蒼蘭香墨 我抬頭看了看天上的太陽泣洞。三九已至,卻和暖如春默色,著一層夾襖步出監(jiān)牢的瞬間球凰,已是汗流浹背。 一陣腳步聲響...
    開封第一講書人閱讀 33,804評論 1 274
  • 我被黑心中介騙來泰國打工, 沒想到剛下飛機(jī)就差點(diǎn)兒被人妖公主榨干…… 1. 我叫王不留呕诉,地道東北人缘厢。 一個(gè)月前我還...
    沈念sama閱讀 49,287評論 3 379
  • 正文 我出身青樓,卻偏偏與公主長得像甩挫,于是被迫代替她去往敵國和親贴硫。 傳聞我的和親對象是個(gè)殘疾皇子,可洞房花燭夜當(dāng)晚...
    茶點(diǎn)故事閱讀 45,860評論 2 361