關(guān)于前置的數(shù)學(xué)知識枫虏,可以查看https://blog.csdn.net/cufewxy1/article/details/80445023
首先是bpnn.py,實現(xiàn)了BPNeuralNetwork類爬虱。
import numpy
import scipy.special
import pickle
from datetime import datetime
class BPNeuralNetwork(object):
def __init__(self, input_size, hidden_size, output_size, activation_func = scipy.special.expit, learning_rate = 0.1):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.activation_func = activation_func
self.learning_rate = learning_rate
# self.wih = numpy.random.sample([hidden_size, input_size]) - 0.5
self.wih = numpy.random.normal(0.0, pow(hidden_size, -0.5), [hidden_size, input_size])
# wih_{i,j} links input_j with hidden_i
self.bih = numpy.random.sample([hidden_size, 1]) - 0.5
# self.who = numpy.random.sample([output_size, hidden_size]) - 0.5
self.who = numpy.random.normal(0.0, pow(output_size, -0.5), [output_size, hidden_size])
# who_{i,j} links hidden_j with output_i
self.bho = numpy.random.sample([output_size, 1]) - 0.5
@staticmethod
def load(filename):
with open(filename, 'rb') as f:
return pickle.load(f)
def train(self, inputs, targets):
inputs = numpy.array(inputs, ndmin = 2).T
targets = numpy.array(targets, ndmin = 2).T
hidden_in = numpy.dot(self.wih, inputs) + self.bih
hidden_out = self.activation_func(hidden_in)
output_in = numpy.dot(self.who, hidden_out) + self.bho
output_out = self.activation_func(output_in)
delta_output_out = output_out - targets
delta_output_in = delta_output_out * output_out * (1 - output_out)
delta_bho = delta_output_in
delta_who = numpy.dot(delta_output_in, hidden_out.T)
delta_hidden_out = numpy.dot(self.who.T, delta_output_in)
delta_hidden_out = numpy.dot(self.who.T, delta_output_out)
delta_hidden_in = delta_hidden_out * hidden_out * (1 - hidden_out)
delta_bih = delta_hidden_in
delta_wih = numpy.dot(delta_hidden_in, inputs.T)
self.who -= (self.learning_rate * delta_who)
self.bho -= (self.learning_rate * delta_bho)
self.wih -= (self.learning_rate * delta_wih)
self.bih -= (self.learning_rate * delta_bih)
def dump(self, filename = 'bpnn_%s.dat' % datetime.now().strftime('%Y-%m-%d_%H:%M:%S')):
with open(filename, 'wb') as f:
pickle.dump(self, f)
def query(self, inputs):
inputs = numpy.array(inputs, ndmin = 2).T
hidden_in = numpy.dot(self.wih, inputs) + self.bih
hidden_out = self.activation_func(hidden_in)
output_in = numpy.dot(self.who, hidden_out) + self.bho
output_out = self.activation_func(output_in)
return output_out.reshape(self.output_size)
然后是__init__.py隶债,使用mnist手寫數(shù)字?jǐn)?shù)據(jù)集對我們的BPNN進(jìn)行測試,準(zhǔn)確率可達(dá)97%以上饮潦。
import tensorflow as tf
from tensorflow import keras
import numpy as np
import matplotlib.pyplot as plt
from bpnn import BPNeuralNetwork
def main():
bp = BPNeuralNetwork(28*28, 500, 10)
# mnist = keras.datasets.fashion_mnist
mnist = keras.datasets.mnist
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
train_images = train_images / 255.0
test_images = test_images / 255.0
for i in range(5):
for train_image, train_label in zip(train_images, train_labels):
inputs = train_image.reshape((1, 28 * 28))
targets = [0] * 10
targets[train_label] = 1
bp.train(inputs, targets)
print('training complete!')
cnt = 0
acc = 0
for test_image, test_label in zip(test_images, test_labels):
cnt += 1
inputs = test_image.reshape((1, 28*28))
result = bp.query(inputs)
if np.argmax(result) == test_label:
acc += 1
print(acc*100.0/cnt)
bp.dump()
if __name__ == '__main__':
main()