# -*- coding: utf-8 -*-
import sys
import importlib
importlib.reload(sys)
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
# 加載數據
mnist = input_data.read_data_sets("./", one_hot=True)
# 構建回歸模型
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784, 10]))
b = tf.Variable(tf.zeros([10]))
y = tf.matmul(x, W) + b? # 預測值
# 定義損失函數和優(yōu)化器
y_ = tf.placeholder(tf.float32, [None, 10])
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=y, labels=y_))
# 采用SGD作為優(yōu)化器
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
# 訓練模型
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
for _ in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
sess.run(train_step, feed_dict={x: batch_xs, y_:batch_ys})
# 評估訓練好的模型
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1))? #計算預測值和真實值
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))? #布爾型轉化為浮點數椿猎,并取平均值迟蜜,得到準確率
print(sess.run(accuracy, feed_dict={x:mnist.test.images, y_:mnist.test.labels}))? #計算模型在測試集上的準確率
0.9179
卷積神經網絡實現
# -*- coding: utf-8 -*-
import sys
import importlib
importlib.reload(sys)
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
# 加載數據
mnist = input_data.read_data_sets("./", one_hot=True)
trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels
trX = trX.reshape(-1, 28, 28, 1)? # 28x28x1 input img
teX = teX.reshape(-1, 28, 28, 1)? # 28x28x1 input img
X = tf.placeholder("float", [None, 28, 28, 1])
Y = tf.placeholder("float", [None, 10])
def init_weights(shape):
return tf.Variable(tf.random_normal(shape, stddev=0.01))
w = init_weights([3, 3, 1, 32])? ? ? ? # patch 大小為 3 × 3 ,輸入維度為 1 ,輸出維度為 32
w2 = init_weights([3, 3, 32, 64])? ? ? # patch 大小為 3 × 3 ,輸入維度為 32 ,輸出維度為 64
w3 = init_weights([3, 3, 64, 128])? ? # patch 大小為 3 × 3 ,輸入維度為 64 ,輸出維度為 128
w4 = init_weights([128 * 4 * 4, 625])? # 全連接層,輸入維度為 128 × 4 × 4, 是上一層的輸出數據又三維的轉變成一維, 輸出維度為 625
w_o = init_weights([625, 10])? ? ? ? ? # 輸出層,輸入維度為 625, 輸出維度為 10 ,代表 10 類 (labels)
def model(X, w, w2, w3, w4, w_o, p_keep_conv, p_keep_hidden):
# 第一組卷積層及池化層
l1a = tf.nn.relu(tf.nn.conv2d(X, w, strides=[1, 1, 1, 1], padding='SAME'))
l1 = tf.nn.max_pool(l1a, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
l1 = tf.nn.dropout(l1, p_keep_conv)? # dropout 一些神經元
# 第二組卷積層及池化層
l2a = tf.nn.relu(tf.nn.conv2d(l1, w2, strides=[1, 1, 1, 1], padding='SAME'))
l2 = tf.nn.max_pool(l2a, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
l2 = tf.nn.dropout(l2, p_keep_conv)? # dropout 一些神經元
# 第三組卷積層及池化層
l3a = tf.nn.relu(tf.nn.conv2d(l2, w3, strides=[1, 1, 1, 1], padding='SAME'))
l3 = tf.nn.max_pool(l3a, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME') # l3 shape=(?, 4, 4, 128)
l3 = tf.reshape(l3, [-1, w4.get_shape().as_list()[0]])? # reshape to (?, 128 * 4 * 4 = 2048)
l3 = tf.nn.dropout(l3, p_keep_conv)
# 全連接層,最后dropout
l4 = tf.nn.relu(tf.matmul(l3, w4))
l4 = tf.nn.dropout(l4, p_keep_hidden)
# 輸出層
pyx = tf.matmul(l4, w_o)
return pyx? #返回預測值
p_keep_conv = tf.placeholder("float")
p_keep_hidden = tf.placeholder("float")
py_x = model(X, w, w2, w3, w4, w_o, p_keep_conv, p_keep_hidden)
# 定義損失函數
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y))
train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost)
predict_op = tf.argmax(py_x, 1)
#訓練模型和評估模型
batch_size = 128
test_size = 256
with tf.Session() as sess:
tf.global_variables_initializer().run()
for i in range(100):
training_batch = zip(range(0, len(trX), batch_size), range(batch_size, len(trX)+1, batch_size))
for start, end in training_batch:
sess.run(train_op, feed_dict={X:trX[start:end], Y:trY[start:end], p_keep_conv:0.8, p_keep_hidden:0.5})
test_indices = np.arange(len(teX))
np.random.shuffle(test_indices)
test_indices = test_indices[0:test_size]
print(i, np.mean(np.argmax(teY[test_indices], axis=1) ==
? ? ? ? ? ? sess.run(predict_op, feed_dict={X: teX[test_indices],
? ? ? ? ? ? p_keep_conv:1.0, p_keep_hidden: 1.0})))
0 0.953125
1 0.98046875
2 0.984375
3 0.9921875
4 0.98828125
5 0.9921875
6 1.0
7 0.99609375
8 0.9921875
9 0.99609375
10 0.99609375
11 0.984375
12 0.9921875