import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
# 載入數(shù)據(jù)集
mnist = input_data.read_data_sets("MNIST_data",one_hot = True)
# 每個(gè)批次的大小
batch_size = 100
# 計(jì)算一共有多少個(gè)批次
n_batch = mnist.train.num_examples// batch_size
# 定義兩個(gè)placeholder
x = tf.placeholder(tf.float32,[None,784])
y = tf.placeholder(tf.float32,[None,10])
# 用來定義某一部分張量進(jìn)行學(xué)習(xí)
keep_prob = tf.placeholder(tf.float32)
# 創(chuàng)建一個(gè)簡(jiǎn)單的神經(jīng)網(wǎng)絡(luò)
# W = tf.Variable(tf.zeros([784,10]))
# b = tf.Variable(tf.zeros([10]))
# prediction = tf.nn.softmax(tf.matmul(x,W)+b)
# 初始化方式改變
W1 = tf.Variable(tf.truncated_normal([784,500],stddev = 0.1))
b1 = tf.Variable(tf.zeros([500])+0.1)
L1 = tf.nn.tanh(tf.matmul(x,W1)+b1)
# 用來用一部分變量進(jìn)行學(xué)習(xí)
L1_prob = tf.nn.dropout(L1,keep_prob)
# 增加中間層
W2 = tf.Variable(tf.truncated_normal([500,500],stddev = 0.1))
b2 = tf.Variable(tf.zeros([500])+0.1)
L2 = tf.nn.tanh(tf.matmul(L1,W2)+b2)
L2_prob = tf.nn.dropout(L2,keep_prob)
W3 = tf.Variable(tf.truncated_normal([500,100],stddev = 0.1))
b3 = tf.Variable(tf.zeros([100])+0.1)
L3 = tf.nn.tanh(tf.matmul(L2,W3)+b3)
L3_prob = tf.nn.dropout(L3,keep_prob)
W4 = tf.Variable(tf.truncated_normal([100,10],stddev = 0.1))
b4 = tf.Variable(tf.zeros([10])+0.1)
prediction = tf.nn.softmax(tf.matmul(L3_prob,W4)+b4)
# 二次代價(jià)函數(shù)
# loss = tf.reduce_mean(tf.square(y-prediction))
# 使用交叉熵?fù)p失函數(shù)+softmax
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels = y,logits = prediction))
# 定義梯度下降函數(shù)
# train_step = tf.train.GradientDescentOptimizer(0.2).minimize(loss)
# 1e-3是學(xué)習(xí)率
train_step = tf.train.AdamOptimizer(1e-2)
# 初始化變量
init = tf.global_variables_initializer()
# 結(jié)果存放在一個(gè)布爾類型列表中
correct_prediction = tf.equal(tf.argmax(y,1),tf.argmax(prediction,1))
# 求準(zhǔn)確率
# 將bool類型的預(yù)測(cè)結(jié)果轉(zhuǎn)換為數(shù)值類型
accuracy = tf.reduce_mean(tf.cast(correct_prediction,tf.float32))
with tf.Session() as sess:
sess.run(init)
for epoch in range(21):
for batch in range(n_batch):
# 圖片的數(shù)據(jù)保存在xs中虱朵,圖片的標(biāo)簽保存在ys中
batch_xs,batch_ys = mnist.train.next_batch(batch_size)
sess.run(train_step,feed_dict={x:batch_xs,y:batch_ys,keep_prob:0.7})
test_acc = sess.run(accuracy,feed_dict = {x:mnist.test.images,y:mnist.test.labels,keep_prob:1.0})
print("Iter" + str(epoch)+",Tesing accuracy" +str(acc))
趁熱,剛敲出來的钓账。碴犬。趕緊的。梆暮》可實(shí)現(xiàn)。啦粹。