"""
FC neural-network
2 hidden-layer 每層256個神經(jīng)元
"""
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets('/tmp/data/', one_hot=True)
import tensorflow as tf
learning_rate = 0.1
num_steps = 500
batch_size = 128
disp_step = 100
h1_num = 256
h2_num = 256 # hidden layer neuron num
num_input = 784
num_classes = 10
X = tf.placeholder("float", [None, num_input])
Y = tf.placeholder("float", [None, num_classes])
weights = {
'h1': tf.Variable(tf.random_normal([num_input, h1_num])),
'h2': tf.Variable(tf.random_normal([h1_num, h2_num])),
'out': tf.Variable(tf.random_normal([h2_num, num_classes]))
}
bias = {
'b1': tf.Variable(tf.random_normal([h1_num])),
'b2': tf.Variable(tf.random_normal([h2_num])),
'out': tf.Variable(tf.random_normal([num_classes]))
}
def neural_net(x):
layer1 = tf.add(tf.matmul(x, weights['h1']), bias['b1'])
layer2 = tf.add(tf.matmul(layer1, weights['h2']), bias['b2'])
out_layer = tf.add(tf.matmul(layer2, weights['out']), bias['out'])
return out_layer
y_ = neural_net(X)
prediction = tf.nn.softmax(y_)
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=y_, labels=Y))
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
train_op = optimizer.minimize(loss)
correct_pred = tf.equal(tf.argmax(prediction, 1), tf.argmax(Y, 1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
for step in range(1, num_steps + 1):
batch_x, batch_y = mnist.train.next_batch(batch_size)
sess.run(train_op, feed_dict={X: batch_x, Y: batch_y})
if step % disp_step == 0 or step == 1:
# Calculate batch loss and accuracy
loss_disp, acc = sess.run([loss, accuracy], feed_dict={X: batch_x,
Y: batch_y})
print("Step " + str(step) + ", Minibatch Loss= " + \
"{:.4f}".format(loss_disp) + ", Training Accuracy= " + \
"{:.3f}".format(acc))
print("optimization finish")
print("test accuracy:", sess.run(accuracy, feed_dict={X: mnist.test.images, Y: mnist.test.labels}))
neural network
?著作權(quán)歸作者所有,轉(zhuǎn)載或內(nèi)容合作請聯(lián)系作者
- 文/潘曉璐 我一進店門,熙熙樓的掌柜王于貴愁眉苦臉地迎上來感凤,“玉大人悯周,你說我怎么就攤上這事∨愀停” “怎么了禽翼?”我有些...
- 文/不壞的土叔 我叫張陵,是天一觀的道長族跛。 經(jīng)常有香客問我闰挡,道長,這世上最難降的妖魔是什么礁哄? 我笑而不...
- 正文 為了忘掉前任长酗,我火速辦了婚禮,結(jié)果婚禮上桐绒,老公的妹妹穿的比我還像新娘夺脾。我一直安慰自己,他們只是感情好茉继,可當我...
- 文/花漫 我一把揭開白布咧叭。 她就那樣靜靜地躺著,像睡著了一般烁竭。 火紅的嫁衣襯著肌膚如雪佳簸。 梳的紋絲不亂的頭發(fā)上,一...
- 文/蒼蘭香墨 我猛地睜開眼佩脊,長吁一口氣:“原來是場噩夢啊……” “哼蛙粘!你這毒婦竟也來了?” 一聲冷哼從身側(cè)響起威彰,我...
- 正文 年R本政府宣布,位于F島的核電站妇汗,受9級特大地震影響帘不,放射性物質(zhì)發(fā)生泄漏。R本人自食惡果不足惜铛纬,卻給世界環(huán)境...
- 文/蒙蒙 一厌均、第九天 我趴在偏房一處隱蔽的房頂上張望唬滑。 院中可真熱鬧告唆,春花似錦、人聲如沸晶密。這莊子的主人今日做“春日...
- 文/蒼蘭香墨 我抬頭看了看天上的太陽稻艰。三九已至懂牧,卻和暖如春,著一層夾襖步出監(jiān)牢的瞬間尊勿,已是汗流浹背僧凤。 一陣腳步聲響...
推薦閱讀更多精彩內(nèi)容
- 這是MATLAB關(guān)于Deep Learning 的一個入門的簡單的例程 Step1加載并查看數(shù)據(jù) 然后隨機顯示其中...
- 摘要 cvpr2017 作品, 是級聯(lián)形狀回歸(Cascaded Shape Regressor)人臉對齊框架的...
- All code can be find here. Implementing Recurrent Neural ...
- 吳恩達deep_learning_week4_Deep_Neural_Network 標簽: 機器學習深度學習 本...