import tensorflow as tf
sess = tf.InteractiveSession()
x = tf.placeholder(tf.float32, [None,784])
W = tf.Variable(tf.zeros([784,10]))
b = tf.Variable(tf.zeros([10]))
y = tf.nn.softmax(tf.matmul(x,W) + b)
定義cross-entropy(損失函數(shù))
y_ = tf.placeholder(tf.float32,[None,10])
# loss function
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y),reduction_indices=[1]))
優(yōu)化算法
# 定義優(yōu)化算法-隨機(jī)梯度下降SGD(Stochastic Gradient Descent)
# 重復(fù)進(jìn)行反向傳播(Back Propagation)和梯度下降
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
# 全局參數(shù)初始化器
tf.global_variables_initializer().run()