AI學習筆記3 —— TensorFlow

Helloworld:

import tensorflow as tf

#定義常量并打印
node1 = tf.constant(3.0, dtype=tf.float32) 
node2 = tf.constant(4.0) # also tf.float32 implicitly
sess = tf.Session()
print(sess.run([node1, node2]))

#加法
node3 = tf.add(node1, node2)
print "node3:", node3
print "sess.run(node3):", sess.run(node3)

# Log
# [3.0, 4.0]
# node3: Tensor("Add:0", shape=TensorShape([]), dtype=float32)
# sess.run(node3): 7.0

#操作變量
a = tf.placeholder(tf.float32)
b = tf.placeholder(tf.float32)
adder_node = a + b  # + provides a shortcut for tf.add(a, b)

print sess.run(adder_node, {a: 3, b: 4.5})
print sess.run(adder_node, {a: [1, 3], b: [2, 4]})

# 7.5
# [ 3.  7.]

#公式運算
W = tf.Variable([.3], dtype=tf.float32)
b = tf.Variable([-.3], dtype=tf.float32)
x = tf.placeholder(tf.float32)
linear_model = W * x + b
init = tf.global_variables_initializer()
sess.run(init)
print sess.run(linear_model, {x: [1, 2, 3, 4]})

# [ 0.  0.30000001  0.60000002  0.90000004]

# 計算方差
y = tf.placeholder(tf.float32)
squared_deltas = tf.square(linear_model - y)
loss = tf.reduce_sum(squared_deltas)
print(sess.run(loss, {x: [1, 2, 3, 4], y: [0, -1, -2, -3]}))

# 23.66

#修改變量
fixW = tf.assign(W, [-1.])
fixb = tf.assign(b, [1.])
sess.run([fixW, fixb])
print(sess.run(loss, {x: [1, 2, 3, 4], y: [0, -1, -2, -3]}))

# 0.0

# 訓練
optimizer = tf.train.GradientDescentOptimizer(0.01)
train = optimizer.minimize(loss)

x_train = [1, 2, 3, 4]
y_train = [0, -1, -2, -3]

for i in range(1000):
    sess.run(train, {x: x_train, y: y_train})
print(sess.run([W, b,loss],{x: x_train, y: y_train}))

# [array([-0.9999969], dtype=float32), array([ 0.99999082], dtype=float32), 5.6999738e-11]

訓練一下下慕购,用線性回歸:

import tensorflow as tf
# NumPy is often used to load, manipulate and preprocess data.
import numpy as np

# Declare list of features. We only have one numeric feature. There are many
# other types of columns that are more complicated and useful.
feature_columns = [tf.feature_column.numeric_column("x", shape=[1])]

# An estimator is the front end to invoke training (fitting) and evaluation
# (inference). There are many predefined types like linear regression,
# linear classification, and many neural network classifiers and regressors.
# The following code provides an estimator that does linear regression.
estimator = tf.estimator.LinearRegressor(feature_columns=feature_columns)

# TensorFlow provides many helper methods to read and set up data sets.
# Here we use two data sets: one for training and one for evaluation
# We have to tell the function how many batches
# of data (num_epochs) we want and how big each batch should be.
x_train = np.array([1., 2., 3., 4.])
y_train = np.array([0., -1., -2., -3.])
x_eval = np.array([2., 5., 8., 1.])
y_eval = np.array([-1.01, -4.1, -7, 0.])
input_fn = tf.estimator.inputs.numpy_input_fn(
    {"x": x_train}, y_train, batch_size=4, num_epochs=None, shuffle=True)
train_input_fn = tf.estimator.inputs.numpy_input_fn(
    {"x": x_train}, y_train, batch_size=4, num_epochs=1000, shuffle=False)
eval_input_fn = tf.estimator.inputs.numpy_input_fn(
    {"x": x_eval}, y_eval, batch_size=4, num_epochs=1000, shuffle=False)

# We can invoke 1000 training steps by invoking the  method and passing the
# training data set.
estimator.train(input_fn=input_fn, steps=1000)

# Here we evaluate how well our model did.
train_metrics = estimator.evaluate(input_fn=train_input_fn)
eval_metrics = estimator.evaluate(input_fn=eval_input_fn)
print("train metrics: %r"% train_metrics)
print("eval metrics: %r"% eval_metrics)

# train metrics: {'average_loss': 1.7029566e-06, 'global_step': 1000, 'loss': 6.8118266e-06}
# eval metrics: {'average_loss': 0.0026467575, 'global_step': 1000, 'loss': 0.01058703}

自定義estimator:

import numpy as np
import tensorflow as tf

# Declare list of features, we only have one real-valued feature
def model_fn(features, labels, mode):
  # Build a linear model and predict values
  W = tf.get_variable("W", [1], dtype=tf.float64)
  b = tf.get_variable("b", [1], dtype=tf.float64)
  y = W * features['x'] + b
  # Loss sub-graph
  loss = tf.reduce_sum(tf.square(y - labels))
  # Training sub-graph
  global_step = tf.train.get_global_step()
  optimizer = tf.train.GradientDescentOptimizer(0.01)
  train = tf.group(optimizer.minimize(loss),
                   tf.assign_add(global_step, 1))
  # EstimatorSpec connects subgraphs we built to the
  # appropriate functionality.
  return tf.estimator.EstimatorSpec(
      mode=mode,
      predictions=y,
      loss=loss,
      train_op=train)

estimator = tf.estimator.Estimator(model_fn=model_fn)
# define our data sets
x_train = np.array([1., 2., 3., 4.])
y_train = np.array([0., -1., -2., -3.])
x_eval = np.array([2., 5., 8., 1.])
y_eval = np.array([-1.01, -4.1, -7, 0.])
input_fn = tf.estimator.inputs.numpy_input_fn(
    {"x": x_train}, y_train, batch_size=4, num_epochs=None, shuffle=True)
train_input_fn = tf.estimator.inputs.numpy_input_fn(
    {"x": x_train}, y_train, batch_size=4, num_epochs=1000, shuffle=False)
eval_input_fn = tf.estimator.inputs.numpy_input_fn(
    {"x": x_eval}, y_eval, batch_size=4, num_epochs=1000, shuffle=False)

# train
estimator.train(input_fn=input_fn, steps=1000)
# Here we evaluate how well our model did.
train_metrics = estimator.evaluate(input_fn=train_input_fn)
eval_metrics = estimator.evaluate(input_fn=eval_input_fn)
print("train metrics: %r"% train_metrics)
print("eval metrics: %r"% eval_metrics)
最后編輯于
?著作權歸作者所有,轉載或內容合作請聯(lián)系作者
  • 序言:七十年代末骏令,一起剝皮案震驚了整個濱河市匪凡,隨后出現(xiàn)的幾起案子辛友,更是在濱河造成了極大的恐慌叉谜,老刑警劉巖杰捂,帶你破解...
    沈念sama閱讀 219,039評論 6 508
  • 序言:濱河連續(xù)發(fā)生了三起死亡事件比驻,死亡現(xiàn)場離奇詭異,居然都是意外死亡认轨,警方通過查閱死者的電腦和手機绅络,發(fā)現(xiàn)死者居然都...
    沈念sama閱讀 93,426評論 3 395
  • 文/潘曉璐 我一進店門,熙熙樓的掌柜王于貴愁眉苦臉地迎上來,“玉大人恩急,你說我怎么就攤上這事杉畜。” “怎么了衷恭?”我有些...
    開封第一講書人閱讀 165,417評論 0 356
  • 文/不壞的土叔 我叫張陵此叠,是天一觀的道長。 經(jīng)常有香客問我随珠,道長灭袁,這世上最難降的妖魔是什么? 我笑而不...
    開封第一講書人閱讀 58,868評論 1 295
  • 正文 為了忘掉前任窗看,我火速辦了婚禮茸歧,結果婚禮上,老公的妹妹穿的比我還像新娘显沈。我一直安慰自己软瞎,他們只是感情好,可當我...
    茶點故事閱讀 67,892評論 6 392
  • 文/花漫 我一把揭開白布拉讯。 她就那樣靜靜地躺著涤浇,像睡著了一般。 火紅的嫁衣襯著肌膚如雪魔慷。 梳的紋絲不亂的頭發(fā)上只锭,一...
    開封第一講書人閱讀 51,692評論 1 305
  • 那天,我揣著相機與錄音盖彭,去河邊找鬼纹烹。 笑死页滚,一個胖子當著我的面吹牛召边,可吹牛的內容都是我干的。 我是一名探鬼主播裹驰,決...
    沈念sama閱讀 40,416評論 3 419
  • 文/蒼蘭香墨 我猛地睜開眼隧熙,長吁一口氣:“原來是場噩夢啊……” “哼!你這毒婦竟也來了幻林?” 一聲冷哼從身側響起贞盯,我...
    開封第一講書人閱讀 39,326評論 0 276
  • 序言:老撾萬榮一對情侶失蹤,失蹤者是張志新(化名)和其女友劉穎沪饺,沒想到半個月后躏敢,有當?shù)厝嗽跇淞掷锇l(fā)現(xiàn)了一具尸體,經(jīng)...
    沈念sama閱讀 45,782評論 1 316
  • 正文 獨居荒郊野嶺守林人離奇死亡整葡,尸身上長有42處帶血的膿包…… 初始之章·張勛 以下內容為張勛視角 年9月15日...
    茶點故事閱讀 37,957評論 3 337
  • 正文 我和宋清朗相戀三年件余,在試婚紗的時候發(fā)現(xiàn)自己被綠了。 大學時的朋友給我發(fā)了我未婚夫和他白月光在一起吃飯的照片。...
    茶點故事閱讀 40,102評論 1 350
  • 序言:一個原本活蹦亂跳的男人離奇死亡啼器,死狀恐怖旬渠,靈堂內的尸體忽然破棺而出,到底是詐尸還是另有隱情端壳,我是刑警寧澤告丢,帶...
    沈念sama閱讀 35,790評論 5 346
  • 正文 年R本政府宣布,位于F島的核電站损谦,受9級特大地震影響岖免,放射性物質發(fā)生泄漏。R本人自食惡果不足惜照捡,卻給世界環(huán)境...
    茶點故事閱讀 41,442評論 3 331
  • 文/蒙蒙 一觅捆、第九天 我趴在偏房一處隱蔽的房頂上張望。 院中可真熱鬧麻敌,春花似錦栅炒、人聲如沸。這莊子的主人今日做“春日...
    開封第一講書人閱讀 31,996評論 0 22
  • 文/蒼蘭香墨 我抬頭看了看天上的太陽。三九已至级历,卻和暖如春释移,著一層夾襖步出監(jiān)牢的瞬間,已是汗流浹背寥殖。 一陣腳步聲響...
    開封第一講書人閱讀 33,113評論 1 272
  • 我被黑心中介騙來泰國打工玩讳, 沒想到剛下飛機就差點兒被人妖公主榨干…… 1. 我叫王不留,地道東北人嚼贡。 一個月前我還...
    沈念sama閱讀 48,332評論 3 373
  • 正文 我出身青樓熏纯,卻偏偏與公主長得像,于是被迫代替她去往敵國和親粤策。 傳聞我的和親對象是個殘疾皇子樟澜,可洞房花燭夜當晚...
    茶點故事閱讀 45,044評論 2 355

推薦閱讀更多精彩內容