Print..Print
๋งจ๋ ํ์ธํ๋ ค๋ฉด ํ๋ฆฐํธํด์ผํด.
GUI๋ก ๋ฉ์ง๊ฒ ๋ด๋ณด์.
5 Steps of using TensorBoard
-
๋ด๊ฐ Logํ ํ๊ณ ์ถ์ ํ ์๋ค์ ๊ฒฐ์ ํ๋ค.
์ด๊ฒ๋ค์ ํจ์๋ก ๊ตฌํํ ์ ์๋ค.
w2_hist = tf.summary.histogram("weight2", W2) cost_sum = tf.summary.scalar("cost", cost)
-
summary๋ฅผ ๋ชจ๋ ๋ณํฉํ๋ค.
summary = tf.summary.merge_all()
-
ํ์ผ์ฐ๊ธฐ๋ฅผ ์ํ ๋ ธ๋๋ฅผ ๋ง๋ ๋ค. ๊ทธ๋ฆฌ๊ณ ๊ทธ๋ํ๋ฅผ ์ถ๊ฐํ๋ค.
writer = tf.summary.FileWriter('./logs') writer.add_graph(sess.graph)
-
summary ๋ ธ๋๋ฅผ ์คํ์ํค๊ณ ํ์ผ์ ๊ธฐ๋กํ๋ค.
s, _ = sess.run([summary, optimizer], feed_dict - feed_dict) writer.add_summary(s, global_step=global_step)
-
ํ ์๋ณด๋๋ฅผ ์คํ์ํจ๋ค.
$ tensorboard --logdir=./logs
Scalar tensors
cost์ ๊ฐ์ด ์์ด ํ์ค์ธ, ์ฆ output์ด ์ค์นผ๋ผ๊ฐ์ธ ๋ ์๋ค์ ์ด ํจ์๋ฅผ ์ฌ์ฉํ๋ค.
cost_summ = tf.summary.scalar("cost", cost)
Histogram (multi-dimensional tensors)
W1 = tf.Variable(tf.random_normal([2, 2]), name="weight_1")
b1 = tf.Variable(tf.random_normal([2]), name="bias_1")
layer1 = tf.sigmoid(tf.matmul(X, W1) + b1)
tf.summary.histogram("W1", W1)
tf.summary.histogram("b1", b1)
tf.summary.histogram("Layer1", layer1)
Scope (graph hierarchy)
with tf.name_scope("Layer1"):
W1 = tf.Variable(tf.random_normal([2, 2]), name="weight_1")
b1 = tf.Variable(tf.random_normal([2]), name="bias_1")
layer1 = tf.sigmoid(tf.matmul(X, W1) + b1)
tf.summary.histogram("W1", W1)
tf.summary.histogram("b1", b1)
tf.summary.histogram("Layer1", layer1)
with tf.name_scope("Layer2"):
W2 = tf.Variable(tf.random_normal([2, 1]), name="weight_2")
b2 = tf.Variable(tf.random_normal([1]), name="bias_2")
hypothesis = tf.sigmoid(tf.matmul(layer1, W2) + b2)
tf.summary.histogram("W2", W2)
tf.summary.histogram("b2", b2)
tf.summary.histogram("Hypothesis", hypothesis)
ํ๋ฒ์ ๋ณด๊ธฐ ํ๋๋ ์ด๋ ๊ฒ ๊ณ์ธต ๊ตฌ์กฐ๋ก ๋ณด์ฌ์ฃผ๋ ๋ฐฉ์์ด๋ค.
ํด๋น layer๋ฅผ ํด๋ฆญํ ๊ฒฝ์ฐ ๋ณด๋ค ์์ธํ ๊ทธ๋ํ ๊ตฌ์กฐ๋ฅผ ๋ณผ ์ ์๋ค.
Merge summaries
merged_summary = tf.summary.merge_all()
writer node ์์ฑ
writer = tf.summary.FileWriter("./logs/xor_logs_r0_01")
writer.add_graph(sess.graph) # Show the graph
ํด๋น ๊ฒฝ๋ก์ ๋ก๊ทธํ์ผ์ ์ฐ๊ณ ,
์ด๊ฑธ sess.graph์ ์ถ๊ฐํด์ค!
Run merged summary and write
for step in range(10001):
_, summary, cost_val = sess.run(
[train, merged_summary, cost], feed_dict={X: x_data, Y: y_data}
)
writer.add_summary(summary, global_step=step)
if step % 100 == 0:
step๋ง๋ค ๊ทธ๋ํ๋ฅผ summary๋ฅผ ๋ํด๋ผ.
Launch tensorboard(local)
ํ์ผ์ ์ด ๊ฒฝ๋ก๋ฅผ ์ค์ ํ๊ณ
$ tensorboard --logdir=./logs/xor_logs
Launch tensorboard(remote server)
$ ssh -L local_port:127.0.0.1:remote_port username@server.com
์ถ๊ฐ ๊ณต๋ถ๊ฐ ํ์ํจ ์ถ๊ฐ๋ก ๋ค๋ฌ๊ฑฐ๋ ๊ณต๋ถํด์ผ ํ๋ ๊ฒ๋ค
์ ์ฒด ์ฝ๋
# Lab 9 XOR
import tensorflow as tf
import numpy as np
tf.set_random_seed(777) # for reproducibility
x_data = np.array([[0, 0], [0, 1], [1, 0], [1, 1]], dtype=np.float32)
y_data = np.array([[0], [1], [1], [0]], dtype=np.float32)
X = tf.placeholder(tf.float32, [None, 2], name="x")
Y = tf.placeholder(tf.float32, [None, 1], name="y")
with tf.name_scope("Layer1"):
W1 = tf.Variable(tf.random_normal([2, 2]), name="weight_1")
b1 = tf.Variable(tf.random_normal([2]), name="bias_1")
layer1 = tf.sigmoid(tf.matmul(X, W1) + b1)
tf.summary.histogram("W1", W1)
tf.summary.histogram("b1", b1)
tf.summary.histogram("Layer1", layer1)
with tf.name_scope("Layer2"):
W2 = tf.Variable(tf.random_normal([2, 1]), name="weight_2")
b2 = tf.Variable(tf.random_normal([1]), name="bias_2")
hypothesis = tf.sigmoid(tf.matmul(layer1, W2) + b2)
tf.summary.histogram("W2", W2)
tf.summary.histogram("b2", b2)
tf.summary.histogram("Hypothesis", hypothesis)
# cost/loss function
with tf.name_scope("Cost"):
cost = -tf.reduce_mean(Y * tf.log(hypothesis) + (1 - Y) * tf.log(1 - hypothesis))
tf.summary.scalar("Cost", cost)
with tf.name_scope("Train"):
train = tf.train.AdamOptimizer(learning_rate=0.01).minimize(cost)
# Accuracy computation
# True if hypothesis>0.5 else False
predicted = tf.cast(hypothesis > 0.5, dtype=tf.float32)
accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float32))
tf.summary.scalar("accuracy", accuracy)
# Launch graph
with tf.Session() as sess:
# tensorboard --logdir=./logs/xor_logs
merged_summary = tf.summary.merge_all()
writer = tf.summary.FileWriter("./logs/xor_logs_r0_01")
writer.add_graph(sess.graph) # Show the graph
# Initialize TensorFlow variables
sess.run(tf.global_variables_initializer())
for step in range(10001):
_, summary, cost_val = sess.run(
[train, merged_summary, cost], feed_dict={X: x_data, Y: y_data}
)
writer.add_summary(summary, global_step=step)
if step % 100 == 0:
print(step, cost_val)
# Accuracy report
h, p, a = sess.run(
[hypothesis, predicted, accuracy], feed_dict={X: x_data, Y: y_data}
)
print(f"\nHypothesis:\n{h} \nPredicted:\n{p} \nAccuracy:\n{a}")
"""
Hypothesis:
[[6.1310326e-05]
[9.9993694e-01]
[9.9995077e-01]
[5.9751470e-05]]
Predicted:
[[0.]
[1.]
[1.]
[0.]]
Accuracy:
1.0
"""
Multiple runs (ํ๋ผ๋ฏธํฐ๋ฅผ ๋ฐ๊ฟ์)
/logs ํด๋์์ ํ๋ผ๋ฏธํฐ๋ฅผ ๋ฐ๊ฟ์ ํ์ผ์ ์ฌ๋ฌ๊ฐ ์ด๋ค.
์๋ฅผ ๋ค์ด ํ์ต์จ์ ๋ค๋ฅด๊ฒ ์ด ๋ ์์ ๋น๊ตํ๊ณ ์ถ๋ค๋ฉด,
with tf.name_scope("Train"):
train = tf.train.AdamOptimizer(learning_rate=0.01).minimize(cost)
...
writer = tf.summary.FileWriter("./logs/xor_logs")
with tf.name_scope("Train"):
train = tf.train.AdamOptimizer(learning_rate=0.1).minimize(cost)
...
writer = tf.summary.FileWriter("./logs/xor_logs")
์ด๋ ๊ฒ ๋๊ฐ๋ฅผ ๋๋ ํ ๋ฆฌ๋ฅผ ๋ค๋ฅด๊ฒ ํด์ ๊ธฐ๋กํด ๋ ๋ค, terminal์์
$ tensorboard --logdir=./logs
์ด๋ ๊ฒ ์์ ํด๋๋ฅผ ์คํ์์ผ ๋ฒ๋ฆฐ๋ค.
๊ทธ๋ฌ๋ฉด ๋ ๊ฐ์ ๊ทธ๋ํ๊ฐ ๋ง๋ค์ด์ง๊ณ , ๋น๊ตํ ์ ์๊ฒ ๋๋ค.