MNIST Dataset
# Lab 7 Learning rate and Evaluation
import tensorflow as tf
import matplotlib.pyplot as plt
import random
tf.set_random_seed(777) # for reproducibility
# ํ
์ํ๋ก์ฐ์์ ๋ฐ์ดํฐ ๋ฐ์์ค๊ธฐ
from tensorflow.examples.tutorials.mnist import input_data
# Check out https://www.tensorflow.org/get_started/mnist/beginners for
# more information about the mnist dataset
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
nb_classes = 10
# MNIST data image of shape 28 * 28 = 784
X = tf.placeholder(tf.float32, [None, 784])
# 0 - 9 digits recognition = 10 classes
Y = tf.placeholder(tf.float32, [None, nb_classes])
W = tf.Variable(tf.random_normal([784, nb_classes]))
b = tf.Variable(tf.random_normal([nb_classes]))
# Hypothesis (using softmax)
hypothesis = tf.nn.softmax(tf.matmul(X, W) + b)
cost = tf.reduce_mean(-tf.reduce_sum(Y * tf.log(hypothesis), axis=1))
train = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(cost)
# Test model
is_correct = tf.equal(tf.argmax(hypothesis, 1), tf.argmax(Y, 1))
# Calculate accuracy
accuracy = tf.reduce_mean(tf.cast(is_correct, tf.float32))
# parameters
num_epochs = 15
batch_size = 100
num_iterations = int(mnist.train.num_examples / batch_size)
with tf.Session() as sess:
# Initialize TensorFlow variables
sess.run(tf.global_variables_initializer())
# Training cycle
for epoch in range(num_epochs):
avg_cost = 0
for i in range(num_iterations):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
_, cost_val = sess.run([train, cost], feed_dict={X: batch_xs, Y: batch_ys})
# ๊ฐ epoch ๋ง๋ค ํ๊ท ๋น์ฉ์ ๊ตฌํ๋ ค๋ฉด, 2๋ฒ์งธ ๊ฐ์ ๋ํ ํ 2๋ฒ์งธ iteration์ผ๋ก ๋๋ ์ค์ผ ํ๋ค.
avg_cost += cost_val / num_iterations
print("Epoch: {:04d}, Cost: {:.9f}".format(epoch + 1, avg_cost))
print("Learning finished")
# Test the model using test sets
# sess.run() ๋ง๊ณ ๋ค๋ฅธ ๋ฐฉ๋ฒ์ผ๋ก ๋
ธ๋๋ฅผ ์คํํ๋ ๋ฒ
# ๋
ธ๋์๋ค๊ฐ, .eval() ์ด๋ผ๋ ํจ์๋ฅผ ํธ์ถํ๋ฉด ๋๋ค.
print(
"Accuracy: ",
accuracy.eval(
session=sess, feed_dict={X: mnist.test.images, Y: mnist.test.labels}
),
)
'''
Epoch: 0001, Cost: 2.826302672
Epoch: 0002, Cost: 1.061668952
Epoch: 0003, Cost: 0.838061315
Epoch: 0004, Cost: 0.733232745
Epoch: 0005, Cost: 0.669279885
Epoch: 0006, Cost: 0.624611836
Epoch: 0007, Cost: 0.591160344
Epoch: 0008, Cost: 0.563868987
Epoch: 0009, Cost: 0.541745171
Epoch: 0010, Cost: 0.522673578
Epoch: 0011, Cost: 0.506782325
Epoch: 0012, Cost: 0.492447643
Epoch: 0013, Cost: 0.479955837
Epoch: 0014, Cost: 0.468893674
Epoch: 0015, Cost: 0.458703488
Learning finished
Accuracy: 0.8951
'''
Training epoch/batch
epoch
์ ์ฒด ๋ฐ์ดํฐ ์ ์ ํ๋ฒ ํ๋ จํ ๊ฒ
batch
์ ์ฒด ๋ฐ์ดํฐ์ ์ ๋ฉ๋ชจ๋ฆฌ์ ์ฌ๋ ค ํ๋ฒ์ ํ๋ จํ๊ธฐ ์ด๋ ค์ฐ๋ฏ๋ก, ์ด ๋ ์์ ์๋ผ์ ์ฌ์ฉํ๋ ๋จ์
Example
์ ์ฒด ๋ฐ์ดํฐ๊ฐ 1000 ๊ฐ์ด๋ค. 1 epoch๋ฅผ ๋๋ฆฌ๊ธฐ ์ํด์๋ batch_size 500์ผ๋ก 2๋ฒ์ ๋ฐ๋ณต์ด ํ์ํ๋ค.