Created
June 4, 2019 09:53
-
-
Save yujuwon/390da8622ba8bba654e6e00add638577 to your computer and use it in GitHub Desktop.
tensorboard
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import tensorflow as tf | |
import numpy as np | |
data = np.loadtxt('../data/data.csv', delimiter=',', | |
unpack=True, dtype='float32') | |
x_data = np.transpose(data[0:2]) | |
y_data = np.transpose(data[2:]) | |
######### | |
# 신경망 모델 구성 | |
###### | |
global_step = tf.Variable(0, trainable=False, name='global_step') | |
X = tf.placeholder(tf.float32) | |
Y = tf.placeholder(tf.float32) | |
with tf.name_scope('layer1'): | |
W1 = tf.Variable(tf.random_uniform([2, 10], -1., 1.), name='W1') | |
L1 = tf.nn.relu(tf.matmul(X, W1)) | |
tf.summary.histogram("X", X) | |
tf.summary.histogram("Weights", W1) | |
with tf.name_scope('layer2'): | |
W2 = tf.Variable(tf.random_uniform([10, 20], -1., 1.), name='W2') | |
L2 = tf.nn.relu(tf.matmul(L1, W2)) | |
tf.summary.histogram("Weights", W2) | |
with tf.name_scope('output'): | |
W3 = tf.Variable(tf.random_uniform([20, 3], -1., 1.), name='W3') | |
model = tf.matmul(L2, W3) | |
tf.summary.histogram("Weights", W3) | |
tf.summary.histogram("Model", model) | |
with tf.name_scope('optimizer'): | |
cost = tf.reduce_mean( | |
tf.nn.softmax_cross_entropy_with_logits_v2(labels=Y, logits=model)) | |
optimizer = tf.train.AdamOptimizer(learning_rate=0.01) | |
train_op = optimizer.minimize(cost, global_step=global_step) | |
tf.summary.scalar('cost', cost) | |
######### | |
# 신경망 모델 학습 | |
###### | |
sess = tf.Session() | |
saver = tf.train.Saver(tf.global_variables()) | |
ckpt = tf.train.get_checkpoint_state('../model/chapter5/model') | |
if ckpt and tf.train.checkpoint_exists(ckpt.model_checkpoint_path): | |
saver.restore(sess, ckpt.model_checkpoint_path) | |
else: | |
sess.run(tf.global_variables_initializer()) | |
merged = tf.summary.merge_all() | |
writer = tf.summary.FileWriter('./logs', sess.graph) | |
for step in range(100): | |
sess.run(train_op, feed_dict={X: x_data, Y: y_data}) | |
print('Step: %d, ' % sess.run(global_step), | |
'Cost: %.3f' % sess.run(cost, feed_dict={X: x_data, Y: y_data})) | |
summary = sess.run(merged, feed_dict={X: x_data, Y: y_data}) | |
writer.add_summary(summary, global_step=sess.run(global_step)) | |
saver.save(sess, '../model/chapter5/dnn.ckpt', global_step=global_step) | |
######### | |
# 결과 확인 | |
###### | |
prediction = tf.argmax(model, 1) | |
target = tf.argmax(Y, 1) | |
print('예측값:', sess.run(prediction, feed_dict={X: x_data})) | |
print('실제값:', sess.run(target, feed_dict={Y: y_data})) | |
is_correct = tf.equal(prediction, target) | |
accuracy = tf.reduce_mean(tf.cast(is_correct, tf.float32)) | |
print('정확도: %.2f' % sess.run(accuracy * 100, feed_dict={X: x_data, Y: y_data})) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment