I am trying to get summary of a training process of the neural net below.
import tensorflow as tf
import numpy as n
This has may have to do with the InteractiveSession
initialization.
I initialized it at the beginning and then it worked - then initialized the global variables within the session.
I am unable to reproduce the error with the old code, which makes it unpredictable or caching settings somewhere.
import tensorflow as tf
sess = tf.InteractiveSession()
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
x = tf.placeholder(tf.float32, [None, 784])
W = tf.Variable(tf.zeros([784,10]))
b = tf.Variable(tf.zeros([10]))
y = tf.nn.softmax(tf.matmul(x, W)+b)
y_ = tf.placeholder(tf.float32, [None,10])
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(0.05).minimize(cross_entropy)
sess.run(tf.global_variables_initializer())
for _ in range(1000):
batch_xs, batch_ys = mnist.train.next_batch(100)
#print batch_xs.shape, batch_ys.shape
sess.run(train_step, feed_dict={x: batch_xs, y_: batch_ys})