From b99e9453045d2698c628263495a1f154c5f69955 Mon Sep 17 00:00:00 2001 From: Sandro Kalbermatter Date: Sun, 14 May 2017 10:58:02 +0200 Subject: [PATCH] Fixing summaries for new version of TensorFlow Refer to http://stackoverflow.com/questions/41066244/tensorflow-module-object-has-no-attribute-scalar-summary --- exercises/ex8/ex1_solution.py | 6 +++--- exercises/ex8/ex2_solution.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/exercises/ex8/ex1_solution.py b/exercises/ex8/ex1_solution.py index 47a18af..acf9045 100644 --- a/exercises/ex8/ex1_solution.py +++ b/exercises/ex8/ex1_solution.py @@ -22,7 +22,7 @@ # define the loss loss = tf.reduce_mean(tf.square(y_hat - y_data)) -tf.scalar_summary('log loss', tf.log(1.0 + loss)) +tf.summary.scalar('log loss', tf.log(1.0 + loss)) # define the optimizer step_size = 0.1 @@ -34,8 +34,8 @@ with tf.Session() as sess: sess.run(init) - summary_op = tf.merge_all_summaries() - summary_writer = tf.train.SummaryWriter("train/ex1_{}".format(datetime.datetime.now().strftime("%s")), sess.graph) + summary_op = tf.summary.merge_all() + summary_writer = tf.summary.FileWriter("train/ex1_{}".format(datetime.datetime.now().strftime("%s")), sess.graph) # call the train_op many times, each time it will update the variables W and b according to their gradients for step in range(201): diff --git a/exercises/ex8/ex2_solution.py b/exercises/ex8/ex2_solution.py index f1b1913..1516a0c 100644 --- a/exercises/ex8/ex2_solution.py +++ b/exercises/ex8/ex2_solution.py @@ -27,7 +27,7 @@ # define the (stochastic!) loss loss = tf.reduce_mean(tf.square(y_hat - y_ph)) -tf.scalar_summary('log loss', tf.log(1.0 + loss)) # attention: this is the stochastic loss, i.e. it will be noisy +tf.summary.scalar('log loss', tf.log(1.0 + loss)) # attention: this is the stochastic loss, i.e. it will be noisy # define the optimizer step_size = 0.1 @@ -39,8 +39,8 @@ with tf.Session() as sess: sess.run(init) - summary_op = tf.merge_all_summaries() - summary_writer = tf.train.SummaryWriter("train/ex2_{}".format(datetime.datetime.now().strftime("%s")), sess.graph) + summary_op = tf.summary.merge_all() + summary_writer = tf.summary.FileWriter("train/ex2_{}".format(datetime.datetime.now().strftime("%s")), sess.graph) # call the train_op many times, each time it will update the variables W and b according to their gradients for step in range(201):