Skip to content

Commit

Permalink
Refactor tensorboard_basic for TF1.0
Browse files Browse the repository at this point in the history
Signed-off-by: Norman Heckscher <[email protected]>
  • Loading branch information
normanheckscher committed Jan 14, 2017
1 parent 6985952 commit 5b9aef4
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 54 deletions.
8 changes: 4 additions & 4 deletions examples/4_Utils/tensorboard_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,18 +52,18 @@
init = tf.initialize_all_variables()

# Create a summary to monitor cost tensor
tf.scalar_summary("loss", cost)
tf.summary.scalar("loss", cost)
# Create a summary to monitor accuracy tensor
tf.scalar_summary("accuracy", acc)
tf.summary.scalar("accuracy", acc)
# Merge all summaries into a single op
merged_summary_op = tf.merge_all_summaries()
merged_summary_op = tf.summary.merge_all()

# Launch the graph
with tf.Session() as sess:
sess.run(init)

# op to write logs to Tensorboard
summary_writer = tf.train.SummaryWriter(logs_path, graph=tf.get_default_graph())
summary_writer = tf.summary.FileWriter(logs_path, graph=tf.get_default_graph())

# Training cycle
for epoch in range(training_epochs):
Expand Down
89 changes: 39 additions & 50 deletions notebooks/4_Utils/tensorboard_basic.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -20,33 +20,22 @@
},
{
"cell_type": "code",
"execution_count": 1,
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Extracting /tmp/data/train-images-idx3-ubyte.gz\n",
"Extracting /tmp/data/train-labels-idx1-ubyte.gz\n",
"Extracting /tmp/data/t10k-images-idx3-ubyte.gz\n",
"Extracting /tmp/data/t10k-labels-idx1-ubyte.gz\n"
]
}
],
"outputs": [],
"source": [
"import tensorflow as tf\n",
"\n",
"# Import MINST data\n",
"from tensorflow.examples.tutorials.mnist import input_data\n",
"mnist = input_data.read_data_sets(\"/tmp/data/\", one_hot=True)"
"mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)"
]
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 6,
"metadata": {
"collapsed": true
},
Expand All @@ -72,9 +61,9 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 9,
"metadata": {
"collapsed": true
"collapsed": false
},
"outputs": [],
"source": [
Expand All @@ -95,19 +84,19 @@
" acc = tf.reduce_mean(tf.cast(acc, tf.float32))\n",
"\n",
"# Initializing the variables\n",
"init = tf.initialize_all_variables()\n",
"init = tf.global_variables_initializer()\n",
"\n",
"# Create a summary to monitor cost tensor\n",
"tf.scalar_summary(\"loss\", cost)\n",
"tf.summary.scalar(\"loss\", cost)\n",
"# Create a summary to monitor accuracy tensor\n",
"tf.scalar_summary(\"accuracy\", acc)\n",
"tf.summary.scalar(\"accuracy\", acc)\n",
"# Merge all summaries into a single op\n",
"merged_summary_op = tf.merge_all_summaries()"
"merged_summary_op = tf.summary.merge_all()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 11,
"metadata": {
"collapsed": false
},
Expand All @@ -116,31 +105,31 @@
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch: 0001 cost= 1.182138957\n",
"Epoch: 0002 cost= 0.664735104\n",
"Epoch: 0003 cost= 0.552622685\n",
"Epoch: 0004 cost= 0.498596912\n",
"Epoch: 0005 cost= 0.465510372\n",
"Epoch: 0006 cost= 0.442504281\n",
"Epoch: 0007 cost= 0.425473650\n",
"Epoch: 0008 cost= 0.412175615\n",
"Epoch: 0009 cost= 0.401374554\n",
"Epoch: 0010 cost= 0.392403109\n",
"Epoch: 0011 cost= 0.384748503\n",
"Epoch: 0012 cost= 0.378154479\n",
"Epoch: 0013 cost= 0.372405099\n",
"Epoch: 0014 cost= 0.367272844\n",
"Epoch: 0015 cost= 0.362745077\n",
"Epoch: 0016 cost= 0.358575674\n",
"Epoch: 0017 cost= 0.354862829\n",
"Epoch: 0018 cost= 0.351437834\n",
"Epoch: 0019 cost= 0.348300697\n",
"Epoch: 0020 cost= 0.345401101\n",
"Epoch: 0021 cost= 0.342762216\n",
"Epoch: 0022 cost= 0.340199728\n",
"Epoch: 0023 cost= 0.337916089\n",
"Epoch: 0024 cost= 0.335764083\n",
"Epoch: 0025 cost= 0.333645939\n",
"Epoch: 0001 cost= 1.182138961\n",
"Epoch: 0002 cost= 0.664609327\n",
"Epoch: 0003 cost= 0.552565036\n",
"Epoch: 0004 cost= 0.498541865\n",
"Epoch: 0005 cost= 0.465393374\n",
"Epoch: 0006 cost= 0.442491178\n",
"Epoch: 0007 cost= 0.425474149\n",
"Epoch: 0008 cost= 0.412152022\n",
"Epoch: 0009 cost= 0.401320939\n",
"Epoch: 0010 cost= 0.392305281\n",
"Epoch: 0011 cost= 0.384732356\n",
"Epoch: 0012 cost= 0.378109478\n",
"Epoch: 0013 cost= 0.372409370\n",
"Epoch: 0014 cost= 0.367236996\n",
"Epoch: 0015 cost= 0.362727492\n",
"Epoch: 0016 cost= 0.358627345\n",
"Epoch: 0017 cost= 0.354815522\n",
"Epoch: 0018 cost= 0.351413656\n",
"Epoch: 0019 cost= 0.348314827\n",
"Epoch: 0020 cost= 0.345429416\n",
"Epoch: 0021 cost= 0.342749324\n",
"Epoch: 0022 cost= 0.340224642\n",
"Epoch: 0023 cost= 0.337897302\n",
"Epoch: 0024 cost= 0.335720168\n",
"Epoch: 0025 cost= 0.333691911\n",
"Optimization Finished!\n",
"Accuracy: 0.9143\n",
"Run the command line:\n",
Expand All @@ -155,7 +144,7 @@
" sess.run(init)\n",
"\n",
" # op to write logs to Tensorboard\n",
" summary_writer = tf.train.SummaryWriter(logs_path, graph=tf.get_default_graph())\n",
" summary_writer = tf.summary.FileWriter(logs_path, graph=tf.get_default_graph())\n",
"\n",
" # Training cycle\n",
" for epoch in range(training_epochs):\n",
Expand Down Expand Up @@ -234,7 +223,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "IPython (Python 2.7)",
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
Expand All @@ -248,7 +237,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.11"
"version": "2.7.13"
}
},
"nbformat": 4,
Expand Down

0 comments on commit 5b9aef4

Please sign in to comment.