tensorflow学习020——标量和自定义标量的tensorboard显示
tensorboard通过读取tensorflow的事件文件来运行,tendorflow的事件文件包括了在tensorflow运行中涉及到的主要数据
点击查看代码
import tensorflow as tf
import datetime
import os
(train_image,train_labels),(test_image,test_labels) = tf.keras.datasets.mnist.load_data()
train_image = tf.expand_dims(train_image,-1)
test_image = tf.expand_dims(test_image,-1)
train_labels = tf.cast(train_labels,tf.int64)
test_labels = tf.cast(test_labels,tf.int64)
dataset = tf.data.Dataset.from_tensor_slices((train_image,train_labels))
test_dataset = tf.data.Dataset.from_tensor_slices((test_image,test_labels))
dataset = dataset.repeat().shuffle(60000).batch(128)
test_dataset = test_dataset.repeat().batch(128)
log_dir = os.path.join('logs',datetime.datetime.now().strftime("%Y%m%d-%H%M%S")) # 存放事件文件的路径
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir,histogram_freq=1)
# 自定义标量
file_writer = tf.summary.create_file_writer(log_dir + '/lr') # 使用创建文件编写器
file_writer.set_as_default() # 设为默认编写器
# 定义自定义学习率功能,将被传递给keas LearningRateScheduler回调
def lr_rate(epoch):
learning_rate = 0.2
if epoch > 5:
learning_rate = 0.02
if epoch > 10:
learning_rate = 0.01
if epoch > 20:
learning_rate = 0.005
# 在学习率功能内,用tf.summary.scalar()记录自定义学习率
tf.summary.scalar('learning_rate',data=learning_rate,step=epoch) # 分别是名字 纵坐标 横坐标
return learning_rate
lr_callback = tf.keras.callbacks.LearningRateScheduler(lr_rate)
model = tf.keras.Sequential([
tf.keras.layers.Conv2D(16,[3,3],activation='relu',input_shape=(None,None,1)),
tf.keras.layers.Conv2D(32,[3,3],activation='relu'),
tf.keras.layers.GlobalMaxPooling2D(),
tf.keras.layers.Dense(10,activation='softmax')
])
model.compile(optimizer='adam',loss='sparse_categorical_crossentropy',metrics=['accuracy'])
model.fit(dataset,epochs=25,steps_per_epoch=60000//128,validation_data=test_dataset,validation_steps=10000//128,
callbacks=[tensorboard_callback,lr_callback])
# 在命令行使用tensrboard --logdir logs启动
作者:孙建钊
出处:http://www.cnblogs.com/sunjianzhao/
本文版权归作者和博客园共有,欢迎转载,但未经作者同意必须保留此段声明,且在文章页面明显位置给出原文连接,否则保留追究法律责任的权利。