当我们恢复一个元模型(restore a meta checkpoint)时,实际上我们执行的操作是将恢复的图载入到当前的默认图中。所有当你完成模型恢复之后,你可以在默认图中访问载入的任何内容,比如一个张量,一个操作或者集合。
1 2 3 4 5 6 7 8 9 10 11 12 13
import tensorflow as tf
# Let's laod a previous meta graph in the current graph in use: usually the default graph # This actions returns a Saver saver = tf.train.import_meta_graph('results/model.ckpt-1000.meta')
# We can now access the default graph where all our metadata has been loaded graph = tf.get_default_graph()
# Finally we can retrieve tensors, operations, etc. global_step_tensor = graph.get_tensor_by_name('loss/global_step:0') train_op = graph.get_operation_by_name('loss/train_op') hyperparameters = tf.get_collection('hyperparameters')
# Load the VGG-16 model in the default graph vgg_saver = tf.train.import_meta_graph(dir + '/vgg/results/vgg-16.meta') # Access the graph vgg_graph = tf.get_default_graph()
# Choose which node you want to connect your own graph output_conv =vgg_graph.get_tensor_by_name('conv1_2:0') # output_conv =vgg_graph.get_tensor_by_name('conv2_2:0') # output_conv =vgg_graph.get_tensor_by_name('conv3_3:0') # output_conv =vgg_graph.get_tensor_by_name('conv4_3:0') # output_conv =vgg_graph.get_tensor_by_name('conv5_3:0')
# Stop the gradient for fine-tuning output_conv_sg = tf.stop_gradient(output_conv) # It's an identity function
# 随机改变亮度和对比度 # Because these operations are not commutative, consider randomizing # the order their operation. distorted_image = tf.image.random_brightness(distorted_image, max_delta=63) distorted_image = tf.image.random_contrast(distorted_image, lower=0.2, upper=1.8)