Tuesday, 25 August 2020

Correctly saving model to .pb

I used the tutorial "Classify Flowers with Transfer Learning" to retrain on my own classes. Unfortunately, no example is provided on how to properly save your model as .pb.

I tried to implement my solution based on the different post I could read. At the end I get to save something as .pb , but I can not reopen the model later. I get the following error:

ValueError: Input 0 of node AssignVariableOp was passed float from
module/InceptionV3/Conv2d_1a_3x3/BatchNorm/beta:0 inco
mpatible with expected resource

Below, in between ** are the parts of the above mentioned colab notebook that I modified to save the model:

def create_model(features):
  layer = tf.layers.dense(inputs=features, units=NUM_CLASSES, activation=None)
  return layer

logits = create_model(features)
labels = tf.placeholder(tf.float32, [None, NUM_CLASSES])

cross_entropy = tf.nn.softmax_cross_entropy_with_logits_v2(logits=logits, labels=labels)
cross_entropy_mean = tf.reduce_mean(cross_entropy)

optimizer = tf.train.GradientDescentOptimizer(learning_rate=LEARNING_RATE)
train_op = optimizer.minimize(loss=cross_entropy_mean)

probabilities = tf.nn.softmax(logits)

prediction = tf.argmax(probabilities, 1)
correct_prediction = tf.equal(prediction, tf.argmax(labels, 1))

accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))

**saver = tf.train.Saver()**


with tf.Session() as sess:
  sess.run(tf.global_variables_initializer())
  for i in range(NUM_TRAIN_STEPS):
    # Get a random batch of training examples.
    train_batch = get_batch(batch_size=TRAIN_BATCH_SIZE)
    batch_images, batch_labels = get_images_and_labels(train_batch)
    # Run the train_op to train the model.
    train_loss, _, train_accuracy = sess.run(
        [cross_entropy_mean, train_op, accuracy],
        feed_dict={encoded_images: batch_images, labels: batch_labels})
    is_final_step = (i == (NUM_TRAIN_STEPS - 1))
    if i % EVAL_EVERY == 0 or is_final_step:
      # Get a batch of test examples.
      test_batch = get_batch(batch_size=None, test=True)
      batch_images, batch_labels = get_images_and_labels(test_batch)
      # Evaluate how well our model performs on the test set.
      test_loss, test_accuracy, test_prediction, correct_predicate = sess.run(
        [cross_entropy_mean, accuracy, prediction, correct_prediction],
        feed_dict={encoded_images: batch_images, labels: batch_labels})
      print('Test accuracy at step %s: %.2f%%' % (i, (test_accuracy * 100)))

      **saver.save(sess, BASE_DIR+'/simple_model')
      tf.io.write_graph(sess.graph, BASE_DIR+'/graph', 'graph.pbtxt')**





**from tensorflow.python.framework import graph_util
saver = tf.train.import_meta_graph(BASE_DIR+'/simple_model.meta', 
                                                                 clear_devices=True
                                                                )
graph = tf.get_default_graph()
input_graph_def = graph.as_graph_def()
sess = tf.Session()
saver.restore(sess, BASE_DIR+"/simple_model")
# this will be seen from the file graph.pbtxt
output_node_names="init"
output_graph_def = graph_util.convert_variables_to_constants(
            sess,
            input_graph_def,  
            output_node_names.split(",")
            )  
output_graph= BASE_DIR+"/simple_model .pb"
print("output_graph...", output_graph)
with tf.gfile.GFile(output_graph, "wb") as f:
    f.write(output_graph_def.SerializeToString())**

sess.close()


 


from Correctly saving model to .pb

No comments:

Post a Comment