TF Estimator: Can't Load *.pb from Saved Model

怎甘沉沦 提交于 2020-01-05 05:53:19

问题


I create a simple model using TF Estimator and save the model using export_savedmodel function. I use a simple Iris dataset which has 4 features.

num_epoch = 50
num_train = 120
num_test = 30

# 1 Define input function
def input_function(x, y, is_train):
    dict_x = {
        "thisisinput" : x,
    }

    dataset = tf.data.Dataset.from_tensor_slices((
        dict_x, y
    ))

    if is_train:
        dataset = dataset.shuffle(num_train).repeat(num_epoch).batch(num_train)
    else:   
        dataset = dataset.batch(num_test)

    return dataset

def my_serving_input_fn():
    input_data = {
        "thisisinput" : tf.placeholder(tf.float32, [None, 4], name='inputtensors')
    }
    return tf.estimator.export.ServingInputReceiver(input_data, input_data)

def main(argv):
    tf.set_random_seed(1103) # avoiding different result of random

    # 2 Define feature columns
    feature_columns = [
        tf.feature_column.numeric_column(key="thisisinput",shape=4),
    ]

    # 3 Define an estimator
    classifier = tf.estimator.DNNClassifier(
        feature_columns=feature_columns,
        hidden_units=[10],
        n_classes=3,
        optimizer=tf.train.GradientDescentOptimizer(0.001),
        activation_fn=tf.nn.relu,
        model_dir = 'modeliris2/'
    )

    # Train the model
    classifier.train(
        input_fn=lambda:input_function(xtrain, ytrain, True)
    )

    # Evaluate the model
    eval_result = classifier.evaluate(
        input_fn=lambda:input_function(xtest, ytest, False)
    )

    print('\nTest set accuracy: {accuracy:0.3f}\n'.format(**eval_result))
    print('\nSaving models...')
    classifier.export_savedmodel("modeliris2pb", my_serving_input_fn)

if __name__ == "__main__":
    os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' 
    tf.app.run(main)

After running the program, it produces a folder which contains saved_model.pb. I see many tutorials suggest to use contrib.predictor to load saved_model.pb but I can't. I've use contrib.predictor function to load the model:

def main(a):
    with tf.Session() as sess:
        PB_PATH= "modeliris2pb/1536219836/"
        predict_fn = predictor.from_saved_model(PB_PATH)

if __name__=="__main__":
    main()

But it yields an error:

ValueError: Got signature_def_key "serving_default". Available signatures are ['predict']. Original error: No SignatureDef with key 'serving_default' found in MetaGraphDef.

Is there another way that is better to load *.pb files? Why does this error happen? I'm suspicious it is because the my_serving_input_fn() function, but I don't know why


回答1:


I was facing same issue , I tried to search on web but there are no explanation about this so i tried different approach:

SAVING :

First you need to define features length in dict format like this:

feature_spec = {'x': tf.FixedLenFeature([4],tf.float32)}

Then you have to build a function which have placeholder with same shape of features and return using tf.estimator.export.ServingInputReceiver

def serving_input_receiver_fn():
    serialized_tf_example = tf.placeholder(dtype=tf.string,
                                         shape=[None],
                                         name='input_tensors')
    receiver_tensors = {'inputs': serialized_tf_example}

    features = tf.parse_example(serialized_tf_example, feature_spec)
    return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)

Then just save with export_savedmodel :

classifier.export_savedmodel(dir_path, serving_input_receiver_fn)

full example code:

import os
from six.moves.urllib.request import urlopen

import numpy as np
import tensorflow as tf


dir_path = os.path.dirname('.')

IRIS_TRAINING = os.path.join(dir_path,  "iris_training.csv")
IRIS_TEST = os.path.join(dir_path,   "iris_test.csv") 

feature_spec = {'x': tf.FixedLenFeature([4],tf.float32)}

def serving_input_receiver_fn():
    serialized_tf_example = tf.placeholder(dtype=tf.string,
                                         shape=[None],
                                         name='input_tensors')
    receiver_tensors = {'inputs': serialized_tf_example}

    features = tf.parse_example(serialized_tf_example, feature_spec)
    return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)




def main():
    training_set = tf.contrib.learn.datasets.base.load_csv_with_header(
        filename=IRIS_TRAINING,
        target_dtype=np.int,
        features_dtype=np.float32)
    test_set = tf.contrib.learn.datasets.base.load_csv_with_header(
        filename=IRIS_TEST,
        target_dtype=np.int,
        features_dtype=np.float32)

    feature_columns = [tf.feature_column.numeric_column("x", shape=[4])]


    classifier = tf.estimator.DNNClassifier(feature_columns=feature_columns,
                                          hidden_units=[10, 20, 10],
                                          n_classes=3,
                                          model_dir=dir_path)
  # Define the training inputs
    train_input_fn = tf.estimator.inputs.numpy_input_fn(
      x={"x": np.array(training_set.data)},
      y=np.array(training_set.target),
      num_epochs=None,
      shuffle=True)

  # Train model.
    classifier.train(input_fn=train_input_fn, steps=200)


    classifier.export_savedmodel(dir_path, serving_input_receiver_fn)


if __name__ == "__main__":
    main()

Restoring

Now let's restore the model :

import tensorflow as tf 
import os

dir_path = os.path.dirname('.') #current directory
exported_path= os.path.join(dir_path,  "1536315752")

def main():
    with tf.Session() as sess:

        tf.saved_model.loader.load(sess, [tf.saved_model.tag_constants.SERVING], exported_path)

        model_input= tf.train.Example(features=tf.train.Features(feature={
                'x': tf.train.Feature(float_list=tf.train.FloatList(value=[6.4, 3.2, 4.5, 1.5]))        
                })) 

        predictor= tf.contrib.predictor.from_saved_model(exported_path)

        input_tensor=tf.get_default_graph().get_tensor_by_name("input_tensors:0")

        model_input=model_input.SerializeToString()

        output_dict= predictor({"inputs":[model_input]})

        print(" prediction is " , output_dict['scores'])


if __name__ == "__main__":
    main()

Here is Ipython notebook demo example with data and explanation :



来源:https://stackoverflow.com/questions/52200016/tf-estimator-cant-load-pb-from-saved-model

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!