Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- def serving_input_receiver_fn():
- serialized_tf_example = tf.placeholder(dtype=tf.string, shape=[None], name='input_exapmle_tensor')
- receiver_tensors = {"predictor_inputs": serialized_tf_example}
- feature_spec = {"image": tf.FixedLenFeature((), tf.string)}
- features = tf.parse_example(serialized_tf_example, feature_spec, example_names='input')
- return tf.estimator.export.ServingInputReceiver(features, receiver_tensors)
- predict_output = {
- 'pred_output_class': tf.argmax(logits, axis=1),
- 'pred_output_prob': tf.nn.softmax(logits, name='softmax_tensor')
- }
- export_output = {'predict_output': tf.estimator.export.PredictOutput(predict_output)}
- if mode == tf.estimator.ModeKeys.PREDICT:
- return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions, export_outputs=export_output)
- resnet_classifier.export_savedmodel(FLAGS.export_dir, serving_input_receiver_fn)
- ValueError: Shape must be rank 1 but is rank 0 for 'ParseExample/ParseExample' (op: 'ParseExample') with input shapes: [?], [], [], [0].
Add Comment
Please, Sign In to add comment