Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import tensorflow as tf
- tf.enable_eager_execution()
- saved_model_dir = 'inference_graph/saved_model/'
- converter = tf.contrib.lite.TFLiteConverter.from_saved_model(saved_model_dir,input_arrays=input_arrays,output_arrays=output_arrays,input_shapes={"image_tensor": [1, 832, 832, 3]})
- converter.post_training_quantize = True
- INFO:tensorflow:Saver not created because there are no variables in the graph to restore
- INFO:tensorflow:The specified SavedModel has no variables; no checkpoints were restored.
- INFO:tensorflow:The given SavedModel MetaGraphDef contains SignatureDefs with the following keys: {'serving_default'}
- INFO:tensorflow:input tensors info:
- INFO:tensorflow:Tensor's key in saved_model's tensor_map: inputs
- INFO:tensorflow: tensor name: image_tensor:0, shape: (-1, -1, -1, 3), type: DT_UINT8
- INFO:tensorflow:output tensors info:
- INFO:tensorflow:Tensor's key in saved_model's tensor_map: num_detections
- INFO:tensorflow: tensor name: num_detections:0, shape: (-1), type: DT_FLOAT
- INFO:tensorflow:Tensor's key in saved_model's tensor_map: detection_boxes
- INFO:tensorflow: tensor name: detection_boxes:0, shape: (-1, 100, 4), type: DT_FLOAT
- INFO:tensorflow:Tensor's key in saved_model's tensor_map: detection_scores
- INFO:tensorflow: tensor name: detection_scores:0, shape: (-1, 100), type: DT_FLOAT
- INFO:tensorflow:Tensor's key in saved_model's tensor_map: detection_classes
- INFO:tensorflow: tensor name: detection_classes:0, shape: (-1, 100), type: DT_FLOAT
- INFO:tensorflow:Saver not created because there are no variables in the graph to restore
- INFO:tensorflow:The specified SavedModel has no variables; no checkpoints were restored.
- INFO:tensorflow:Froze 0 variables.
- INFO:tensorflow:Converted 0 variables to const ops.
- tflite_quantized_model = converter.convert()
- ---------------------------------------------------------------------------
- RuntimeError Traceback (most recent call last)
- <ipython-input-6-61a136476642> in <module>
- ----> 1 tflite_quantized_model = converter.convert()
- ~/.local/lib/python3.5/site-packages/tensorflow/contrib/lite/python/lite.py in convert(self)
- 451 input_tensors=self._input_tensors,
- 452 output_tensors=self._output_tensors,
- --> 453 **converter_kwargs)
- 454 else:
- 455 # Graphs without valid tensors cannot be loaded into tf.Session since they
- ~/.local/lib/python3.5/site-packages/tensorflow/contrib/lite/python/convert.py in toco_convert_impl(input_data, input_tensors, output_tensors, *args, **kwargs)
- 340 data = toco_convert_protos(model_flags.SerializeToString(),
- 341 toco_flags.SerializeToString(),
- --> 342 input_data.SerializeToString())
- 343 return data
- 344
- ~/.local/lib/python3.5/site-packages/tensorflow/contrib/lite/python/convert.py in toco_convert_protos(model_flags_str, toco_flags_str, input_data_str)
- 133 else:
- 134 raise RuntimeError("TOCO failed see console for info.n%sn%sn" %
- --> 135 (stdout, stderr))
- 136
- 137
- RuntimeError: TOCO failed see console for info.
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement