Advertisement
Guest User

Untitled

a guest
Jul 12th, 2019
146
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 9.10 KB | None | 0 0
  1. # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. # ==============================================================================
  15.  
  16. """Simple image classification with Inception.
  17.  
  18. Run image classification with Inception trained on ImageNet 2012 Challenge data
  19. set.
  20.  
  21. This program creates a graph from a saved GraphDef protocol buffer,
  22. and runs inference on an input JPEG image. It outputs human readable
  23. strings of the top 5 predictions along with their probabilities.
  24.  
  25. Change the --image_file argument to any jpg image to compute a
  26. classification of that image.
  27.  
  28. Please see the tutorial and website for a detailed description of how
  29. to use this script to perform image recognition.
  30.  
  31. https://tensorflow.org/tutorials/image_recognition/
  32. """
  33.  
  34. from __future__ import absolute_import
  35. from __future__ import division
  36. from __future__ import print_function
  37.  
  38. import argparse
  39. import os.path
  40. import re
  41. import sys
  42. import tarfile
  43. from tkinter import filedialog
  44. from tkinter import *
  45. import array
  46.  
  47. import numpy as np
  48. from six.moves import urllib
  49. import tensorflow as tf
  50. import pymongo
  51.  
  52. FLAGS = None
  53.  
  54. # pylint: disable=line-too-long
  55. DATA_URL = 'http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz'
  56. # pylint: enable=line-too-long
  57.  
  58.  
  59.  
  60. LABELS_PREDICTED = ''
  61. PERCENTAGE = ''
  62.  
  63.  
  64. class NodeLookup(object):
  65. """Converts integer node ID's to human readable labels."""
  66.  
  67. def __init__(self,
  68. label_lookup_path=None,
  69. uid_lookup_path=None):
  70. if not label_lookup_path:
  71. label_lookup_path = os.path.join(
  72. FLAGS.model_dir, 'imagenet_2012_challenge_label_map_proto.pbtxt')
  73. if not uid_lookup_path:
  74. uid_lookup_path = os.path.join(
  75. FLAGS.model_dir, 'imagenet_synset_to_human_label_map.txt')
  76. self.node_lookup = self.load(label_lookup_path, uid_lookup_path)
  77.  
  78. def load(self, label_lookup_path, uid_lookup_path):
  79. """Loads a human readable English name for each softmax node.
  80.  
  81. Args:
  82. label_lookup_path: string UID to integer node ID.
  83. uid_lookup_path: string UID to human-readable string.
  84.  
  85. Returns:
  86. dict from integer node ID to human-readable string.
  87. """
  88. if not tf.gfile.Exists(uid_lookup_path):
  89. tf.logging.fatal('File does not exist %s', uid_lookup_path)
  90. if not tf.gfile.Exists(label_lookup_path):
  91. tf.logging.fatal('File does not exist %s', label_lookup_path)
  92.  
  93. # Loads mapping from string UID to human-readable string
  94. proto_as_ascii_lines = tf.gfile.GFile(uid_lookup_path).readlines()
  95. uid_to_human = {}
  96. p = re.compile(r'[n\d]*[ \S,]*')
  97. for line in proto_as_ascii_lines:
  98. parsed_items = p.findall(line)
  99. uid = parsed_items[0]
  100. human_string = parsed_items[2]
  101. uid_to_human[uid] = human_string
  102.  
  103. # Loads mapping from string UID to integer node ID.
  104. node_id_to_uid = {}
  105. proto_as_ascii = tf.gfile.GFile(label_lookup_path).readlines()
  106. for line in proto_as_ascii:
  107. if line.startswith(' target_class:'):
  108. target_class = int(line.split(': ')[1])
  109. if line.startswith(' target_class_string:'):
  110. target_class_string = line.split(': ')[1]
  111. node_id_to_uid[target_class] = target_class_string[1:-2]
  112.  
  113. # Loads the final mapping of integer node ID to human-readable string
  114. node_id_to_name = {}
  115. for key, val in node_id_to_uid.items():
  116. if val not in uid_to_human:
  117. tf.logging.fatal('Failed to locate: %s', val)
  118. name = uid_to_human[val]
  119. node_id_to_name[key] = name
  120.  
  121. return node_id_to_name
  122.  
  123. def id_to_string(self, node_id):
  124. if node_id not in self.node_lookup:
  125. return ''
  126. return self.node_lookup[node_id]
  127.  
  128.  
  129. def create_graph():
  130. """Creates a graph from saved GraphDef file and returns a saver."""
  131. # Creates graph from saved graph_def.pb.
  132. with tf.gfile.FastGFile(os.path.join(
  133. FLAGS.model_dir, 'classify_image_graph_def.pb'), 'rb') as f:
  134. graph_def = tf.GraphDef()
  135. graph_def.ParseFromString(f.read())
  136. _ = tf.import_graph_def(graph_def, name='')
  137.  
  138.  
  139. def run_inference_on_image(image):
  140. """Runs inference on an image.
  141.  
  142. Args:
  143. image: Image file name.
  144.  
  145. Returns:
  146. Nothing
  147. """
  148. if not tf.gfile.Exists(image):
  149. tf.logging.fatal('File does not exist %s', image)
  150. image_data = tf.gfile.FastGFile(image, 'rb').read()
  151.  
  152. # Creates graph from saved GraphDef.
  153. create_graph()
  154.  
  155. with tf.Session() as sess:
  156. # Some useful tensors:
  157. # 'softmax:0': A tensor containing the normalized prediction across
  158. # 1000 labels.
  159. # 'pool_3:0': A tensor containing the next-to-last layer containing 2048
  160. # float description of the image.
  161. # 'DecodeJpeg/contents:0': A tensor containing a string providing JPEG
  162. # encoding of the image.
  163. # Runs the softmax tensor by feeding the image_data as input to the graph.
  164. softmax_tensor = sess.graph.get_tensor_by_name('softmax:0')
  165. predictions = sess.run(softmax_tensor,
  166. {'DecodeJpeg/contents:0': image_data})
  167. predictions = np.squeeze(predictions)
  168.  
  169. # Creates node ID --> English string lookup.
  170. node_lookup = NodeLookup()
  171.  
  172. top_k = predictions.argsort()[-FLAGS.num_top_predictions:][::-1]
  173. print("COUCOU")
  174. print(node_lookup.id_to_string(top_k[0]))
  175. LABELS_PREDICTED = node_lookup.id_to_string(top_k[0])
  176. PERCENTAGE = predictions[top_k[0]]
  177. for node_id in top_k:
  178. human_string = node_lookup.id_to_string(node_id)
  179. score = predictions[node_id]
  180. print('%s (score = %.5f)' % (human_string, score))
  181.  
  182. my_array = [LABELS_PREDICTED, PERCENTAGE]
  183. return my_array
  184.  
  185.  
  186. def maybe_download_and_extract():
  187. """Download and extract model tar file."""
  188. dest_directory = FLAGS.model_dir
  189. if not os.path.exists(dest_directory):
  190. os.makedirs(dest_directory)
  191. filename = DATA_URL.split('/')[-1]
  192. filepath = os.path.join(dest_directory, filename)
  193. if not os.path.exists(filepath):
  194. def _progress(count, block_size, total_size):
  195. sys.stdout.write('\r>> Downloading %s %.1f%%' % (
  196. filename, float(count * block_size) / float(total_size) * 100.0))
  197. sys.stdout.flush()
  198. filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath, _progress)
  199. print()
  200. statinfo = os.stat(filepath)
  201. print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')
  202. tarfile.open(filepath, 'r:gz').extractall(dest_directory)
  203.  
  204.  
  205.  
  206. def normal_execution(image):
  207. maybe_download_and_extract()
  208. image = (FLAGS.image_file if FLAGS.image_file else os.path.join(FLAGS.model_dir, image))
  209. my_array = run_inference_on_image(image)
  210. return my_array
  211.  
  212.  
  213. def main(_):
  214. def callback():
  215. root.filename = filedialog.askopenfilename(initialdir = "/",title = "Select file",filetypes = (("jpeg files","*.jpg"),("all files","*.*")))
  216. print (root.filename)
  217.  
  218. my_array = normal_execution(root.filename)
  219.  
  220. print("COUCOU_2")
  221. print(my_array[0])
  222. print(my_array[1])
  223.  
  224. w = Label(root, text=my_array[0])
  225. w.pack()
  226.  
  227.  
  228. s = Label(root, text=my_array[1])
  229. s.pack()
  230.  
  231.  
  232. myclient = pymongo.MongoClient("mongodb://localhost:27017/")
  233. mydb = myclient["ia"]
  234. mycol = mydb["collection_ia"]
  235.  
  236. mydb.collection_ia.insert_one({'CheminImage': root.filename, 'Label': my_array[0], 'Score': str(my_array[1])})
  237.  
  238.  
  239. root = Tk()
  240.  
  241. b = Button(root, text="Selectionner une image", command=callback)
  242. b.pack()
  243. mainloop()
  244.  
  245.  
  246.  
  247.  
  248.  
  249.  
  250.  
  251.  
  252. if __name__ == '__main__':
  253. parser = argparse.ArgumentParser()
  254. # classify_image_graph_def.pb:
  255. # Binary representation of the GraphDef protocol buffer.
  256. # imagenet_synset_to_human_label_map.txt:
  257. # Map from synset ID to a human readable string.
  258. # imagenet_2012_challenge_label_map_proto.pbtxt:
  259. # Text representation of a protocol buffer mapping a label to synset ID.
  260. parser.add_argument(
  261. '--model_dir',
  262. type=str,
  263. default='/tmp/imagenet',
  264. help="""\
  265. Path to classify_image_graph_def.pb,
  266. imagenet_synset_to_human_label_map.txt, and
  267. imagenet_2012_challenge_label_map_proto.pbtxt.\
  268. """
  269. )
  270. parser.add_argument(
  271. '--image_file',
  272. type=str,
  273. default='',
  274. help='Absolute path to image file.'
  275. )
  276. parser.add_argument(
  277. '--num_top_predictions',
  278. type=int,
  279. default=5,
  280. help='Display this many predictions.'
  281. )
  282. FLAGS, unparsed = parser.parse_known_args()
  283. tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement