Guest User

Untitled

a guest
Apr 23rd, 2024
46
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 8.25 KB | None | 0 0
  1. [gw3] linux -- Python 3.10.12 /usr/bin/python3.10
  2.  
  3. test_self = <test.OnnxBackendNodeModelTest testMethod=test_unique_sorted_with_axis_3d_cpu>
  4. device = 'CPU', kwargs = {}
  5. model_pb_path = '/usr/local/lib/python3.10/dist-packages/onnx/backend/test/data/node/test_unique_sorted_with_axis_3d/model.onnx'
  6. model_dir = '/usr/local/lib/python3.10/dist-packages/onnx/backend/test/data/node/test_unique_sorted_with_axis_3d'
  7. use_dummy = False
  8. model = ir_version: 6
  9. producer_name: "backend-test"
  10. graph {
  11. node {
  12. input: "X"
  13. output: "Y"
  14. output: "indices"
  15. ... dim {
  16. dim_value: 3
  17. }
  18. }
  19. }
  20. }
  21. }
  22. }
  23. opset_import {
  24. domain: ""
  25. version: 11
  26. }
  27.  
  28. prepared_model = <inference_backend.EndiannessAwareExecutionSession object at 0x7f49cdc6bb50>
  29.  
  30. def run(test_self: Any, device: str, **kwargs) -> None:
  31. if model_test.url is not None and model_test.url.startswith(
  32. "onnx/backend/test/data/light/"
  33. ):
  34. # testing local files
  35. model_pb_path = os.path.normpath(
  36. os.path.join(
  37. os.path.dirname(__file__),
  38. "..",
  39. "..",
  40. "..",
  41. "..",
  42. model_test.url,
  43. )
  44. )
  45. if not os.path.exists(model_pb_path):
  46. raise FileNotFoundError(f"Unable to find model {model_pb_path!r}.")
  47. onnx_home = os.path.expanduser(
  48. os.getenv("ONNX_HOME", os.path.join("~", ".onnx"))
  49. )
  50. models_dir = os.getenv(
  51. "ONNX_MODELS", os.path.join(onnx_home, "models", "light")
  52. )
  53. model_dir: str = os.path.join(models_dir, model_test.model_name)
  54. if not os.path.exists(model_dir):
  55. os.makedirs(model_dir)
  56. use_dummy = True
  57. else:
  58. if model_test.model_dir is None:
  59. model_dir = self.prepare_model_data(model_test)
  60. else:
  61. model_dir = model_test.model_dir
  62. model_pb_path = os.path.join(model_dir, "model.onnx")
  63. use_dummy = False
  64.  
  65. if not ONNX_ML and "ai_onnx_ml" in model_dir:
  66. return
  67.  
  68. model = onnx.load(model_pb_path)
  69. model_marker[0] = model
  70. if (
  71. hasattr(self.backend, "is_compatible")
  72. and callable(self.backend.is_compatible)
  73. and not self.backend.is_compatible(model)
  74. ):
  75. raise unittest.SkipTest("Not compatible with backend")
  76.  
  77. prepared_model = self.backend.prepare(model, device, **kwargs)
  78. assert prepared_model is not None
  79.  
  80. if use_dummy:
  81. # When the backend test goes through a test involving a
  82. # model stored in onnx/backend/test/data/light,
  83. # this function generates expected output coming from
  84. # from ReferenceEvaluator run with random inputs.
  85. # A couple of models include many Conv operators and the
  86. # python implementation is slow (such as test_bvlc_alexnet).
  87. with open(model_pb_path, "rb") as f:
  88. onx = onnx.load(f)
  89.  
  90. test_data_set = os.path.join(model_dir, "test_data_set_0")
  91. if not os.path.exists(test_data_set):
  92. os.mkdir(test_data_set)
  93. feeds = {}
  94. inits = {i.name for i in onx.graph.initializer}
  95. n_input = 0
  96. inputs = []
  97. for i in range(len(onx.graph.input)):
  98. if onx.graph.input[i].name in inits:
  99. continue
  100. name = os.path.join(test_data_set, f"input_{n_input}.pb")
  101. inputs.append(name)
  102. n_input += 1
  103. x = onx.graph.input[i]
  104. value = self.generate_dummy_data(
  105. x, seed=0, name=model_test.model_name, random=False
  106. )
  107. feeds[x.name] = value
  108. with open(name, "wb") as f:
  109. f.write(onnx.numpy_helper.from_array(value).SerializeToString())
  110.  
  111. # loads expected output if any available
  112. prefix = os.path.splitext(model_pb_path)[0]
  113. expected_outputs = []
  114. for i in range(len(onx.graph.output)):
  115. name = f"{prefix}_output_{i}.pb"
  116. if os.path.exists(name):
  117. expected_outputs.append(name)
  118. continue
  119. expected_outputs = None
  120. break
  121.  
  122. if expected_outputs is None:
  123. ref = onnx.reference.ReferenceEvaluator(onx)
  124. outputs = ref.run(None, feeds)
  125. for i, o in enumerate(outputs):
  126. name = os.path.join(test_data_set, f"output_{i}.pb")
  127. with open(name, "wb") as f:
  128. f.write(onnx.numpy_helper.from_array(o).SerializeToString())
  129. else:
  130. for i, o in enumerate(expected_outputs):
  131. name = os.path.join(test_data_set, f"output_{i}.pb")
  132. shutil.copy(o, name)
  133. else:
  134. # TODO after converting all npz files to protobuf, we can delete this.
  135. for test_data_npz in glob.glob(
  136. os.path.join(model_dir, "test_data_*.npz")
  137. ):
  138. test_data = np.load(test_data_npz, encoding="bytes")
  139. inputs = list(test_data["inputs"])
  140. outputs = list(prepared_model.run(inputs))
  141. ref_outputs = test_data["outputs"]
  142. self.assert_similar_outputs(
  143. ref_outputs, outputs, rtol=model_test.rtol, atol=model_test.atol
  144. )
  145.  
  146. for test_data_dir in glob.glob(os.path.join(model_dir, "test_data_set*")):
  147. inputs = []
  148. inputs_num = len(glob.glob(os.path.join(test_data_dir, "input_*.pb")))
  149. for i in range(inputs_num):
  150. input_file = os.path.join(test_data_dir, f"input_{i}.pb")
  151. self._load_proto(input_file, inputs, model.graph.input[i].type)
  152. ref_outputs = []
  153. ref_outputs_num = len(
  154. glob.glob(os.path.join(test_data_dir, "output_*.pb"))
  155. )
  156. for i in range(ref_outputs_num):
  157. output_file = os.path.join(test_data_dir, f"output_{i}.pb")
  158. self._load_proto(
  159. output_file, ref_outputs, model.graph.output[i].type
  160. )
  161. > outputs = list(prepared_model.run(inputs))
  162.  
  163. /usr/local/lib/python3.10/dist-packages/onnx/backend/test/runner/__init__.py:460:
  164. _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
  165. Debug/inference_backend.py:3765: in run
  166. outputs = JniExecutionSession(self.exec_name, inputs)
  167. Debug/inference_backend.py:3579: in JniExecutionSession
  168. procStdout = json.loads(
  169. /usr/lib/python3.10/json/__init__.py:346: in loads
  170. return _default_decoder.decode(s)
  171. /usr/lib/python3.10/json/decoder.py:337: in decode
  172. obj, end = self.raw_decode(s, idx=_w(s, 0).end())
  173. _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
  174.  
  175. self = <json.decoder.JSONDecoder object at 0x7f49d5d58a00>, s = '', idx = 0
  176.  
  177. def raw_decode(self, s, idx=0):
  178. """Decode a JSON document from ``s`` (a ``str`` beginning with
  179. a JSON document) and return a 2-tuple of the Python
  180. representation and the index in ``s`` where the document ended.
  181.  
  182. This can be used to decode a JSON document from a string that may
  183. have extraneous data at the end.
  184.  
  185. """
  186. try:
  187. obj, end = self.scan_once(s, idx)
  188. except StopIteration as err:
  189. > raise JSONDecodeError("Expecting value", s, err.value) from None
  190. E json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
  191.  
  192. /usr/lib/python3.10/json/decoder.py:355: JSONDecodeError
  193. ----------------------------- Captured stderr call -----------------------------
  194. ['java', '-cp', '/scratch/tmp/workspace/onnx-mlir/build/test/backend/Debug/check-onnx-backend-jni/test_unique_sorted_with_axis_3d/test_unique_sorted_with_axis_3d.jar:/usr/share/java/jsoniter-0.9.23.jar', 'com.ibm.onnxmlir.OMRunner']
  195. free(): double free detected in tcache 2
Advertisement
Add Comment
Please, Sign In to add comment