Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- [gw3] linux -- Python 3.10.12 /usr/bin/python3.10
- test_self = <test.OnnxBackendNodeModelTest testMethod=test_unique_sorted_with_axis_3d_cpu>
- device = 'CPU', kwargs = {}
- model_pb_path = '/usr/local/lib/python3.10/dist-packages/onnx/backend/test/data/node/test_unique_sorted_with_axis_3d/model.onnx'
- model_dir = '/usr/local/lib/python3.10/dist-packages/onnx/backend/test/data/node/test_unique_sorted_with_axis_3d'
- use_dummy = False
- model = ir_version: 6
- producer_name: "backend-test"
- graph {
- node {
- input: "X"
- output: "Y"
- output: "indices"
- ... dim {
- dim_value: 3
- }
- }
- }
- }
- }
- }
- opset_import {
- domain: ""
- version: 11
- }
- prepared_model = <inference_backend.EndiannessAwareExecutionSession object at 0x7f49cdc6bb50>
- def run(test_self: Any, device: str, **kwargs) -> None:
- if model_test.url is not None and model_test.url.startswith(
- "onnx/backend/test/data/light/"
- ):
- # testing local files
- model_pb_path = os.path.normpath(
- os.path.join(
- os.path.dirname(__file__),
- "..",
- "..",
- "..",
- "..",
- model_test.url,
- )
- )
- if not os.path.exists(model_pb_path):
- raise FileNotFoundError(f"Unable to find model {model_pb_path!r}.")
- onnx_home = os.path.expanduser(
- os.getenv("ONNX_HOME", os.path.join("~", ".onnx"))
- )
- models_dir = os.getenv(
- "ONNX_MODELS", os.path.join(onnx_home, "models", "light")
- )
- model_dir: str = os.path.join(models_dir, model_test.model_name)
- if not os.path.exists(model_dir):
- os.makedirs(model_dir)
- use_dummy = True
- else:
- if model_test.model_dir is None:
- model_dir = self.prepare_model_data(model_test)
- else:
- model_dir = model_test.model_dir
- model_pb_path = os.path.join(model_dir, "model.onnx")
- use_dummy = False
- if not ONNX_ML and "ai_onnx_ml" in model_dir:
- return
- model = onnx.load(model_pb_path)
- model_marker[0] = model
- if (
- hasattr(self.backend, "is_compatible")
- and callable(self.backend.is_compatible)
- and not self.backend.is_compatible(model)
- ):
- raise unittest.SkipTest("Not compatible with backend")
- prepared_model = self.backend.prepare(model, device, **kwargs)
- assert prepared_model is not None
- if use_dummy:
- # When the backend test goes through a test involving a
- # model stored in onnx/backend/test/data/light,
- # this function generates expected output coming from
- # from ReferenceEvaluator run with random inputs.
- # A couple of models include many Conv operators and the
- # python implementation is slow (such as test_bvlc_alexnet).
- with open(model_pb_path, "rb") as f:
- onx = onnx.load(f)
- test_data_set = os.path.join(model_dir, "test_data_set_0")
- if not os.path.exists(test_data_set):
- os.mkdir(test_data_set)
- feeds = {}
- inits = {i.name for i in onx.graph.initializer}
- n_input = 0
- inputs = []
- for i in range(len(onx.graph.input)):
- if onx.graph.input[i].name in inits:
- continue
- name = os.path.join(test_data_set, f"input_{n_input}.pb")
- inputs.append(name)
- n_input += 1
- x = onx.graph.input[i]
- value = self.generate_dummy_data(
- x, seed=0, name=model_test.model_name, random=False
- )
- feeds[x.name] = value
- with open(name, "wb") as f:
- f.write(onnx.numpy_helper.from_array(value).SerializeToString())
- # loads expected output if any available
- prefix = os.path.splitext(model_pb_path)[0]
- expected_outputs = []
- for i in range(len(onx.graph.output)):
- name = f"{prefix}_output_{i}.pb"
- if os.path.exists(name):
- expected_outputs.append(name)
- continue
- expected_outputs = None
- break
- if expected_outputs is None:
- ref = onnx.reference.ReferenceEvaluator(onx)
- outputs = ref.run(None, feeds)
- for i, o in enumerate(outputs):
- name = os.path.join(test_data_set, f"output_{i}.pb")
- with open(name, "wb") as f:
- f.write(onnx.numpy_helper.from_array(o).SerializeToString())
- else:
- for i, o in enumerate(expected_outputs):
- name = os.path.join(test_data_set, f"output_{i}.pb")
- shutil.copy(o, name)
- else:
- # TODO after converting all npz files to protobuf, we can delete this.
- for test_data_npz in glob.glob(
- os.path.join(model_dir, "test_data_*.npz")
- ):
- test_data = np.load(test_data_npz, encoding="bytes")
- inputs = list(test_data["inputs"])
- outputs = list(prepared_model.run(inputs))
- ref_outputs = test_data["outputs"]
- self.assert_similar_outputs(
- ref_outputs, outputs, rtol=model_test.rtol, atol=model_test.atol
- )
- for test_data_dir in glob.glob(os.path.join(model_dir, "test_data_set*")):
- inputs = []
- inputs_num = len(glob.glob(os.path.join(test_data_dir, "input_*.pb")))
- for i in range(inputs_num):
- input_file = os.path.join(test_data_dir, f"input_{i}.pb")
- self._load_proto(input_file, inputs, model.graph.input[i].type)
- ref_outputs = []
- ref_outputs_num = len(
- glob.glob(os.path.join(test_data_dir, "output_*.pb"))
- )
- for i in range(ref_outputs_num):
- output_file = os.path.join(test_data_dir, f"output_{i}.pb")
- self._load_proto(
- output_file, ref_outputs, model.graph.output[i].type
- )
- > outputs = list(prepared_model.run(inputs))
- /usr/local/lib/python3.10/dist-packages/onnx/backend/test/runner/__init__.py:460:
- _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
- Debug/inference_backend.py:3765: in run
- outputs = JniExecutionSession(self.exec_name, inputs)
- Debug/inference_backend.py:3579: in JniExecutionSession
- procStdout = json.loads(
- /usr/lib/python3.10/json/__init__.py:346: in loads
- return _default_decoder.decode(s)
- /usr/lib/python3.10/json/decoder.py:337: in decode
- obj, end = self.raw_decode(s, idx=_w(s, 0).end())
- _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
- self = <json.decoder.JSONDecoder object at 0x7f49d5d58a00>, s = '', idx = 0
- def raw_decode(self, s, idx=0):
- """Decode a JSON document from ``s`` (a ``str`` beginning with
- a JSON document) and return a 2-tuple of the Python
- representation and the index in ``s`` where the document ended.
- This can be used to decode a JSON document from a string that may
- have extraneous data at the end.
- """
- try:
- obj, end = self.scan_once(s, idx)
- except StopIteration as err:
- > raise JSONDecodeError("Expecting value", s, err.value) from None
- E json.decoder.JSONDecodeError: Expecting value: line 1 column 1 (char 0)
- /usr/lib/python3.10/json/decoder.py:355: JSONDecodeError
- ----------------------------- Captured stderr call -----------------------------
- ['java', '-cp', '/scratch/tmp/workspace/onnx-mlir/build/test/backend/Debug/check-onnx-backend-jni/test_unique_sorted_with_axis_3d/test_unique_sorted_with_axis_3d.jar:/usr/share/java/jsoniter-0.9.23.jar', 'com.ibm.onnxmlir.OMRunner']
- free(): double free detected in tcache 2
Advertisement
Add Comment
Please, Sign In to add comment