mlprodict
mlprodict copied to clipboard
Error on mlprodict runtime vs onnxruntime
I have a model that runs properly on onnxruntime v1.15.1 and fails on mlprodict (version 0.9.1887) runtime. Stacktrace:
AttributeError Traceback (most recent call last) Cell In[20], line 1 ----> 1 res = ses.mlp_ses.run2onnx(inputs=input_dict) 2 res
File c:\Users\aviy\AppData\Local\Programs\Python\Python311\Lib\site-packages\mlprodict\onnxrt\onnx_inference.py:897, in OnnxInference.run2onnx(self, inputs, verbose, fLOG, as_parameter, suffix, param_name, node_type, domain, domain_opset, attributes) 843 def run2onnx(self, inputs, verbose=0, fLOG=None, 844 as_parameter=True, suffix='_DBG', 845 param_name=None, node_type='DEBUG', 846 domain='DEBUG', domain_opset=1, 847 attributes=None): 848 """ 849 Executes the graphs with the given inputs, then adds the intermediate 850 results into ONNX nodes in the original graph. Once saved, it can be (...) 895 .. versionadded:: 0.7 896 """ --> 897 intermediate = self.run(inputs, verbose=verbose, fLOG=fLOG, 898 intermediate=True, attributes=attributes) 899 for name in self.input_names: 900 del intermediate[name]
File c:\Users\aviy\AppData\Local\Programs\Python\Python311\Lib\site-packages\mlprodict\onnxrt\onnx_inference.py:828, in OnnxInference.run(self, inputs, clean_right_away, intermediate, verbose, node_time, overwrite_types, yield_ops, fLOG, context, attributes) 824 if self.inplace: 825 raise RuntimeError( # pragma: no cover 826 "inplace must be False if intermediate is True, a container " 827 "might be used by several nodes.") --> 828 return self._run(inputs, clean_right_away=False, # pylint: disable=E1123 829 intermediate=intermediate, 830 verbose=verbose, node_time=node_time, 831 overwrite_types=overwrite_types, 832 yield_ops=yield_ops, fLOG=fLOG, 833 context=context, attributes=attributes) 834 if overwrite_types is not None: 835 raise RuntimeError( # pragma: no cover 836 "overwrite_types is not used if intermediate is False.")
File c:\Users\aviy\AppData\Local\Programs\Python\Python311\Lib\site-packages\mlprodict\onnxrt\onnx_inference.py:1004, in OnnxInference.run_sequence_runtime(self, inputs, clean_right_away, intermediate, verbose, node_time, overwrite_types, yield_ops, fLOG, context, attributes) 1002 else: 1003 for node in self.sequence: -> 1004 node.run(values, attributes=attributes) 1005 else: 1006 def dispsimple(arr):
File c:\Users\aviy\AppData\Local\Programs\Python\Python311\Lib\site-packages\mlprodict\onnxrt\onnx_inference_node.py:436, in OnnxInferenceNode.run(self, values, attributes, verbose, fLOG) 432 res = self.ops_.run(*args, context=context, 433 attributes=attributes, 434 verbose=verbose, fLOG=fLOG) 435 else: --> 436 res = self.ops_.run( 437 *args, attributes=attributes, 438 verbose=verbose, fLOG=fLOG) 439 except (ValueError, TypeError) as e: 440 raise RuntimeError( # pragma: no cover 441 "Unable to run operator %r, inputs=%r." 442 "" % (type(self.ops_), self.inputs)) from e
File c:\Users\aviy\AppData\Local\Programs\Python\Python311\Lib\site-packages\mlprodict\onnxrt\ops_cpu\op_min.py:19, in Min.run(self, attributes, verbose, fLOG, *data) 17 def run(self, *data, attributes=None, verbose=0, fLOG=None): # pylint: disable=W0221 18 if len(data) == 2: ---> 19 return OpRunBinaryNumpy.run(self, *data, verbose=verbose, fLOG=fLOG) 20 if len(data) == 1: 21 if self.inplaces.get(0, False):
File c:\Users\aviy\AppData\Local\Programs\Python\Python311\Lib\site-packages\mlprodict\onnxrt\ops_cpu_op.py:542, in OpRunBinaryNum.run(self, x, y, attributes, verbose, fLOG)
538 def run(self, x, y, attributes=None, verbose=0, fLOG=None): # pylint: disable=E0202
539 """
540 Calls method _run
.
541 """
--> 542 res = OpRunBinary.run(
543 self, x, y, attributes=attributes, verbose=verbose, fLOG=fLOG)
544 if res[0].dtype != x.dtype:
545 raise RuntimeTypeError(
546 "Output type mismatch: {} != {} or {} (operator '{}')"
547 " type(x)={} type(y)={}".format(
548 x.dtype, res[0].dtype, y.dtype,
549 self.class.name, type(x), type(y)))
File c:\Users\aviy\AppData\Local\Programs\Python\Python311\Lib\site-packages\mlprodict\onnxrt\ops_cpu_op.py:483, in OpRunBinary.run(self, x, y, attributes, verbose, fLOG) 480 if x is None or y is None: 481 raise RuntimeError( # pragma: no cover 482 f"x and y have different dtype: {type(x)} != {type(y)} ({type(self)})") --> 483 if x.dtype != y.dtype: 484 raise RuntimeTypeError( 485 "Input type mismatch: {} != {} (operator '{}', shapes {}, {})".format( 486 x.dtype, y.dtype, self.class.name, 487 x.shape, y.shape)) 488 try:
AttributeError: 'list' object has no attribute 'dtype'
I used mlprodict as a base to implement the class ReferenceEvaluator in the onnx package (https://onnx.ai/onnx/api/reference.html). Could you tell me if works better with python runtime?
from onnx.reference import ReferenceEvaluator
ref = ReferenceEvaluator(your model)
ref.run(None, { ... })
If it fails, could you change the second line into ref = ReferenceEvaluator(your model, verbose=10)
and share the output if it can be shared?
Output with verbose=10: +I f0: float32:(1,):[202302.0] +I f1: <U9:(1,):['UP TO 15K'] +I f2: <U7:(1,):['PRIVATE'] +I f3: <U3:(1,):['QLD'] +I f4: float32:(1,):[0.0] +I f5: float32:(1,):[-0.3496617376804352] +I f6: <U10:(1,):['NO FINANCE'] +I f7: <U4:(1,):['MALE'] +I f8: <U2:(1,):['RB'] +I f9: <U3:(1,):['FWD'] +I f10: <U6:(1,):['AUS_US'] +I f11: float32:(1,):[202303.0] +I f12: float32:(1,):[202204.0] +I f13: float32:(1,):[129.08804321289062] +I f14: float32:(1,):[14.0] +I f15: float32:(1,):[0.0010000000474974513] +I f16: float32:(1,):[0.17630000412464142] +I f17: float32:(1,):[2775.0] +I f18: float32:(1,):[49.0] +I f19: float32:(1,):[0.09279999881982803] +I f20: float32:(1,):[1820.0] +I f21: float32:(1,):[0.2930999994277954] +I f22: float32:(1,):[0.0] +I f23: float32:(1,):[0.05920000001788139] Constant() -> constant_100
- constant_100: float32:(1,):[100.0] Constant() -> const_12
- const_12: float32:(1,):[12.0] Constant() -> const_05
- const_05: float32:(1,):[0.5] Min(F0, f12) -> o01
And error: AttributeError Traceback (most recent call last) Cell In[21], line 4 2 from onnx.reference import ReferenceEvaluator 3 ref = ReferenceEvaluator(onnx.load('inf_merged.onnx'), verbose=10) ----> 4 ref.run(None, input_dict)
File c:\Users\aviy\AppData\Local\Programs\Python\Python311\Lib\site-packages\onnx\reference\reference_evaluator.py:462, in ReferenceEvaluator.run(self, output_names, feed_inputs, attributes) 460 outputs = node.run(*inputs, context=results, **linked_attributes) 461 else: --> 462 outputs = node.run(*inputs, **linked_attributes) 463 for name, value in zip(node.output, outputs): 464 if isinstance(value, tuple):
File c:\Users\aviy\AppData\Local\Programs\Python\Python311\Lib\site-packages\onnx\reference\ops\op_min.py:17, in Min.run(self, *data) 15 def run(self, *data): # type: ignore 16 if len(data) == 2: ---> 17 return OpRunBinaryNumpy.run(self, *data) 18 if len(data) == 1: 19 return (data[0].copy(),)
File c:\Users\aviy\AppData\Local\Programs\Python\Python311\Lib\site-packages\onnx\reference\ops_op.py:128, in OpRunBinaryNum.run(self, x, y)
123 def run(self, x, y): # type: ignore # pylint: disable=W0221
124 """
125 Calls method OpRunBinary.run
, catches exceptions,
126 displays a longer error message.
127 """
--> 128 res = OpRunBinary.run(self, x, y)
129 if res[0].dtype != x.dtype:
130 raise RuntimeTypeError(
131 f"Output type mismatch: {x.dtype} != {res[0].dtype} or {y.dtype} "
132 f"(operator {self.class.name!r})"
133 f" type(x)={type(x)} type(y)={type(y)}"
134 )
File c:\Users\aviy\AppData\Local\Programs\Python\Python311\Lib\site-packages\onnx\reference\ops_op.py:87, in OpRunBinary.run(self, x, y) 83 if x is None or y is None: 84 raise RuntimeError( 85 f"x and y have different dtype: {type(x)} != {type(y)} ({type(self)})" 86 ) ---> 87 if x.dtype != y.dtype: 88 raise RuntimeTypeError( 89 f"Input type mismatch: {x.dtype} != {y.dtype} " 90 f"(operator '{self.class.name!r}', " 91 f"shapes {x.shape}, {y.shape})." 92 ) 93 try:
AttributeError: 'list' object has no attribute 'dtype'
Could you raise an issue on onnx repository? Most of the python runtime implemented in onnx comes from this package but it is maintained by the community as opposed to this one. So it is much more robust. I'm surprised by this line Min(F0, f12) -> o01
. This operators fails but it should be Min(f0, f12)
as F0
does not exist as an iniatializer or a result yet. The error says f0 or f12 is not a numpy array but it does not seem to be the case.