Python 多次推理会有结果不一致的情况
平台(如果交叉编译请再附上交叉编译目标平台):
Platform(Include target platform as well if cross-compiling):
- OS: Mac os 11.7
- Python version: 3.10.2
- MNN version: 2.8.3 from pypi
脚本如下
# Copyright @ 2019 Alibaba. All rights reserved.
# Created by ruhuan on 2019.09.09
""" python demo usage about MNN API """
from __future__ import print_function
import numpy as np
import MNN
import sys
# %%
""" inference mobilenet_v1 using a specific picture """
interpreter = MNN.Interpreter("./model.mnn")
# interpreter.setCacheFile('.tempcache')
# %%
def getOutput(blobName):
config = {}
config['precision'] = 'low'
config['saveTensors'] = (blobName,)
# create session
runtimeinfo, exists = MNN.Interpreter.createRuntime((config,))
print(runtimeinfo, exists)
print("===")
session = interpreter.createSession(config, runtimeinfo)
# show session info
# print('memory_info: %fMB' % interpreter.getSessionInfo(session, 0))
# print('flops_info: %fM' % interpreter.getSessionInfo(session, 1))
# print('backend_info: %d' % interpreter.getSessionInfo(session, 2))
in0 = interpreter.getSessionInput(session, "source_img")
in1 = interpreter.getSessionInput(session, "ref_img")
in2 = interpreter.getSessionInput(session, "audio_feature")
# print(in0.getDimensionType() == MNN.Tensor_DimensionType_Caffe)
# print(in1.getDimensionType() == MNN.Tensor_DimensionType_Caffe)
# print(in2.getDimensionType() == MNN.Tensor_DimensionType_Caffe)
interpreter.resizeTensor(in0, (1, 3, 208, 160))
interpreter.resizeTensor(in1, (1, 15, 208, 160))
interpreter.resizeTensor(in2, (1, 29, 5))
interpreter.resizeSession(session)
oin0 = np.ones((1, 3, 208, 160), dtype=np.float32)
oin1 = np.ones((1, 15, 208, 160), dtype=np.float32)
oin2 = np.ones((1, 29, 5), dtype=np.float32)
tmp_input0 = MNN.Tensor((1, 3, 208, 160), MNN.Halide_Type_Float, oin0, MNN.Tensor_DimensionType_Caffe)
tmp_input1 = MNN.Tensor((1, 15, 208, 160), MNN.Halide_Type_Float, oin1, MNN.Tensor_DimensionType_Caffe)
tmp_input2 = MNN.Tensor((1, 29, 5), MNN.Halide_Type_Float, oin2, MNN.Tensor_DimensionType_Caffe)
in0.copyFrom(tmp_input0)
in1.copyFrom(tmp_input1)
in2.copyFrom(tmp_input2)
interpreter.runSession(session)
o_tensor = interpreter.getSessionOutput(session, blobName)
o_shape = o_tensor.getShape()
print(o_shape)
output = MNN.Tensor(o_shape, MNN.Halide_Type_Float, np.ones(o_shape).astype(np.float32), MNN.Tensor_DimensionType_Caffe)
o_tensor.copyToHostTensor(output)
return output.getNumpyData()
def analysis(arr):
print(f"arr has nan value: {np.isnan(arr).any()}")
print(f"arr max value: {max(arr.flatten())}")
print(f"arr min value: {min(arr.flatten())}")
return arr
#%%
o1 = analysis(getOutput('/trans_conv/trans_conv.8/relu/Relu_output_0')) # nan
异常行为
多次执行 o1 = analysis(getOutput('/trans_conv/trans_conv.8/relu/Relu_output_0')) 会有推理结果不一致的情况,十次左右能复现。
模型文件
第一次尝试 mnn 部署移动端模型,上面 Python 脚本如果有不合理的地方,也麻烦指出来,谢谢。
这个是移动端上的 python 运行?
numpy 数据转到 mnn 后建议 copy 一下。另外建议不要用 session 接口。换用 module api ,可参考 pymnn/examples/MNNExpr/mnn_numpy_cv_demo.py
这个是移动端上的 python 运行?
不,是 Mac os 11.7 big sur 的 Python 运行的。我现在是对齐 onnx 和 mnn 推理结果的阶段。
numpy 数据转到 mnn 后建议 copy 一下。另外建议不要用 session 接口。换用 module api ,可参考 pymnn/examples/MNNExpr/mnn_numpy_cv_demo.py
好的,我先试一下。
换用 module api 推理崩掉了。
模型转换命令:
~/code/3rd_party/aarch64/MNN/build/MNNConvert -f ONNX --modelFile model.onnx --MNNModel model.mnn --bizCode MNN --debug
module api 代码:
# Copyright @ 2019 Alibaba. All rights reserved.
# Created by MNN on 2021.11.24
""" python demo usage about MNN API """
from __future__ import print_function
import MNN.numpy as np
import MNN
import MNN.cv as cv2
import sys
def inference(blobName):
""" inference mobilenet_v1 using a specific picture """
net = MNN.nn.load_module_from_file("./model.mnn", ["source_img", "ref_img", "audio_feature"], [blobName])
oin0 = np.ones((1, 208, 160, 3), dtype=np.float32)
oin1 = np.ones((1, 208, 160, 15), dtype=np.float32)
oin2 = np.ones((1, 5, 29), dtype=np.float32)
in0 = MNN.expr.convert(oin0, MNN.expr.NC4HW4)
in1 = MNN.expr.convert(oin1, MNN.expr.NC4HW4)
in2 = MNN.expr.convert(oin2, MNN.expr.NC4HW4)
#inference
output_var = net.forward((in0, in1, in2))
#the output from net may be NC4HW4, turn to linear layout
output_var = MNN.expr.convert(output_var, MNN.expr.NHWC)
if __name__ == "__main__":
# inference('/trans_conv/trans_conv.10/relu/Relu_output_0')
inference('1002')
Marking as stale. No activity in 60 days.