models icon indicating copy to clipboard operation
models copied to clipboard

How to export the insightface mxnet model to onnx model which is same as the onnx model given to download.

Open flazerain opened this issue 5 years ago • 6 comments

I exported the insightface mxnet model with mxnet 1.5 and onnx 1.3(comment spatial=1) and I cannot build the tensor RT engine with tensorRT 6, the error is: [TensorRT] ERROR: (Unnamed Layer* 7) [Parametric ReLU]: slope tensor must be unidirectional broadcastable to input tensor [TensorRT] ERROR: Network must have at least one output

and I found the onnx net structure is different from the onnx model given to download.

How to export the arcface mxnet model that can be converted to tensorRT engine?

flazerain avatar Nov 14 '19 03:11 flazerain

@flazerain Hi you can convert arcface model to onnx right ?, I convert .,but prelu layer out is error

sky186 avatar Dec 17 '19 02:12 sky186

@flazerain Hi you can convert arcface model to onnx right ?, I convert .,but prelu layer out is error

yes,but it is different from the model zoo 's model(it has reshape before prelu)

flazerain avatar Dec 19 '19 10:12 flazerain

hi @sky186 @flazerain have you fixed problem above? i also try to convert arcface LResNet100E-IR mxnet to onnx by using convert_onnx.py. Then, it seem that, i got error with PRelu when i deploy my model.

onnx runtime error 1: Non-zero status code returned while running PRelu node. Name:'relu0' Status Message: relu0: right operand cannot broadcast on dim 0 LeftShape: {1,64,112,112}, RightShape: {64}

Can you guide me how to fix it? Thank all off u.

HoangTienDuc avatar Mar 18 '20 08:03 HoangTienDuc

Have anyone resolved the issue?

theVmagnificient avatar Jul 09 '20 11:07 theVmagnificient

I convert the mxnet model (model-r100-ii) to onnx by using the insightface/deploy/convert_onnx.py and get r100.onnx this step is good. however,when do ../onnx-tensorrt/build/onnx2trt r100.onnx -o r100_engine.trt, thing got wrong: image I do this in nvidia-docker

bruceche11 avatar Feb 02 '21 12:02 bruceche11

Try they export scripts here: https://github.com/deepinsight/insightface/issues/1350

import argparse
import onnx
import mxnet as mx
import numpy as np
from mxnet.contrib import onnx as onnx_mxnet
import mxnet.contrib.onnx.mx2onnx.export_onnx as mx_op
from mxnet.contrib.onnx.mx2onnx._op_translations import get_inputs

print('mxnet version:', mx.__version__)
print('onnx version:', onnx.__version__)
# assert onnx.__version__ == '1.3.0'


def create_helper_tensor_node(input_vals, output_name, kwargs):
    """create extra tensor node from numpy values"""
    data_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[input_vals.dtype]
    tensor_node = onnx.helper.make_tensor_value_info(
        name=output_name,
        elem_type=data_type,
        shape=input_vals.shape
    )
    kwargs["initializer"].append(
        onnx.helper.make_tensor(
            name=output_name,
            data_type=data_type,
            dims=input_vals.shape,
            vals=input_vals.flatten().tolist(),
            raw=False,
        )
    )
    return tensor_node


@mx_op.MXNetGraph.register("BatchNorm")
def convert_batchnorm(node, **kwargs):
    """
    Map MXNet's BatchNorm operator attributes to onnx's BatchNormalization operator and return the created node.
    """
    name, input_nodes, attrs = get_inputs(node, kwargs)
    momentum = float(attrs.get("momentum", 0.9))
    eps = float(attrs.get("eps", 0.001))
    bn_node = onnx.helper.make_node(
        "BatchNormalization",
        input_nodes,
        [name],
        name=name,
        epsilon=eps,
        momentum=momentum,
        # MXNet computes mean and variance per feature for batchnorm
        # Default for onnx is across all spatial features. So disabling the parameter.
        # spatial=0
    )
    return [bn_node]


@mx_op.MXNetGraph.register("LeakyReLU")
def convert_leakyrelu(node, **kwargs):
    """Map MXNet's LeakyReLU operator attributes to onnx's Elu/LeakyRelu/PRelu operators
    based on the input node's attributes and return the created node.
    """
    name, input_nodes, attrs = get_inputs(node, kwargs)
    initializer = kwargs["initializer"]
    act_type = attrs.get("act_type", "leaky")
    alpha = float(attrs.get("slope", 0.25))
    act_name = {"elu": "Elu", "leaky": "LeakyRelu", "prelu": "PRelu",
                "selu": "Selu"}
    reshape_val_name = 'reshape' + str(kwargs["idx"])
    input_type = onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[np.dtype('int64')]
    reshape_value = np.array([1, -1, 1, 1], dtype='int64')
    dims = np.shape(reshape_value)
    shape_node = onnx.helper.make_tensor_value_info(reshape_val_name, input_type, dims)
    initializer.append(
        onnx.helper.make_tensor(
            name=reshape_val_name,
            data_type=input_type,
            dims=dims,
            vals=reshape_value,
            raw=False,
        )
    )
    slope_op_name = 'slope' + str(kwargs["idx"])
    lr_node = []
    if act_type == "prelu" or act_type == "selu":
        reshape_slope_node = onnx.helper.make_node(
            'Reshape',
            inputs=[input_nodes[1], reshape_val_name],
            outputs=[slope_op_name],
            name=slope_op_name
        )
        node = onnx.helper.make_node(
            act_name[act_type],
            inputs=[input_nodes[0], slope_op_name],
            outputs=[name],
            name=name)
        lr_node.append(shape_node)
        lr_node.append(reshape_slope_node)
        lr_node.append(node)
    else:
        node = onnx.helper.make_node(
            act_name[act_type],
            inputs=input_nodes,
            outputs=[name],
            name=name,
            alpha=alpha)
        lr_node.append(node)
    return lr_node


parser = argparse.ArgumentParser(description='convert arcface models to onnx')
# general
parser.add_argument('--prefix', default='./model', help='prefix to load model.')
parser.add_argument('--epoch', default=0, type=int, help='epoch number to load model.')
parser.add_argument('--input_shape', nargs='+', default=[1, 3, 112, 112], type=int, help='input shape.')
parser.add_argument('--output_onnx', default='./arcface_r100.onnx', help='path to write onnx model.')
args = parser.parse_args()

input_shape = args.input_shape
print('input-shape:', input_shape)

sym_file = f'{args.prefix}-symbol.json'
params_file = f'{args.prefix}-{args.epoch:04d}.params'

converted_model_path = onnx_mxnet.export_model(sym_file, params_file, [input_shape], np.float32, args.output_onnx,
                                               verbose=True)

cyrusbehr avatar Apr 07 '23 02:04 cyrusbehr