'those operators will be assignd node.out_shape==None, '
'refer to https://github.com/onnx/onnx/blob/master/docs/ShapeInference.md'
)
try:
datatype_map={
'tensor(int64)':'int',
'tensor(float)':'float32',
'tensor(int32)':'int32'
}
input_dict={}
sess=rt.InferenceSession(model_path)
foriptinsess.get_inputs():
datatype=datatype_map[ipt.type]
input_dict[ipt.name]=np.random.random(
ipt.shape).astype(datatype)
res=sess.run(None,input_feed=input_dict)
except:
raiseException(
"onnxruntime inference onnx model failed, Please confirm the correctness of onnx model by onnxruntime, if onnx model is correct, please submit issue in github."