Open
Description
Hello, I have constructed a model with a single operator, trilu, but I am unable to run inference on it using ONNXRuntime. Here is my environment setup:
ort-nightly 1.19.0.dev20240716003
onnx 1.16.0
onnxsim 0.4.36
here is the code to reproduce the problem
import onnx
from onnx import helper, TensorProto
import numpy as np
import onnxruntime
input_1 = helper.make_tensor_value_info('input_1', TensorProto.INT32, [1,1,1024,1024])
input_2 = helper.make_tensor_value_info('input_2', TensorProto.INT64, [1])
output = helper.make_tensor_value_info('output', TensorProto.INT32,[1,1,1024,1024])
def build_model():
node_def = helper.make_node(
'Trilu',
['input_1', 'input_2'],
['output'],
name="Trilu"
)
graph_def = helper.make_graph(
nodes=[node_def],
name='SingleOperatorGraph',
inputs=[input_1, input_2],
outputs=[output],
initializer=[]
)
model_def = helper.make_model(
graph_def,
producer_name='MyModel',
opset_imports=[helper.make_opsetid("", 14)],
)
return model_def
def run_model(model_path):
input_data_1 = np.random.randint(10, size=(1,1,1024, 1024)).astype(np.int64)
input_data_2 = np.array(-1).astype(np.int64)
np.save("trilu_i1",input_data_1)
np.save("trilu_i2",input_data_2)
session = onnxruntime.InferenceSession(model_path,providers=['CPUExecutionProvider'])
input_names = [input.name for input in session.get_inputs()]
inputs = {input_names[0]: input_data_1, input_names[1]: input_data_2}
output = session.run(None, inputs)
print(output)
model_def = build_model()
onnx.checker.check_model(model_def)
save_path = "single_operator_model.onnx"
onnx.save_model(model_def, "single_operator_model.onnx")
print(f"{onnxruntime.__version__=}")
run_model(save_path)