File size: 372 Bytes
0891b79
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
import onnx
from onnxruntime import InferenceSession
import numpy as np
import torch


def execute_onnx_model(x, onnx_model) -> None:
    sess = InferenceSession(onnx_model.SerializeToString(), providers=[
                            'AzureExecutionProvider', 'CPUExecutionProvider'])
    out = sess.run(None, {'input.1': x.numpy().astype(np.float32)})[0]

    return out