import * as ort from "onnxruntime-react-native"; import { loadOnnxSession } from "./loadModel"; /** * inputTensor: * Float32Array with shape [1, 3, 224, 224] */ export async function runLeafInference( inputTensor: Float32Array ) { const session = await loadOnnxSession(); const inputName = session.inputNames[0]; const outputName = session.outputNames[0]; const feeds: Record = {}; feeds[inputName] = new ort.Tensor( "float32", inputTensor, [1, 3, 224, 224] ); const results = await session.run(feeds); return results[outputName].data as Float32Array; }