Açıklama Yok

runInference.ts 611B

1234567891011121314151617181920212223242526
  1. import * as ort from "onnxruntime-react-native";
  2. import { loadOnnxSession } from "./loadModel";
  3. /**
  4. * inputTensor:
  5. * Float32Array with shape [1, 3, 224, 224]
  6. */
  7. export async function runLeafInference(
  8. inputTensor: Float32Array
  9. ) {
  10. const session = await loadOnnxSession();
  11. const inputName = session.inputNames[0];
  12. const outputName = session.outputNames[0];
  13. const feeds: Record<string, ort.Tensor> = {};
  14. feeds[inputName] = new ort.Tensor(
  15. "float32",
  16. inputTensor,
  17. [1, 3, 224, 224]
  18. );
  19. const results = await session.run(feeds);
  20. return results[outputName].data as Float32Array;
  21. }