'failed to inference ONNX model: TypeError: Cannot read properties of undefined (reading 'InferenceSession')
I tried to replicate the example found here: https://github.com/microsoft/onnxruntime-inference-examples/tree/main/js/quick-start_onnxruntime-web-bundler:
import * as React from 'react';
import ort from 'onnxruntime-web'
import regeneratorRuntime from 'regenerator-runtime'
function App_F() {
// https://github.com/microsoft/onnxruntime-inference-examples/blob/main/js/quick-start_onnxruntime-web-bundler/main.js
const onnxFunct = async () => {
try {
// create a new session and load the specific model
// the model in this example contains a single MatMul node
// it has 2 inputs: 'a'(float32, 3x4) and 'b'(float32, 4x3)
// it has 1 output: 'c'(float32, 3x3)
const session = await ort.InferenceSession.create('../../assets/onnx/model.onnx');
//const dataA = Float32Array.from([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12])
//const dataB = Float32Array.from([10, 20, 30, 40, 50, 60, 70, 80, 90, 100, 110, 120])
//const tensorA = new ort.Tensor('float32', dataA, [3, 4])
//const tensorB = new ort.Tensor('float32', dataB, [4, 3])
// prepare feeds. use model input names as keys
//const feeds = { a: tensorA, b: tensorB }
// feed inputs and run
//const results = await session.run(feeds)
// read from results
//const dataC = results.c.data
//console.log(`data of result rensor 'c': ${dataC}`)
} catch (e) {
console.log(`failed to inference ONNX model: ${e}. `)
}
}
return (
<div className='container'>
<h1 className='heading'>
F
</h1>
</div>
);
}
export default App_F;
In webpack.config.js
:
new CopyPlugin({
patterns: [
{
from: path.resolve(__dirname, "./node_modules/onnxruntime-web/dist/*.wasm"),
to: path.resolve(__dirname, "[name][ext]")
},
{
from: path.resolve(__dirname, "./src/assets/onnx"),
to: path.resolve(__dirname, "./dist/assets/onnx")
}
tsconfig.js
:
{
"compilerOptions": {
"target": "ESNext",
"module": "commonjs",
"lib": ["dom", "es2015", "ESNext"],
"outDir": "dist",
//"jsx": "react",
"jsx": "react-jsx",
"baseUrl": "./src",
"paths": {
"@sections/*": ["app/sections/*"],
"@app/*": ["app/*"]
},
"strict": true,
"sourceMap": true,
"skipLibCheck": true,
"noImplicitAny": false,
"noImplicitThis": false,
"moduleResolution": "node",
"esModuleInterop": true,
"experimentalDecorators": true,
"allowSyntheticDefaultImports": true,
"resolveJsonModule": true,
"allowJs": true
},
"include": ["src/**/*"],
"exclude": [
"src/index.js",
"dist",
]
}
I get this error:
failed to inference ONNX model: TypeError: Cannot read properties of undefined (reading 'InferenceSession').
Other info:
"onnxruntime": "^1.7.0",
"onnxruntime-web": "^1.10.0"
node: v16.13.0
O.S.: Ubuntu 20.04 Desktop
How to solve the problem?
Solution 1:[1]
Multiple import methods work for onnxruntime-web
:
- Method 1, in js script. Good for bundling ord Node.js :
import { InferenceSession, Tensor } from "onnxruntime-web";
or
const ort = require('onnxruntime-web');
- Method 2, In an HTML file. Good for browser app :
<script src="https://cdn.jsdelivr.net/npm/onnxruntime-web/dist/ort.min.js"></script>
or
<script src="path/to/ort.min.js"></script>
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
Solution | Source |
---|---|
Solution 1 | ffletcherr |