Following is my test code
<!DOCTYPE html>
<html>
<head>
<title>ONNX Runtime Web Example</title>
<script src="https://cdn.jsdelivr.net/npm/onnxruntime-web/dist/ort.min.js"></script>
</head>
<body>
<h1>ONNX Runtime Web Example</h1>
<script>
// Load the ONNX model
async function loadModel() {
// Replace 'model.onnx' with the path to your ONNX model file
const modelPath = '../src/RealESRGAN_x4plus_anime_6B.onnx';
const session = await ort.InferenceSession.create(modelPath, {
executionProviders: ["webgpu", "webgl"], // use webgpu and webgl cannot use dynamic input shape, so tmp don't use them and wait onnx update
});
// Example input tensor with dynamic shape
const inputTensor = new ort.Tensor('float32', new Float32Array(3 * 28 * 28), [1, 3, 28, 28]); // Example: 28x28 input shape
// Run inference with dynamic input shape
const output = await session.run({input: inputTensor});
// Handle the output
console.log('Output:', output);
}
// Call loadModel function to load and run the model
loadModel();
</script>
</body>
</html>
when enable executionProviders: ["webgpu", "webgl"], it raise err
Uncaught (in promise) Error: input tensor[0] check failed: expected shape '[,,,]' but got [1,3,28,28]
validateInputTensorDims
normalizeAndValidateInputs
(anonymous function)
event
run
run
run
loadModel
when disable gpu(remove executionProviders: ["webgpu", "webgl"]), it use cpu, can run onnx work well
how to use dynamic input shape when enable gpu