Kernel not found
Opened this issue · 1 comments
gabrielemidulla commented
node_modules\onnxruntime-node\dist\backend.js:24
__classPrivateFieldGet(this, _OnnxruntimeSessionHandler_inferenceSession, "f").loadModel(pathOrBuffer, options);
^
Error: Failed to find kernel for BiasSplitGelu(1) (node BiasSplitGelu_0). Kernel not found
at new OnnxruntimeSessionHandler (C:\Users\gabri\Documents\GitHub\glow-backend-js\node_modules\onnxruntime-node\dist\backend.js:24:92)
at C:\Users\gabri\Documents\GitHub\glow-backend-js\node_modules\onnxruntime-node\dist\backend.js:64:29
at process.processTicksAndRejections (node:internal/process/task_queues:77:11)
import { DiffusionPipeline } from '@aislamov/diffusers.js'
import { PNG } from 'pngjs'
const pipe = DiffusionPipeline.fromPretrained('aislamov/stable-diffusion-2-1-base-onnx');
const images = pipe.run({
prompt: "an astronaut running a horse",
numInferenceSteps: 30,
})
const data = await images[0].mul(255).round().clipByValue(0, 255).transpose(0, 2, 3, 1)
const p = new PNG({ width: 512, height: 512, inputColorType: 2 })
p.data = Buffer.from(data.data)
p.pack().pipe(fs.createWriteStream('output.png')).on('finish', () => {
console.log('Image saved as output.png');
})
dakenf commented
Hi. It's because with node.js it runs on CPU and it does not support these operations. There's a version with cpu-only ops, const pipe = DiffusionPipeline.fromPretrained('aislamov/stable-diffusion-2-1-base-onnx', { revision: 'cpu' });
I'm waiting for ONNX team to publish my changes where you can use DirectML or CUDA with node.js