onnxruntime-node test
Browse files- nodejs/onnxruntime.js +29 -0
nodejs/onnxruntime.js
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
const fs = require('fs');
|
2 |
+
const sharp = require('sharp');
|
3 |
+
const ort = require('onnxruntime-node');
|
4 |
+
|
5 |
+
(async () => {
|
6 |
+
try {
|
7 |
+
// Step 1: Load and preprocess the image
|
8 |
+
const imageBuffer = await sharp('./training_images/shirt/00e745c9-97d9-429d-8c3f-d3db7a2d2991.jpg')
|
9 |
+
.resize(128, 128) // Resize to 128x128
|
10 |
+
.raw() // Get raw pixel data
|
11 |
+
.toBuffer();
|
12 |
+
|
13 |
+
// Convert to Float32 and normalize pixel values to [0, 1]
|
14 |
+
const imgArray = Float32Array.from(imageBuffer).map(value => value / 255.0);
|
15 |
+
|
16 |
+
// Add batch dimension [1, 128, 128, 3]
|
17 |
+
const inputTensor = new ort.Tensor('float32', imgArray, [1, 128, 128, 3]);
|
18 |
+
|
19 |
+
// Step 2: Load ONNX model
|
20 |
+
const session = await ort.InferenceSession.create('./saved-model/model.onnx');
|
21 |
+
|
22 |
+
// Step 3: Run inference
|
23 |
+
const results = await session.run({ [session.inputNames[0]]: inputTensor });
|
24 |
+
|
25 |
+
console.log('Inference outputs:', results[session.outputNames[0]]);
|
26 |
+
} catch (err) {
|
27 |
+
console.error('Error:', err);
|
28 |
+
}
|
29 |
+
})();
|