Skip to content

Commit 85aec33

Browse files
committed
[WIP] WASM deno support
1 parent a5847c9 commit 85aec33

File tree

4 files changed

+32
-5
lines changed

4 files changed

+32
-5
lines changed

src/backends/onnx.js

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -56,12 +56,15 @@ let defaultDevices;
5656
let ONNX;
5757
const ORT_SYMBOL = Symbol.for('onnxruntime');
5858

59+
/** @type {"custom"|"node"|"web"} */
60+
let ort;
5961
if (ORT_SYMBOL in globalThis) {
6062
// If the JS runtime exposes their own ONNX runtime, use it
6163
ONNX = globalThis[ORT_SYMBOL];
64+
ort = 'custom';
6265

63-
} else if (apis.IS_NODE_ENV) {
64-
ONNX = ONNX_NODE.default ?? ONNX_NODE;
66+
} else if (apis.IS_NODE_ENV && (ONNX = ONNX_NODE.default ?? ONNX_NODE)?.InferenceSession) {
67+
ort = 'node';
6568

6669
// Updated as of ONNX Runtime 1.20.1
6770
// The following table lists the supported versions of ONNX Runtime Node.js binding provided with pre-built binaries.
@@ -87,6 +90,7 @@ if (ORT_SYMBOL in globalThis) {
8790
defaultDevices = ['cpu'];
8891
} else {
8992
ONNX = ONNX_WEB;
93+
ort = 'web';
9094

9195
if (apis.IS_WEBNN_AVAILABLE) {
9296
// TODO: Only push supported providers (depending on available hardware)
@@ -169,6 +173,14 @@ export function isONNXTensor(x) {
169173
return x instanceof ONNX.Tensor;
170174
}
171175

176+
/**
177+
* The type of ONNX runtime being used.
178+
* - 'node' for `onnxruntime-node`
179+
* - 'web' for `onnxruntime-web`
180+
* - 'custom' for a custom ONNX runtime
181+
*/
182+
export const runtime = ort;
183+
172184
/** @type {import('onnxruntime-common').Env} */
173185
// @ts-ignore
174186
const ONNX_ENV = ONNX?.env;

src/env.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ export const env = {
142142
remoteHost: 'https://huggingface.co/',
143143
remotePathTemplate: '{model}/resolve/{revision}/',
144144

145-
allowLocalModels: !(IS_BROWSER_ENV || IS_WEBWORKER_ENV),
145+
allowLocalModels: !(IS_BROWSER_ENV || IS_WEBWORKER_ENV || IS_DENO_RUNTIME),
146146
localModelPath: localModelPath,
147147
useFS: IS_FS_AVAILABLE,
148148

src/models.js

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ import {
4848
createInferenceSession,
4949
isONNXTensor,
5050
isONNXProxy,
51+
runtime,
5152
} from './backends/onnx.js';
5253
import {
5354
DATA_TYPES,
@@ -172,7 +173,7 @@ async function getSession(pretrained_model_name_or_path, fileName, options) {
172173

173174
// If the device is not specified, we use the default (supported) execution providers.
174175
const selectedDevice = /** @type {import("./utils/devices.js").DeviceType} */(
175-
device ?? (apis.IS_NODE_ENV ? 'cpu' : 'wasm')
176+
device ?? (runtime === "web" ? 'wasm' : 'cpu')
176177
);
177178

178179
const executionProviders = deviceToExecutionProviders(selectedDevice);

webpack.config.js

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,21 @@ class PostBuildPlugin {
5454
{
5555
const src = path.join(__dirname, 'node_modules/onnxruntime-web/dist', ORT_JSEP_FILE);
5656
const dest = path.join(dist, ORT_JSEP_FILE);
57-
fs.copyFileSync(src, dest);
57+
58+
// Transformers.js uses both onnxruntime-web and onnxruntime-node in the same package,
59+
// and the runtime we use depends on the environment (onnxruntime-web for web, onnxruntime-node for Node.js).
60+
// This means that we don't currently support using the WASM backend in Node.js, so we disable this behaviour in the JSEP file.
61+
const content = fs.readFileSync(src, 'utf8');
62+
const updatedContent = content
63+
.replace(
64+
`"object"==typeof process&&"object"==typeof process.versions&&"string"==typeof process.versions.node&&"renderer"!=process.type`,
65+
"false",
66+
)
67+
.replace(
68+
`typeof globalThis.process?.versions?.node == 'string'`,
69+
"false",
70+
)
71+
fs.writeFileSync(dest, updatedContent);
5872
}
5973
});
6074
}

0 commit comments

Comments
 (0)