diff --git a/examples/extension/package.json b/examples/extension/package.json index 9adcd617e..8d33266a1 100644 --- a/examples/extension/package.json +++ b/examples/extension/package.json @@ -15,6 +15,6 @@ "webpack": "^5.79.0" }, "dependencies": { - "@xenova/transformers": "^2.0.0" + "@huggingface/transformers": "^3.4.0" } } diff --git a/examples/extension/src/background.js b/examples/extension/src/background.js index db3ffee35..ec85e1cf4 100644 --- a/examples/extension/src/background.js +++ b/examples/extension/src/background.js @@ -1,14 +1,6 @@ // background.js - Handles requests from the UI, runs the model, then sends back a response -import { pipeline, env } from '@xenova/transformers'; - -// Skip initial check for local models, since we are not loading any local models. -env.allowLocalModels = false; - -// Due to a bug in onnxruntime-web, we must disable multithreading for now. -// See https://github.com/microsoft/onnxruntime/issues/14445 for more information. -env.backends.onnx.wasm.numThreads = 1; - +import { pipeline } from '@huggingface/transformers'; class PipelineSingleton { static task = 'text-classification'; @@ -16,9 +8,7 @@ class PipelineSingleton { static instance = null; static async getInstance(progress_callback = null) { - if (this.instance === null) { - this.instance = pipeline(this.task, this.model, { progress_callback }); - } + this.instance ??= pipeline(this.task, this.model, { progress_callback }); return this.instance; } diff --git a/examples/extension/webpack.config.js b/examples/extension/webpack.config.js index 8e96f29d6..ae71c9b11 100644 --- a/examples/extension/webpack.config.js +++ b/examples/extension/webpack.config.js @@ -11,7 +11,10 @@ const config = { mode: 'development', devtool: 'inline-source-map', entry: { - background: './src/background.js', + background: { + import: './src/background.js', + chunkLoading: `import-scripts`, + }, popup: './src/popup.js', content: './src/content.js', },