diff --git a/examples/objectDetector-dimLight/dim.jpg b/examples/objectDetector-dimLight/dim.jpg
new file mode 100644
index 00000000..31c45c1d
Binary files /dev/null and b/examples/objectDetector-dimLight/dim.jpg differ
diff --git a/examples/objectDetector-dimLight/dimmy.jpg b/examples/objectDetector-dimLight/dimmy.jpg
new file mode 100644
index 00000000..b2c258c8
Binary files /dev/null and b/examples/objectDetector-dimLight/dimmy.jpg differ
diff --git a/examples/objectDetector-dimLight/index.html b/examples/objectDetector-dimLight/index.html
new file mode 100644
index 00000000..d5f731db
--- /dev/null
+++ b/examples/objectDetector-dimLight/index.html
@@ -0,0 +1,20 @@
+
+
+
+
+
+
+ Object Detection in Dim Light
+
+
+
+
+
+
+
+
+
diff --git a/examples/objectDetector-dimLight/sketch.js b/examples/objectDetector-dimLight/sketch.js
new file mode 100644
index 00000000..142f57f4
--- /dev/null
+++ b/examples/objectDetector-dimLight/sketch.js
@@ -0,0 +1,53 @@
+let canvas;
+let ctx;
+let imageElement;
+let objectDetector;
+
+async function setup() {
+ canvas = document.getElementById("canvas");
+ ctx = canvas.getContext("2d");
+
+ imageElement = new Image();
+ imageElement.src = "dimmy.jpg";
+ imageElement.onload = async () => {
+ canvas.width = imageElement.width;
+ canvas.height = imageElement.height;
+ ctx.drawImage(imageElement, 0, 0);
+
+ objectDetector = await cocoSsd.load();
+ console.log("Object Detector Loaded");
+
+ detectObjects();
+ };
+}
+
+async function detectObjects() {
+ const results = await objectDetector.detect(canvas);
+ console.log(results);
+
+ drawResults(results);
+}
+
+function drawResults(objects) {
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
+
+ ctx.drawImage(imageElement, 0, 0);
+
+ objects.forEach((object) => {
+ ctx.beginPath();
+ ctx.rect(object.bbox[0], object.bbox[1], object.bbox[2], object.bbox[3]);
+ ctx.lineWidth = 2;
+ ctx.strokeStyle = "red";
+ ctx.stroke();
+
+ ctx.font = "16px Arial";
+ ctx.fillStyle = "red";
+ ctx.fillText(
+ object.class,
+ object.bbox[0],
+ object.bbox[1] > 10 ? object.bbox[1] - 5 : 10
+ );
+ });
+}
+
+setup();
diff --git a/examples/objectDetector-single-image/index.html b/examples/objectDetector-single-image/index.html
new file mode 100644
index 00000000..31e16d08
--- /dev/null
+++ b/examples/objectDetector-single-image/index.html
@@ -0,0 +1,22 @@
+
+
+
+
+
+
+
+
+ ml5.js ObjectDetector Image Example
+
+
+
+
+
+
+
diff --git a/examples/objectDetector-single-image/objects.jpg b/examples/objectDetector-single-image/objects.jpg
new file mode 100644
index 00000000..d147c687
Binary files /dev/null and b/examples/objectDetector-single-image/objects.jpg differ
diff --git a/examples/objectDetector-single-image/sketch.js b/examples/objectDetector-single-image/sketch.js
new file mode 100644
index 00000000..6e547868
--- /dev/null
+++ b/examples/objectDetector-single-image/sketch.js
@@ -0,0 +1,73 @@
+/*
+ * 👋 Hello! This is an ml5.js example made and shared with ❤️.
+ * Learn more about the ml5.js project: https://ml5js.org/
+ * ml5.js license and Code of Conduct: https://github.com/ml5js/ml5-next-gen/blob/main/LICENSE.md
+ *
+ * This example demonstrates object detection on an image through ml5.objectDetector.
+ */
+
+let objectDetector;
+let img;
+let objects = [];
+
+function preload() {
+ // Load the image to be detected
+ img = loadImage("objects.jpg");
+
+ // trying to work around "WebGPU readSync is only available for CPU-resident tensors."
+ // see https://github.com/ml5js/ml5-next-gen/issues/117
+ ml5.setBackend("webgl");
+
+ // Load the objectDetector model
+ objectDetector = ml5.objectDetector();
+}
+
+function setup() {
+ createCanvas(800, 800);
+ // Draw the image
+ image(img, 0, 0);
+ // Detect objects in the image
+ objectDetector.detect(img, gotObjects);
+}
+
+function draw() {
+ // Draw the image
+ image(img, 0, 0);
+
+ // Loop through all the detected objects and draw bounding boxes with labels
+ for (let i = 0; i < objects.length; i++) {
+ let object = objects[i];
+ let x = object.bbox[0];
+ let y = object.bbox[1];
+ let w = object.bbox[2];
+ let h = object.bbox[3];
+
+ stroke(object.color.r, object.color.g, object.color.b);
+ noFill();
+
+ // Draw the bounding box
+ rect(x, y, w, h);
+
+ // Draw the label with the class name
+ noStroke();
+ fill(object.color.r, object.color.g, object.color.b);
+ textSize(16);
+ text(object.class, x + 5, y + 15);
+ }
+}
+
+// Callback function for when objectDetector outputs data
+function gotObjects(results) {
+ // Save the output to the objects variable and assign a random color to each object
+ objects = results.map((object) => {
+ object.color = {
+ r: random(255),
+ g: random(255),
+ b: random(255),
+ };
+ return object;
+ });
+
+ // Redraw canvas to update the bounding boxes
+ redraw();
+}
diff --git a/examples/objectDetector-webcam/index.html b/examples/objectDetector-webcam/index.html
new file mode 100644
index 00000000..fc2704d3
--- /dev/null
+++ b/examples/objectDetector-webcam/index.html
@@ -0,0 +1,22 @@
+
+
+
+
+
+
+
+
+ ml5.js ObjectDetector Image Example
+
+
+
+
+
+
+
diff --git a/examples/objectDetector-webcam/objects.jpg b/examples/objectDetector-webcam/objects.jpg
new file mode 100644
index 00000000..d147c687
Binary files /dev/null and b/examples/objectDetector-webcam/objects.jpg differ
diff --git a/examples/objectDetector-webcam/sketch.js b/examples/objectDetector-webcam/sketch.js
new file mode 100644
index 00000000..7ad52f51
--- /dev/null
+++ b/examples/objectDetector-webcam/sketch.js
@@ -0,0 +1,94 @@
+/*
+ * 👋 Hello! This is an ml5.js example made and shared with ❤️.
+ * Learn more about the ml5.js project: https://ml5js.org/
+ * ml5.js license and Code of Conduct: https://github.com/ml5js/ml5-next-gen/blob/main/LICENSE.md
+ *
+ * This example demonstrates object detection on a video through ml5.objectDetector.
+ */
+
+let objectDetector;
+let video;
+let objects = [];
+let isModelLoaded = false;
+let isVideoReady = false;
+let detectionInterval = 30; // Number of frames between each detection
+let frameCount = 0;
+
+function preload() {
+ // Set the backend to "webgl"
+ ml5.setBackend("webgl");
+
+ // Load the objectDetector model
+ objectDetector = ml5.objectDetector("cocossd", modelReady);
+}
+
+function setup() {
+ createCanvas(800, 800);
+
+ // Create a video capture element
+ video = createCapture(VIDEO, videoReady);
+ video.size(800, 800);
+ video.hide(); // Hide the video element since we'll draw it on the canvas
+}
+
+function draw() {
+ if (isVideoReady && isModelLoaded) {
+ // Draw the video frame to the canvas
+ image(video, 0, 0);
+
+ frameCount++;
+
+ // Run object detection at specified intervals
+ if (frameCount % detectionInterval === 0) {
+ objectDetector.detect(video, gotObjects);
+ }
+
+ // Loop through all the detected objects and draw bounding boxes with labels
+ for (let i = 0; i < objects.length; i++) {
+ let object = objects[i];
+ let x = object.bbox[0];
+ let y = object.bbox[1];
+ let w = object.bbox[2];
+ let h = object.bbox[3];
+
+ // Draw the bounding box
+ stroke(object.color.r, object.color.g, object.color.b);
+ noFill();
+ rect(x, y, w, h);
+
+ // Draw the label with the class name
+ noStroke();
+ fill(object.color.r, object.color.g, object.color.b);
+ textSize(16);
+ text(object.class, x + 5, y + 15);
+ }
+ }
+}
+
+// Callback when the model is ready
+function modelReady() {
+ console.log("Model Loaded!");
+ isModelLoaded = true;
+}
+
+// Callback when the video is ready
+function videoReady() {
+ console.log("Video Ready!");
+ isVideoReady = true;
+}
+
+// Callback function for when objectDetector outputs data
+function gotObjects(results) {
+ // Save the output to the objects variable and assign a random color to each object
+ objects = results.map((object) => {
+ object.color = {
+ r: random(255),
+ g: random(255),
+ b: random(255),
+ };
+ return object;
+ });
+
+ // Redraw canvas to update the boxes
+ redraw();
+}
diff --git a/package.json b/package.json
index 63b88db7..469578eb 100644
--- a/package.json
+++ b/package.json
@@ -56,12 +56,13 @@
"@mediapipe/pose": "^0.5.1675469404",
"@mediapipe/selfie_segmentation": "~0.1.0",
"@tensorflow-models/body-segmentation": "^1.0.1",
+ "@tensorflow-models/coco-ssd": "^2.2.3",
"@tensorflow-models/face-landmarks-detection": "1.0.5",
"@tensorflow-models/hand-pose-detection": "^2.0.0",
"@tensorflow-models/mobilenet": "^2.1.0",
"@tensorflow-models/pose-detection": "^2.1.0",
"@tensorflow-models/speech-commands": "^0.5.4",
- "@tensorflow/tfjs": "^4.2.0",
+ "@tensorflow/tfjs": "^4.20.0",
"@tensorflow/tfjs-vis": "^1.5.1",
"axios": "^1.3.4",
"webpack-merge": "^5.9.0"
diff --git a/src/ObjectDetector/index.js b/src/ObjectDetector/index.js
new file mode 100644
index 00000000..a7abdacb
--- /dev/null
+++ b/src/ObjectDetector/index.js
@@ -0,0 +1,158 @@
+// Copyright (c) 2020-2024 ml5
+//
+// This software is released under the MIT License.
+// https://opensource.org/licenses/MIT
+
+/*
+ * ObjectDetector: Object detection using cocoSsd in the browser
+ */
+
+import * as tf from "@tensorflow/tfjs";
+import * as cocoSsd from "@tensorflow-models/coco-ssd";
+import callCallback from "../utils/callcallback";
+import handleArguments from "../utils/handleArguments";
+import handleOptions from "../utils/handleOptions";
+import { handleModelName } from "../utils/handleOptions";
+import { mediaReady } from "../utils/imageUtilities";
+
+class ObjectDetector {
+ /**
+ * An object for configuring ObjectDetector options.
+ * @typedef {Object} configOptions
+ * @property {string} modelType - Optional. The type of model to use. Default: "lite".
+ */
+
+ /**
+ * Creates ObjectDetector.
+ * @param {configOptions} options - An object containing ObjectDetector configuration options.
+ * @param {function} callback - A callback to be called when the model is ready.
+ * @private
+ */
+ constructor(modelName, options, callback) {
+ this.modelName = handleModelName(
+ modelName,
+ ["cocoSsd"],
+ "cocoSsd",
+ "objectDetector"
+ );
+ this.model = null;
+ this.config = options;
+ this.runtimeConfig = {};
+ this.detectMedia = null;
+ this.detectCallback = null;
+
+ // flags for detectStart() and detectStop()
+ this.detecting = false; // True when detection loop is running
+ this.signalStop = false; // Signal to stop the loop
+ this.prevCall = ""; // Track previous call to detectStart() or detectStop()
+
+ this.ready = callCallback(this.loadModel(), callback);
+ }
+
+ /**
+ * Loads the model.
+ * @return {this} the ObjectDetector model.
+ * @private
+ */
+ async loadModel() {
+ await tf.ready();
+ this.model = await cocoSsd.load();
+ console.log("Finished loading cocoSsd");
+ return this;
+ }
+
+ /**
+ * Asynchronously outputs a single object detection result when called.
+ * Supports both callback and promise.
+ * @param {*} [media] - An HMTL or p5.js image, video, or canvas element to run the detection on.
+ * @param {function} [callback] - Optional. A callback to handle the detection result.
+ * @returns {Promise} The detection result.
+ */
+ async detect(...inputs) {
+ const argumentObject = handleArguments(...inputs);
+ argumentObject.require(
+ "image",
+ "An html or p5.js image, video, or canvas element argument is required for detect()."
+ );
+ const { image, callback } = argumentObject;
+
+ // await mediaReady(image, false);
+
+ // const predictions = await this.model.detect(image);
+ // console.log("raw result from cocoSsd", predictions);
+
+ const result = predictions;
+
+ if (typeof callback === "function") callback(result);
+ return result;
+ }
+
+ /**
+ * Repeatedly outputs object predictions through a callback function.
+ * @param {*} [media] - An HMTL or p5.js image, video, or canvas element to run the prediction on.
+ * @param {function} [callback] - A callback to handle the object detection results.
+ */
+ detectStart(...inputs) {
+ const argumentObject = handleArguments(...inputs);
+ argumentObject.require(
+ "image",
+ "An html or p5.js image, video, or canvas element argument is required for detectStart()."
+ );
+ argumentObject.require(
+ "callback",
+ "A callback function argument is required for detectStart()."
+ );
+ this.detectMedia = argumentObject.image;
+ this.detectCallback = argumentObject.callback;
+
+ this.signalStop = false;
+ if (!this.detecting) {
+ this.detecting = true;
+ this.detectLoop();
+ }
+ if (this.prevCall === "start") {
+ console.warn(
+ "detectStart() was called more than once without calling detectStop(). Only the latest detectStart() call will take effect."
+ );
+ }
+ this.prevCall = "start";
+ }
+
+ /**
+ * Stops the detection loop before next detection loop runs.
+ */
+ detectStop() {
+ if (this.detecting) this.signalStop = true;
+ this.prevCall = "stop";
+ }
+
+ /**
+ * Calls detect in a loop.
+ * Can be started by detectStart() and terminated by detectStop().
+ * @private
+ */
+ async detectLoop() {
+ await mediaReady(this.detectMedia, false);
+ while (!this.signalStop) {
+ const predictions = await this.model.detect(this.detectMedia);
+ const result = predictions;
+
+ this.detectCallback(result);
+ await tf.nextFrame();
+ }
+ this.detecting = false;
+ this.signalStop = false;
+ }
+}
+
+/**
+ * Factory function that returns a new ObjectDetector instance.
+ * @returns {ObjectDetector} A new objectDetector instance.
+ */
+const objectDetector = (...inputs) => {
+ const { string, options = {}, callback } = handleArguments(...inputs);
+ const instance = new ObjectDetector(string, options, callback);
+ return instance;
+};
+
+export default objectDetector;
diff --git a/src/index.js b/src/index.js
index 09416c1c..3e327b56 100644
--- a/src/index.js
+++ b/src/index.js
@@ -1,5 +1,6 @@
import neuralNetwork from "./NeuralNetwork";
import handPose from "./HandPose";
+import objectDetector from "./ObjectDetector";
import sentiment from "./Sentiment";
import faceMesh from "./FaceMesh";
import bodyPose from "./BodyPose";
@@ -22,6 +23,7 @@ const withPreload = {
neuralNetwork,
sentiment,
soundClassifier,
+ objectDetector,
};
const ml5 = Object.assign({ p5Utils }, withPreload, {
diff --git a/yarn.lock b/yarn.lock
index 83e5ed70..c75e9895 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -1186,6 +1186,11 @@
dependencies:
rimraf "^3.0.2"
+"@tensorflow-models/coco-ssd@^2.2.3":
+ version "2.2.3"
+ resolved "https://registry.yarnpkg.com/@tensorflow-models/coco-ssd/-/coco-ssd-2.2.3.tgz#3825286569076d6788199c9cb89fb2fa31f7d2f2"
+ integrity sha512-iCLGktG/XhHbP6h2FWxqCKMp/Px0lCp6MZU1fjNhjDHeaWEC9G7S7cZrnPXsfH+NewCM53YShlrHnknxU3SQig==
+
"@tensorflow-models/face-landmarks-detection@1.0.5":
version "1.0.5"
resolved "https://registry.yarnpkg.com/@tensorflow-models/face-landmarks-detection/-/face-landmarks-detection-1.0.5.tgz#ef2e6f03e329d9457afbe7fdbf76fa8bd1e54fc1"
@@ -1221,21 +1226,22 @@
"@types/seedrandom" "^2.4.28"
seedrandom "^3.0.5"
-"@tensorflow/tfjs-backend-cpu@4.8.0":
- version "4.8.0"
- resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-backend-cpu/-/tfjs-backend-cpu-4.8.0.tgz#6281ab0a93400f2b5c7b2efa07b0befb895a0260"
+"@tensorflow/tfjs-backend-cpu@4.20.0":
+ version "4.20.0"
+ resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-backend-cpu/-/tfjs-backend-cpu-4.20.0.tgz#7cc7a31fd91950a0d4d1e922c8e9a8f7e7a3e2cc"
+ integrity sha512-1QRQ6AqAa/VB8JOArf5nY3Dc/QQHXbfuxgdIdQhKrABEHgvlaWt2Vv696UhIlVl75YoNY+vWlCwBdGQIKYfFGw==
dependencies:
"@types/seedrandom" "^2.4.28"
seedrandom "^3.0.5"
-"@tensorflow/tfjs-backend-webgl@4.8.0":
- version "4.8.0"
- resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-backend-webgl/-/tfjs-backend-webgl-4.8.0.tgz#b361c28660aee3d33554b0b50032a8d78bcd268c"
+"@tensorflow/tfjs-backend-webgl@4.20.0":
+ version "4.20.0"
+ resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-backend-webgl/-/tfjs-backend-webgl-4.20.0.tgz#920bdda8cce920c183a48d917e34e80cea2e0c43"
+ integrity sha512-M03fJonJGxm2u3SCzRNA2JLh0gxaAye64SEmGAXOehizowxy42l+lMsPWU8xU7r7mN6PEilBNkuKAf5YJ7Xumg==
dependencies:
- "@tensorflow/tfjs-backend-cpu" "4.8.0"
+ "@tensorflow/tfjs-backend-cpu" "4.20.0"
"@types/offscreencanvas" "~2019.3.0"
"@types/seedrandom" "^2.4.28"
- "@types/webgl-ext" "0.0.30"
seedrandom "^3.0.5"
"@tensorflow/tfjs-backend-webgpu@^4.17.0":
@@ -1244,34 +1250,37 @@
dependencies:
"@tensorflow/tfjs-backend-cpu" "4.17.0"
-"@tensorflow/tfjs-converter@4.8.0":
- version "4.8.0"
- resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-converter/-/tfjs-converter-4.8.0.tgz#be38e9708eb52a2dcd7e78df44bea831da5970d6"
+"@tensorflow/tfjs-converter@4.20.0":
+ version "4.20.0"
+ resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-converter/-/tfjs-converter-4.20.0.tgz#e23549330e0bb693db9373efa892e695fd48186b"
+ integrity sha512-UJ2ntQ1TNtVHB5qGMwB0j306bs3KH1E1HKJ9Dxvrc6PUaivOV+CPKqmbidOFG5LylXeRC36JBdhe+gVT2nFHNw==
-"@tensorflow/tfjs-core@4.8.0":
- version "4.8.0"
- resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-core/-/tfjs-core-4.8.0.tgz#a238007edd3643fc4bcf7ec726c1cea67a29b894"
+"@tensorflow/tfjs-core@4.20.0":
+ version "4.20.0"
+ resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-core/-/tfjs-core-4.20.0.tgz#6f40757d33e53489a4a165e59ef4768502db12a9"
+ integrity sha512-m/cc9qDc63al9UhdbXRUYTLGfJJlhuN5tylAX/2pJMLj32c8a6ThGDJYoKzpf32n5g3MQGYLchjClDxeGdXMPQ==
dependencies:
"@types/long" "^4.0.1"
"@types/offscreencanvas" "~2019.7.0"
"@types/seedrandom" "^2.4.28"
- "@types/webgl-ext" "0.0.30"
- "@webgpu/types" "0.1.30"
+ "@webgpu/types" "0.1.38"
long "4.0.0"
node-fetch "~2.6.1"
seedrandom "^3.0.5"
-"@tensorflow/tfjs-data@4.8.0":
- version "4.8.0"
- resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-data/-/tfjs-data-4.8.0.tgz#6969b90f41ff53ab0dd3b0c933183a5cbe4c52e1"
+"@tensorflow/tfjs-data@4.20.0":
+ version "4.20.0"
+ resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-data/-/tfjs-data-4.20.0.tgz#4c0e84a0e12e07392c3c854efd5f2a147548776e"
+ integrity sha512-k6S8joXhoXkatcoT6mYCxBzRCsnrLfnl6xjLe46SnXO0oEEy4Vuzbmp5Ydl1uU2hHr73zL91EdAC1k8Hng/+oA==
dependencies:
"@types/node-fetch" "^2.1.2"
node-fetch "~2.6.1"
string_decoder "^1.3.0"
-"@tensorflow/tfjs-layers@4.8.0":
- version "4.8.0"
- resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-layers/-/tfjs-layers-4.8.0.tgz#23b09daa67cd05d732bca67922723368bb7dba81"
+"@tensorflow/tfjs-layers@4.20.0":
+ version "4.20.0"
+ resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-layers/-/tfjs-layers-4.20.0.tgz#7d00dc3c3cea1ce8e7df67480e2738ab71a0016f"
+ integrity sha512-SCHZH29Vyw+Y9eoaJHiaNo6yqM9vD3XCKncoczonRRywejm3FFqddg1AuWAfSE9XoNPE21o9PsknvKLl/Uh+Cg==
"@tensorflow/tfjs-vis@^1.5.1":
version "1.5.1"
@@ -1285,16 +1294,17 @@
vega-embed "6.17.0"
vega-lite "4.13.1"
-"@tensorflow/tfjs@^4.2.0":
- version "4.8.0"
- resolved "https://registry.yarnpkg.com/@tensorflow/tfjs/-/tfjs-4.8.0.tgz#58b2b8d1bcf4249da7bb2ebecdaf7e8be87f01fd"
- dependencies:
- "@tensorflow/tfjs-backend-cpu" "4.8.0"
- "@tensorflow/tfjs-backend-webgl" "4.8.0"
- "@tensorflow/tfjs-converter" "4.8.0"
- "@tensorflow/tfjs-core" "4.8.0"
- "@tensorflow/tfjs-data" "4.8.0"
- "@tensorflow/tfjs-layers" "4.8.0"
+"@tensorflow/tfjs@^4.20.0":
+ version "4.20.0"
+ resolved "https://registry.yarnpkg.com/@tensorflow/tfjs/-/tfjs-4.20.0.tgz#7835d14d1661d459817595196a4bc7bb0d949b75"
+ integrity sha512-+ZLfJq2jyIOE2/+yKPoyD/gfy3RZypbfMrlzvBDgodTK5jnexprihhX38hxilh9HPWvWQXJqiUjKJP5ECCikrw==
+ dependencies:
+ "@tensorflow/tfjs-backend-cpu" "4.20.0"
+ "@tensorflow/tfjs-backend-webgl" "4.20.0"
+ "@tensorflow/tfjs-converter" "4.20.0"
+ "@tensorflow/tfjs-core" "4.20.0"
+ "@tensorflow/tfjs-data" "4.20.0"
+ "@tensorflow/tfjs-layers" "4.20.0"
argparse "^1.0.10"
chalk "^4.1.0"
core-js "3.29.1"
@@ -1536,10 +1546,6 @@
version "4.0.5"
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.5.tgz#cb6e2a691b70cb177c6e3ae9c1d2e8b2ea8cd304"
-"@types/webgl-ext@0.0.30":
- version "0.0.30"
- resolved "https://registry.yarnpkg.com/@types/webgl-ext/-/webgl-ext-0.0.30.tgz#0ce498c16a41a23d15289e0b844d945b25f0fb9d"
-
"@types/ws@^8.5.5":
version "8.5.5"
resolved "https://registry.yarnpkg.com/@types/ws/-/ws-8.5.5.tgz#af587964aa06682702ee6dcbc7be41a80e4b28eb"
@@ -1662,9 +1668,10 @@
"@webassemblyjs/ast" "1.11.6"
"@xtuc/long" "4.2.2"
-"@webgpu/types@0.1.30":
- version "0.1.30"
- resolved "https://registry.yarnpkg.com/@webgpu/types/-/types-0.1.30.tgz#b6406dc4a1c1e0d469028ceb30ddffbbd2fa706c"
+"@webgpu/types@0.1.38":
+ version "0.1.38"
+ resolved "https://registry.yarnpkg.com/@webgpu/types/-/types-0.1.38.tgz#6fda4b410edc753d3213c648320ebcf319669020"
+ integrity sha512-7LrhVKz2PRh+DD7+S+PVaFd5HxaWQvoMqBbsV9fNJO1pjUs1P8bM2vQVNfk+3URTqbuTI7gkXi0rfsN0IadoBA==
"@webpack-cli/configtest@^2.1.1":
version "2.1.1"
@@ -4955,7 +4962,15 @@ string-length@^4.0.1:
char-regex "^1.0.2"
strip-ansi "^6.0.0"
-"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3:
+"string-width-cjs@npm:string-width@^4.2.0":
+ version "4.2.3"
+ resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
+ dependencies:
+ emoji-regex "^8.0.0"
+ is-fullwidth-code-point "^3.0.0"
+ strip-ansi "^6.0.1"
+
+string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3:
version "4.2.3"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
dependencies:
@@ -4983,7 +4998,13 @@ string_decoder@~1.1.1:
dependencies:
safe-buffer "~5.1.0"
-"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1:
+"strip-ansi-cjs@npm:strip-ansi@^6.0.1":
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
+ dependencies:
+ ansi-regex "^5.0.1"
+
+strip-ansi@^6.0.0, strip-ansi@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
dependencies:
@@ -5880,7 +5901,7 @@ wildcard@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/wildcard/-/wildcard-2.0.1.tgz#5ab10d02487198954836b6349f74fff961e10f67"
-"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0:
+"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0":
version "7.0.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
dependencies:
@@ -5896,6 +5917,14 @@ wrap-ansi@^6.2.0:
string-width "^4.1.0"
strip-ansi "^6.0.0"
+wrap-ansi@^7.0.0:
+ version "7.0.0"
+ resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43"
+ dependencies:
+ ansi-styles "^4.0.0"
+ string-width "^4.1.0"
+ strip-ansi "^6.0.0"
+
wrap-ansi@^8.1.0:
version "8.1.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-8.1.0.tgz#56dc22368ee570face1b49819975d9b9a5ead214"