diff --git a/examples/bodyPose-blazePose-keypoints/index.html b/examples/bodyPose-blazePose-keypoints/index.html
index dbefaf17..27b875d2 100644
--- a/examples/bodyPose-blazePose-keypoints/index.html
+++ b/examples/bodyPose-blazePose-keypoints/index.html
@@ -11,7 +11,7 @@
ml5.js bodyPose BlazePose Detection Example
-
+
diff --git a/examples/bodyPose-blazePose-skeleton/index.html b/examples/bodyPose-blazePose-skeleton/index.html
index 1f2ca3b0..a46b7663 100644
--- a/examples/bodyPose-blazePose-skeleton/index.html
+++ b/examples/bodyPose-blazePose-skeleton/index.html
@@ -11,7 +11,7 @@
ml5.js bodyPose BlazePose Skeleton Example
-
+
diff --git a/examples/bodyPose-keypoints/index.html b/examples/bodyPose-keypoints/index.html
index 811ee61f..0f5dcfc7 100644
--- a/examples/bodyPose-keypoints/index.html
+++ b/examples/bodyPose-keypoints/index.html
@@ -11,7 +11,7 @@
ml5.js bodyPose Detection Example
-
+
diff --git a/examples/bodyPose-skeletal-connections/index.html b/examples/bodyPose-skeletal-connections/index.html
index 08a1fb65..0e5b9199 100644
--- a/examples/bodyPose-skeletal-connections/index.html
+++ b/examples/bodyPose-skeletal-connections/index.html
@@ -11,7 +11,7 @@
ml5.js bodyPose Skeleton Example
-
+
diff --git a/examples/bodySegmentation-mask-background/index.html b/examples/bodySegmentation-mask-background/index.html
index 5e9ed6a5..49a759df 100644
--- a/examples/bodySegmentation-mask-background/index.html
+++ b/examples/bodySegmentation-mask-background/index.html
@@ -14,7 +14,7 @@
ml5.js bodySegmentation Mask Background Example
-
+
diff --git a/examples/bodySegmentation-mask-body-parts/index.html b/examples/bodySegmentation-mask-body-parts/index.html
index 54111ef3..4696cd21 100644
--- a/examples/bodySegmentation-mask-body-parts/index.html
+++ b/examples/bodySegmentation-mask-body-parts/index.html
@@ -14,7 +14,7 @@
ml5.js bodySegmentation Parts Example
-
+
diff --git a/examples/bodySegmentation-mask-person/index.html b/examples/bodySegmentation-mask-person/index.html
index d7932242..f2ee161b 100644
--- a/examples/bodySegmentation-mask-person/index.html
+++ b/examples/bodySegmentation-mask-person/index.html
@@ -14,7 +14,7 @@
ml5.js bodySegmentation Mask Person Example
-
+
diff --git a/examples/bodySegmentation-select-body-parts/index.html b/examples/bodySegmentation-select-body-parts/index.html
index 54111ef3..4696cd21 100644
--- a/examples/bodySegmentation-select-body-parts/index.html
+++ b/examples/bodySegmentation-select-body-parts/index.html
@@ -14,7 +14,7 @@
ml5.js bodySegmentation Parts Example
-
+
diff --git a/examples/faceMesh-bounding-box/index.html b/examples/faceMesh-bounding-box/index.html
index 0f44ca8f..b64e1b5f 100644
--- a/examples/faceMesh-bounding-box/index.html
+++ b/examples/faceMesh-bounding-box/index.html
@@ -14,7 +14,7 @@
ml5.js faceMesh Bounding Box Example
-
+
diff --git a/examples/faceMesh-keypoints-from-parts/index.html b/examples/faceMesh-keypoints-from-parts/index.html
index 4bd649e7..753c2acd 100644
--- a/examples/faceMesh-keypoints-from-parts/index.html
+++ b/examples/faceMesh-keypoints-from-parts/index.html
@@ -14,7 +14,7 @@
ml5.js faceMesh Keypoints from Parts Example
-
+
diff --git a/examples/faceMesh-keypoints/index.html b/examples/faceMesh-keypoints/index.html
index 0a07e429..fc27bb24 100644
--- a/examples/faceMesh-keypoints/index.html
+++ b/examples/faceMesh-keypoints/index.html
@@ -14,7 +14,7 @@
ml5.js faceMesh Webcam Example
-
+
diff --git a/examples/faceMesh-parts-bounding-box/index.html b/examples/faceMesh-parts-bounding-box/index.html
index a61060fc..cd8f9ce3 100644
--- a/examples/faceMesh-parts-bounding-box/index.html
+++ b/examples/faceMesh-parts-bounding-box/index.html
@@ -14,7 +14,7 @@
ml5.js faceMesh Parts Bounding Box Example
-
+
diff --git a/examples/faceMesh-parts/index.html b/examples/faceMesh-parts/index.html
index c6f9e6c2..4b93c076 100644
--- a/examples/faceMesh-parts/index.html
+++ b/examples/faceMesh-parts/index.html
@@ -14,7 +14,7 @@
ml5.js faceMesh p5 Webcam Example
-
+
diff --git a/examples/faceMesh-shapes-from-parts/index.html b/examples/faceMesh-shapes-from-parts/index.html
index fe57e1a7..4caba2ff 100644
--- a/examples/faceMesh-shapes-from-parts/index.html
+++ b/examples/faceMesh-shapes-from-parts/index.html
@@ -14,7 +14,7 @@
ml5.js faceMesh Shapes from Parts Example
-
+
diff --git a/examples/faceMesh-single-image/index.html b/examples/faceMesh-single-image/index.html
index 4eb134a5..e79bf6c0 100644
--- a/examples/faceMesh-single-image/index.html
+++ b/examples/faceMesh-single-image/index.html
@@ -14,7 +14,7 @@
ml5.js faceMesh Image Example
-
+
diff --git a/examples/faceMesh-triangle-mesh/index.html b/examples/faceMesh-triangle-mesh/index.html
index 966408c8..754a3d50 100644
--- a/examples/faceMesh-triangle-mesh/index.html
+++ b/examples/faceMesh-triangle-mesh/index.html
@@ -14,7 +14,7 @@
ml5.js faceMesh Triangle Mesh Example
-
+
diff --git a/examples/faceMesh-uv-map/index.html b/examples/faceMesh-uv-map/index.html
index c42b8b9b..33820f32 100644
--- a/examples/faceMesh-uv-map/index.html
+++ b/examples/faceMesh-uv-map/index.html
@@ -14,7 +14,7 @@
ml5.js faceMesh Webcam Example
-
+
diff --git a/examples/handPose-detect-start-stop/index.html b/examples/handPose-detect-start-stop/index.html
index f9a10ddd..27f59f77 100644
--- a/examples/handPose-detect-start-stop/index.html
+++ b/examples/handPose-detect-start-stop/index.html
@@ -14,7 +14,7 @@
ml5.js handPose Start and Stop Example
-
+
diff --git a/examples/handPose-keypoints/index.html b/examples/handPose-keypoints/index.html
index 9277dfa0..c8f5ebc9 100644
--- a/examples/handPose-keypoints/index.html
+++ b/examples/handPose-keypoints/index.html
@@ -14,7 +14,7 @@
ml5.js handPose Webcam Example
-
+
diff --git a/examples/handPose-parts/index.html b/examples/handPose-parts/index.html
index 0309b2ac..0957ae41 100644
--- a/examples/handPose-parts/index.html
+++ b/examples/handPose-parts/index.html
@@ -14,7 +14,7 @@
ml5.js handPose p5 Webcam Example
-
+
diff --git a/examples/handPose-single-image/index.html b/examples/handPose-single-image/index.html
index 35c47592..9ba78f49 100644
--- a/examples/handPose-single-image/index.html
+++ b/examples/handPose-single-image/index.html
@@ -14,7 +14,7 @@
ml5.js handPose Image Example
-
+
diff --git a/examples/handPose-skeletal-connections/index.html b/examples/handPose-skeletal-connections/index.html
index 1559cd99..31f6909d 100644
--- a/examples/handPose-skeletal-connections/index.html
+++ b/examples/handPose-skeletal-connections/index.html
@@ -14,7 +14,7 @@
ml5.js handPose Skeletal Connection Example
-
+
diff --git a/examples/imageClassifier-single-image/index.html b/examples/imageClassifier-single-image/index.html
index 1d7d3724..a574c079 100644
--- a/examples/imageClassifier-single-image/index.html
+++ b/examples/imageClassifier-single-image/index.html
@@ -14,7 +14,7 @@
ml5.js imageClassifier Example
-
+
diff --git a/examples/imageClassifier-teachable-machine/index.html b/examples/imageClassifier-teachable-machine/index.html
index 6d4d1448..9f568319 100644
--- a/examples/imageClassifier-teachable-machine/index.html
+++ b/examples/imageClassifier-teachable-machine/index.html
@@ -14,7 +14,7 @@
ml5.js imageClassifier + Teachable Machine Example
-
+
diff --git a/examples/imageClassifier-webcam/index.html b/examples/imageClassifier-webcam/index.html
index 4900210c..97e6fd45 100644
--- a/examples/imageClassifier-webcam/index.html
+++ b/examples/imageClassifier-webcam/index.html
@@ -14,7 +14,7 @@
ml5.js imageClassifier Webcam Example
-
+
diff --git a/examples/ml5-bodyPose/index.html b/examples/ml5-bodyPose/index.html
new file mode 100644
index 00000000..27b875d2
--- /dev/null
+++ b/examples/ml5-bodyPose/index.html
@@ -0,0 +1,20 @@
+
+
+
+
+
+ ml5.js bodyPose BlazePose Detection Example
+
+
+
+
+
+
+
+
diff --git a/examples/ml5-bodyPose/sketch.js b/examples/ml5-bodyPose/sketch.js
new file mode 100644
index 00000000..194a7b17
--- /dev/null
+++ b/examples/ml5-bodyPose/sketch.js
@@ -0,0 +1,50 @@
+/*
+ * 👋 Hello! This is an ml5.js example made and shared with ❤️.
+ * Learn more about the ml5.js project: https://ml5js.org/
+ * ml5.js license and Code of Conduct: https://github.com/ml5js/ml5-next-gen/blob/main/LICENSE.md
+ *
+ * This example demonstrates pose tracking on live video through ml5.bodyPose with BlazePose model
+ */
+
+let video;
+let bodyPose;
+let poses = [];
+
+function preload() {
+ // Load the bodyPose model
+ bodyPose = ml5.bodyPose("BlazePose");
+}
+
+function setup() {
+ createCanvas(640, 480);
+
+ // Create the video and hide it
+ video = createCapture(VIDEO);
+ video.size(640, 480);
+ video.hide();
+
+ // Start detecting poses in the webcam video
+ bodyPose.detectStart(video, gotPoses);
+}
+
+function draw() {
+ // Draw the webcam video
+ image(video, 0, 0, width, height);
+
+ // Draw all the tracked landmark points
+ for (let i = 0; i < poses.length; i++) {
+ let pose = poses[i];
+ for (let j = 0; j < pose.keypoints.length; j++) {
+ let keypoint = pose.keypoints[j];
+ fill(0, 255, 0);
+ noStroke();
+ circle(keypoint.x, keypoint.y, 10);
+ }
+ }
+}
+
+// Callback function for when bodyPose outputs data
+function gotPoses(results) {
+ // Save the output to the poses variable
+ poses = results;
+}
diff --git a/examples/neuralNetwork-color-classifier/index.html b/examples/neuralNetwork-color-classifier/index.html
index 369514dd..f90e7a06 100644
--- a/examples/neuralNetwork-color-classifier/index.html
+++ b/examples/neuralNetwork-color-classifier/index.html
@@ -14,7 +14,7 @@
ml5.js neuralNetwork Color Classifier Example
-
+
diff --git a/examples/neuralNetwork-load-model/index.html b/examples/neuralNetwork-load-model/index.html
index a2c4a5fd..2ca63ff2 100644
--- a/examples/neuralNetwork-load-model/index.html
+++ b/examples/neuralNetwork-load-model/index.html
@@ -14,7 +14,7 @@
ml5.js neuralNetwork Loading Pre-trained Model Example
-
+
diff --git a/examples/neuralNetwork-mouse-gesture/index.html b/examples/neuralNetwork-mouse-gesture/index.html
index c406a501..55f60bcf 100644
--- a/examples/neuralNetwork-mouse-gesture/index.html
+++ b/examples/neuralNetwork-mouse-gesture/index.html
@@ -14,7 +14,7 @@
ml5.js neuralNetwork Gesture Classifier Example
-
+
diff --git a/examples/neuralNetwork-train-and-save/index.html b/examples/neuralNetwork-train-and-save/index.html
index a8a1c9c0..4385318d 100644
--- a/examples/neuralNetwork-train-and-save/index.html
+++ b/examples/neuralNetwork-train-and-save/index.html
@@ -14,7 +14,7 @@
ml5.js neuralNetwork Training and Saving Example
-
+
diff --git a/examples/neuroEvolution-flappy-bird/index.html b/examples/neuroEvolution-flappy-bird/index.html
index ab498dcf..53095e78 100644
--- a/examples/neuroEvolution-flappy-bird/index.html
+++ b/examples/neuroEvolution-flappy-bird/index.html
@@ -14,7 +14,7 @@
ml5.js neuralNetwork Neuroevolution Flappy Bird
-
+
diff --git a/examples/neuroEvolution-sensors/index.html b/examples/neuroEvolution-sensors/index.html
index 2f5765e1..bea0ef72 100644
--- a/examples/neuroEvolution-sensors/index.html
+++ b/examples/neuroEvolution-sensors/index.html
@@ -14,7 +14,7 @@
ml5.js neuralNetwork Neuroevolution Sensors
-
+
diff --git a/examples/neuroEvolution-steering/index.html b/examples/neuroEvolution-steering/index.html
index b90a565f..c4be1a58 100644
--- a/examples/neuroEvolution-steering/index.html
+++ b/examples/neuroEvolution-steering/index.html
@@ -14,7 +14,7 @@
ml5.js neuralNetwork Neuroevolution Steering
-
+
diff --git a/examples/sentiment/index.html b/examples/sentiment/index.html
index d6a99363..8606eb50 100644
--- a/examples/sentiment/index.html
+++ b/examples/sentiment/index.html
@@ -10,7 +10,7 @@
ml5.js Sentiment Analysis Example
-
+
diff --git a/examples/soundClassifier-speech-command/index.html b/examples/soundClassifier-speech-command/index.html
index 3ae14989..020a5253 100644
--- a/examples/soundClassifier-speech-command/index.html
+++ b/examples/soundClassifier-speech-command/index.html
@@ -13,7 +13,7 @@
Sound classification using SpeechCommands18w and p5.js
-
+
diff --git a/examples/soundClassifier-teachable-machine/index.html b/examples/soundClassifier-teachable-machine/index.html
index 149929cf..1e53a0b0 100644
--- a/examples/soundClassifier-teachable-machine/index.html
+++ b/examples/soundClassifier-teachable-machine/index.html
@@ -14,7 +14,7 @@
Sound classification using pre-trained custom model
-
+
diff --git a/package.json b/package.json
index 2d303aae..27386187 100644
--- a/package.json
+++ b/package.json
@@ -12,7 +12,8 @@
"test": "jest --config tests/jest.config.js",
"upload-examples": "node scripts/uploadExamples.js",
"update-p5-version": "node scripts/updateP5Version.js",
- "update-readme": "node scripts/updateReadme.js"
+ "update-readme": "node scripts/updateReadme.js",
+ "analyze": "ANALYZE=true webpack --config webpack.config.js --mode production"
},
"files": [
"dist"
@@ -20,6 +21,9 @@
"keywords": [
"ML5"
],
+ "sideEffects": [
+ "src/index.js"
+ ],
"author": "ml5",
"license": "MIT",
"repository": {
@@ -46,6 +50,7 @@
"rimraf": "^5.0.5",
"terser-webpack-plugin": "^5.3.10",
"webpack": "^5.76.1",
+ "webpack-bundle-analyzer": "^4.10.2",
"webpack-cli": "^5.0.1",
"webpack-dev-server": "^4.15.1"
},
diff --git a/src/BodyPose/index.js b/src/BodyPose/index.js
index e1efe833..f5769d87 100644
--- a/src/BodyPose/index.js
+++ b/src/BodyPose/index.js
@@ -27,6 +27,7 @@ import handleOptions from "../utils/handleOptions";
import { handleModelName } from "../utils/handleOptions";
import objectRenameKey from "../utils/objectRenameKey";
import { isVideo } from "../utils/handleArguments";
+import p5Utils from "../utils/p5Utils";
/**
* User provided options object for BodyPose with MoveNet model.
@@ -547,10 +548,7 @@ class BodyPose {
* @param {function} callback - A callback function that is called once the model has been loaded.
* @returns {BodyPose} A BodyPose instance.
*/
-const bodyPose = (...inputs) => {
+export const bodyPose = p5Utils.maybeRegisterPreload((...inputs) => {
const { string, options = {}, callback } = handleArguments(...inputs);
- const instance = new BodyPose(string, options, callback);
- return instance;
-};
-
-export default bodyPose;
+ return new BodyPose(string, options, callback);
+});
diff --git a/src/BodySegmentation/index.js b/src/BodySegmentation/index.js
index dea0fbd2..28ddbdaf 100644
--- a/src/BodySegmentation/index.js
+++ b/src/BodySegmentation/index.js
@@ -16,6 +16,7 @@ import BODYPIX_PALETTE from "./BODYPIX_PALETTE";
import { mediaReady } from "../utils/imageUtilities";
import handleOptions from "../utils/handleOptions";
import { handleModelName } from "../utils/handleOptions";
+import p5Utils from "../utils/p5Utils";
class BodySegmentation {
/**
@@ -411,10 +412,7 @@ class BodySegmentation {
* Factory function that returns a Facemesh instance
* @returns {Object} A new bodySegmentation instance
*/
-const bodySegmentation = (...inputs) => {
+export const bodySegmentation = p5Utils.maybeRegisterPreload((...inputs) => {
const { string, options = {}, callback } = handleArguments(...inputs);
- const instance = new BodySegmentation(string, options, callback);
- return instance;
-};
-
-export default bodySegmentation;
+ return new BodySegmentation(string, options, callback);
+});
diff --git a/src/FaceMesh/index.js b/src/FaceMesh/index.js
index 2209e612..f22cda67 100644
--- a/src/FaceMesh/index.js
+++ b/src/FaceMesh/index.js
@@ -26,6 +26,7 @@ import { mediaReady } from "../utils/imageUtilities";
import handleOptions from "../utils/handleOptions";
import { handleModelName } from "../utils/handleOptions";
import { UV_COORDS } from "./uv_coords";
+import p5Utils from "../utils/p5Utils";
/**
* User provided options object for FaceMesh. See config schema below for default and available values.
@@ -375,10 +376,7 @@ class FaceMesh {
* @param {function} [callback] - A callback to be called when the model is ready.
* @returns {Object} A new faceMesh instance.
*/
-const faceMesh = (...inputs) => {
+export const faceMesh = p5Utils.maybeRegisterPreload((...inputs) => {
const { string, options = {}, callback } = handleArguments(...inputs);
- const instance = new FaceMesh(string, options, callback);
- return instance;
-};
-
-export default faceMesh;
+ return new FaceMesh(string, options, callback);
+});
diff --git a/src/HandPose/index.js b/src/HandPose/index.js
index db445794..6b3c86a4 100644
--- a/src/HandPose/index.js
+++ b/src/HandPose/index.js
@@ -27,6 +27,7 @@ import handleOptions from "../utils/handleOptions";
import { handleModelName } from "../utils/handleOptions";
import { mediaReady } from "../utils/imageUtilities";
import objectRenameKey from "../utils/objectRenameKey";
+import p5Utils from "../utils/p5Utils";
/**
* User provided options object for HandPose. See config schema below for default and available values.
@@ -311,10 +312,7 @@ class HandPose {
* @param {function} [callback] - A callback function that is called once the model has been loaded.
* @returns {HandPose} A new handPose instance.
*/
-const handPose = (...inputs) => {
+export const handPose = p5Utils.maybeRegisterPreload((...inputs) => {
const { string, options = {}, callback } = handleArguments(...inputs);
- const instance = new HandPose(string, options, callback);
- return instance;
-};
-
-export default handPose;
+ return new HandPose(string, options, callback);
+});
diff --git a/src/ImageClassifier/index.js b/src/ImageClassifier/index.js
index ff1c6f62..d06bc089 100644
--- a/src/ImageClassifier/index.js
+++ b/src/ImageClassifier/index.js
@@ -18,6 +18,7 @@ import callCallback from "../utils/callcallback";
import { imgToTensor, mediaReady } from "../utils/imageUtilities";
import handleOptions from "../utils/handleOptions";
import { handleModelName } from "../utils/handleOptions";
+import p5Utils from "../utils/p5Utils";
const IMAGE_SIZE = 224;
const MODEL_OPTIONS = ["mobilenet", "darknet", "darknet-tiny", "doodlenet"];
@@ -248,7 +249,10 @@ class ImageClassifier {
"image",
"No input image provided. If you want to classify a video, use classifyStart."
);
- return callCallback(this.classifyInternal(image, number || this.topk), callback);
+ return callCallback(
+ this.classifyInternal(image, number || this.topk),
+ callback
+ );
}
/**
@@ -270,8 +274,11 @@ class ImageClassifier {
const classifyFrame = async () => {
await mediaReady(image, true);
// call the callback function
- await callCallback(this.classifyInternal(image, number || this.topk), callback);
-
+ await callCallback(
+ this.classifyInternal(image, number || this.topk),
+ callback
+ );
+
// call recursively for continuous classification
if (!this.signalStop) {
requestAnimationFrame(classifyFrame);
@@ -305,13 +312,10 @@ class ImageClassifier {
}
}
-const imageClassifier = (modelName, optionsOrCallback, cb) => {
- const args = handleArguments(modelName, optionsOrCallback, cb);
-
- const { string, options = {}, callback } = args;
-
- const instance = new ImageClassifier(string, options, callback);
- return instance;
-};
-
-export default imageClassifier;
+export const imageClassifier = p5Utils.maybeRegisterPreload(
+ (modelName, optionsOrCallback, cb) => {
+ const args = handleArguments(modelName, optionsOrCallback, cb);
+ const { string, options = {}, callback } = args;
+ return new ImageClassifier(string, options, callback);
+ }
+);
diff --git a/src/NeuralNetwork/index.js b/src/NeuralNetwork/index.js
index fa439452..507a780b 100644
--- a/src/NeuralNetwork/index.js
+++ b/src/NeuralNetwork/index.js
@@ -10,6 +10,7 @@ import NeuralNetworkData from "./NeuralNetworkData";
import nnUtils from "./NeuralNetworkUtils";
import NeuralNetworkVis from "./NeuralNetworkVis";
+import p5Utils from "../utils/p5Utils";
const DEFAULTS = {
inputs: [],
@@ -1249,23 +1250,22 @@ class DiyNeuralNetwork {
}
}
-const neuralNetwork = (inputsOrOptions, outputsOrCallback, callback) => {
- let options;
- let cb;
-
- if (inputsOrOptions instanceof Object) {
- options = inputsOrOptions;
- cb = outputsOrCallback;
- } else {
- options = {
- inputs: inputsOrOptions,
- outputs: outputsOrCallback,
- };
- cb = callback;
- }
+export const neuralNetwork = p5Utils.maybeRegisterPreload(
+ (inputsOrOptions, outputsOrCallback, callback) => {
+ let options;
+ let cb;
- const instance = new DiyNeuralNetwork(options, cb);
- return instance;
-};
+ if (inputsOrOptions instanceof Object) {
+ options = inputsOrOptions;
+ cb = outputsOrCallback;
+ } else {
+ options = {
+ inputs: inputsOrOptions,
+ outputs: outputsOrCallback,
+ };
+ cb = callback;
+ }
-export default neuralNetwork;
+ return new DiyNeuralNetwork(options, cb);
+ }
+);
diff --git a/src/Sentiment/index.js b/src/Sentiment/index.js
index 502a4ba8..42762c0b 100644
--- a/src/Sentiment/index.js
+++ b/src/Sentiment/index.js
@@ -3,6 +3,8 @@ import callCallback from "../utils/callcallback";
import modelLoader from "../utils/modelLoader";
import handleArguments from "../utils/handleArguments";
import { handleModelName } from "../utils/handleOptions";
+import p5Utils from "../utils/p5Utils";
+
/**
* Initializes the Sentiment demo.
*/
@@ -143,10 +145,7 @@ class Sentiment {
}
}
-const sentiment = (...inputs) => {
+export const sentiment = p5Utils.maybeRegisterPreload((...inputs) => {
const { string, callback } = handleArguments(...inputs);
- const instance = new Sentiment(string, callback);
- return instance;
-};
-
-export default sentiment;
+ return new Sentiment(string, callback);
+});
diff --git a/src/SoundClassifier/index.js b/src/SoundClassifier/index.js
index 7cbffd31..bc7af8fa 100644
--- a/src/SoundClassifier/index.js
+++ b/src/SoundClassifier/index.js
@@ -11,6 +11,7 @@ import * as tf from "@tensorflow/tfjs";
import handleArguments from "../utils/handleArguments";
import * as speechCommands from "./speechcommands";
import callCallback from "../utils/callcallback";
+import p5Utils from "../utils/p5Utils";
const MODEL_OPTIONS = ["speechcommands18w"];
// exporting the sound classifier instance so that the stop/start flags regarding classification state are accessible to speechcommands.js to use
@@ -98,7 +99,7 @@ class SoundClassifier {
/**
* Used to stop the continuous classification of a video
*/
- classifyStop() {
+ classifyStop() {
if (this.isClassifying) {
this.signalStop = true;
}
@@ -106,24 +107,23 @@ class SoundClassifier {
}
}
-const soundClassifier = (modelName, optionsOrCallback, cb) => {
- const {
- string,
- options = {},
- callback,
- } = handleArguments(modelName, optionsOrCallback, cb).require(
- "string",
- 'Please specify a model to use. E.g: "SpeechCommands18w"'
- );
-
- let model = string;
+export const soundClassifier = p5Utils.maybeRegisterPreload(
+ (modelName, optionsOrCallback, cb) => {
+ const {
+ string,
+ options = {},
+ callback,
+ } = handleArguments(modelName, optionsOrCallback, cb).require(
+ "string",
+ 'Please specify a model to use. E.g: "SpeechCommands18w"'
+ );
+
+ let model = string;
+
+ if (model.indexOf("http") === -1) {
+ model = model.toLowerCase();
+ }
- if (model.indexOf("http") === -1) {
- model = model.toLowerCase();
+ return new SoundClassifier(model, options, callback);
}
-
- instance = new SoundClassifier(model, options, callback);
- return instance;
-};
-
-export default soundClassifier;
+);
diff --git a/src/index.js b/src/index.js
index 09416c1c..75fc46d6 100644
--- a/src/index.js
+++ b/src/index.js
@@ -1,19 +1,24 @@
-import neuralNetwork from "./NeuralNetwork";
-import handPose from "./HandPose";
-import sentiment from "./Sentiment";
-import faceMesh from "./FaceMesh";
-import bodyPose from "./BodyPose";
-import imageClassifier from "./ImageClassifier";
-import soundClassifier from "./SoundClassifier";
+import { neuralNetwork } from "./NeuralNetwork";
+import { handPose } from "./HandPose";
+import { sentiment } from "./Sentiment";
+import { faceMesh } from "./FaceMesh";
+import { bodyPose } from "./BodyPose";
+import { imageClassifier } from "./ImageClassifier";
+import { soundClassifier } from "./SoundClassifier";
+import { bodySegmentation } from "./BodySegmentation";
import setBackend from "./utils/setBackend";
-import bodySegmentation from "./BodySegmentation";
import communityStatement from "./utils/communityStatement";
import * as tf from "@tensorflow/tfjs";
import * as tfvis from "@tensorflow/tfjs-vis";
import p5Utils from "./utils/p5Utils";
import packageInfo from "../package.json";
-const withPreload = {
+communityStatement();
+
+const version = packageInfo.version;
+const setP5 = p5Utils.setP5.bind(p5Utils);
+
+export {
bodyPose,
bodySegmentation,
faceMesh,
@@ -22,18 +27,10 @@ const withPreload = {
neuralNetwork,
sentiment,
soundClassifier,
-};
-
-const ml5 = Object.assign({ p5Utils }, withPreload, {
+ p5Utils,
tf,
tfvis,
setBackend,
- version: packageInfo.version,
- setP5: p5Utils.setP5.bind(p5Utils),
-});
-
-p5Utils.shouldPreload(ml5, Object.keys(withPreload));
-
-communityStatement();
-
-export default ml5;
+ version,
+ setP5,
+};
diff --git a/src/utils/p5Utils.js b/src/utils/p5Utils.js
index fceed4e0..6a03539d 100644
--- a/src/utils/p5Utils.js
+++ b/src/utils/p5Utils.js
@@ -22,6 +22,7 @@ class P5Util {
* @type {boolean}
*/
this.didSetupPreload = false;
+
/**
* The `p5` variable, which can be instantiated via `new p5()` and has a `.prototype` property.
* In browser environments this is `window.p5`.
@@ -62,7 +63,6 @@ class P5Util {
if (isP5Constructor(source.p5)) {
this.p5Constructor = source.p5;
- this.registerPreloads();
}
if (isP5Extensions(source)) {
this.p5Extensions = source;
@@ -88,7 +88,6 @@ class P5Util {
if (isP5Constructor(p5)) {
this.p5Constructor = p5;
this.p5Extensions = p5.prototype;
- this.registerPreloads();
} else {
console.warn("Invalid p5 object provided to ml5.setP5().");
}
@@ -96,70 +95,42 @@ class P5Util {
/**
* @internal
- * Pass in the ml5 methods which require p5 preload behavior.
- * Preload functions must return an object with a property `ready` which is a `Promise`.
- * Preloading will be set up immediately if p5 is available on the window.
- * Store the references in case p5 is added later.
- *
- * @param {*} ml5Library - the `ml5` variable.
- * @param {Array} methodNames - an array of ml5 functions to preload.
+ * Register a preload function for a model. If p5 is not loaded or the sketch is not created,
+ * the model will be returned as is. Otherwise, the model will be wrapped to increment and
+ * decrement the p5 preload counter when called, which is for allowing the model to be used in
+ * p5 preload function.
+ * @param {Function} model - The model function to register.
+ * @returns {Function} The wrapped model function.
*/
- shouldPreload(ml5Library, methodNames) {
- this.methodsToPreload = methodNames;
- this.ml5Library = ml5Library;
- if (this.checkP5()) {
- this.registerPreloads();
- }
- }
-
- /**
- * @private
- * Execute the p5 preload setup using the stored references, provided by shouldPreload().
- * Won't do anything if `shouldPreload()` has not been called or if p5 is not found.
- */
- registerPreloads() {
- if (this.didSetupPreload) return;
+ maybeRegisterPreload(model) {
const p5 = this.p5Constructor;
- const ml5 = this.ml5Library;
- const preloadMethods = this.methodsToPreload;
- if (!p5 || !ml5) return;
-
- // Must shallow copy so that it doesn't reference the replaced method.
- const original = { ...ml5 };
- // Must alias `this` so that it can be used inside functions with their own `this` context.
- const self = this;
-
- // Function to be called when a sketch is created, either in global or instance mode.
- p5.prototype.ml5Init = function () {
- // Bind to this specific p5 instance.
- const increment = this._incrementPreload.bind(this);
- const decrement = this._decrementPreload.bind(this);
- // Replace each preloaded on the ml5 object with a wrapped version which
- // increments and decrements the p5 preload counter when called.
- preloadMethods.forEach((method) => {
- ml5[method] = function (...args) {
- increment();
- const result = original[method](...args);
- result.ready.then(() => {
- decrement();
- });
- return result;
- };
- });
- self.didSetupPreload = true;
- };
+ if (!p5) return model;
+
+ let sketchCreated = false;
+ let p5Instance;
+ let loaded = false;
+
+ // Function to be called when a sketch is created.
+ p5.prototype.registerMethod("init", function () {
+ p5Instance = this;
+ sketchCreated = true;
+ });
// Function to be called when a sketch is destroyed.
- p5.prototype.ml5Remove = function () {
- // Resets each ml5 method back to its original version.
- preloadMethods.forEach((method) => {
- ml5[method] = original[method];
- });
- self.didSetupPreload = false;
- };
+ p5.prototype.registerMethod("remove", function () {
+ sketchCreated = false;
+ p5Instance = undefined;
+ });
- p5.prototype.registerMethod("init", p5.prototype.ml5Init);
- p5.prototype.registerMethod("remove", p5.prototype.ml5Remove);
+ return function (...args) {
+ if (!sketchCreated || loaded) return model(...args);
+ loaded = true;
+
+ p5Instance._incrementPreload();
+ const result = model(...args);
+ result.ready.then(() => p5Instance._decrementPreload());
+ return result;
+ };
}
/**
diff --git a/webpack.config.js b/webpack.config.js
index ff90e03d..574304b9 100644
--- a/webpack.config.js
+++ b/webpack.config.js
@@ -2,6 +2,8 @@ const { resolve } = require("path");
const HtmlWebpackPlugin = require("html-webpack-plugin");
const { merge } = require("webpack-merge");
const TerserPlugin = require("terser-webpack-plugin");
+const BundleAnalyzerPlugin =
+ require("webpack-bundle-analyzer").BundleAnalyzerPlugin;
const commonConfig = {
context: __dirname,
@@ -12,16 +14,31 @@ const commonConfig = {
library: {
name: "ml5",
type: "umd",
- export: "default",
},
},
};
const developmentConfig = {
mode: "development",
+ entry: {
+ ml5: "./src/index.js",
+ "ml5-body-pose": "./src/BodyPose/index.js",
+ "ml5-body-segmentation": "./src/BodySegmentation/index.js",
+ "ml5-face-mesh": "./src/FaceMesh/index.js",
+ "ml5-hand-pose": "./src/HandPose/index.js",
+ "ml5-image-classifier": "./src/ImageClassifier/index.js",
+ "ml5-neural-network": "./src/NeuralNetwork/index.js",
+ "ml5-sentiment": "./src/Sentiment/index.js",
+ "ml5-sound-classifier": "./src/SoundClassifier/index.js",
+ },
devtool: "inline-source-map",
output: {
publicPath: "/dist/",
+ filename: "[name].js",
+ library: {
+ name: ["ml5"],
+ type: "umd",
+ },
},
devServer: {
port: 8080,
@@ -44,13 +61,14 @@ const developmentConfig = {
new HtmlWebpackPlugin({
title: "ml5",
}),
+ ...(process.env.ANALYZE ? [new BundleAnalyzerPlugin()] : []),
],
resolve: {
fallback: {
fs: false,
- util: false
+ util: false,
},
- }
+ },
};
const productionConfig = {
@@ -58,18 +76,38 @@ const productionConfig = {
entry: {
ml5: "./src/index.js",
"ml5.min": "./src/index.js",
+ "ml5-body-pose": "./src/BodyPose/index.js",
+ "ml5-body-pose.min": "./src/BodyPose/index.js",
+ "ml5-body-segmentation": "./src/BodySegmentation/index.js",
+ "ml5-body-segmentation.min": "./src/BodySegmentation/index.js",
+ "ml5-face-mesh": "./src/FaceMesh/index.js",
+ "ml5-face-mesh.min": "./src/FaceMesh/index.js",
+ "ml5-hand-pose": "./src/HandPose/index.js",
+ "ml5-hand-pose.min": "./src/HandPose/index.js",
+ "ml5-image-classifier": "./src/ImageClassifier/index.js",
+ "ml5-image-classifier.min": "./src/ImageClassifier/index.js",
+ "ml5-neural-network": "./src/NeuralNetwork/index.js",
+ "ml5-neural-network.min": "./src/NeuralNetwork/index.js",
+ "ml5-sentiment": "./src/Sentiment/index.js",
+ "ml5-sentiment.min": "./src/Sentiment/index.js",
+ "ml5-sound-classifier": "./src/SoundClassifier/index.js",
+ "ml5-sound-classifier.min": "./src/SoundClassifier/index.js",
},
devtool: "source-map",
output: {
publicPath: "/",
filename: "[name].js",
+ library: {
+ name: ["ml5"],
+ type: "umd",
+ },
},
optimization: {
minimize: true,
minimizer: [
new TerserPlugin({
- include: "ml5.min.js",
- exclude: "ml5.js",
+ include: /\.min\.js$/,
+ exclude: /^(?!.*\.min\.js$).*\.js$/,
extractComments: false,
}),
],
@@ -77,9 +115,10 @@ const productionConfig = {
resolve: {
fallback: {
fs: false,
- util: false
+ util: false,
},
- }
+ },
+ plugins: [...(process.env.ANALYZE ? [new BundleAnalyzerPlugin()] : [])],
};
module.exports = function (env, args) {
diff --git a/yarn.lock b/yarn.lock
index 783da7b4..68faad75 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -1305,7 +1305,7 @@ __metadata:
languageName: node
linkType: hard
-"@discoveryjs/json-ext@npm:^0.5.0":
+"@discoveryjs/json-ext@npm:0.5.7, @discoveryjs/json-ext@npm:^0.5.0":
version: 0.5.7
resolution: "@discoveryjs/json-ext@npm:0.5.7"
checksum: 10c0/e10f1b02b78e4812646ddf289b7d9f2cb567d336c363b266bd50cd223cf3de7c2c74018d91cd2613041568397ef3a4a2b500aba588c6e5bd78c38374ba68f38c
@@ -1692,6 +1692,13 @@ __metadata:
languageName: node
linkType: hard
+"@polka/url@npm:^1.0.0-next.24":
+ version: 1.0.0-next.29
+ resolution: "@polka/url@npm:1.0.0-next.29"
+ checksum: 10c0/0d58e081844095cb029d3c19a659bfefd09d5d51a2f791bc61eba7ea826f13d6ee204a8a448c2f5a855c17df07b37517373ff916dd05801063c0568ae9937684
+ languageName: node
+ linkType: hard
+
"@sinclair/typebox@npm:^0.27.8":
version: 0.27.8
resolution: "@sinclair/typebox@npm:0.27.8"
@@ -2579,7 +2586,7 @@ __metadata:
languageName: node
linkType: hard
-"acorn-walk@npm:^8.0.2":
+"acorn-walk@npm:^8.0.0, acorn-walk@npm:^8.0.2":
version: 8.3.4
resolution: "acorn-walk@npm:8.3.4"
dependencies:
@@ -2588,6 +2595,15 @@ __metadata:
languageName: node
linkType: hard
+"acorn@npm:^8.0.4":
+ version: 8.15.0
+ resolution: "acorn@npm:8.15.0"
+ bin:
+ acorn: bin/acorn
+ checksum: 10c0/dec73ff59b7d6628a01eebaece7f2bdb8bb62b9b5926dcad0f8931f2b8b79c2be21f6c68ac095592adb5adb15831a3635d9343e6a91d028bbe85d564875ec3ec
+ languageName: node
+ linkType: hard
+
"acorn@npm:^8.1.0, acorn@npm:^8.11.0, acorn@npm:^8.14.0, acorn@npm:^8.8.1, acorn@npm:^8.8.2":
version: 8.14.0
resolution: "acorn@npm:8.14.0"
@@ -3364,7 +3380,7 @@ __metadata:
languageName: node
linkType: hard
-"commander@npm:7":
+"commander@npm:7, commander@npm:^7.2.0":
version: 7.2.0
resolution: "commander@npm:7.2.0"
checksum: 10c0/8d690ff13b0356df7e0ebbe6c59b4712f754f4b724d4f473d3cc5b3fdcf978e3a5dc3078717858a2ceb50b0f84d0660a7f22a96cdc50fb877d0c9bb31593d23a
@@ -3916,6 +3932,13 @@ __metadata:
languageName: node
linkType: hard
+"debounce@npm:^1.2.1":
+ version: 1.2.1
+ resolution: "debounce@npm:1.2.1"
+ checksum: 10c0/6c9320aa0973fc42050814621a7a8a78146c1975799b5b3cc1becf1f77ba9a5aa583987884230da0842a03f385def452fad5d60db97c3d1c8b824e38a8edf500
+ languageName: node
+ linkType: hard
+
"debug@npm:2.6.9":
version: 2.6.9
resolution: "debug@npm:2.6.9"
@@ -4142,6 +4165,13 @@ __metadata:
languageName: node
linkType: hard
+"duplexer@npm:^0.1.2":
+ version: 0.1.2
+ resolution: "duplexer@npm:0.1.2"
+ checksum: 10c0/c57bcd4bdf7e623abab2df43a7b5b23d18152154529d166c1e0da6bee341d84c432d157d7e97b32fecb1bf3a8b8857dd85ed81a915789f550637ed25b8e64fc2
+ languageName: node
+ linkType: hard
+
"eastasianwidth@npm:^0.2.0":
version: 0.2.0
resolution: "eastasianwidth@npm:0.2.0"
@@ -4314,6 +4344,13 @@ __metadata:
languageName: node
linkType: hard
+"escape-string-regexp@npm:^4.0.0":
+ version: 4.0.0
+ resolution: "escape-string-regexp@npm:4.0.0"
+ checksum: 10c0/9497d4dd307d845bd7f75180d8188bb17ea8c151c1edbf6b6717c100e104d629dc2dfb687686181b0f4b7d732c7dfdc4d5e7a8ff72de1b0ca283a75bbb3a9cd9
+ languageName: node
+ linkType: hard
+
"escodegen@npm:^2.0.0":
version: 2.1.0
resolution: "escodegen@npm:2.1.0"
@@ -4864,6 +4901,15 @@ __metadata:
languageName: node
linkType: hard
+"gzip-size@npm:^6.0.0":
+ version: 6.0.0
+ resolution: "gzip-size@npm:6.0.0"
+ dependencies:
+ duplexer: "npm:^0.1.2"
+ checksum: 10c0/4ccb924626c82125897a997d1c84f2377846a6ef57fbee38f7c0e6b41387fba4d00422274440747b58008b5d60114bac2349c2908e9aba55188345281af40a3f
+ languageName: node
+ linkType: hard
+
"handle-thing@npm:^2.0.0":
version: 2.0.1
resolution: "handle-thing@npm:2.0.1"
@@ -4947,7 +4993,7 @@ __metadata:
languageName: node
linkType: hard
-"html-escaper@npm:^2.0.0":
+"html-escaper@npm:^2.0.0, html-escaper@npm:^2.0.2":
version: 2.0.2
resolution: "html-escaper@npm:2.0.2"
checksum: 10c0/208e8a12de1a6569edbb14544f4567e6ce8ecc30b9394fcaa4e7bb1e60c12a7c9a1ed27e31290817157e8626f3a4f29e76c8747030822eb84a6abb15c255f0a0
@@ -6560,12 +6606,20 @@ __metadata:
rimraf: "npm:^5.0.5"
terser-webpack-plugin: "npm:^5.3.10"
webpack: "npm:^5.76.1"
+ webpack-bundle-analyzer: "npm:^4.10.2"
webpack-cli: "npm:^5.0.1"
webpack-dev-server: "npm:^4.15.1"
webpack-merge: "npm:^5.9.0"
languageName: unknown
linkType: soft
+"mrmime@npm:^2.0.0":
+ version: 2.0.1
+ resolution: "mrmime@npm:2.0.1"
+ checksum: 10c0/af05afd95af202fdd620422f976ad67dc18e6ee29beb03dd1ce950ea6ef664de378e44197246df4c7cdd73d47f2e7143a6e26e473084b9e4aa2095c0ad1e1761
+ languageName: node
+ linkType: hard
+
"ms@npm:2.0.0":
version: 2.0.0
resolution: "ms@npm:2.0.0"
@@ -6842,6 +6896,15 @@ __metadata:
languageName: node
linkType: hard
+"opener@npm:^1.5.2":
+ version: 1.5.2
+ resolution: "opener@npm:1.5.2"
+ bin:
+ opener: bin/opener-bin.js
+ checksum: 10c0/dd56256ab0cf796585617bc28e06e058adf09211781e70b264c76a1dbe16e90f868c974e5bf5309c93469157c7d14b89c35dc53fe7293b0e40b4d2f92073bc79
+ languageName: node
+ linkType: hard
+
"os-tmpdir@npm:~1.0.2":
version: 1.0.2
resolution: "os-tmpdir@npm:1.0.2"
@@ -7813,6 +7876,17 @@ __metadata:
languageName: node
linkType: hard
+"sirv@npm:^2.0.3":
+ version: 2.0.4
+ resolution: "sirv@npm:2.0.4"
+ dependencies:
+ "@polka/url": "npm:^1.0.0-next.24"
+ mrmime: "npm:^2.0.0"
+ totalist: "npm:^3.0.0"
+ checksum: 10c0/68f8ee857f6a9415e9c07a1f31c7c561df8d5f1b1ba79bee3de583fa37da8718def5309f6b1c6e2c3ef77de45d74f5e49efc7959214443aa92d42e9c99180a4e
+ languageName: node
+ linkType: hard
+
"sisteransi@npm:^1.0.5":
version: 1.0.5
resolution: "sisteransi@npm:1.0.5"
@@ -8221,6 +8295,13 @@ __metadata:
languageName: node
linkType: hard
+"totalist@npm:^3.0.0":
+ version: 3.0.1
+ resolution: "totalist@npm:3.0.1"
+ checksum: 10c0/4bb1fadb69c3edbef91c73ebef9d25b33bbf69afe1e37ce544d5f7d13854cda15e47132f3e0dc4cafe300ddb8578c77c50a65004d8b6e97e77934a69aa924863
+ languageName: node
+ linkType: hard
+
"tough-cookie@npm:^4.1.2":
version: 4.1.4
resolution: "tough-cookie@npm:4.1.4"
@@ -9114,6 +9195,28 @@ __metadata:
languageName: node
linkType: hard
+"webpack-bundle-analyzer@npm:^4.10.2":
+ version: 4.10.2
+ resolution: "webpack-bundle-analyzer@npm:4.10.2"
+ dependencies:
+ "@discoveryjs/json-ext": "npm:0.5.7"
+ acorn: "npm:^8.0.4"
+ acorn-walk: "npm:^8.0.0"
+ commander: "npm:^7.2.0"
+ debounce: "npm:^1.2.1"
+ escape-string-regexp: "npm:^4.0.0"
+ gzip-size: "npm:^6.0.0"
+ html-escaper: "npm:^2.0.2"
+ opener: "npm:^1.5.2"
+ picocolors: "npm:^1.0.0"
+ sirv: "npm:^2.0.3"
+ ws: "npm:^7.3.1"
+ bin:
+ webpack-bundle-analyzer: lib/bin/analyzer.js
+ checksum: 10c0/00603040e244ead15b2d92981f0559fa14216381349412a30070a7358eb3994cd61a8221d34a3b3fb8202dc3d1c5ee1fbbe94c5c52da536e5b410aa1cf279a48
+ languageName: node
+ linkType: hard
+
"webpack-cli@npm:^5.0.1":
version: 5.1.4
resolution: "webpack-cli@npm:5.1.4"
@@ -9409,6 +9512,21 @@ __metadata:
languageName: node
linkType: hard
+"ws@npm:^7.3.1":
+ version: 7.5.10
+ resolution: "ws@npm:7.5.10"
+ peerDependencies:
+ bufferutil: ^4.0.1
+ utf-8-validate: ^5.0.2
+ peerDependenciesMeta:
+ bufferutil:
+ optional: true
+ utf-8-validate:
+ optional: true
+ checksum: 10c0/bd7d5f4aaf04fae7960c23dcb6c6375d525e00f795dd20b9385902bd008c40a94d3db3ce97d878acc7573df852056ca546328b27b39f47609f80fb22a0a9b61d
+ languageName: node
+ linkType: hard
+
"ws@npm:^8.11.0, ws@npm:^8.13.0":
version: 8.18.0
resolution: "ws@npm:8.18.0"