Skip to content

Commit 282fc6a

Browse files
committed
refractor and add comments
1 parent 4afabac commit 282fc6a

File tree

2 files changed

+15
-16
lines changed

2 files changed

+15
-16
lines changed

examples/Handpose/script.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ function setup() {
88
video.size(width, height);
99

1010
const options = { maxHands: 2 };
11-
handpose = ml5.handpose(video, options, modelReady);
11+
handpose = ml5.handpose(video, options);
1212

1313
// This sets up an event that fills the global variable "predictions"
1414
// with an array every time new hand poses are detected

src/Handpose/index.js

Lines changed: 14 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
/*
77
* Handpose: Palm detector and hand-skeleton finger tracking in the browser
8-
* Ported and integrated from all the hard work by: https://github.com/tensorflow/tfjs-models/tree/master/handpose
8+
* Ported and integrated from all the hard work by: https://github.com/tensorflow/tfjs-models/tree/master/hand-pose-detection
99
*/
1010
import * as tf from "@tensorflow/tfjs";
1111
import * as handPoseDetection from "@tensorflow-models/hand-pose-detection";
@@ -18,16 +18,13 @@ class Handpose extends EventEmitter {
1818
/**
1919
* Create Handpose.
2020
* @param {HTMLVideoElement} [video] - An HTMLVideoElement.
21-
* @param {object} [options] - An object with options.
21+
* @param {Object} [options] - An object with options.
2222
* @param {function} [callback] - A callback to be called when the model is ready.
2323
*/
2424
constructor(video, options, callback) {
2525
super();
2626

2727
this.video = video;
28-
/**
29-
* @type {null|handposeCore.HandPose}
30-
*/
3128
this.model = null;
3229
this.modelReady = false;
3330
this.config = options;
@@ -40,17 +37,15 @@ class Handpose extends EventEmitter {
4037
* @return {this} the Handpose model.
4138
*/
4239
async loadModel() {
43-
const mediaPipeHands = handPoseDetection.SupportedModels.MediaPipeHands;
40+
const pipeline = handPoseDetection.SupportedModels.MediaPipeHands;
4441
const modelConfig = {
42+
runtime: "mediapipe", // use MediaPipe runtime by default
43+
solutionPath: "https://cdn.jsdelivr.net/npm/@mediapipe/hands", // fetch model from mediapipe server
4544
...this.config,
46-
runtime: "mediapipe",
47-
solutionPath: "https://cdn.jsdelivr.net/npm/@mediapipe/hands",
4845
};
4946

50-
this.model = await handPoseDetection.createDetector(
51-
mediaPipeHands,
52-
modelConfig
53-
);
47+
this.model = await handPoseDetection.createDetector(pipeline, modelConfig);
48+
5449
this.modelReady = true;
5550

5651
if (this.video) {
@@ -61,6 +56,8 @@ class Handpose extends EventEmitter {
6156
}
6257

6358
/**
59+
* @param {*} [inputOr] - An HMTL or p5.js image, video, or canvas element to run the prediction on.
60+
* @param {function} [cb] - A callback function to handle the predictions.
6461
* @return {Promise<handposeCore.AnnotatedPrediction[]>} an array of predictions.
6562
*/
6663
async predict(inputOr, cb) {
@@ -72,9 +69,7 @@ class Handpose extends EventEmitter {
7269
const { flipHorizontal } = this.config;
7370
const predictions = await this.model.estimateHands(image, flipHorizontal);
7471
const result = predictions;
75-
// Soon, we will remove the 'predict' event and prefer the 'hand' event. During
76-
// the interim period, we will both events.
77-
this.emit("predict", result);
72+
7873
this.emit("hand", result);
7974

8075
if (this.video) {
@@ -89,6 +84,10 @@ class Handpose extends EventEmitter {
8984
}
9085
}
9186

87+
/**
88+
* exposes handpose class through function
89+
* @returns {Object|Promise<Boolean>} A new handpose instance
90+
*/
9291
const handpose = (...inputs) => {
9392
const { video, options = {}, callback } = handleArguments(...inputs);
9493
const instance = new Handpose(video, options, callback);

0 commit comments

Comments
 (0)