Skip to content
This repository was archived by the owner on Apr 4, 2023. It is now read-only.

Commit b006682

Browse files
Add support for ML Kit Object Detection and Tracking #1274
1 parent 72a40f0 commit b006682

File tree

3 files changed

+6
-9
lines changed

3 files changed

+6
-9
lines changed

demo-ng/app/tabs/mlkit/objectdetection/objectdetection.component.html

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
height="100%"
99
classify="true"
1010
multiple="false"
11+
processEveryNthFrame="20"
1112
[torchOn]="torchOn"
1213
(scanResult)="onObjectDetectionResult($event)">
1314
</MLKitObjectDetection>

src/mlkit/mlkit-cameraview.android.ts

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -143,9 +143,6 @@ export abstract class MLKitCameraView extends MLKitCameraViewBase {
143143
const pictureSize = sizePair.pictureSize;
144144
const previewSize = sizePair.previewSize;
145145

146-
console.log("sizePair.pictureSize: " + pictureSize.width + "x" + pictureSize.height);
147-
console.log("sizePair.previewSize: " + previewSize.width + "x" + previewSize.height);
148-
149146
const parameters = this.camera.getParameters();
150147

151148
if (pictureSize) {
@@ -203,7 +200,6 @@ export abstract class MLKitCameraView extends MLKitCameraViewBase {
203200
}
204201

205202
let data = this.pendingFrameData;
206-
// pendingFrameData = null;
207203

208204
if (this.detector.processImage) {
209205
this.lastVisionImage = com.google.firebase.ml.vision.common.FirebaseVisionImage.fromByteBuffer(data, metadata);

src/mlkit/objectdetection/index.android.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,13 @@ declare const com: any;
88
export class MLKitObjectDetection extends MLKitObjectDetectionBase {
99

1010
protected createDetector(): any {
11-
return getDetector(true, this.classify, this.multiple);
11+
return getDetector(this.classify, this.multiple);
1212
}
1313

1414
protected createSuccessListener(): any {
1515
return new com.google.android.gms.tasks.OnSuccessListener({
1616
onSuccess: objects => {
17-
console.log(">> onSuccess @ " + new Date().getTime() + ", objects: " + objects);
17+
console.log(">> onSuccess @ " + new Date().getTime() + ", objects: " + objects.size());
1818

1919
if (objects.size() === 0) return;
2020

@@ -37,9 +37,9 @@ export class MLKitObjectDetection extends MLKitObjectDetectionBase {
3737
}
3838
}
3939

40-
function getDetector(stream: boolean, classify: boolean, multiple: boolean): com.google.firebase.ml.vision.objects.FirebaseVisionObjectDetector {
40+
function getDetector(classify: boolean, multiple: boolean): com.google.firebase.ml.vision.objects.FirebaseVisionObjectDetector {
4141
const builder = new com.google.firebase.ml.vision.objects.FirebaseVisionObjectDetectorOptions.Builder()
42-
.setDetectorMode(stream ? com.google.firebase.ml.vision.objects.FirebaseVisionObjectDetectorOptions.STREAM_MODE : com.google.firebase.ml.vision.objects.FirebaseVisionObjectDetectorOptions.SINGLE_IMAGE_MODE);
42+
.setDetectorMode(com.google.firebase.ml.vision.objects.FirebaseVisionObjectDetectorOptions.SINGLE_IMAGE_MODE);
4343

4444
if (classify) {
4545
builder.enableClassification();
@@ -55,7 +55,7 @@ function getDetector(stream: boolean, classify: boolean, multiple: boolean): com
5555
export function detectObjects(options: MLKitObjectDetectionOptions): Promise<MLKitObjectDetectionResult> {
5656
return new Promise((resolve, reject) => {
5757
try {
58-
const firebaseObjectDetector = getDetector(false, options.classify, options.multiple);
58+
const firebaseObjectDetector = getDetector(options.classify, options.multiple);
5959

6060
const onSuccessListener = new com.google.android.gms.tasks.OnSuccessListener({
6161
onSuccess: objects => {

0 commit comments

Comments
 (0)