Skip to content
This repository was archived by the owner on Apr 4, 2023. It is now read-only.

Commit ae354c9

Browse files
AutoMLAdd support for MLKit's AutoML #1500
1 parent d31c10a commit ae354c9

File tree

12 files changed

+85
-11
lines changed

12 files changed

+85
-11
lines changed

.travis.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,8 @@ android:
4242
- sys-img-armeabi-v7a-android-21
4343

4444
before_install:
45-
# - sudo pip install --upgrade pip
46-
# - sudo pip install six
45+
- sudo pip install --upgrade pip
46+
- sudo pip install six
4747

4848
install:
4949
- echo no | npm install -g nativescript
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
Left
2+
Right
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
{
2+
"modelFile": "model.tflite",
3+
"labelsFile": "dict.txt",
4+
"modelType": "IMAGE_LABELING"
5+
}
Binary file not shown.

demo-ng/app/tabs/mlkit/automl/automl.component.html

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
width="100%"
99
height="100%"
1010
opacity="0.8"
11+
localModelResourceFolder="leftright"
1112
processEveryNthFrame="60"
1213
confidenceThreshold="0.4"
1314
(scanResult)="onAutoMLResult($event)">

demo-ng/app/tabs/mlkit/mlkit.component.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -459,6 +459,7 @@ export class MLKitComponent {
459459
private autoML(imageSource: ImageSource): void {
460460
firebase.mlkit.automl.labelImage({
461461
image: imageSource,
462+
localModelResourceFolder: "leftright",
462463
confidenceThreshold: 0.3
463464
}).then(
464465
(result: MLKitAutoMLResult) => {

docs/ML_KIT.md

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,7 @@ To be able to use Cloud features you need to do two things:
9090
|[Natural language identification](#natural-language-identification)|✅|
9191
|[Translate text](#translate-text)|✅|
9292
|[Smart reply](#smart-reply)|✅|
93+
|[AutoML Vision Edge](#automl-vision-edge)|✅|✅
9394
|[Custom model inference](#custom-model-inference)|✅|✅
9495

9596
### Text recognition
@@ -513,6 +514,46 @@ firebase.mlkit.smartreply.suggestReplies({
513514
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));
514515
```
515516
517+
### AutoML Vision Edge
518+
<img src="https://raw.githubusercontent.com/EddyVerbruggen/nativescript-plugin-firebase/master/docs/images/features/mlkit_automl.png" height="153px" alt="ML Kit - AutoML Vision Edge"/>
519+
520+
[Firebase documentation 🌎](https://firebase.google.com/docs/ml-kit/automl-image-labeling)
521+
522+
> NOTE: currently only local models are supported (not cloud models), but it's fairly easy to add those so open an issue if you need it. See the demo-ng folder for an example.
523+
524+
#### Still image (on-device)
525+
526+
```typescript
527+
import { MLKitAutoMLResult } from "nativescript-plugin-firebase/mlkit/automl";
528+
const firebase = require("nativescript-plugin-firebase");
529+
530+
firebase.mlkit.automl.labelImage({
531+
localModelResourceFolder: "leftright",
532+
image: imageSource,
533+
confidenceThreshold: 0.6 // this will only return labels with at least 0.6 (60%) confidence. Default 0.5.
534+
})
535+
.then((result: MLKitAutoMLResult) => console.log(JSON.stringify(result.labels)))
536+
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));
537+
```
538+
539+
#### Live camera feed
540+
The basics are explained above for 'Text recognition', so we're only showing the differences here.
541+
542+
```typescript
543+
import { registerElement } from "nativescript-angular/element-registry";
544+
registerElement("MLKitAutoML", () => require("nativescript-plugin-firebase/mlkit/automl").MLKitAutoML);
545+
```
546+
547+
```html
548+
<MLKitAutoML
549+
width="260"
550+
height="380"
551+
localModelResourceFolder="leftright"
552+
confidenceThreshold="0.6"
553+
(scanResult)="onAutoMLResult($event)">
554+
</MLKitImageLabeling>
555+
```
556+
516557
### Custom model inference
517558
<img src="https://raw.githubusercontent.com/EddyVerbruggen/nativescript-plugin-firebase/master/docs/images/features/mlkit_custom_model_tflite.png" height="153px" alt="ML Kit - Custom Model (TensorFlow Lite)"/>
518559
35.2 KB
Loading

src/mlkit/automl/automl-common.ts

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,11 @@
11
import { Property } from "tns-core-modules/ui/core/properties";
22
import { MLKitCameraView } from "../mlkit-cameraview";
33

4+
export const localModelResourceFolderProperty = new Property<MLKitAutoML, string>({
5+
name: "localModelResourceFolder",
6+
defaultValue: null,
7+
});
8+
49
export const confidenceThresholdProperty = new Property<MLKitAutoML, number>({
510
name: "confidenceThreshold",
611
defaultValue: 0.5,
@@ -9,11 +14,18 @@ export const confidenceThresholdProperty = new Property<MLKitAutoML, number>({
914
export abstract class MLKitAutoML extends MLKitCameraView {
1015
static scanResultEvent: string = "scanResult";
1116

17+
protected localModelResourceFolder: string;
1218
protected confidenceThreshold: number;
1319

20+
21+
[localModelResourceFolderProperty.setNative](value: string) {
22+
this.localModelResourceFolder = value;
23+
}
24+
1425
[confidenceThresholdProperty.setNative](value: any) {
1526
this.confidenceThreshold = parseFloat(value);
1627
}
1728
}
1829

30+
localModelResourceFolderProperty.register(MLKitAutoML);
1931
confidenceThresholdProperty.register(MLKitAutoML);

src/mlkit/automl/index.android.ts

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ declare const com: any;
88
export class MLKitAutoML extends MLKitAutoMLBase {
99

1010
protected createDetector(): any {
11-
return getDetector(this.confidenceThreshold);
11+
return getDetector(this.localModelResourceFolder, this.confidenceThreshold);
1212
}
1313

1414
protected createSuccessListener(): any {
@@ -44,19 +44,26 @@ export class MLKitAutoML extends MLKitAutoMLBase {
4444
}
4545
}
4646

47-
function getDetector(confidenceThreshold: number): com.google.firebase.ml.vision.label.FirebaseVisionImageLabeler {
47+
function getDetector(localModelResourceFolder: string, confidenceThreshold: number): com.google.firebase.ml.vision.label.FirebaseVisionImageLabeler {
48+
// TODO also support cloud hosted models
49+
const model = new com.google.firebase.ml.vision.automl.FirebaseAutoMLLocalModel.Builder()
50+
.setAssetFilePath(localModelResourceFolder + "/manifest.json") // TODO this..
51+
// .setFilePath() // .. or this
52+
.build();
53+
4854
const labelDetectorOptions =
49-
new com.google.firebase.ml.vision.label.FirebaseVisionOnDeviceAutoMLImageLabelerOptions.Builder()
55+
new com.google.firebase.ml.vision.label.FirebaseVisionOnDeviceAutoMLImageLabelerOptions.Builder(model)
5056
.setConfidenceThreshold(confidenceThreshold)
5157
.build();
5258

53-
return com.google.firebase.ml.vision.FirebaseVision.getInstance().getOnDeviceAutoMLImageLabeler(labelDetectorOptions);
59+
return com.google.firebase.ml.vision.FirebaseVision.getInstance()
60+
.getOnDeviceAutoMLImageLabeler(labelDetectorOptions);
5461
}
5562

5663
export function labelImage(options: MLKitAutoMLOptions): Promise<MLKitAutoMLResult> {
5764
return new Promise((resolve, reject) => {
5865
try {
59-
const firebaseVisionAutoMLImageLabeler = getDetector(options.confidenceThreshold || 0.5);
66+
const firebaseVisionAutoMLImageLabeler = getDetector(options.localModelResourceFolder, options.confidenceThreshold || 0.5);
6067

6168
const onSuccessListener = new com.google.android.gms.tasks.OnSuccessListener({
6269
onSuccess: labels => {

0 commit comments

Comments
 (0)