Skip to content
This repository was archived by the owner on Apr 4, 2023. It is now read-only.

Commit 5869baf

Browse files
#699 Add ML Kit support (cloud text recognition)
1 parent 5d9133d commit 5869baf

File tree

9 files changed

+157
-60
lines changed

9 files changed

+157
-60
lines changed
Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
<GridLayout rows="auto, auto, *" class="tab-content">
1+
<GridLayout rows="auto, auto, *, auto" class="tab-content">
22

33
<Label row="0" text="Pick the image processing source:" textWrap="true"></Label>
44

@@ -8,5 +8,7 @@
88
<Button text="Cam feed" (tap)="fromCameraFeed()" class="button"></Button>
99
</StackLayout>
1010

11-
<Image row="2" [src]="pickedImage" width="240" horizontalAlignment="center"></Image>
11+
<Image row="2" [src]="pickedImage" width="240" horizontalAlignment="center" (tap)="reusePickedImage()"></Image>
12+
13+
<Label row="3" text="⬆️ Tap the image to use it again" textWrap="true" horizontalAlignment="center" *ngIf="pickedImage"></Label>
1214
</GridLayout>

demo-ng/app/tabs/mlkit/mlkit.component.ts

Lines changed: 41 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import { Component, NgZone } from "@angular/core";
22
import { ImageSource } from "tns-core-modules/image-source";
33

44
import { BarcodeFormat, MLKitScanBarcodesResult } from "nativescript-plugin-firebase/mlkit/barcodescanning";
5-
import { MLKitRecognizeTextResult } from "nativescript-plugin-firebase/mlkit/textrecognition";
5+
import { MLKitRecognizeTextLocalResult, MLKitRecognizeTextCloudResult } from "nativescript-plugin-firebase/mlkit/textrecognition";
66
import { MLKitLandmarkRecognitionResult } from "nativescript-plugin-firebase/mlkit/landmarkrecognition";
77
import { MLKitDetectFacesResult } from "nativescript-plugin-firebase/mlkit/facedetection";
88
import { action } from "tns-core-modules/ui/dialogs";
@@ -24,24 +24,31 @@ export class MLKitComponent {
2424

2525
pickedImage: ImageSource;
2626

27-
// TODO once more ML plugin features support cloud, add those as (cloud) options to this list
2827
private mlkitFeatures: Array<string> = [
29-
"Text recognition",
28+
"Text recognition (local)",
29+
"Text recognition (cloud)",
3030
"Barcode scanning",
3131
"Face detection",
3232
"Image labeling",
3333
"Landmark recognition (cloud)"
3434
];
3535

36+
private mlkitLocalFeatures: Array<string> = [
37+
"Text recognition",
38+
"Barcode scanning",
39+
"Face detection",
40+
"Image labeling"
41+
];
42+
3643
constructor(private routerExtensions: RouterExtensions,
3744
private zone: NgZone) {
3845
}
3946

4047
fromCameraFeed(): void {
4148
action(
42-
"Test which ML Kit feature?",
49+
"Test which ML Kit feature? No cloud processing will be used.",
4350
"Cancel",
44-
this.mlkitFeatures
51+
this.mlkitLocalFeatures
4552
).then((pickedItem: string) => {
4653
let to;
4754
if (pickedItem === "Text recognition") {
@@ -52,14 +59,8 @@ export class MLKitComponent {
5259
to = "/tabs/mlkit/facedetection";
5360
} else if (pickedItem === "Image labeling") {
5461
to = "/tabs/mlkit/imagelabeling";
55-
} else if (pickedItem === "Landmark recognition (cloud)") {
56-
alert({
57-
title: `Not available`,
58-
message: `Landmark recognition is currently cloud-only, so that would be a bit too taxing on your dataplan.`,
59-
okButtonText: "Gotcha!"
60-
});
61-
return;
6262
}
63+
6364
if (to !== undefined) {
6465
this.routerExtensions.navigate([to],
6566
{
@@ -135,15 +136,21 @@ export class MLKitComponent {
135136
});
136137
}
137138

138-
selectMLKitFeature(imageSource: ImageSource): void {
139+
reusePickedImage(): void {
140+
this.selectMLKitFeature(this.pickedImage);
141+
}
142+
143+
private selectMLKitFeature(imageSource: ImageSource): void {
139144
action(
140145
"Use which ML Kit feature?",
141146
"Cancel",
142147
this.mlkitFeatures
143148
).then((pickedItem: string) => {
144149
let pickedItemIndex = this.mlkitFeatures.indexOf(pickedItem);
145-
if (pickedItem === "Text recognition") {
146-
this.recognizeText(imageSource);
150+
if (pickedItem === "Text recognition (local)") {
151+
this.recognizeTextLocal(imageSource);
152+
} else if (pickedItem === "Text recognition (cloud)") {
153+
this.recognizeTextCloud(imageSource);
147154
} else if (pickedItem === "Barcode scanning") {
148155
this.scanBarcode(imageSource);
149156
} else if (pickedItem === "Face detection") {
@@ -156,11 +163,11 @@ export class MLKitComponent {
156163
});
157164
}
158165

159-
private recognizeText(imageSource: ImageSource): void {
160-
firebase.mlkit.textrecognition.recognizeText({
166+
private recognizeTextLocal(imageSource: ImageSource): void {
167+
firebase.mlkit.textrecognition.recognizeTextLocal({
161168
image: imageSource
162169
}).then(
163-
(result: MLKitRecognizeTextResult) => {
170+
(result: MLKitRecognizeTextLocalResult) => {
164171
alert({
165172
title: `Result`,
166173
message: result.features.map(feature => feature.text).join(""),
@@ -170,6 +177,22 @@ export class MLKitComponent {
170177
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));
171178
}
172179

180+
private recognizeTextCloud(imageSource: ImageSource): void {
181+
firebase.mlkit.textrecognition.recognizeTextCloud({
182+
image: imageSource,
183+
modelType: "latest",
184+
maxResults: 15
185+
}).then(
186+
(result: MLKitRecognizeTextCloudResult) => {
187+
alert({
188+
title: `Result`,
189+
message: result.text,
190+
okButtonText: "OK"
191+
});
192+
})
193+
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));
194+
}
195+
173196
private recognizeLandmark(imageSource: ImageSource): void {
174197
firebase.mlkit.landmarkrecognition.recognizeLandmark({
175198
image: imageSource

demo-ng/app/tabs/mlkit/textrecognition/textrecognition.component.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { Component } from "@angular/core";
2-
import { MLKitRecognizeTextResult } from "nativescript-plugin-firebase/mlkit/textrecognition";
2+
import { MLKitRecognizeTextLocalResult } from "nativescript-plugin-firebase/mlkit/textrecognition";
33

44
@Component({
55
selector: "mlkit-textrecognition",
@@ -12,7 +12,7 @@ export class TextRecognitionComponent {
1212
}>;
1313

1414
onTextRecognitionResult(scanResult: any): void {
15-
const value: MLKitRecognizeTextResult = scanResult.value;
15+
const value: MLKitRecognizeTextLocalResult = scanResult.value;
1616
this.features = value.features;
1717
}
1818
}

demo-ng/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525
"nativescript-angular": "~5.3.0",
2626
"nativescript-camera": "^4.0.2",
2727
"nativescript-imagepicker": "^6.0.1",
28-
"nativescript-plugin-firebase": "^5.0.0",
28+
"nativescript-plugin-firebase": "^6.0.0",
2929
"nativescript-theme-core": "~1.0.4",
3030
"reflect-metadata": "~0.1.10",
3131
"rxjs": "~5.5.0",

src/mlkit/imagelabeling/index.d.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { MLKitMultiEngineOptions } from "../";
1+
import { MLKitOptions } from "../";
22
import { MLKitCameraView, MLKitResult } from "../index";
33

44
export interface MLKitImageLabelingResult extends MLKitResult {
@@ -9,7 +9,7 @@ export interface MLKitImageLabelingResult extends MLKitResult {
99
}>;
1010
}
1111

12-
export interface MLKitImageLabelingOptions extends MLKitMultiEngineOptions {
12+
export interface MLKitImageLabelingOptions extends MLKitOptions {
1313
/**
1414
* 0.5 by default
1515
*/

src/mlkit/index.ts

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,15 @@ export interface MLKitOptions {
1111
image: Image | ImageSource;
1212
}
1313

14-
export interface MLKitMultiEngineOptions extends MLKitOptions {
15-
engine?: "device" | "cloud"; // defaults to local, as cloud requires an additional plan
14+
export interface MLKitCloudOptions extends MLKitOptions {
15+
/**
16+
* Defaults to "stable".
17+
*/
18+
modelType?: "stable" | "latest";
19+
/**
20+
* Defaults to 10.
21+
*/
22+
maxResults?: number
1623
}
1724

1825
export interface MLKitResult {
@@ -26,8 +33,8 @@ export interface MLKitResult {
2633
export declare class MLKitCameraView {
2734
}
2835

29-
export declare class MLKitBarcodeScanner {
30-
}
36+
// export declare class MLKitBarcodeScanner {
37+
// }
3138

3239
export {
3340
textrecognition,

src/mlkit/textrecognition/index.android.ts

Lines changed: 48 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,12 @@
11
import { ImageSource } from "tns-core-modules/image-source";
22
import { MLKitOptions, } from "../";
3-
import { MLKitRecognizeTextOptions, MLKitRecognizeTextResult } from "./";
3+
import { MLKitRecognizeTextLocalOptions, MLKitRecognizeTextLocalResult } from "./";
44
import { MLKitTextRecognition as MLKitTextRecognitionBase } from "./textrecognition-common";
5-
import { MLKitRecognizeTextResultFeature } from "./index";
5+
import {
6+
MLKitRecognizeTextCloudOptions,
7+
MLKitRecognizeTextCloudResult,
8+
MLKitRecognizeTextResultFeature
9+
} from "./index";
610

711
declare const com: any;
812

@@ -19,16 +23,16 @@ export class MLKitTextRecognition extends MLKitTextRecognitionBase {
1923
this.notify({
2024
eventName: MLKitTextRecognition.scanResultEvent,
2125
object: this,
22-
value: getResult(textBlocks.getBlocks())
26+
value: getLocalResult(textBlocks.getBlocks())
2327
});
2428
}
2529
}
2630
});
2731
}
2832
}
2933

30-
function getResult(blocks: any): MLKitRecognizeTextResult {
31-
const result = <MLKitRecognizeTextResult>{
34+
function getLocalResult(blocks: any): MLKitRecognizeTextLocalResult {
35+
const result = <MLKitRecognizeTextLocalResult>{
3236
features: []
3337
};
3438

@@ -68,14 +72,14 @@ function getResult(blocks: any): MLKitRecognizeTextResult {
6872
return result;
6973
}
7074

71-
export function recognizeText(options: MLKitRecognizeTextOptions): Promise<MLKitRecognizeTextResult> {
75+
export function recognizeTextLocal(options: MLKitRecognizeTextLocalOptions): Promise<MLKitRecognizeTextLocalResult> {
7276
return new Promise((resolve, reject) => {
7377
try {
7478
const firebaseVisionTextDetector = com.google.firebase.ml.vision.FirebaseVision.getInstance().getVisionTextDetector();
7579

7680
const onSuccessListener = new com.google.android.gms.tasks.OnSuccessListener({
7781
onSuccess: textBlocks => {
78-
resolve(getResult(textBlocks.getBlocks()));
82+
resolve(getLocalResult(textBlocks.getBlocks()));
7983
firebaseVisionTextDetector.close();
8084
}
8185
});
@@ -90,13 +94,48 @@ export function recognizeText(options: MLKitRecognizeTextOptions): Promise<MLKit
9094
.addOnFailureListener(onFailureListener);
9195

9296
} catch (ex) {
93-
console.log("Error in firebase.mlkit.recognizeText: " + ex);
97+
console.log("Error in firebase.mlkit.recognizeTextLocal: " + ex);
98+
reject(ex);
99+
}
100+
});
101+
}
102+
103+
export function recognizeTextCloud(options: MLKitRecognizeTextCloudOptions): Promise<MLKitRecognizeTextCloudResult> {
104+
return new Promise((resolve, reject) => {
105+
try {
106+
const cloudDetectorOptions =
107+
new com.google.firebase.ml.vision.cloud.FirebaseVisionCloudDetectorOptions.Builder()
108+
.setModelType(options.modelType === "latest" ? com.google.firebase.ml.vision.cloud.FirebaseVisionCloudDetectorOptions.LATEST_MODEL : com.google.firebase.ml.vision.cloud.FirebaseVisionCloudDetectorOptions.STABLE_MODEL)
109+
.setMaxResults(options.maxResults || 10)
110+
.build();
111+
112+
const firebaseVisionCloudTextDetector = com.google.firebase.ml.vision.FirebaseVision.getInstance().getVisionCloudTextDetector(cloudDetectorOptions);
113+
114+
const onSuccessListener = new com.google.android.gms.tasks.OnSuccessListener({
115+
onSuccess: firebaseVisionCloudText => {
116+
resolve({
117+
text: firebaseVisionCloudText.getText()
118+
});
119+
firebaseVisionCloudTextDetector.close();
120+
}
121+
});
122+
123+
const onFailureListener = new com.google.android.gms.tasks.OnFailureListener({
124+
onFailure: exception => reject(exception.getMessage())
125+
});
126+
127+
firebaseVisionCloudTextDetector
128+
.detectInImage(getImage(options))
129+
.addOnSuccessListener(onSuccessListener)
130+
.addOnFailureListener(onFailureListener);
131+
132+
} catch (ex) {
133+
console.log("Error in firebase.mlkit.recognizeTextCloud: " + ex);
94134
reject(ex);
95135
}
96136
});
97137
}
98138

99-
// TODO move
100139
function getImage(options: MLKitOptions): any /* com.google.firebase.ml.vision.common.FirebaseVisionImage */ {
101140
const image: android.graphics.Bitmap = options.image instanceof ImageSource ? options.image.android : options.image.imageSource.android;
102141
return com.google.firebase.ml.vision.common.FirebaseVisionImage.fromBitmap(image);

src/mlkit/textrecognition/index.d.ts

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
import { MLKitMultiEngineOptions } from "../";
2-
import { MLKitCameraView, MLKitResult } from "../index";
1+
import { MLKitCameraView, MLKitCloudOptions, MLKitOptions, MLKitResult } from "../index";
32

43
export interface MLKitRecognizeTextResultFeature {
54
text: string;
@@ -18,13 +17,23 @@ export interface MLKitRecognizeTextResultFeature {
1817
}>
1918
}
2019

21-
export interface MLKitRecognizeTextResult extends MLKitResult {
20+
export interface MLKitRecognizeTextLocalResult extends MLKitResult {
2221
features: Array<MLKitRecognizeTextResultFeature>;
2322
}
2423

25-
export interface MLKitRecognizeTextOptions extends MLKitMultiEngineOptions {
24+
export interface MLKitRecognizeTextCloudResult extends MLKitResult {
25+
text: string;
26+
}
27+
28+
export interface MLKitRecognizeTextLocalOptions extends MLKitOptions {
2629
}
2730

28-
export declare function recognizeText(options: MLKitRecognizeTextOptions): Promise<MLKitRecognizeTextResult>;
31+
export interface MLKitRecognizeTextCloudOptions extends MLKitCloudOptions {
32+
}
2933

30-
export declare class MLKitTextRecognition extends MLKitCameraView {}
34+
export declare function recognizeTextLocal(options: MLKitRecognizeTextLocalOptions): Promise<MLKitRecognizeTextLocalResult>;
35+
36+
export declare function recognizeTextCloud(options: MLKitRecognizeTextCloudOptions): Promise<MLKitRecognizeTextCloudResult>;
37+
38+
export declare class MLKitTextRecognition extends MLKitCameraView {
39+
}

0 commit comments

Comments
 (0)