Skip to content
This repository was archived by the owner on Apr 4, 2023. It is now read-only.

Commit a3c45fb

Browse files
Add support for ML Kit's Smart Reply feature #1219
1 parent d7a4e67 commit a3c45fb

File tree

9 files changed

+201
-18
lines changed

9 files changed

+201
-18
lines changed

demo-ng/app/tabs/mlkit/mlkit.component.ts

Lines changed: 41 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ import {
1111
} from "nativescript-plugin-firebase/mlkit/imagelabeling";
1212
import { MLKitLandmarkRecognitionCloudResult } from "nativescript-plugin-firebase/mlkit/landmarkrecognition";
1313
import { MLKitNaturalLanguageIdentificationResult } from "nativescript-plugin-firebase/mlkit/naturallanguageidentification";
14+
import { MLKitSmartReplyConversationMessage } from "nativescript-plugin-firebase/mlkit/smartreply";
1415
import { MLKitRecognizeTextResult } from "nativescript-plugin-firebase/mlkit/textrecognition";
1516
import * as fileSystemModule from "tns-core-modules/file-system";
1617
import { ImageAsset } from "tns-core-modules/image-asset";
@@ -38,7 +39,8 @@ export class MLKitComponent {
3839
"Image labeling (cloud)",
3940
"Custom model",
4041
"Landmark recognition (cloud)",
41-
"Language identification (on device)"
42+
"Language identification",
43+
"Smart Reply"
4244
];
4345

4446
private mlkitOnDeviceFeatures: Array<string> = [
@@ -191,8 +193,10 @@ export class MLKitComponent {
191193
this.recognizeLandmarkCloud(imageSource);
192194
} else if (pickedItem === "Custom model") {
193195
this.customModel(imageSource);
194-
} else if (pickedItem === "Language identification (on device)") {
196+
} else if (pickedItem === "Language identification") {
195197
this.languageIdentification(imageSource);
198+
} else if (pickedItem === "Smart reply") {
199+
this.smartReply(imageSource);
196200
}
197201
});
198202
}
@@ -246,21 +250,41 @@ export class MLKitComponent {
246250
// First recognize text, then get its language
247251
firebase.mlkit.textrecognition.recognizeTextOnDevice({
248252
image: imageSource
249-
}).then(
250-
(result: MLKitRecognizeTextResult) => {
251-
firebase.mlkit.naturallanguageidentification.identifyNaturalLanguage({
252-
text: result.text
253-
}).then(
254-
(languageIdResult: MLKitNaturalLanguageIdentificationResult) => {
255-
alert({
256-
title: `Result`,
257-
message: `Language code: ${languageIdResult.languageCode}`,
258-
okButtonText: "OK"
259-
});
260-
})
261-
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));
262-
})
263-
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));
253+
}).then((result: MLKitRecognizeTextResult) => {
254+
firebase.mlkit.naturallanguageidentification.identifyNaturalLanguage({
255+
text: result.text
256+
}).then((languageIdResult: MLKitNaturalLanguageIdentificationResult) => {
257+
alert({
258+
title: `Result`,
259+
message: `Language code: ${languageIdResult.languageCode}`,
260+
okButtonText: "OK"
261+
});
262+
}).catch(errorMessage => console.log("ML Kit error: " + errorMessage));
263+
}).catch(errorMessage => console.log("ML Kit error: " + errorMessage));
264+
}
265+
266+
// it would be easier to hardcode the conversation, but this fits better with the other image-based examples
267+
private smartReply(imageSource: ImageSource): void {
268+
firebase.mlkit.textrecognition.recognizeTextOnDevice({
269+
image: imageSource
270+
}).then((result: MLKitRecognizeTextResult) => {
271+
const messages: Array<MLKitSmartReplyConversationMessage> = [];
272+
result.blocks.forEach(block => messages.push({
273+
text: block.text,
274+
userId: "abc",
275+
localUser: false,
276+
timestamp: new Date().getTime()
277+
}));
278+
firebase.mlkit.smartreply.suggestReplies({
279+
messages
280+
}).then((result: Array<string>) => {
281+
alert({
282+
title: `Suggestions`,
283+
message: JSON.stringify(result),
284+
okButtonText: "OK"
285+
});
286+
}).catch(errorMessage => console.log("ML Kit error: " + errorMessage));
287+
}).catch(errorMessage => console.log("ML Kit error: " + errorMessage));
264288
}
265289

266290
private customModel(imageSource: ImageSource): void {

docs/ML_KIT.md

Lines changed: 55 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -84,6 +84,8 @@ To be able to use Cloud features you need to do two things:
8484
|[Barcode scanning](#barcode-scanning)|✅|
8585
|[Image labeling](#image-labeling)|✅|✅
8686
|[Landmark recognition](#landmark-recognition)||✅
87+
|[Natural language identification](#natural-language-identification)|✅|
88+
|[Smart reply](#smart-reply)|✅|
8789
|[Custom model inference](#custom-model-inference)|✅|✅
8890

8991
### Text recognition
@@ -384,6 +386,59 @@ firebase.mlkit.landmarkrecognition.recognizeLandmarksCloud({
384386
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));
385387
```
386388
389+
### Natural language identification
390+
<img src="https://raw.githubusercontent.com/EddyVerbruggen/nativescript-plugin-firebase/master/docs/images/features/mlkit_language_detection.png" height="153px" alt="ML Kit - Landmark detection"/>
391+
392+
[Firebase documentation 🌎](https://firebase.google.com/docs/ml-kit/identify-languages)
393+
394+
#### Still image
395+
Note that you could hook this up to text recognition running from a live camera stream, to get realtime results.
396+
Check [demo-ng](demo-ng) if you're interested in such a solution.
397+
398+
```typescript
399+
import { MLKitNaturalLanguageIdentificationResult } from "nativescript-plugin-firebase/mlkit/naturallanguageidentification";
400+
const firebase = require("nativescript-plugin-firebase");
401+
402+
firebase.mlkit.naturallanguageidentification.identifyNaturalLanguage({
403+
text: "Some text to detect the language for"
404+
})
405+
.then((languageIdResult: MLKitNaturalLanguageIdentificationResult) => console.log(`Language code: ${languageIdResult.languageCode}`))
406+
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));
407+
```
408+
409+
### Smart reply
410+
<img src="https://raw.githubusercontent.com/EddyVerbruggen/nativescript-plugin-firebase/master/docs/images/features/mlkit_smart_reply.png" height="153px" alt="ML Kit - Smart reply"/>
411+
412+
[Firebase documentation 🌎](https://firebase.google.com/docs/ml-kit/generate-smart-replies)
413+
414+
#### Still image
415+
416+
```typescript
417+
import { MLKitSmartReplyConversationMessage } from "nativescript-plugin-firebase/mlkit/smartreply";
418+
const firebase = require("nativescript-plugin-firebase");
419+
420+
// build a converstation history MLKit can create suggestions for, in chronological order
421+
const conversation: Array<MLKitSmartReplyConversationMessage> = [];
422+
conversation.push({
423+
text: "some text",
424+
userId: "abc",
425+
localUser: false,
426+
timestamp: new Date().getTime()
427+
});
428+
429+
conversation.push({
430+
text: "some other text",
431+
userId: "def",
432+
localUser: true,
433+
timestamp: new Date().getTime()
434+
});
435+
436+
firebase.mlkit.smartreply.suggestReplies({
437+
conversation
438+
}).then((suggestions: Array<string>) => console.log(JSON.stringify(suggestions)))
439+
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));
440+
```
441+
387442
### Custom model inference
388443
<img src="https://raw.githubusercontent.com/EddyVerbruggen/nativescript-plugin-firebase/master/docs/images/features/mlkit_custom_model_tflite.png" height="153px" alt="ML Kit - Custom Model (TensorFlow Lite)"/>
389444
23.2 KB
Loading
112 KB
Loading

src/mlkit/index.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import * as imagelabeling from "./imagelabeling";
55
import * as landmarkrecognition from "./landmarkrecognition";
66
import * as custommodel from "./custommodel";
77
import * as naturallanguageidentification from "./naturallanguageidentification";
8+
import * as smartreply from "./smartreply";
89

910
import { ImageSource } from "tns-core-modules/image-source";
1011
import { Image } from "tns-core-modules/ui/image";
@@ -44,6 +45,7 @@ export {
4445
imagelabeling,
4546
landmarkrecognition,
4647
custommodel,
47-
naturallanguageidentification
48+
naturallanguageidentification,
49+
smartreply
4850
};
4951

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import { MLKitSmartReplySuggestRepliesOptions, } from "./index";
2+
3+
export function suggestReplies(options: MLKitSmartReplySuggestRepliesOptions): Promise<Array<string>> {
4+
return new Promise((resolve, reject) => {
5+
try {
6+
const conversation: java.util.List<com.google.firebase.ml.naturallanguage.smartreply.FirebaseTextMessage> = new java.util.ArrayList();
7+
8+
options.conversation.forEach(m => {
9+
if (m.localUser) {
10+
conversation.add(com.google.firebase.ml.naturallanguage.smartreply.FirebaseTextMessage.createForLocalUser(m.text, m.timestamp))
11+
} else {
12+
conversation.add(com.google.firebase.ml.naturallanguage.smartreply.FirebaseTextMessage.createForRemoteUser(m.text, m.timestamp, m.userId))
13+
}
14+
});
15+
16+
const smartReply: com.google.firebase.ml.naturallanguage.smartreply.FirebaseSmartReply =
17+
com.google.firebase.ml.naturallanguage.FirebaseNaturalLanguage.getInstance().getSmartReply();
18+
19+
smartReply.suggestReplies(conversation)
20+
.addOnSuccessListener(new (<any>com.google.android.gms).tasks.OnSuccessListener({
21+
onSuccess: (result: com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult) => {
22+
if (result.getStatus() == com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult.STATUS_NOT_SUPPORTED_LANGUAGE) {
23+
reject("Unsupported language");
24+
} else if (result.getStatus() == com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult.STATUS_NO_REPLY) {
25+
reject("No reply");
26+
} else if (result.getStatus() == com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult.STATUS_SUCCESS) {
27+
const suggestions = [];
28+
for (let i = 0; i < result.getSuggestions().size(); i++) {
29+
const s = result.getSuggestions().get(i);
30+
suggestions.push(s.getText());
31+
}
32+
resolve(suggestions);
33+
} else {
34+
reject();
35+
}
36+
}
37+
}))
38+
.addOnFailureListener(new (<any>com.google.android.gms).tasks.OnFailureListener({
39+
onFailure: exception => reject(exception.getMessage())
40+
}));
41+
} catch (ex) {
42+
console.log("Error in firebase.mlkit.suggestReplies: " + ex);
43+
reject(ex);
44+
}
45+
});
46+
}

src/mlkit/smartreply/index.d.ts

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
export interface MLKitSmartReplyConversationMessage {
2+
text: string;
3+
timestamp: number;
4+
userId: string;
5+
localUser: boolean;
6+
}
7+
8+
export interface MLKitSmartReplySuggestRepliesOptions {
9+
conversation: Array<MLKitSmartReplyConversationMessage>;
10+
}
11+
12+
export declare function suggestReplies(options: MLKitSmartReplySuggestRepliesOptions): Promise<Array<string>>;

src/mlkit/smartreply/index.ios.ts

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
import { MLKitSmartReplySuggestRepliesOptions, } from "./index";
2+
3+
export function suggestReplies(options: MLKitSmartReplySuggestRepliesOptions): Promise<Array<string>> {
4+
return new Promise((resolve, reject) => {
5+
try {
6+
const naturalLanguage = FIRNaturalLanguage.naturalLanguage();
7+
const smartReply = naturalLanguage.smartReply();
8+
const conversation: Array<FIRTextMessage> = [];
9+
options.conversation.forEach(m => conversation.push(new FIRTextMessage({
10+
text: m.text,
11+
isLocalUser: m.localUser,
12+
timestamp: m.timestamp,
13+
userID: m.userId
14+
})));
15+
16+
smartReply.suggestRepliesForMessagesCompletion(conversation, (result: FIRSmartReplySuggestionResult, error: NSError) => {
17+
if (error) {
18+
reject(error.localizedDescription);
19+
} else if (!result) {
20+
reject("No results");
21+
} else if (result.status === FIRSmartReplyResultStatus.NotSupportedLanguage) {
22+
reject("Unsupported language");
23+
} else if (result.status === FIRSmartReplyResultStatus.NoReply) {
24+
reject("No reply");
25+
} else if (result.status === FIRSmartReplyResultStatus.Success) {
26+
const suggestions = [];
27+
for (let i = 0; i < result.suggestions.count; i++) {
28+
const s = result.suggestions.objectAtIndex(i);
29+
suggestions.push(s.text);
30+
}
31+
resolve(suggestions);
32+
} else {
33+
reject();
34+
}
35+
});
36+
} catch (ex) {
37+
console.log("Error in firebase.mlkit.suggestReplies: " + ex);
38+
reject(ex);
39+
}
40+
});
41+
}

src/scripts/postinstall.js

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4184,6 +4184,9 @@ const supportsIOSModernBuildSystem = tnsVersionFull.indexOf("5.2.0-") > -1 || se
41844184
// Custom gradle buildscripts are supported from NativeScript-Android version 5.3.0 (TODO this actually checks the CLI version)
41854185
const supportsGradleBuildscripts = tnsVersionFull.indexOf("5.3.0-") > -1 || semver.gte(tnsVersionFull, "5.3.0");
41864186

4187+
// TODO use this when writing stuff like 'com.android.support' below, because {N} 6 will have to use AndroidX
4188+
const supportsAndroidX = semver.gte(tnsVersionFull, "6.0.0");
4189+
41874190
if (!supportsIOSModernBuildSystem) {
41884191
console.log(`You're using NativeScript ${tnsVersionFull}.. which doesn't support the latest Firestore and in-app-messaging SDKs. Upgrade NativeScript to at least 5.2.0 if you need those!\n\n`);
41894192
}

0 commit comments

Comments
 (0)