Skip to content
This repository was archived by the owner on Apr 4, 2023. It is now read-only.

Commit a556314

Browse files
Upgrade Firebase iOS SDK to 5.5.0 #833
1 parent a86f96d commit a556314

File tree

4 files changed

+26
-23
lines changed

4 files changed

+26
-23
lines changed

docs/ML_KIT.md

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -109,8 +109,6 @@ const firebase = require("nativescript-plugin-firebase");
109109
110110
firebase.mlkit.textrecognition.recognizeTextCloud({
111111
image: imageSource, // a NativeScript Image or ImageSource, see the demo for examples
112-
modelType: "latest", // either "latest" or "stable" (default "stable")
113-
maxResults: 15 // default 10
114112
})
115113
.then((result: MLKitRecognizeTextCloudResult) => console.log(result.text))
116114
.catch(errorMessage => console.log("ML Kit error: " + errorMessage));

src/mlkit/custommodel/index.ios.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ export class MLKitCustomModel extends MLKitCustomModelBase {
3737
}
3838

3939
function getInterpreter(): FIRModelInterpreter {
40-
const fIRModelDownloadConditions = FIRModelDownloadConditions.alloc().initWithWiFiRequiredIdleRequired(false, false);
40+
const fIRModelDownloadConditions = FIRModelDownloadConditions.alloc().initWithIsWiFiRequiredCanDownloadInBackground(false, true);
4141

4242
const fIRCloudModelSource = FIRCloudModelSource.alloc().initWithModelNameEnableModelUpdatesInitialConditionsUpdateConditions(
4343
"my-custom-model",

src/mlkit/textrecognition/index.d.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,8 @@ export interface MLKitRecognizeTextCloudResult extends MLKitResult {
3838
text: string;
3939
}
4040

41+
// TODO see 'setLanguageHints' at https://firebase.google.com/docs/ml-kit/android/recognize-text
42+
4143
export interface MLKitRecognizeTextOnDeviceOptions extends MLKitOptions {
4244
}
4345

src/mlkit/textrecognition/index.ios.ts

Lines changed: 23 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -7,18 +7,18 @@ import { MLKitRecognizeTextResultBlock, MLKitRecognizeTextResultLine } from "./i
77
export class MLKitTextRecognition extends MLKitTextRecognitionBase {
88
protected createDetector(): any {
99
const firVision: FIRVision = FIRVision.vision();
10-
return firVision.textDetector();
10+
return firVision.onDeviceTextRecognizer();
1111
}
1212

1313
protected createSuccessListener(): any {
14-
return (features: NSArray<FIRVisionText>, error: NSError) => {
14+
return (visionText: FIRVisionText, error: NSError) => {
1515
if (error !== null) {
1616
console.log(error.localizedDescription);
17-
} else if (features !== null && features.count > 0) {
17+
} else if (visionText !== null) {
1818
this.notify({
1919
eventName: MLKitTextRecognition.scanResultEvent,
2020
object: this,
21-
value: getOnDeviceResult(features)
21+
value: getOnDeviceResult(visionText)
2222
});
2323
}
2424
};
@@ -30,13 +30,14 @@ export class MLKitTextRecognition extends MLKitTextRecognitionBase {
3030

3131
}
3232

33-
function getOnDeviceResult(features: NSArray<FIRVisionText>): MLKitRecognizeTextOnDeviceResult {
33+
function getOnDeviceResult(visionText: FIRVisionText): MLKitRecognizeTextOnDeviceResult {
3434
const result = <MLKitRecognizeTextOnDeviceResult>{
35+
text: visionText.text,
3536
blocks: []
3637
};
3738

38-
for (let i = 0, l = features.count; i < l; i++) {
39-
const feature = features.objectAtIndex(i);
39+
for (let i = 0, l = visionText.blocks.count; i < l; i++) {
40+
const feature: FIRVisionTextBlock = visionText.blocks.objectAtIndex(i);
4041
const resultFeature = <MLKitRecognizeTextResultBlock>{
4142
text: feature.text,
4243
bounds: feature.frame,
@@ -59,13 +60,15 @@ function getOnDeviceResult(features: NSArray<FIRVisionText>): MLKitRecognizeText
5960
resultFeature.lines.push(resultLine);
6061
};
6162

63+
// TODO
6264
if (feature instanceof FIRVisionTextBlock) {
6365
const textBlock = <FIRVisionTextBlock>feature;
6466
for (let j = 0, k = textBlock.lines.count; j < k; j++) {
6567
addLineToResult(textBlock.lines.objectAtIndex(j));
6668
}
6769
}
6870

71+
// TODO
6972
if (feature instanceof FIRVisionTextLine) {
7073
addLineToResult(feature);
7174
}
@@ -79,13 +82,13 @@ export function recognizeTextOnDevice(options: MLKitRecognizeTextOnDeviceOptions
7982
return new Promise((resolve, reject) => {
8083
try {
8184
const firVision: FIRVision = FIRVision.vision();
82-
const textDetector: FIRVisionTextDetector = firVision.textDetector();
85+
const textDetector: FIRVisionTextRecognizer = firVision.onDeviceTextRecognizer();
8386

84-
textDetector.detectInImageCompletion(getImage(options), (features: NSArray<FIRVisionText>, error: NSError) => {
87+
textDetector.processImageCompletion(getImage(options), (visionText: FIRVisionText, error: NSError) => {
8588
if (error !== null) {
8689
reject(error.localizedDescription);
87-
} else if (features !== null) {
88-
resolve(getOnDeviceResult(features));
90+
} else if (visionText !== null) {
91+
resolve(getOnDeviceResult(visionText));
8992
}
9093
});
9194
} catch (ex) {
@@ -98,21 +101,21 @@ export function recognizeTextOnDevice(options: MLKitRecognizeTextOnDeviceOptions
98101
export function recognizeTextCloud(options: MLKitRecognizeTextCloudOptions): Promise<MLKitRecognizeTextCloudResult> {
99102
return new Promise((resolve, reject) => {
100103
try {
101-
const fIRVisionCloudDetectorOptions = FIRVisionCloudDetectorOptions.new();
102-
fIRVisionCloudDetectorOptions.modelType = options.modelType === "latest" ? FIRVisionCloudModelType.Latest : FIRVisionCloudModelType.Stable;
103-
fIRVisionCloudDetectorOptions.maxResults = options.maxResults || 10;
104+
const fIRVisionCloudDetectorOptions = FIRVisionCloudTextRecognizerOptions.new();
105+
fIRVisionCloudDetectorOptions.modelType = FIRVisionCloudTextModelType.Sparse;
106+
// fIRVisionCloudDetectorOptions.modelType = options.modelType === "latest" ? FIRVisionCloudModelType.Latest : FIRVisionCloudModelType.Stable;
107+
// fIRVisionCloudDetectorOptions.maxResults = options.maxResults || 10;
104108

105109
const firVision: FIRVision = FIRVision.vision();
106-
const textDetector = firVision.cloudTextDetectorWithOptions(fIRVisionCloudDetectorOptions);
110+
const textDetector = firVision.cloudTextRecognizerWithOptions(fIRVisionCloudDetectorOptions);
107111

108-
textDetector.detectInImageCompletion(getImage(options), (cloudText: FIRVisionCloudText, error: NSError) => {
109-
console.log(">>> recognizeTextCloud error? " + error + ", cloudText? " + cloudText);
112+
textDetector.processImageCompletion(getImage(options), (visionText: FIRVisionText, error: NSError) => {
113+
console.log(">>> recognizeTextCloud error? " + error + ", visionText? " + visionText);
110114
if (error !== null) {
111115
reject(error.localizedDescription);
112-
} else if (cloudText !== null) {
113-
console.log(">>> recognizeTextCloud result: " + cloudText);
116+
} else if (visionText !== null) {
114117
resolve({
115-
text: cloudText.text
118+
text: visionText.text
116119
});
117120
} else {
118121
reject("Unknown error :'(");

0 commit comments

Comments
 (0)