diff --git a/mlkit/android-snippets/README.md b/mlkit/android-snippets/README.md
new file mode 100755
index 0000000..11a92ac
--- /dev/null
+++ b/mlkit/android-snippets/README.md
@@ -0,0 +1,22 @@
+# ML Kit Android Snippets
+
+This is a collection of Android code snippets seen on https://developers.google.com/ml-kit. This project is not meant to be run as a standalone app to demo ML Kit APIs.
+
+## License
+
+Copyright 2020 Google, Inc.
+
+Licensed to the Apache Software Foundation (ASF) under one or more contributor
+license agreements. See the NOTICE file distributed with this work for
+additional information regarding copyright ownership. The ASF licenses this
+file to you under the Apache License, Version 2.0 (the "License"); you may not
+use this file except in compliance with the License. You may obtain a copy of
+the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations under
+the License.
diff --git a/mlkit/android-snippets/app/build.gradle b/mlkit/android-snippets/app/build.gradle
new file mode 100644
index 0000000..14905ff
--- /dev/null
+++ b/mlkit/android-snippets/app/build.gradle
@@ -0,0 +1,62 @@
+apply plugin: 'com.android.application'
+apply plugin: 'kotlin-android'
+apply plugin: 'kotlin-android-extensions'
+
+android {
+ compileSdkVersion 31
+
+ defaultConfig {
+ applicationId "com.google.example.mlkit"
+ minSdkVersion 19
+ targetSdkVersion 31
+ versionCode 1
+ versionName "1.0"
+ testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+ multiDexEnabled true
+ }
+ buildTypes {
+ release {
+ minifyEnabled false
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+ }
+ }
+ packagingOptions {
+ exclude 'META-INF/androidx.exifinterface_exifinterface.version'
+ exclude 'META-INF/proguard/androidx-annotations.pro'
+ }
+}
+
+dependencies {
+ implementation 'androidx.legacy:legacy-support-v4:1.0.0'
+ implementation 'androidx.appcompat:appcompat:1.4.2'
+ implementation 'androidx.exifinterface:exifinterface:1.3.3'
+ implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
+
+ // Add this dependency and multiDexEnabled = true in your defaultConfig to solve the error:
+ // Cannot fit requested classes in a single dex file
+ implementation 'androidx.multidex:multidex:2.0.1'
+
+ // ML Kit related dependencies
+
+ // Barcode model
+ implementation 'com.google.mlkit:barcode-scanning:17.0.2'
+
+ // Object detection and tracking features
+ implementation 'com.google.mlkit:object-detection:17.0.0'
+ implementation 'com.google.mlkit:object-detection-custom:17.0.0'
+
+ // Face features
+ implementation 'com.google.android.gms:play-services-mlkit-face-detection:17.0.1'
+
+ // Text features
+ implementation 'com.google.android.gms:play-services-mlkit-text-recognition:18.0.0'
+
+ // Image labeling
+ implementation 'com.google.mlkit:image-labeling:17.0.7'
+
+ // Language Identification
+ implementation 'com.google.mlkit:language-id:17.0.3'
+
+ implementation 'androidx.appcompat:appcompat:1.4.2'
+ implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
+}
diff --git a/mlkit/android-snippets/app/proguard-rules.pro b/mlkit/android-snippets/app/proguard-rules.pro
new file mode 100644
index 0000000..af6097f
--- /dev/null
+++ b/mlkit/android-snippets/app/proguard-rules.pro
@@ -0,0 +1,17 @@
+# Add project specific ProGuard rules here.
+# By default, the flags in this file are appended to flags specified
+# in /Users/ianbarber/Library/Android/sdk/tools/proguard/proguard-android.txt
+# You can edit the include path and order by changing the proguardFiles
+# directive in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# Add any project specific keep options here:
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
diff --git a/mlkit/android-snippets/app/src/main/AndroidManifest.xml b/mlkit/android-snippets/app/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..d5ee86d
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/AndroidManifest.xml
@@ -0,0 +1,23 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/BarcodeScanningActivity.java b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/BarcodeScanningActivity.java
new file mode 100644
index 0000000..b8ef2f4
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/BarcodeScanningActivity.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit;
+
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.os.Bundle;
+import androidx.annotation.NonNull;
+import androidx.appcompat.app.AppCompatActivity;
+
+import com.google.android.gms.tasks.OnFailureListener;
+import com.google.android.gms.tasks.OnSuccessListener;
+import com.google.android.gms.tasks.Task;
+import com.google.mlkit.vision.barcode.common.Barcode;
+import com.google.mlkit.vision.barcode.BarcodeScanner;
+import com.google.mlkit.vision.barcode.BarcodeScannerOptions;
+import com.google.mlkit.vision.barcode.BarcodeScanning;
+import com.google.mlkit.vision.common.InputImage;
+
+import java.util.List;
+
+public class BarcodeScanningActivity extends AppCompatActivity {
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ }
+
+ private void scanBarcodes(InputImage image) {
+ // [START set_detector_options]
+ BarcodeScannerOptions options =
+ new BarcodeScannerOptions.Builder()
+ .setBarcodeFormats(
+ Barcode.FORMAT_QR_CODE,
+ Barcode.FORMAT_AZTEC)
+ .build();
+ // [END set_detector_options]
+
+ // [START get_detector]
+ BarcodeScanner scanner = BarcodeScanning.getClient();
+ // Or, to specify the formats to recognize:
+ // BarcodeScanner scanner = BarcodeScanning.getClient(options);
+ // [END get_detector]
+
+ // [START run_detector]
+ Task> result = scanner.process(image)
+ .addOnSuccessListener(new OnSuccessListener>() {
+ @Override
+ public void onSuccess(List barcodes) {
+ // Task completed successfully
+ // [START_EXCLUDE]
+ // [START get_barcodes]
+ for (Barcode barcode: barcodes) {
+ Rect bounds = barcode.getBoundingBox();
+ Point[] corners = barcode.getCornerPoints();
+
+ String rawValue = barcode.getRawValue();
+
+ int valueType = barcode.getValueType();
+ // See API reference for complete list of supported types
+ switch (valueType) {
+ case Barcode.TYPE_WIFI:
+ String ssid = barcode.getWifi().getSsid();
+ String password = barcode.getWifi().getPassword();
+ int type = barcode.getWifi().getEncryptionType();
+ break;
+ case Barcode.TYPE_URL:
+ String title = barcode.getUrl().getTitle();
+ String url = barcode.getUrl().getUrl();
+ break;
+ }
+ }
+ // [END get_barcodes]
+ // [END_EXCLUDE]
+ }
+ })
+ .addOnFailureListener(new OnFailureListener() {
+ @Override
+ public void onFailure(@NonNull Exception e) {
+ // Task failed with an exception
+ // ...
+ }
+ });
+ // [END run_detector]
+ }
+
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/FaceDetectionActivity.java b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/FaceDetectionActivity.java
new file mode 100644
index 0000000..86ebf6e
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/FaceDetectionActivity.java
@@ -0,0 +1,166 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit;
+
+import android.graphics.PointF;
+import android.graphics.Rect;
+import android.os.Bundle;
+import androidx.annotation.NonNull;
+import androidx.appcompat.app.AppCompatActivity;
+
+import com.google.android.gms.tasks.OnFailureListener;
+import com.google.android.gms.tasks.OnSuccessListener;
+import com.google.android.gms.tasks.Task;
+import com.google.mlkit.vision.common.InputImage;
+import com.google.mlkit.vision.face.Face;
+import com.google.mlkit.vision.face.FaceContour;
+import com.google.mlkit.vision.face.FaceDetection;
+import com.google.mlkit.vision.face.FaceDetector;
+import com.google.mlkit.vision.face.FaceDetectorOptions;
+import com.google.mlkit.vision.face.FaceLandmark;
+
+import java.util.List;
+
+public class FaceDetectionActivity extends AppCompatActivity {
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ }
+
+ private void detectFaces(InputImage image) {
+ // [START set_detector_options]
+ FaceDetectorOptions options =
+ new FaceDetectorOptions.Builder()
+ .setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
+ .setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
+ .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
+ .setMinFaceSize(0.15f)
+ .enableTracking()
+ .build();
+ // [END set_detector_options]
+
+ // [START get_detector]
+ FaceDetector detector = FaceDetection.getClient(options);
+ // Or use the default options:
+ // FaceDetector detector = FaceDetection.getClient();
+ // [END get_detector]
+
+ // [START run_detector]
+ Task> result =
+ detector.process(image)
+ .addOnSuccessListener(
+ new OnSuccessListener>() {
+ @Override
+ public void onSuccess(List faces) {
+ // Task completed successfully
+ // [START_EXCLUDE]
+ // [START get_face_info]
+ for (Face face : faces) {
+ Rect bounds = face.getBoundingBox();
+ float rotY = face.getHeadEulerAngleY(); // Head is rotated to the right rotY degrees
+ float rotZ = face.getHeadEulerAngleZ(); // Head is tilted sideways rotZ degrees
+
+ // If landmark detection was enabled (mouth, ears, eyes, cheeks, and
+ // nose available):
+ FaceLandmark leftEar = face.getLandmark(FaceLandmark.LEFT_EAR);
+ if (leftEar != null) {
+ PointF leftEarPos = leftEar.getPosition();
+ }
+
+ // If classification was enabled:
+ if (face.getSmilingProbability() != null) {
+ float smileProb = face.getSmilingProbability();
+ }
+ if (face.getRightEyeOpenProbability() != null) {
+ float rightEyeOpenProb = face.getRightEyeOpenProbability();
+ }
+
+ // If face tracking was enabled:
+ if (face.getTrackingId() != null) {
+ int id = face.getTrackingId();
+ }
+ }
+ // [END get_face_info]
+ // [END_EXCLUDE]
+ }
+ })
+ .addOnFailureListener(
+ new OnFailureListener() {
+ @Override
+ public void onFailure(@NonNull Exception e) {
+ // Task failed with an exception
+ // ...
+ }
+ });
+ // [END run_detector]
+ }
+
+ private void faceOptionsExamples() {
+ // [START mlkit_face_options_examples]
+ // High-accuracy landmark detection and face classification
+ FaceDetectorOptions highAccuracyOpts =
+ new FaceDetectorOptions.Builder()
+ .setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
+ .setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
+ .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
+ .build();
+
+ // Real-time contour detection
+ FaceDetectorOptions realTimeOpts =
+ new FaceDetectorOptions.Builder()
+ .setContourMode(FaceDetectorOptions.CONTOUR_MODE_ALL)
+ .build();
+ // [END mlkit_face_options_examples]
+ }
+
+ private void processFaceList(List faces) {
+ // [START mlkit_face_list]
+ for (Face face : faces) {
+ Rect bounds = face.getBoundingBox();
+ float rotY = face.getHeadEulerAngleY(); // Head is rotated to the right rotY degrees
+ float rotZ = face.getHeadEulerAngleZ(); // Head is tilted sideways rotZ degrees
+
+ // If landmark detection was enabled (mouth, ears, eyes, cheeks, and
+ // nose available):
+ FaceLandmark leftEar = face.getLandmark(FaceLandmark.LEFT_EAR);
+ if (leftEar != null) {
+ PointF leftEarPos = leftEar.getPosition();
+ }
+
+ // If contour detection was enabled:
+ List leftEyeContour =
+ face.getContour(FaceContour.LEFT_EYE).getPoints();
+ List upperLipBottomContour =
+ face.getContour(FaceContour.UPPER_LIP_BOTTOM).getPoints();
+
+ // If classification was enabled:
+ if (face.getSmilingProbability() != null) {
+ float smileProb = face.getSmilingProbability();
+ }
+ if (face.getRightEyeOpenProbability() != null) {
+ float rightEyeOpenProb = face.getRightEyeOpenProbability();
+ }
+
+ // If face tracking was enabled:
+ if (face.getTrackingId() != null) {
+ int id = face.getTrackingId();
+ }
+ }
+ // [END mlkit_face_list]
+ }
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/ImageLabelingActivity.java b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/ImageLabelingActivity.java
new file mode 100644
index 0000000..1b42538
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/ImageLabelingActivity.java
@@ -0,0 +1,136 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit;
+
+import android.os.Bundle;
+
+import androidx.annotation.NonNull;
+import androidx.appcompat.app.AppCompatActivity;
+
+import com.google.android.gms.tasks.OnFailureListener;
+import com.google.android.gms.tasks.OnSuccessListener;
+import com.google.android.gms.tasks.Task;
+import com.google.mlkit.vision.common.InputImage;
+import com.google.mlkit.vision.label.ImageLabel;
+import com.google.mlkit.vision.label.ImageLabeler;
+import com.google.mlkit.vision.label.ImageLabeling;
+import com.google.mlkit.vision.label.defaults.ImageLabelerOptions;
+
+import java.util.List;
+
+public class ImageLabelingActivity extends AppCompatActivity {
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ }
+
+ private void labelImages(InputImage image) {
+ ImageLabelerOptions options =
+ new ImageLabelerOptions.Builder()
+ .setConfidenceThreshold(0.8f)
+ .build();
+
+ // [START get_detector_options]
+ ImageLabeler labeler = ImageLabeling.getClient(options);
+ // [END get_detector_options]
+
+ /*
+ // [START get_detector_default]
+ // Or use the default options:
+ ImageLabeler detector = ImageLabeling.getClient(ImageLabelerOptions.DEFAULT_OPTIONS);
+ // [END get_detector_default]
+ */
+
+ // [START run_detector]
+ Task> result =
+ labeler.process(image)
+ .addOnSuccessListener(
+ new OnSuccessListener>() {
+ @Override
+ public void onSuccess(List labels) {
+ // Task completed successfully
+ // [START_EXCLUDE]
+ // [START get_labels]
+ for (ImageLabel label : labels) {
+ String text = label.getText();
+ float confidence = label.getConfidence();
+ }
+ // [END get_labels]
+ // [END_EXCLUDE]
+ }
+ })
+ .addOnFailureListener(
+ new OnFailureListener() {
+ @Override
+ public void onFailure(@NonNull Exception e) {
+ // Task failed with an exception
+ // ...
+ }
+ });
+ // [END run_detector]
+ }
+
+ private void configureAndRunImageLabeler(InputImage image) {
+ // [START on_device_image_labeler]
+ // To use default options:
+ ImageLabeler labeler = ImageLabeling.getClient(ImageLabelerOptions.DEFAULT_OPTIONS);
+
+ // Or, to set the minimum confidence required:
+ // ImageLabelerOptions options =
+ // new ImageLabelerOptions.Builder()
+ // .setConfidenceThreshold(0.7f)
+ // .build();
+ // ImageLabeler labeler = ImageLabeling.getClient(options);
+
+ // [END on_device_image_labeler]
+
+ // Process image with custom onSuccess() example
+ // [START process_image]
+ labeler.process(image)
+ .addOnSuccessListener(new OnSuccessListener>() {
+ @Override
+ public void onSuccess(List labels) {
+ // Task completed successfully
+ // ...
+ }
+ })
+ .addOnFailureListener(new OnFailureListener() {
+ @Override
+ public void onFailure(@NonNull Exception e) {
+ // Task failed with an exception
+ // ...
+ }
+ });
+ // [END process_image]
+
+ // Process image with example onSuccess()
+ labeler.process(image)
+ .addOnSuccessListener(new OnSuccessListener>() {
+ @Override
+ public void onSuccess(List labels) {
+ // [START get_image_label_info]
+ for (ImageLabel label : labels) {
+ String text = label.getText();
+ float confidence = label.getConfidence();
+ int index = label.getIndex();
+ }
+ // [END get_image_label_info]
+ }
+ });
+ }
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/LanguageIdentificationActivity.java b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/LanguageIdentificationActivity.java
new file mode 100644
index 0000000..d342481
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/LanguageIdentificationActivity.java
@@ -0,0 +1,101 @@
+package com.google.example.mlkit;
+
+import android.os.Bundle;
+import android.util.Log;
+
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.appcompat.app.AppCompatActivity;
+
+import com.google.android.gms.tasks.OnFailureListener;
+import com.google.android.gms.tasks.OnSuccessListener;
+import com.google.mlkit.nl.languageid.IdentifiedLanguage;
+import com.google.mlkit.nl.languageid.LanguageIdentification;
+import com.google.mlkit.nl.languageid.LanguageIdentificationOptions;
+import com.google.mlkit.nl.languageid.LanguageIdentifier;
+
+import java.util.List;
+
+
+public class LanguageIdentificationActivity extends AppCompatActivity {
+
+ private static final String TAG = "LangID";
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ }
+
+ private void identifyLanguageWithStringInput(String text) {
+ // [START identify_languages]
+ LanguageIdentifier languageIdentifier =
+ LanguageIdentification.getClient();
+ languageIdentifier.identifyLanguage(text)
+ .addOnSuccessListener(
+ new OnSuccessListener() {
+ @Override
+ public void onSuccess(@Nullable String languageCode) {
+ if (languageCode.equals("und")) {
+ Log.i(TAG, "Can't identify language.");
+ } else {
+ Log.i(TAG, "Language: " + languageCode);
+ }
+ }
+ })
+ .addOnFailureListener(
+ new OnFailureListener() {
+ @Override
+ public void onFailure(@NonNull Exception e) {
+ // Model couldn’t be loaded or other internal error.
+ // ...
+ }
+ });
+ // [END identify_languages]
+ }
+
+ private void setConfidence() {
+ // [START set_confidence]
+ LanguageIdentifier languageIdentifier = LanguageIdentification.getClient(
+ new LanguageIdentificationOptions.Builder()
+ .setConfidenceThreshold(0.34f)
+ .build());
+ // [END set_confidence]
+ }
+
+ private void getPossibleLanguuages(String text) {
+ // [START get_possible_languages]
+ LanguageIdentifier languageIdentifier =
+ LanguageIdentification.getClient();
+ languageIdentifier.identifyPossibleLanguages(text)
+ .addOnSuccessListener(new OnSuccessListener>() {
+ @Override
+ public void onSuccess(List identifiedLanguages) {
+ for (IdentifiedLanguage identifiedLanguage : identifiedLanguages) {
+ String language = identifiedLanguage.getLanguageTag();
+ float confidence = identifiedLanguage.getConfidence();
+ Log.i(TAG, language + " (" + confidence + ")");
+ }
+ }
+ })
+ .addOnFailureListener(
+ new OnFailureListener() {
+ @Override
+ public void onFailure(@NonNull Exception e) {
+ // Model couldn’t be loaded or other internal error.
+ // ...
+ }
+ });
+ // [END get_possible_languages]
+ }
+
+ private void setConfidenceThreshold() {
+ // [START set_confidence_threshold]
+ LanguageIdentificationOptions identifierOptions =
+ new LanguageIdentificationOptions.Builder()
+ .setConfidenceThreshold(0.5f)
+ .build();
+ LanguageIdentifier languageIdentifier = LanguageIdentification
+ .getClient(identifierOptions);
+ // [END set_confidence_threshold]
+ }
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/MLKitVisionImage.java b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/MLKitVisionImage.java
new file mode 100644
index 0000000..3d94697
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/MLKitVisionImage.java
@@ -0,0 +1,141 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit;
+
+import android.app.Activity;
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.media.Image;
+import android.net.Uri;
+import android.os.Build;
+
+import androidx.annotation.RequiresApi;
+
+import android.util.SparseIntArray;
+import android.view.Surface;
+
+import com.google.mlkit.vision.common.InputImage;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import static android.content.Context.CAMERA_SERVICE;
+
+public class MLKitVisionImage {
+
+ private static final String TAG = "MLKIT";
+ private static final String MY_CAMERA_ID = "my_camera_id";
+
+ private void imageFromBitmap(Bitmap bitmap) {
+ int rotationDegree = 0;
+ // [START image_from_bitmap]
+ InputImage image = InputImage.fromBitmap(bitmap, rotationDegree);
+ // [END image_from_bitmap]
+ }
+
+ @RequiresApi(api = Build.VERSION_CODES.KITKAT)
+ private void imageFromMediaImage(Image mediaImage, int rotation) {
+ // [START image_from_media_image]
+ InputImage image = InputImage.fromMediaImage(mediaImage, rotation);
+ // [END image_from_media_image]
+ }
+
+ private void imageFromBuffer(ByteBuffer byteBuffer, int rotationDegrees) {
+ // [START set_metadata]
+ // TODO How do we document the FrameMetadata developers need to implement?
+ // [END set_metadata]
+
+ // [START image_from_buffer]
+ InputImage image = InputImage.fromByteBuffer(byteBuffer,
+ /* image width */ 480,
+ /* image height */ 360,
+ rotationDegrees,
+ InputImage.IMAGE_FORMAT_NV21 // or IMAGE_FORMAT_YV12
+ );
+ // [END image_from_buffer]
+ }
+
+ private void imageFromArray(byte[] byteArray, int rotation) {
+ // [START image_from_array]
+ InputImage image = InputImage.fromByteArray(
+ byteArray,
+ /* image width */480,
+ /* image height */360,
+ rotation,
+ InputImage.IMAGE_FORMAT_NV21 // or IMAGE_FORMAT_YV12
+ );
+ // [END image_from_array]
+ }
+
+ private void imageFromPath(Context context, Uri uri) {
+ // [START image_from_path]
+ InputImage image;
+ try {
+ image = InputImage.fromFilePath(context, uri);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ // [END image_from_path]
+ }
+
+ // [START get_rotation]
+ private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
+ static {
+ ORIENTATIONS.append(Surface.ROTATION_0, 0);
+ ORIENTATIONS.append(Surface.ROTATION_90, 90);
+ ORIENTATIONS.append(Surface.ROTATION_180, 180);
+ ORIENTATIONS.append(Surface.ROTATION_270, 270);
+ }
+
+ /**
+ * Get the angle by which an image must be rotated given the device's current
+ * orientation.
+ */
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ private int getRotationCompensation(String cameraId, Activity activity, boolean isFrontFacing)
+ throws CameraAccessException {
+ // Get the device's current rotation relative to its "native" orientation.
+ // Then, from the ORIENTATIONS table, look up the angle the image must be
+ // rotated to compensate for the device's rotation.
+ int deviceRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
+ int rotationCompensation = ORIENTATIONS.get(deviceRotation);
+
+ // Get the device's sensor orientation.
+ CameraManager cameraManager = (CameraManager) activity.getSystemService(CAMERA_SERVICE);
+ int sensorOrientation = cameraManager
+ .getCameraCharacteristics(cameraId)
+ .get(CameraCharacteristics.SENSOR_ORIENTATION);
+
+ if (isFrontFacing) {
+ rotationCompensation = (sensorOrientation + rotationCompensation) % 360;
+ } else { // back-facing
+ rotationCompensation = (sensorOrientation - rotationCompensation + 360) % 360;
+ }
+ return rotationCompensation;
+ }
+ // [END get_rotation]
+
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ private void getCompensation(Activity activity, boolean isFrontFacing) throws CameraAccessException {
+ // Get the ID of the camera using CameraManager. Then:
+ int rotation = getRotationCompensation(MY_CAMERA_ID, activity, isFrontFacing);
+ }
+
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/MainActivity.java b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/MainActivity.java
new file mode 100644
index 0000000..6402976
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/MainActivity.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit;
+
+import androidx.appcompat.app.AppCompatActivity;
+
+public class MainActivity extends AppCompatActivity {
+
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/ObjectDetectionActivity.java b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/ObjectDetectionActivity.java
new file mode 100644
index 0000000..23d8d0d
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/ObjectDetectionActivity.java
@@ -0,0 +1,166 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit;
+
+import android.graphics.Bitmap;
+import android.graphics.Rect;
+import android.os.Bundle;
+
+import androidx.annotation.NonNull;
+import androidx.appcompat.app.AppCompatActivity;
+
+import com.google.android.gms.tasks.OnFailureListener;
+import com.google.android.gms.tasks.OnSuccessListener;
+import com.google.mlkit.common.model.LocalModel;
+import com.google.mlkit.vision.common.InputImage;
+import com.google.mlkit.vision.objects.DetectedObject;
+import com.google.mlkit.vision.objects.ObjectDetection;
+import com.google.mlkit.vision.objects.ObjectDetector;
+import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions;
+import com.google.mlkit.vision.objects.defaults.ObjectDetectorOptions;
+import com.google.mlkit.vision.objects.defaults.PredefinedCategory;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class ObjectDetectionActivity extends AppCompatActivity {
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ }
+
+ private void useDefaultObjectDetector() {
+ // [START create_default_options]
+ // Live detection and tracking
+ ObjectDetectorOptions options =
+ new ObjectDetectorOptions.Builder()
+ .setDetectorMode(ObjectDetectorOptions.STREAM_MODE)
+ .enableClassification() // Optional
+ .build();
+
+ // Multiple object detection in static images
+ options =
+ new ObjectDetectorOptions.Builder()
+ .setDetectorMode(ObjectDetectorOptions.SINGLE_IMAGE_MODE)
+ .enableMultipleObjects()
+ .enableClassification() // Optional
+ .build();
+ // [END create_default_options]
+
+ // [START create_detector]
+ ObjectDetector objectDetector = ObjectDetection.getClient(options);
+ // [END create_detector]
+
+ InputImage image =
+ InputImage.fromBitmap(
+ Bitmap.createBitmap(new int[100 * 100], 100, 100, Bitmap.Config.ARGB_8888),
+ 0);
+
+ // [START process_image]
+ objectDetector.process(image)
+ .addOnSuccessListener(
+ new OnSuccessListener>() {
+ @Override
+ public void onSuccess(List detectedObjects) {
+ // Task completed successfully
+ // ...
+ }
+ })
+ .addOnFailureListener(
+ new OnFailureListener() {
+ @Override
+ public void onFailure(@NonNull Exception e) {
+ // Task failed with an exception
+ // ...
+ }
+ });
+ // [END process_image]
+
+ List results = new ArrayList<>();
+ // [START read_results_default]
+ // The list of detected objects contains one item if multiple
+ // object detection wasn't enabled.
+ for (DetectedObject detectedObject : results) {
+ Rect boundingBox = detectedObject.getBoundingBox();
+ Integer trackingId = detectedObject.getTrackingId();
+ for (DetectedObject.Label label : detectedObject.getLabels()) {
+ String text = label.getText();
+ if (PredefinedCategory.FOOD.equals(text)) {
+ // ...
+ }
+ int index = label.getIndex();
+ if (PredefinedCategory.FOOD_INDEX == index) {
+ // ...
+ }
+ float confidence = label.getConfidence();
+ }
+ }
+ // [END read_results_default]
+ }
+
+ private void useCustomObjectDetector() {
+ InputImage image =
+ InputImage.fromBitmap(
+ Bitmap.createBitmap(new int[100 * 100], 100, 100, Bitmap.Config.ARGB_8888),
+ 0);
+
+ // [START create_local_model]
+ LocalModel localModel =
+ new LocalModel.Builder()
+ .setAssetFilePath("asset_file_path_to_tflite_model")
+ // or .setAbsoluteFilePath("absolute_file_path_to_tflite_model")
+ .build();
+ // [END create_local_model]
+
+ // [START create_custom_options]
+ // Live detection and tracking
+ CustomObjectDetectorOptions options =
+ new CustomObjectDetectorOptions.Builder(localModel)
+ .setDetectorMode(CustomObjectDetectorOptions.STREAM_MODE)
+ .enableClassification()
+ .setClassificationConfidenceThreshold(0.5f)
+ .setMaxPerObjectLabelCount(3)
+ .build();
+
+ // Multiple object detection in static images
+ options =
+ new CustomObjectDetectorOptions.Builder(localModel)
+ .setDetectorMode(CustomObjectDetectorOptions.SINGLE_IMAGE_MODE)
+ .enableMultipleObjects()
+ .enableClassification()
+ .setClassificationConfidenceThreshold(0.5f)
+ .setMaxPerObjectLabelCount(3)
+ .build();
+ // [END create_custom_options]
+
+ List results = new ArrayList<>();
+ // [START read_results_custom]
+ // The list of detected objects contains one item if multiple
+ // object detection wasn't enabled.
+ for (DetectedObject detectedObject : results) {
+ Rect boundingBox = detectedObject.getBoundingBox();
+ Integer trackingId = detectedObject.getTrackingId();
+ for (DetectedObject.Label label : detectedObject.getLabels()) {
+ String text = label.getText();
+ int index = label.getIndex();
+ float confidence = label.getConfidence();
+ }
+ }
+ // [END read_results_custom]
+ }
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/TextRecognitionActivity.java b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/TextRecognitionActivity.java
new file mode 100644
index 0000000..6ed2b09
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/TextRecognitionActivity.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit;
+
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.os.Bundle;
+import androidx.annotation.NonNull;
+import androidx.appcompat.app.AppCompatActivity;
+
+import com.google.android.gms.tasks.OnFailureListener;
+import com.google.android.gms.tasks.OnSuccessListener;
+import com.google.android.gms.tasks.Task;
+import com.google.mlkit.vision.common.InputImage;
+import com.google.mlkit.vision.text.Text;
+import com.google.mlkit.vision.text.TextRecognition;
+import com.google.mlkit.vision.text.TextRecognizer;
+import com.google.mlkit.vision.text.latin.TextRecognizerOptions;
+
+public class TextRecognitionActivity extends AppCompatActivity {
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ }
+
+ private void recognizeText(InputImage image) {
+
+ // [START get_detector_default]
+ TextRecognizer recognizer = TextRecognition.getClient(TextRecognizerOptions.DEFAULT_OPTIONS);
+ // [END get_detector_default]
+
+ // [START run_detector]
+ Task result =
+ recognizer.process(image)
+ .addOnSuccessListener(new OnSuccessListener() {
+ @Override
+ public void onSuccess(Text visionText) {
+ // Task completed successfully
+ // [START_EXCLUDE]
+ // [START get_text]
+ for (Text.TextBlock block : visionText.getTextBlocks()) {
+ Rect boundingBox = block.getBoundingBox();
+ Point[] cornerPoints = block.getCornerPoints();
+ String text = block.getText();
+
+ for (Text.Line line: block.getLines()) {
+ // ...
+ for (Text.Element element: line.getElements()) {
+ // ...
+ for (Text.Symbol symbol: element.getSymbols()) {
+ // ...
+ }
+ }
+ }
+ }
+ // [END get_text]
+ // [END_EXCLUDE]
+ }
+ })
+ .addOnFailureListener(
+ new OnFailureListener() {
+ @Override
+ public void onFailure(@NonNull Exception e) {
+ // Task failed with an exception
+ // ...
+ }
+ });
+ // [END run_detector]
+ }
+
+ private void processTextBlock(Text result) {
+ // [START mlkit_process_text_block]
+ String resultText = result.getText();
+ for (Text.TextBlock block : result.getTextBlocks()) {
+ String blockText = block.getText();
+ Point[] blockCornerPoints = block.getCornerPoints();
+ Rect blockFrame = block.getBoundingBox();
+ for (Text.Line line : block.getLines()) {
+ String lineText = line.getText();
+ Point[] lineCornerPoints = line.getCornerPoints();
+ Rect lineFrame = line.getBoundingBox();
+ for (Text.Element element : line.getElements()) {
+ String elementText = element.getText();
+ Point[] elementCornerPoints = element.getCornerPoints();
+ Rect elementFrame = element.getBoundingBox();
+ for (Text.Symbol symbol : element.getSymbols()) {
+ String symbolText = symbol.getText();
+ Point[] symbolCornerPoints = symbol.getCornerPoints();
+ Rect symbolFrame = symbol.getBoundingBox();
+ }
+ }
+ }
+ }
+ // [END mlkit_process_text_block]
+ }
+
+ private TextRecognizer getTextRecognizer() {
+ // [START mlkit_local_doc_recognizer]
+ TextRecognizer detector = TextRecognition.getClient(TextRecognizerOptions.DEFAULT_OPTIONS);
+ // [END mlkit_local_doc_recognizer]
+
+ return detector;
+ }
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/BarcodeScanningActivity.kt b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/BarcodeScanningActivity.kt
new file mode 100644
index 0000000..71de6d0
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/BarcodeScanningActivity.kt
@@ -0,0 +1,77 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit.kotlin
+
+import androidx.appcompat.app.AppCompatActivity
+import com.google.mlkit.vision.barcode.BarcodeScannerOptions
+import com.google.mlkit.vision.barcode.BarcodeScanning
+import com.google.mlkit.vision.barcode.common.Barcode
+import com.google.mlkit.vision.common.InputImage
+
+class BarcodeScanningActivity : AppCompatActivity() {
+
+ private fun scanBarcodes(image: InputImage) {
+ // [START set_detector_options]
+ val options = BarcodeScannerOptions.Builder()
+ .setBarcodeFormats(
+ Barcode.FORMAT_QR_CODE,
+ Barcode.FORMAT_AZTEC)
+ .build()
+ // [END set_detector_options]
+
+ // [START get_detector]
+ val scanner = BarcodeScanning.getClient()
+ // Or, to specify the formats to recognize:
+ // val scanner = BarcodeScanning.getClient(options)
+ // [END get_detector]
+
+ // [START run_detector]
+ val result = scanner.process(image)
+ .addOnSuccessListener { barcodes ->
+ // Task completed successfully
+ // [START_EXCLUDE]
+ // [START get_barcodes]
+ for (barcode in barcodes) {
+ val bounds = barcode.boundingBox
+ val corners = barcode.cornerPoints
+
+ val rawValue = barcode.rawValue
+
+ val valueType = barcode.valueType
+ // See API reference for complete list of supported types
+ when (valueType) {
+ Barcode.TYPE_WIFI -> {
+ val ssid = barcode.wifi!!.ssid
+ val password = barcode.wifi!!.password
+ val type = barcode.wifi!!.encryptionType
+ }
+ Barcode.TYPE_URL -> {
+ val title = barcode.url!!.title
+ val url = barcode.url!!.url
+ }
+ }
+ }
+ // [END get_barcodes]
+ // [END_EXCLUDE]
+ }
+ .addOnFailureListener {
+ // Task failed with an exception
+ // ...
+ }
+ // [END run_detector]
+ }
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/FaceDetectionActivity.kt b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/FaceDetectionActivity.kt
new file mode 100644
index 0000000..d69cb27
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/FaceDetectionActivity.kt
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit.kotlin
+
+import androidx.appcompat.app.AppCompatActivity
+import com.google.mlkit.vision.common.InputImage
+import com.google.mlkit.vision.face.*
+
+class FaceDetectionActivity : AppCompatActivity() {
+
+ private fun detectFaces(image: InputImage) {
+ // [START set_detector_options]
+ val options = FaceDetectorOptions.Builder()
+ .setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
+ .setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
+ .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
+ .setMinFaceSize(0.15f)
+ .enableTracking()
+ .build()
+ // [END set_detector_options]
+
+ // [START get_detector]
+ val detector = FaceDetection.getClient(options)
+ // Or, to use the default option:
+ // val detector = FaceDetection.getClient();
+ // [END get_detector]
+
+ // [START run_detector]
+ val result = detector.process(image)
+ .addOnSuccessListener { faces ->
+ // Task completed successfully
+ // [START_EXCLUDE]
+ // [START get_face_info]
+ for (face in faces) {
+ val bounds = face.boundingBox
+ val rotY = face.headEulerAngleY // Head is rotated to the right rotY degrees
+ val rotZ = face.headEulerAngleZ // Head is tilted sideways rotZ degrees
+
+ // If landmark detection was enabled (mouth, ears, eyes, cheeks, and
+ // nose available):
+ val leftEar = face.getLandmark(FaceLandmark.LEFT_EAR)
+ leftEar?.let {
+ val leftEarPos = leftEar.position
+ }
+
+ // If classification was enabled:
+ if (face.smilingProbability != null) {
+ val smileProb = face.smilingProbability
+ }
+ if (face.rightEyeOpenProbability != null) {
+ val rightEyeOpenProb = face.rightEyeOpenProbability
+ }
+
+ // If face tracking was enabled:
+ if (face.trackingId != null) {
+ val id = face.trackingId
+ }
+ }
+ // [END get_face_info]
+ // [END_EXCLUDE]
+ }
+ .addOnFailureListener { e ->
+ // Task failed with an exception
+ // ...
+ }
+ // [END run_detector]
+ }
+
+ private fun faceOptionsExamples() {
+ // [START mlkit_face_options_examples]
+ // High-accuracy landmark detection and face classification
+ val highAccuracyOpts = FaceDetectorOptions.Builder()
+ .setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
+ .setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
+ .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
+ .build()
+
+ // Real-time contour detection
+ val realTimeOpts = FaceDetectorOptions.Builder()
+ .setContourMode(FaceDetectorOptions.CONTOUR_MODE_ALL)
+ .build()
+ // [END mlkit_face_options_examples]
+ }
+
+ private fun processFaceList(faces: List) {
+ // [START mlkit_face_list]
+ for (face in faces) {
+ val bounds = face.boundingBox
+ val rotY = face.headEulerAngleY // Head is rotated to the right rotY degrees
+ val rotZ = face.headEulerAngleZ // Head is tilted sideways rotZ degrees
+
+ // If landmark detection was enabled (mouth, ears, eyes, cheeks, and
+ // nose available):
+ val leftEar = face.getLandmark(FaceLandmark.LEFT_EAR)
+ leftEar?.let {
+ val leftEarPos = leftEar.position
+ }
+
+ // If contour detection was enabled:
+ val leftEyeContour = face.getContour(FaceContour.LEFT_EYE)?.points
+ val upperLipBottomContour = face.getContour(FaceContour.UPPER_LIP_BOTTOM)?.points
+
+ // If classification was enabled:
+ if (face.smilingProbability != null) {
+ val smileProb = face.smilingProbability
+ }
+ if (face.rightEyeOpenProbability != null) {
+ val rightEyeOpenProb = face.rightEyeOpenProbability
+ }
+
+ // If face tracking was enabled:
+ if (face.trackingId != null) {
+ val id = face.trackingId
+ }
+ }
+ // [END mlkit_face_list]
+ }
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/ImageLabelingActivity.kt b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/ImageLabelingActivity.kt
new file mode 100644
index 0000000..9e49fc1
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/ImageLabelingActivity.kt
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit.kotlin
+
+import androidx.appcompat.app.AppCompatActivity
+import com.google.mlkit.vision.common.InputImage
+import com.google.mlkit.vision.label.ImageLabeler
+import com.google.mlkit.vision.label.ImageLabeling
+import com.google.mlkit.vision.label.defaults.ImageLabelerOptions
+
+class ImageLabelingActivity : AppCompatActivity() {
+
+ private fun labelImages(image: InputImage) {
+ val options = ImageLabelerOptions.Builder()
+ .setConfidenceThreshold(0.8f)
+ .build()
+
+ val labeler = ImageLabeling.getClient(options)
+
+ // [START run_detector]
+ val result = labeler.process(image)
+ .addOnSuccessListener { labels ->
+ // Task completed successfully
+ // [START_EXCLUDE]
+ // [START get_labels]
+ for (label in labels) {
+ val text = label.text
+ val confidence = label.confidence
+ }
+ // [END get_labels]
+ // [END_EXCLUDE]
+ }
+ .addOnFailureListener { e ->
+ // Task failed with an exception
+ // ...
+ }
+ // [END run_detector]
+ }
+
+ private fun configureAndRunImageLabeler(image: InputImage) {
+ // [START on_device_image_labeler]
+ // To use default options:
+ val labeler = ImageLabeling.getClient(ImageLabelerOptions.DEFAULT_OPTIONS)
+
+ // Or, to set the minimum confidence required:
+ // val options = ImageLabelerOptions.Builder()
+ // .setConfidenceThreshold(0.7f)
+ // .build()
+ // val labeler = ImageLabeling.getClient(options)
+
+ // [END on_device_image_labeler]
+
+ // Process image with custom onSuccess() example
+ // [START process_image]
+ labeler.process(image)
+ .addOnSuccessListener { labels ->
+ // Task completed successfully
+ // ...
+ }
+ .addOnFailureListener { e ->
+ // Task failed with an exception
+ // ...
+ }
+ // [END process_image]
+
+ // Process image with example onSuccess()
+ labeler.process(image)
+ .addOnSuccessListener { labels ->
+ // [START get_image_label_info]
+ for (label in labels) {
+ val text = label.text
+ val confidence = label.confidence
+ val index = label.index
+ }
+ // [END get_image_label_info]
+ }
+ }
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/LanguageIdentificationActivity.kt b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/LanguageIdentificationActivity.kt
new file mode 100644
index 0000000..ddc45d1
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/LanguageIdentificationActivity.kt
@@ -0,0 +1,73 @@
+package com.google.example.mlkit.kotlin
+
+import android.os.Bundle
+import android.util.Log
+import androidx.appcompat.app.AppCompatActivity
+import com.google.mlkit.nl.languageid.LanguageIdentification
+import com.google.mlkit.nl.languageid.LanguageIdentificationOptions
+
+private const val TAG = "LangIDActivity"
+
+class LanguageIdentificationActivity : AppCompatActivity() {
+
+ override fun onCreate(savedInstanceState: Bundle?) {
+ super.onCreate(savedInstanceState)
+ }
+
+ fun identifyLanguageWithStringInput(text: String) {
+ // [START identify_languages]
+ val languageIdentifier = LanguageIdentification.getClient()
+ languageIdentifier.identifyLanguage(text)
+ .addOnSuccessListener { languageCode ->
+ if (languageCode == "und") {
+ Log.i(TAG, "Can't identify language.")
+ } else {
+ Log.i(TAG, "Language: $languageCode")
+ }
+ }
+ .addOnFailureListener {
+ // Model couldn’t be loaded or other internal error.
+ // ...
+ }
+ // [END identify_languages]
+ }
+
+ fun setConfidence() {
+ // [START set_confidence]
+ val languageIdentifier = LanguageIdentification
+ .getClient(LanguageIdentificationOptions.Builder()
+ .setConfidenceThreshold(0.34f)
+ .build())
+ // [END set_confidence]
+ }
+
+ fun getPossibleLanguuages(text: String) {
+ // [START get_possible_languages]
+ val languageIdentifier = LanguageIdentification.getClient()
+ languageIdentifier.identifyPossibleLanguages(text)
+ .addOnSuccessListener { identifiedLanguages ->
+ for (identifiedLanguage in identifiedLanguages) {
+ val language = identifiedLanguage.languageTag
+ val confidence = identifiedLanguage.confidence
+ Log.i(TAG, "$language $confidence")
+ }
+ }
+ .addOnFailureListener {
+ // Model couldn’t be loaded or other internal error.
+ // ...
+ }
+ // [END get_possible_languages]
+ }
+
+ private fun setConfidenceThreshold() {
+ // [START set_confidence_threshold]
+ val identifierOptions = LanguageIdentificationOptions.Builder()
+ .setConfidenceThreshold(0.5f)
+ .build()
+ val languageIdentifier = LanguageIdentification
+ .getClient(identifierOptions)
+ // [END set_confidence_threshold]
+ }
+
+
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/MLKitVisionImage.kt b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/MLKitVisionImage.kt
new file mode 100644
index 0000000..b967572
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/MLKitVisionImage.kt
@@ -0,0 +1,144 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit.kotlin
+
+import android.app.Activity
+import android.content.Context
+import android.content.Context.CAMERA_SERVICE
+import android.graphics.Bitmap
+import android.hardware.camera2.CameraAccessException
+import android.hardware.camera2.CameraCharacteristics
+import android.hardware.camera2.CameraManager
+import android.media.Image
+import android.net.Uri
+import android.os.Build
+import android.util.SparseIntArray
+import android.view.Surface
+import androidx.annotation.RequiresApi
+import com.google.mlkit.vision.common.InputImage
+import java.io.IOException
+import java.nio.ByteBuffer
+
+
+class MLKitVisionImage {
+
+ private fun imageFromBitmap(bitmap: Bitmap) {
+ val rotationDegrees = 0
+ // [START image_from_bitmap]
+ val image = InputImage.fromBitmap(bitmap, 0)
+ // [END image_from_bitmap]
+ }
+
+ @RequiresApi(api = Build.VERSION_CODES.KITKAT)
+ private fun imageFromMediaImage(mediaImage: Image, rotation: Int) {
+ // [START image_from_media_image]
+ val image = InputImage.fromMediaImage(mediaImage, rotation)
+ // [END image_from_media_image]
+ }
+
+ private fun imageFromBuffer(byteBuffer: ByteBuffer, rotationDegrees: Int) {
+ // [START set_metadata]
+ // TODO How do we document the FrameMetadata developers need to implement?
+ // [END set_metadata]
+ // [START image_from_buffer]
+ val image = InputImage.fromByteBuffer(
+ byteBuffer,
+ /* image width */ 480,
+ /* image height */ 360,
+ rotationDegrees,
+ InputImage.IMAGE_FORMAT_NV21 // or IMAGE_FORMAT_YV12
+ )
+ // [END image_from_buffer]
+ }
+
+ private fun imageFromArray(byteArray: ByteArray, rotationDegrees: Int) {
+ // [START image_from_array]
+ val image = InputImage.fromByteArray(
+ byteArray,
+ /* image width */ 480,
+ /* image height */ 360,
+ rotationDegrees,
+ InputImage.IMAGE_FORMAT_NV21 // or IMAGE_FORMAT_YV12
+ )
+
+ // [END image_from_array]
+ }
+
+ private fun imageFromPath(context: Context, uri: Uri) {
+ // [START image_from_path]
+ val image: InputImage
+ try {
+ image = InputImage.fromFilePath(context, uri)
+ } catch (e: IOException) {
+ e.printStackTrace()
+ }
+ // [END image_from_path]
+ }
+
+ // [START get_rotation]
+ /**
+ * Get the angle by which an image must be rotated given the device's current
+ * orientation.
+ */
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ @Throws(CameraAccessException::class)
+ private fun getRotationCompensation(cameraId: String, activity: Activity, isFrontFacing: Boolean): Int {
+ // Get the device's current rotation relative to its "native" orientation.
+ // Then, from the ORIENTATIONS table, look up the angle the image must be
+ // rotated to compensate for the device's rotation.
+ val deviceRotation = activity.windowManager.defaultDisplay.rotation
+ var rotationCompensation = ORIENTATIONS.get(deviceRotation)
+
+ // Get the device's sensor orientation.
+ val cameraManager = activity.getSystemService(CAMERA_SERVICE) as CameraManager
+ val sensorOrientation = cameraManager
+ .getCameraCharacteristics(cameraId)
+ .get(CameraCharacteristics.SENSOR_ORIENTATION)!!
+
+ if (isFrontFacing) {
+ rotationCompensation = (sensorOrientation + rotationCompensation) % 360
+ } else { // back-facing
+ rotationCompensation = (sensorOrientation - rotationCompensation + 360) % 360
+ }
+ return rotationCompensation
+ }
+ // [END get_rotation]
+
+ @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
+ @Throws(CameraAccessException::class)
+ private fun getCompensation(activity: Activity, context: Context, isFrontFacing: Boolean) {
+ // Get the ID of the camera using CameraManager. Then:
+ val rotation = getRotationCompensation(MY_CAMERA_ID, activity, isFrontFacing)
+ }
+
+ companion object {
+
+ private val TAG = "MLKIT"
+ private val MY_CAMERA_ID = "my_camera_id"
+
+ // [START camera_orientations]
+ private val ORIENTATIONS = SparseIntArray()
+
+ init {
+ ORIENTATIONS.append(Surface.ROTATION_0, 0)
+ ORIENTATIONS.append(Surface.ROTATION_90, 90)
+ ORIENTATIONS.append(Surface.ROTATION_180, 180)
+ ORIENTATIONS.append(Surface.ROTATION_270, 270)
+ }
+ // [END camera_orientations]
+ }
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/MainActivity.kt b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/MainActivity.kt
new file mode 100644
index 0000000..3b608af
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/MainActivity.kt
@@ -0,0 +1,21 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit.kotlin
+
+import androidx.appcompat.app.AppCompatActivity
+
+class MainActivity : AppCompatActivity()
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/ObjectDetectionActivity.kt b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/ObjectDetectionActivity.kt
new file mode 100644
index 0000000..27308f5
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/ObjectDetectionActivity.kt
@@ -0,0 +1,134 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit.kotlin
+
+import android.graphics.Bitmap
+import androidx.appcompat.app.AppCompatActivity
+import com.google.mlkit.common.model.LocalModel
+import com.google.mlkit.vision.common.InputImage
+import com.google.mlkit.vision.objects.DetectedObject
+import com.google.mlkit.vision.objects.ObjectDetection
+import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions
+import com.google.mlkit.vision.objects.defaults.ObjectDetectorOptions
+import com.google.mlkit.vision.objects.defaults.PredefinedCategory
+
+class ObjectDetectionActivity : AppCompatActivity() {
+
+ private fun useDefaultObjectDetector() {
+ // [START create_default_options]
+ // Live detection and tracking
+ var options = ObjectDetectorOptions.Builder()
+ .setDetectorMode(ObjectDetectorOptions.STREAM_MODE)
+ .enableClassification() // Optional
+ .build()
+
+ // Multiple object detection in static images
+ options = ObjectDetectorOptions.Builder()
+ .setDetectorMode(ObjectDetectorOptions.SINGLE_IMAGE_MODE)
+ .enableMultipleObjects()
+ .enableClassification() // Optional
+ .build()
+ // [END create_default_options]
+
+ // [START create_detector]
+ val objectDetector = ObjectDetection.getClient(options)
+ // [END create_detector]
+
+ val image = InputImage.fromBitmap(
+ Bitmap.createBitmap(IntArray(100 * 100), 100, 100, Bitmap.Config.ARGB_8888),
+ 0)
+
+ // [START process_image]
+ objectDetector.process(image)
+ .addOnSuccessListener { results ->
+ // Task completed successfully
+ // ...
+ }
+ .addOnFailureListener { e ->
+ // Task failed with an exception
+ // ...
+ }
+ // [END process_image]
+
+ val results = listOf()
+ // [START read_results_default]
+ for (detectedObject in results) {
+ val boundingBox = detectedObject.boundingBox
+ val trackingId = detectedObject.trackingId
+ for (label in detectedObject.labels) {
+ val text = label.text
+ if (PredefinedCategory.FOOD == text) {
+ // ...
+ }
+ val index = label.index
+ if (PredefinedCategory.FOOD_INDEX == index) {
+ // ...
+ }
+ val confidence = label.confidence
+ }
+ }
+ // [END read_results_default]
+ }
+
+ private fun useCustomObjectDetector() {
+ val image = InputImage.fromBitmap(
+ Bitmap.createBitmap(IntArray(100 * 100), 100, 100, Bitmap.Config.ARGB_8888),
+ 0)
+
+ // [START create_local_model]
+ val localModel =
+ LocalModel.Builder()
+ .setAssetFilePath("asset_file_path_to_tflite_model")
+ // or .setAbsoluteFilePath("absolute_file_path_to_tflite_model")
+ .build()
+ // [END create_local_model]
+
+ // [START create_custom_options]
+ // Live detection and tracking
+ var options =
+ CustomObjectDetectorOptions.Builder(localModel)
+ .setDetectorMode(CustomObjectDetectorOptions.STREAM_MODE)
+ .enableClassification()
+ .setClassificationConfidenceThreshold(0.5f)
+ .setMaxPerObjectLabelCount(3)
+ .build()
+
+ // Multiple object detection in static images
+ options =
+ CustomObjectDetectorOptions.Builder(localModel)
+ .setDetectorMode(CustomObjectDetectorOptions.SINGLE_IMAGE_MODE)
+ .enableMultipleObjects()
+ .enableClassification()
+ .setClassificationConfidenceThreshold(0.5f)
+ .setMaxPerObjectLabelCount(3)
+ .build()
+ // [END create_custom_options]
+
+ val results = listOf()
+ // [START read_results_custom]
+ for (detectedObject in results) {
+ val boundingBox = detectedObject.boundingBox
+ val trackingId = detectedObject.trackingId
+ for (label in detectedObject.labels) {
+ val text = label.text
+ val index = label.index
+ val confidence = label.confidence
+ }
+ }
+ // [END read_results_custom]
+ }
+}
diff --git a/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/TextRecognitionActivity.kt b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/TextRecognitionActivity.kt
new file mode 100644
index 0000000..726bbe3
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/java/com/google/example/mlkit/kotlin/TextRecognitionActivity.kt
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.example.mlkit.kotlin
+
+import androidx.appcompat.app.AppCompatActivity
+import com.google.mlkit.vision.common.InputImage
+import com.google.mlkit.vision.text.Text
+import com.google.mlkit.vision.text.TextRecognition
+import com.google.mlkit.vision.text.TextRecognizer
+import com.google.mlkit.vision.text.latin.TextRecognizerOptions
+
+class TextRecognitionActivity : AppCompatActivity() {
+
+ private fun recognizeText(image: InputImage) {
+
+ // [START get_detector_default]
+ val recognizer = TextRecognition.getClient(TextRecognizerOptions.DEFAULT_OPTIONS)
+ // [END get_detector_default]
+
+ // [START run_detector]
+ val result = recognizer.process(image)
+ .addOnSuccessListener { visionText ->
+ // Task completed successfully
+ // [START_EXCLUDE]
+ // [START get_text]
+ for (block in visionText.textBlocks) {
+ val boundingBox = block.boundingBox
+ val cornerPoints = block.cornerPoints
+ val text = block.text
+
+ for (line in block.lines) {
+ // ...
+ for (element in line.elements) {
+ // ...
+ }
+ }
+ }
+ // [END get_text]
+ // [END_EXCLUDE]
+ }
+ .addOnFailureListener { e ->
+ // Task failed with an exception
+ // ...
+ }
+ // [END run_detector]
+ }
+
+ private fun processTextBlock(result: Text) {
+ // [START mlkit_process_text_block]
+ val resultText = result.text
+ for (block in result.textBlocks) {
+ val blockText = block.text
+ val blockCornerPoints = block.cornerPoints
+ val blockFrame = block.boundingBox
+ for (line in block.lines) {
+ val lineText = line.text
+ val lineCornerPoints = line.cornerPoints
+ val lineFrame = line.boundingBox
+ for (element in line.elements) {
+ val elementText = element.text
+ val elementCornerPoints = element.cornerPoints
+ val elementFrame = element.boundingBox
+ }
+ }
+ }
+ // [END mlkit_process_text_block]
+ }
+
+ private fun getTextRecognizer(): TextRecognizer {
+ // [START mlkit_local_doc_recognizer]
+ return TextRecognition.getClient(TextRecognizerOptions.DEFAULT_OPTIONS)
+ // [END mlkit_local_doc_recognizer]
+ }
+}
diff --git a/mlkit/android-snippets/app/src/main/res/layout/activity_main.xml b/mlkit/android-snippets/app/src/main/res/layout/activity_main.xml
new file mode 100644
index 0000000..3e3fc69
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/res/layout/activity_main.xml
@@ -0,0 +1,18 @@
+
+
+
+
+
diff --git a/mlkit/android-snippets/app/src/main/res/mipmap-hdpi/ic_launcher.png b/mlkit/android-snippets/app/src/main/res/mipmap-hdpi/ic_launcher.png
new file mode 100644
index 0000000..cde69bc
Binary files /dev/null and b/mlkit/android-snippets/app/src/main/res/mipmap-hdpi/ic_launcher.png differ
diff --git a/mlkit/android-snippets/app/src/main/res/mipmap-mdpi/ic_launcher.png b/mlkit/android-snippets/app/src/main/res/mipmap-mdpi/ic_launcher.png
new file mode 100644
index 0000000..c133a0c
Binary files /dev/null and b/mlkit/android-snippets/app/src/main/res/mipmap-mdpi/ic_launcher.png differ
diff --git a/mlkit/android-snippets/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/mlkit/android-snippets/app/src/main/res/mipmap-xhdpi/ic_launcher.png
new file mode 100644
index 0000000..bfa42f0
Binary files /dev/null and b/mlkit/android-snippets/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ
diff --git a/mlkit/android-snippets/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/mlkit/android-snippets/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
new file mode 100644
index 0000000..324e72c
Binary files /dev/null and b/mlkit/android-snippets/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ
diff --git a/mlkit/android-snippets/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/mlkit/android-snippets/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
new file mode 100644
index 0000000..aee44e1
Binary files /dev/null and b/mlkit/android-snippets/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ
diff --git a/mlkit/android-snippets/app/src/main/res/values-w820dp/dimens.xml b/mlkit/android-snippets/app/src/main/res/values-w820dp/dimens.xml
new file mode 100644
index 0000000..63fc816
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/res/values-w820dp/dimens.xml
@@ -0,0 +1,6 @@
+
+
+ 64dp
+
diff --git a/mlkit/android-snippets/app/src/main/res/values/colors.xml b/mlkit/android-snippets/app/src/main/res/values/colors.xml
new file mode 100644
index 0000000..3ab3e9c
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/res/values/colors.xml
@@ -0,0 +1,6 @@
+
+
+ #3F51B5
+ #303F9F
+ #FF4081
+
diff --git a/mlkit/android-snippets/app/src/main/res/values/dimens.xml b/mlkit/android-snippets/app/src/main/res/values/dimens.xml
new file mode 100644
index 0000000..47c8224
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/res/values/dimens.xml
@@ -0,0 +1,5 @@
+
+
+ 16dp
+ 16dp
+
diff --git a/mlkit/android-snippets/app/src/main/res/values/strings.xml b/mlkit/android-snippets/app/src/main/res/values/strings.xml
new file mode 100644
index 0000000..4ecbba2
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/res/values/strings.xml
@@ -0,0 +1,3 @@
+
+ MLKit Code Snippets
+
diff --git a/mlkit/android-snippets/app/src/main/res/values/styles.xml b/mlkit/android-snippets/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..5885930
--- /dev/null
+++ b/mlkit/android-snippets/app/src/main/res/values/styles.xml
@@ -0,0 +1,11 @@
+
+
+
+
+
+
diff --git a/mlkit/android-snippets/build.gradle b/mlkit/android-snippets/build.gradle
new file mode 100644
index 0000000..fcb9724
--- /dev/null
+++ b/mlkit/android-snippets/build.gradle
@@ -0,0 +1,26 @@
+// Top-level build file where you can add configuration options common to all sub-projects/modules.
+
+buildscript {
+ ext.kotlin_version = '1.4.31'
+ repositories {
+ mavenLocal()
+ google()
+ jcenter()
+ }
+ dependencies {
+ classpath 'com.android.tools.build:gradle:7.2.1'
+ classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
+ }
+}
+
+allprojects {
+ repositories {
+ google()
+ mavenLocal()
+ jcenter()
+ }
+}
+
+task clean(type: Delete) {
+ delete rootProject.buildDir
+}
diff --git a/mlkit/android-snippets/gradle.properties b/mlkit/android-snippets/gradle.properties
new file mode 100644
index 0000000..9e6fce1
--- /dev/null
+++ b/mlkit/android-snippets/gradle.properties
@@ -0,0 +1,19 @@
+# Project-wide Gradle settings.
+
+# IDE (e.g. Android Studio) users:
+# Gradle settings configured through the IDE *will override*
+# any settings specified in this file.
+
+# For more details on how to configure your build environment visit
+# http://www.gradle.org/docs/current/userguide/build_environment.html
+
+# Specifies the JVM arguments used for the daemon process.
+# The setting is particularly useful for tweaking memory settings.
+android.enableJetifier=true
+android.useAndroidX=true
+org.gradle.jvmargs=-Xmx1536m
+
+# When configured, Gradle will run in incubating parallel mode.
+# This option should only be used with decoupled projects. More details, visit
+# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
+# org.gradle.parallel=true
diff --git a/mlkit/android-snippets/gradle/wrapper/gradle-wrapper.jar b/mlkit/android-snippets/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..5c2d1cf
Binary files /dev/null and b/mlkit/android-snippets/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/mlkit/android-snippets/gradle/wrapper/gradle-wrapper.properties b/mlkit/android-snippets/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..fac6ed0
--- /dev/null
+++ b/mlkit/android-snippets/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Mon Aug 24 22:29:37 PDT 2020
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-all.zip
diff --git a/mlkit/android-snippets/gradlew b/mlkit/android-snippets/gradlew
new file mode 100755
index 0000000..b0d6d0a
--- /dev/null
+++ b/mlkit/android-snippets/gradlew
@@ -0,0 +1,188 @@
+#!/usr/bin/env sh
+
+#
+# Copyright 2015 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+##############################################################################
+##
+## Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+ echo "$*"
+}
+
+die () {
+ echo
+ echo "$*"
+ echo
+ exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+ CYGWIN* )
+ cygwin=true
+ ;;
+ Darwin* )
+ darwin=true
+ ;;
+ MINGW* )
+ msys=true
+ ;;
+ NONSTOP* )
+ nonstop=true
+ ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD="java"
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+ MAX_FD_LIMIT=`ulimit -H -n`
+ if [ $? -eq 0 ] ; then
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+ MAX_FD="$MAX_FD_LIMIT"
+ fi
+ ulimit -n $MAX_FD
+ if [ $? -ne 0 ] ; then
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
+ fi
+ else
+ warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+ fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+ GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+ JAVACMD=`cygpath --unix "$JAVACMD"`
+
+ # We build the pattern for arguments to be converted via cygpath
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+ SEP=""
+ for dir in $ROOTDIRSRAW ; do
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
+ SEP="|"
+ done
+ OURCYGPATTERN="(^($ROOTDIRS))"
+ # Add a user-defined pattern to the cygpath arguments
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+ fi
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ i=0
+ for arg in "$@" ; do
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
+
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+ else
+ eval `echo args$i`="\"$arg\""
+ fi
+ i=$((i+1))
+ done
+ case $i in
+ (0) set -- ;;
+ (1) set -- "$args0" ;;
+ (2) set -- "$args0" "$args1" ;;
+ (3) set -- "$args0" "$args1" "$args2" ;;
+ (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+ (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+ (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+ (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+ (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+ (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+ esac
+fi
+
+# Escape application args
+save () {
+ for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+ echo " "
+}
+APP_ARGS=$(save "$@")
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+ cd "$(dirname "$0")"
+fi
+
+exec "$JAVACMD" "$@"
diff --git a/mlkit/android-snippets/gradlew.bat b/mlkit/android-snippets/gradlew.bat
new file mode 100644
index 0000000..15e1ee3
--- /dev/null
+++ b/mlkit/android-snippets/gradlew.bat
@@ -0,0 +1,100 @@
+@rem
+@rem Copyright 2015 the original author or authors.
+@rem
+@rem Licensed under the Apache License, Version 2.0 (the "License");
+@rem you may not use this file except in compliance with the License.
+@rem You may obtain a copy of the License at
+@rem
+@rem http://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing, software
+@rem distributed under the License is distributed on an "AS IS" BASIS,
+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+@rem See the License for the specific language governing permissions and
+@rem limitations under the License.
+@rem
+
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windows variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/mlkit/android-snippets/settings.gradle b/mlkit/android-snippets/settings.gradle
new file mode 100644
index 0000000..e7b4def
--- /dev/null
+++ b/mlkit/android-snippets/settings.gradle
@@ -0,0 +1 @@
+include ':app'
diff --git a/mlkit/automl/LICENSE b/mlkit/automl/LICENSE
new file mode 100644
index 0000000..973b3b7
--- /dev/null
+++ b/mlkit/automl/LICENSE
@@ -0,0 +1,191 @@
+ Copyright 2020 Google LLC
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/mlkit/automl/README.md b/mlkit/automl/README.md
new file mode 100644
index 0000000..32951a0
--- /dev/null
+++ b/mlkit/automl/README.md
@@ -0,0 +1,77 @@
+# ML Kit AutoML Remote Model Quickstart Sample App
+
+* [Read more about training an image classification model using AutoML Vision Edge model](https://firebase.google.com/docs/ml/automl-image-labeling)
+* [Read more about hosting models with Firebase Model deployment](https://firebase.google.com/docs/ml/manage-hosted-models)
+
+## Introduction
+
+This ML Kit Quickstart App demonstrates how to label images and objects with an
+AutoML Vision Edge trained model, and also how to download models that are
+hosted within Firebase.
+
+## Feature List
+Features that are included in this demo app:
+
+* [Custom Image labeling](https://developers.google.com/ml-kit/vision/image-labeling/custom-models/android) - Label images using a custom AutoML VisionEdge trained model that is downloaded from Firebase.
+* [Custom Object Detection and Tracking](https://developers.google.com/ml-kit/vision/object-detection/custom-models/android) - Detect and label object using a custom AutoML VisionEdge trained model that is downloaded from Firebase.
+
+
+
+## Getting Started
+
+* In order to setup this app to run, you need to create a Firebase project as
+outlined [here](https://firebase.google.com/docs/android/setup).
+* Run the sample code on your Android device or emulator
+* Try extending the code to add new features and functionality
+* Ensure your device is connected to the Internet to download the remote model
+
+## How to use the app
+This app supports three usage scenarios: Live Camera, Static Image, and CameraX
+enabled live camera.
+
+### Live Camera Scenario
+This mode uses the camera preview as input and allows you to label images using
+a remotely downloaded AutoML Vision Edge model. There is also a settings page
+that allows you to configure several options:
+
+* Camera
+ * Preview Size -- Specify the preview size of rear camera manually (Default
+ size is chose appropriately based on screen size)
+ * Enable live viewport -- Prevent the live camera preview from being blocked
+ by API rendering speed.
+* AutoML Image Labeling
+ * AutoML Remote Model Name -- Allows you to specify an AutoML VisionEdge
+ model to remotely download from the Firebase Console
+
+### Static Image Scenario
+The static image scenario is identical to the live camera scenario, but instead
+relies on images fed into the app through the gallery.
+
+### CameraX Live Preview scenario
+The CameraX live preview scenario is very similar to the native live camera
+scenario, but instead relies on CameraX live preview instead of the Camera2 live
+preview. Note: CameraX is only supported on API level 21+.
+
+
+## Support
+
+* [Stack Overflow](https://stackoverflow.com/questions/tagged/google-mlkit)
+
+## License
+
+Copyright 2020 Google, Inc.
+
+Licensed to the Apache Software Foundation (ASF) under one or more contributor
+license agreements. See the NOTICE file distributed with this work for
+additional information regarding copyright ownership. The ASF licenses this
+file to you under the Apache License, Version 2.0 (the "License"); you may not
+use this file except in compliance with the License. You may obtain a copy of
+the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations under
+the License.
diff --git a/mlkit/automl/app/build.gradle b/mlkit/automl/app/build.gradle
new file mode 100644
index 0000000..6402b1c
--- /dev/null
+++ b/mlkit/automl/app/build.gradle
@@ -0,0 +1,91 @@
+apply plugin: 'com.android.application'
+
+android {
+ compileSdkVersion 31
+
+ defaultConfig {
+ applicationId "com.google.mlkit.vision.automl.demo"
+ minSdkVersion 19
+ multiDexEnabled true
+ targetSdkVersion 31
+ // Version code for next release (version 9 is now in Play Store)
+ versionCode 11
+ versionName "1.11"
+ vectorDrawables.useSupportLibrary = true
+ setProperty("archivesBaseName", "vision-automl-remote-model-quickstart")
+ }
+ buildTypes {
+ proguard {
+ debuggable false
+ minifyEnabled true
+ shrinkResources true
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro', 'proguard.cfg'
+ testProguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguardTest-rules.pro', 'proguard.cfg'
+ }
+ testBuildType "proguard"
+ release {
+ minifyEnabled true
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+ }
+ debug {
+ minifyEnabled false
+ proguardFiles 'proguard-rules.pro'
+ }
+ }
+
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+
+ // Do NOT compress tflite model files (need to call out to developers!)
+ aaptOptions {
+ noCompress "tflite"
+ }
+
+ dexOptions {
+ incremental true
+ javaMaxHeapSize "4g"
+ }
+}
+
+repositories {
+ // Depending on AndroidX Snapshot Builds to get the latest CameraX libs.
+ maven { url 'https://androidx.dev/snapshots/builds/6787662/artifacts/repository/' }
+}
+
+dependencies {
+ // Image labeling with AutoML model support
+ implementation 'com.google.mlkit:image-labeling-custom:17.0.1'
+ // Or comment the dependency above and uncomment the dependency below to
+ // use unbundled model that depends on Google Play Services
+ // implementation 'com.google.android.gms:play-services-mlkit-image-labeling-custom:16.0.0-beta3'
+
+ implementation 'com.google.mlkit:object-detection-custom:17.0.0'
+ implementation 'com.google.mlkit:linkfirebase:17.0.0'
+
+ implementation 'androidx.multidex:multidex:2.0.1'
+ // -------------------------------------------------------
+
+ implementation 'com.google.code.gson:gson:2.8.6'
+ implementation 'com.google.guava:guava:27.1-android'
+
+ // ViewModel and LiveData
+ implementation "androidx.lifecycle:lifecycle-livedata:2.3.1"
+ implementation "androidx.lifecycle:lifecycle-viewmodel:2.3.1"
+
+ implementation 'androidx.appcompat:appcompat:1.2.0'
+ implementation 'androidx.annotation:annotation:1.2.0'
+ implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
+
+ // CameraX
+ implementation "androidx.camera:camera-camera2:1.0.0-SNAPSHOT"
+ implementation "androidx.camera:camera-lifecycle:1.0.0-SNAPSHOT"
+ implementation "androidx.camera:camera-view:1.0.0-SNAPSHOT"
+}
+
+configurations {
+ // Resolves dependency conflict caused by some dependencies use
+ // com.google.guava:guava and com.google.guava:listenablefuture together.
+ all*.exclude group: 'com.google.guava', module: 'listenablefuture'
+}
diff --git a/mlkit/automl/app/proguard-rules.pro b/mlkit/automl/app/proguard-rules.pro
new file mode 100644
index 0000000..f1b4245
--- /dev/null
+++ b/mlkit/automl/app/proguard-rules.pro
@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
diff --git a/mlkit/automl/app/src/main/AndroidManifest.xml b/mlkit/automl/app/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..7c24f99
--- /dev/null
+++ b/mlkit/automl/app/src/main/AndroidManifest.xml
@@ -0,0 +1,76 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/BitmapUtils.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/BitmapUtils.java
new file mode 100644
index 0000000..f26b06c
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/BitmapUtils.java
@@ -0,0 +1,401 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import static java.lang.Math.min;
+
+import android.annotation.TargetApi;
+import android.content.ContentResolver;
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.ImageFormat;
+import android.graphics.Matrix;
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.media.Image;
+import android.media.Image.Plane;
+import android.net.Uri;
+import android.os.Build.VERSION_CODES;
+import android.provider.MediaStore;
+import android.util.Log;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.camera.core.ExperimentalGetImage;
+import androidx.camera.core.ImageProxy;
+import androidx.exifinterface.media.ExifInterface;
+import java.io.ByteArrayOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+
+/** Utils functions for bitmap conversions. */
+public final class BitmapUtils {
+ private static final String TAG = "BitmapUtils";
+
+ /** Converts NV21 format byte buffer to bitmap. */
+ @Nullable
+ public static Bitmap getBitmap(ByteBuffer data, FrameMetadata metadata) {
+ data.rewind();
+ byte[] imageInBuffer = new byte[data.limit()];
+ data.get(imageInBuffer, 0, imageInBuffer.length);
+ try {
+ YuvImage image =
+ new YuvImage(
+ imageInBuffer, ImageFormat.NV21, metadata.getWidth(), metadata.getHeight(), null);
+ ByteArrayOutputStream stream = new ByteArrayOutputStream();
+ image.compressToJpeg(new Rect(0, 0, metadata.getWidth(), metadata.getHeight()), 80, stream);
+
+ Bitmap bmp = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
+
+ stream.close();
+ return rotateBitmap(bmp, metadata.getRotation(), false, false);
+ } catch (Exception e) {
+ Log.e("VisionProcessorBase", "Error: " + e.getMessage());
+ }
+ return null;
+ }
+
+ /** Converts a YUV_420_888 image from CameraX API to a bitmap. */
+ @RequiresApi(VERSION_CODES.LOLLIPOP)
+ @Nullable
+ @ExperimentalGetImage
+ public static Bitmap getBitmap(ImageProxy image) {
+ FrameMetadata frameMetadata =
+ new FrameMetadata.Builder()
+ .setWidth(image.getWidth())
+ .setHeight(image.getHeight())
+ .setRotation(image.getImageInfo().getRotationDegrees())
+ .build();
+
+ ByteBuffer nv21Buffer =
+ yuv420ThreePlanesToNV21(image.getImage().getPlanes(), image.getWidth(), image.getHeight());
+ return getBitmap(nv21Buffer, frameMetadata);
+ }
+
+ /** Rotates a bitmap if it is converted from a bytebuffer. */
+ private static Bitmap rotateBitmap(
+ Bitmap bitmap, int rotationDegrees, boolean flipX, boolean flipY) {
+ Matrix matrix = new Matrix();
+
+ // Rotate the image back to straight.
+ matrix.postRotate(rotationDegrees);
+
+ // Mirror the image along the X or Y axis.
+ matrix.postScale(flipX ? -1.0f : 1.0f, flipY ? -1.0f : 1.0f);
+ Bitmap rotatedBitmap =
+ Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true);
+
+ // Recycle the old bitmap if it has changed.
+ if (rotatedBitmap != bitmap) {
+ bitmap.recycle();
+ }
+ return rotatedBitmap;
+ }
+
+ @Nullable
+ public static Bitmap getBitmapFromAsset(Context context, String fileName) {
+ InputStream inputStream = null;
+ try {
+ inputStream = context.getAssets().open(fileName);
+ return BitmapFactory.decodeStream(inputStream);
+ } catch (IOException e) {
+ Log.e(TAG, "Error reading asset: " + fileName, e);
+ } finally {
+ if (inputStream != null) {
+ try {
+ inputStream.close();
+ } catch (IOException e) {
+ Log.e(TAG, "Failed to close input stream: ", e);
+ }
+ }
+ }
+
+ return null;
+ }
+
+ @Nullable
+ public static Bitmap getBitmapFromContentUri(ContentResolver contentResolver, Uri imageUri)
+ throws IOException {
+ Bitmap decodedBitmap = MediaStore.Images.Media.getBitmap(contentResolver, imageUri);
+ if (decodedBitmap == null) {
+ return null;
+ }
+ int orientation = getExifOrientationTag(contentResolver, imageUri);
+
+ int rotationDegrees = 0;
+ boolean flipX = false;
+ boolean flipY = false;
+ // See e.g. https://magnushoff.com/articles/jpeg-orientation/ for a detailed explanation on each
+ // orientation.
+ switch (orientation) {
+ case ExifInterface.ORIENTATION_FLIP_HORIZONTAL:
+ flipX = true;
+ break;
+ case ExifInterface.ORIENTATION_ROTATE_90:
+ rotationDegrees = 90;
+ break;
+ case ExifInterface.ORIENTATION_TRANSPOSE:
+ rotationDegrees = 90;
+ flipX = true;
+ break;
+ case ExifInterface.ORIENTATION_ROTATE_180:
+ rotationDegrees = 180;
+ break;
+ case ExifInterface.ORIENTATION_FLIP_VERTICAL:
+ flipY = true;
+ break;
+ case ExifInterface.ORIENTATION_ROTATE_270:
+ rotationDegrees = -90;
+ break;
+ case ExifInterface.ORIENTATION_TRANSVERSE:
+ rotationDegrees = -90;
+ flipX = true;
+ break;
+ case ExifInterface.ORIENTATION_UNDEFINED:
+ case ExifInterface.ORIENTATION_NORMAL:
+ default:
+ // No transformations necessary in this case.
+ }
+
+ return rotateBitmap(decodedBitmap, rotationDegrees, flipX, flipY);
+ }
+
+ private static int getExifOrientationTag(ContentResolver resolver, Uri imageUri) {
+ // We only support parsing EXIF orientation tag from local file on the device.
+ // See also:
+ // https://android-developers.googleblog.com/2016/12/introducing-the-exifinterface-support-library.html
+ if (!ContentResolver.SCHEME_CONTENT.equals(imageUri.getScheme())
+ && !ContentResolver.SCHEME_FILE.equals(imageUri.getScheme())) {
+ return 0;
+ }
+
+ ExifInterface exif;
+ try (InputStream inputStream = resolver.openInputStream(imageUri)) {
+ if (inputStream == null) {
+ return 0;
+ }
+
+ exif = new ExifInterface(inputStream);
+ } catch (IOException e) {
+ Log.e(TAG, "failed to open file to read rotation meta data: " + imageUri, e);
+ return 0;
+ }
+
+ return exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL);
+ }
+
+ public static ByteBuffer convertBitmapToNv21Buffer(Bitmap bitmap) {
+ return ByteBuffer.wrap(convertBitmapToNv21Bytes(bitmap));
+ }
+
+ public static byte[] convertBitmapToNv21Bytes(Bitmap bitmap) {
+ int inputWidth = bitmap.getWidth();
+ int inputHeight = bitmap.getHeight();
+ int[] argb = new int[inputWidth * inputHeight];
+
+ bitmap.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
+
+ byte[] nv21Bytes =
+ new byte
+ [inputHeight * inputWidth
+ + 2 * (int) Math.ceil(inputHeight / 2.0) * (int) Math.ceil(inputWidth / 2.0)];
+ encodeToNv21(nv21Bytes, argb, inputWidth, inputHeight);
+ return nv21Bytes;
+ }
+
+ private static void encodeToNv21(byte[] nv21Bytes, int[] argb, int width, int height) {
+ int frameSize = width * height;
+
+ int yIndex = 0;
+ int uvIndex = frameSize;
+
+ int red;
+ int green;
+ int blue;
+ int y;
+ int u;
+ int v;
+ int index = 0;
+ for (int j = 0; j < height; j++) {
+ for (int i = 0; i < width; i++) {
+
+ // first byte is alpha, but is unused
+ red = (argb[index] & 0xff0000) >> 16;
+ green = (argb[index] & 0xff00) >> 8;
+ blue = (argb[index] & 0xff) >> 0;
+
+ // well known RGB to YUV algorithm
+ y = ((66 * red + 129 * green + 25 * blue + 128) >> 8) + 16;
+ u = ((-38 * red - 74 * green + 112 * blue + 128) >> 8) + 128;
+ v = ((112 * red - 94 * green - 18 * blue + 128) >> 8) + 128;
+
+ // NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
+ // meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other
+ // pixel AND every other scanline.
+ nv21Bytes[yIndex++] = (byte) ((y < 0) ? 0 : min(255, y));
+ if (j % 2 == 0 && index % 2 == 0) {
+ nv21Bytes[uvIndex++] = (byte) ((v < 0) ? 0 : min(255, v));
+ nv21Bytes[uvIndex++] = (byte) ((u < 0) ? 0 : min(255, u));
+ }
+
+ index++;
+ }
+ }
+ }
+
+ public static ByteBuffer convertBitmapToYv12Buffer(Bitmap bitmap) {
+ return ByteBuffer.wrap(convertBitmapToYv12Bytes(bitmap));
+ }
+
+ public static byte[] convertBitmapToYv12Bytes(Bitmap bitmap) {
+ byte[] nv21Bytes = convertBitmapToNv21Bytes(bitmap);
+ return nv21Toyv12(nv21Bytes);
+ }
+
+ /**
+ * Converts nv21 byte[] to yv12 byte[].
+ *
+ *
NV21 (4:2:0) Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y V U V U V U V U V U V U
+ *
+ *
YV12 (4:2:0) Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y V V V V V V U U U U U U
+ */
+ private static byte[] nv21Toyv12(byte[] nv21Bytes) {
+ int totalBytes = nv21Bytes.length;
+ int rowSize = totalBytes / 6; // 4+2+0
+ byte[] yv12Bytes = new byte[totalBytes];
+ System.arraycopy(nv21Bytes, 0, yv12Bytes, 0, rowSize * 4);
+ int offSet = totalBytes / 6 * 4;
+ for (int i = 0; i < rowSize; i++) {
+ yv12Bytes[offSet + i] = nv21Bytes[offSet + 2 * i]; // V
+ yv12Bytes[offSet + rowSize + i] = nv21Bytes[offSet + 2 * i + 1]; // U
+ }
+
+ return yv12Bytes;
+ }
+
+ /**
+ * Converts YUV_420_888 to NV21 bytebuffer.
+ *
+ *
The NV21 format consists of a single byte array containing the Y, U and V values. For an
+ * image of size S, the first S positions of the array contain all the Y values. The remaining
+ * positions contain interleaved V and U values. U and V are subsampled by a factor of 2 in both
+ * dimensions, so there are S/4 U values and S/4 V values. In summary, the NV21 array will contain
+ * S Y values followed by S/4 VU values: YYYYYYYYYYYYYY(...)YVUVUVUVU(...)VU
+ *
+ *
YUV_420_888 is a generic format that can describe any YUV image where U and V are subsampled
+ * by a factor of 2 in both dimensions. {@link Image#getPlanes} returns an array with the Y, U and
+ * V planes. The Y plane is guaranteed not to be interleaved, so we can just copy its values into
+ * the first part of the NV21 array. The U and V planes may already have the representation in the
+ * NV21 format. This happens if the planes share the same buffer, the V buffer is one position
+ * before the U buffer and the planes have a pixelStride of 2. If this is case, we can just copy
+ * them to the NV21 array.
+ */
+ @RequiresApi(VERSION_CODES.KITKAT)
+ private static ByteBuffer yuv420ThreePlanesToNV21(
+ Plane[] yuv420888planes, int width, int height) {
+ int imageSize = width * height;
+ byte[] out = new byte[imageSize + 2 * (imageSize / 4)];
+
+ if (areUVPlanesNV21(yuv420888planes, width, height)) {
+ // Copy the Y values.
+ yuv420888planes[0].getBuffer().get(out, 0, imageSize);
+
+ ByteBuffer uBuffer = yuv420888planes[1].getBuffer();
+ ByteBuffer vBuffer = yuv420888planes[2].getBuffer();
+ // Get the first V value from the V buffer, since the U buffer does not contain it.
+ vBuffer.get(out, imageSize, 1);
+ // Copy the first U value and the remaining VU values from the U buffer.
+ uBuffer.get(out, imageSize + 1, 2 * imageSize / 4 - 1);
+ } else {
+ // Fallback to copying the UV values one by one, which is slower but also works.
+ // Unpack Y.
+ unpackPlane(yuv420888planes[0], width, height, out, 0, 1);
+ // Unpack U.
+ unpackPlane(yuv420888planes[1], width, height, out, imageSize + 1, 2);
+ // Unpack V.
+ unpackPlane(yuv420888planes[2], width, height, out, imageSize, 2);
+ }
+
+ return ByteBuffer.wrap(out);
+ }
+
+ /** Checks if the UV plane buffers of a YUV_420_888 image are in the NV21 format. */
+ @RequiresApi(VERSION_CODES.KITKAT)
+ private static boolean areUVPlanesNV21(Plane[] planes, int width, int height) {
+ int imageSize = width * height;
+
+ ByteBuffer uBuffer = planes[1].getBuffer();
+ ByteBuffer vBuffer = planes[2].getBuffer();
+
+ // Backup buffer properties.
+ int vBufferPosition = vBuffer.position();
+ int uBufferLimit = uBuffer.limit();
+
+ // Advance the V buffer by 1 byte, since the U buffer will not contain the first V value.
+ vBuffer.position(vBufferPosition + 1);
+ // Chop off the last byte of the U buffer, since the V buffer will not contain the last U value.
+ uBuffer.limit(uBufferLimit - 1);
+
+ // Check that the buffers are equal and have the expected number of elements.
+ boolean areNV21 =
+ (vBuffer.remaining() == (2 * imageSize / 4 - 2)) && (vBuffer.compareTo(uBuffer) == 0);
+
+ // Restore buffers to their initial state.
+ vBuffer.position(vBufferPosition);
+ uBuffer.limit(uBufferLimit);
+
+ return areNV21;
+ }
+
+ /**
+ * Unpack an image plane into a byte array.
+ *
+ *
The input plane data will be copied in 'out', starting at 'offset' and every pixel will be
+ * spaced by 'pixelStride'. Note that there is no row padding on the output.
+ */
+ @TargetApi(VERSION_CODES.KITKAT)
+ private static void unpackPlane(
+ Plane plane, int width, int height, byte[] out, int offset, int pixelStride) {
+ ByteBuffer buffer = plane.getBuffer();
+ buffer.rewind();
+
+ // Compute the size of the current plane.
+ // We assume that it has the aspect ratio as the original image.
+ int numRow = (buffer.limit() + plane.getRowStride() - 1) / plane.getRowStride();
+ if (numRow == 0) {
+ return;
+ }
+ int scaleFactor = height / numRow;
+ int numCol = width / scaleFactor;
+
+ // Extract the data in the output buffer.
+ int outputPos = offset;
+ int rowStart = 0;
+ for (int row = 0; row < numRow; row++) {
+ int inputPos = rowStart;
+ for (int col = 0; col < numCol; col++) {
+ out[outputPos] = buffer.get(inputPos);
+ outputPos += pixelStride;
+ inputPos += plane.getPixelStride();
+ }
+ rowStart += plane.getRowStride();
+ }
+ }
+
+ private BitmapUtils() {}
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraImageGraphic.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraImageGraphic.java
new file mode 100644
index 0000000..0d2f509
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraImageGraphic.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import com.google.mlkit.vision.automl.demo.GraphicOverlay.Graphic;
+
+/** Draw camera image to background. */
+public class CameraImageGraphic extends Graphic {
+
+ private final Bitmap bitmap;
+
+ public CameraImageGraphic(GraphicOverlay overlay, Bitmap bitmap) {
+ super(overlay);
+ this.bitmap = bitmap;
+ }
+
+ @Override
+ public void draw(Canvas canvas) {
+ canvas.drawBitmap(bitmap, getTransformationMatrix(), null);
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraSource.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraSource.java
new file mode 100644
index 0000000..aea8740
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraSource.java
@@ -0,0 +1,722 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import android.Manifest;
+import android.annotation.SuppressLint;
+import android.app.Activity;
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
+import android.util.Log;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.WindowManager;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresPermission;
+import com.google.android.gms.common.images.Size;
+import com.google.mlkit.vision.automl.demo.preference.PreferenceUtils;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.IdentityHashMap;
+import java.util.List;
+
+/**
+ * Manages the camera and allows UI updates on top of it (e.g. overlaying extra Graphics or
+ * displaying extra information). This receives preview frames from the camera at a specified rate,
+ * sending those frames to child classes' detectors / classifiers as fast as it is able to process.
+ */
+public class CameraSource {
+ @SuppressLint("InlinedApi")
+ public static final int CAMERA_FACING_BACK = CameraInfo.CAMERA_FACING_BACK;
+
+ @SuppressLint("InlinedApi")
+ public static final int CAMERA_FACING_FRONT = CameraInfo.CAMERA_FACING_FRONT;
+
+ public static final int IMAGE_FORMAT = ImageFormat.NV21;
+ public static final int DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH = 480;
+ public static final int DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT = 360;
+
+ private static final String TAG = "MIDemoApp:CameraSource";
+
+ /**
+ * The dummy surface texture must be assigned a chosen name. Since we never use an OpenGL context,
+ * we can choose any ID we want here. The dummy surface texture is not a crazy hack - it is
+ * actually how the camera team recommends using the camera without a preview.
+ */
+ private static final int DUMMY_TEXTURE_NAME = 100;
+
+ /**
+ * If the absolute difference between a preview size aspect ratio and a picture size aspect ratio
+ * is less than this tolerance, they are considered to be the same aspect ratio.
+ */
+ private static final float ASPECT_RATIO_TOLERANCE = 0.01f;
+
+ private static final float REQUESTED_FPS = 30.0f;
+ private static final boolean REQUESTED_AUTO_FOCUS = true;
+
+ protected Activity activity;
+
+ private Camera camera;
+
+ private int facing = CAMERA_FACING_BACK;
+
+ /** Rotation of the device, and thus the associated preview images captured from the device. */
+ private int rotationDegrees;
+
+ private Size previewSize;
+
+ // These instances need to be held onto to avoid GC of their underlying resources. Even though
+ // these aren't used outside of the method that creates them, they still must have hard
+ // references maintained to them.
+ private SurfaceTexture dummySurfaceTexture;
+ private final GraphicOverlay graphicOverlay;
+
+ // True if a SurfaceTexture is being used for the preview, false if a SurfaceHolder is being
+ // used for the preview. We want to be compatible back to Gingerbread, but SurfaceTexture
+ // wasn't introduced until Honeycomb. Since the interface cannot use a SurfaceTexture, if the
+ // developer wants to display a preview we must use a SurfaceHolder. If the developer doesn't
+ // want to display a preview we use a SurfaceTexture if we are running at least Honeycomb.
+ private boolean usingSurfaceTexture;
+
+ /**
+ * Dedicated thread and associated runnable for calling into the detector with frames, as the
+ * frames become available from the camera.
+ */
+ private Thread processingThread;
+
+ private final FrameProcessingRunnable processingRunnable;
+
+ private final Object processorLock = new Object();
+ // TODO(b/74400062) Re-enable the annotaion
+ // @GuardedBy("processorLock")
+ private VisionImageProcessor frameProcessor;
+
+ /**
+ * Map to convert between a byte array, received from the camera, and its associated byte buffer.
+ * We use byte buffers internally because this is a more efficient way to call into native code
+ * later (avoids a potential copy).
+ *
+ *
Note: uses IdentityHashMap here instead of HashMap because the behavior of an array's
+ * equals, hashCode and toString methods is both useless and unexpected. IdentityHashMap enforces
+ * identity ('==') check on the keys.
+ */
+ private final IdentityHashMap bytesToByteBuffer = new IdentityHashMap<>();
+
+ public CameraSource(Activity activity, GraphicOverlay overlay) {
+ this.activity = activity;
+ graphicOverlay = overlay;
+ graphicOverlay.clear();
+ processingRunnable = new FrameProcessingRunnable();
+ }
+
+ // ==============================================================================================
+ // Public
+ // ==============================================================================================
+
+ /** Stops the camera and releases the resources of the camera and underlying detector. */
+ public void release() {
+ synchronized (processorLock) {
+ stop();
+ cleanScreen();
+
+ if (frameProcessor != null) {
+ frameProcessor.stop();
+ }
+ }
+ }
+
+ /**
+ * Opens the camera and starts sending preview frames to the underlying detector. The preview
+ * frames are not displayed.
+ *
+ * @throws IOException if the camera's preview texture or display could not be initialized
+ */
+ @RequiresPermission(Manifest.permission.CAMERA)
+ public synchronized CameraSource start() throws IOException {
+ if (camera != null) {
+ return this;
+ }
+
+ camera = createCamera();
+ dummySurfaceTexture = new SurfaceTexture(DUMMY_TEXTURE_NAME);
+ camera.setPreviewTexture(dummySurfaceTexture);
+ usingSurfaceTexture = true;
+ camera.startPreview();
+
+ processingThread = new Thread(processingRunnable);
+ processingRunnable.setActive(true);
+ processingThread.start();
+ return this;
+ }
+
+ /**
+ * Opens the camera and starts sending preview frames to the underlying detector. The supplied
+ * surface holder is used for the preview so frames can be displayed to the user.
+ *
+ * @param surfaceHolder the surface holder to use for the preview frames
+ * @throws IOException if the supplied surface holder could not be used as the preview display
+ */
+ @RequiresPermission(Manifest.permission.CAMERA)
+ public synchronized CameraSource start(SurfaceHolder surfaceHolder) throws IOException {
+ if (camera != null) {
+ return this;
+ }
+
+ camera = createCamera();
+ camera.setPreviewDisplay(surfaceHolder);
+ camera.startPreview();
+
+ processingThread = new Thread(processingRunnable);
+ processingRunnable.setActive(true);
+ processingThread.start();
+
+ usingSurfaceTexture = false;
+ return this;
+ }
+
+ /**
+ * Closes the camera and stops sending frames to the underlying frame detector.
+ *
+ *
This camera source may be restarted again by calling {@link #start()} or {@link
+ * #start(SurfaceHolder)}.
+ *
+ *
Call {@link #release()} instead to completely shut down this camera source and release the
+ * resources of the underlying detector.
+ */
+ public synchronized void stop() {
+ processingRunnable.setActive(false);
+ if (processingThread != null) {
+ try {
+ // Wait for the thread to complete to ensure that we can't have multiple threads
+ // executing at the same time (i.e., which would happen if we called start too
+ // quickly after stop).
+ processingThread.join();
+ } catch (InterruptedException e) {
+ Log.d(TAG, "Frame processing thread interrupted on release.");
+ }
+ processingThread = null;
+ }
+
+ if (camera != null) {
+ camera.stopPreview();
+ camera.setPreviewCallbackWithBuffer(null);
+ try {
+ if (usingSurfaceTexture) {
+ camera.setPreviewTexture(null);
+ } else {
+ camera.setPreviewDisplay(null);
+ }
+ } catch (Exception e) {
+ Log.e(TAG, "Failed to clear camera preview: " + e);
+ }
+ camera.release();
+ camera = null;
+ }
+
+ // Release the reference to any image buffers, since these will no longer be in use.
+ bytesToByteBuffer.clear();
+ }
+
+ /** Changes the facing of the camera. */
+ public synchronized void setFacing(int facing) {
+ if ((facing != CAMERA_FACING_BACK) && (facing != CAMERA_FACING_FRONT)) {
+ throw new IllegalArgumentException("Invalid camera: " + facing);
+ }
+ this.facing = facing;
+ }
+
+ /** Returns the preview size that is currently in use by the underlying camera. */
+ public Size getPreviewSize() {
+ return previewSize;
+ }
+
+ /**
+ * Returns the selected camera; one of {@link #CAMERA_FACING_BACK} or {@link
+ * #CAMERA_FACING_FRONT}.
+ */
+ public int getCameraFacing() {
+ return facing;
+ }
+
+ /**
+ * Opens the camera and applies the user settings.
+ *
+ * @throws IOException if camera cannot be found or preview cannot be processed
+ */
+ @SuppressLint("InlinedApi")
+ private Camera createCamera() throws IOException {
+ int requestedCameraId = getIdForRequestedCamera(facing);
+ if (requestedCameraId == -1) {
+ throw new IOException("Could not find requested camera.");
+ }
+ Camera camera = Camera.open(requestedCameraId);
+
+ SizePair sizePair = PreferenceUtils.getCameraPreviewSizePair(activity, requestedCameraId);
+ if (sizePair == null) {
+ sizePair =
+ selectSizePair(
+ camera,
+ DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH,
+ DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT);
+ }
+
+ if (sizePair == null) {
+ throw new IOException("Could not find suitable preview size.");
+ }
+
+ previewSize = sizePair.preview;
+ Log.v(TAG, "Camera preview size: " + previewSize);
+
+ int[] previewFpsRange = selectPreviewFpsRange(camera, REQUESTED_FPS);
+ if (previewFpsRange == null) {
+ throw new IOException("Could not find suitable preview frames per second range.");
+ }
+
+ Camera.Parameters parameters = camera.getParameters();
+
+ Size pictureSize = sizePair.picture;
+ if (pictureSize != null) {
+ Log.v(TAG, "Camera picture size: " + pictureSize);
+ parameters.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
+ }
+ parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
+ parameters.setPreviewFpsRange(
+ previewFpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ previewFpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+ // Use YV12 so that we can exercise YV12->NV21 auto-conversion logic for OCR detection
+ parameters.setPreviewFormat(IMAGE_FORMAT);
+
+ setRotation(camera, parameters, requestedCameraId);
+
+ if (REQUESTED_AUTO_FOCUS) {
+ if (parameters
+ .getSupportedFocusModes()
+ .contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+ } else {
+ Log.i(TAG, "Camera auto focus is not supported on this device.");
+ }
+ }
+
+ camera.setParameters(parameters);
+
+ // Four frame buffers are needed for working with the camera:
+ //
+ // one for the frame that is currently being executed upon in doing detection
+ // one for the next pending frame to process immediately upon completing detection
+ // two for the frames that the camera uses to populate future preview images
+ //
+ // Through trial and error it appears that two free buffers, in addition to the two buffers
+ // used in this code, are needed for the camera to work properly. Perhaps the camera has
+ // one thread for acquiring images, and another thread for calling into user code. If only
+ // three buffers are used, then the camera will spew thousands of warning messages when
+ // detection takes a non-trivial amount of time.
+ camera.setPreviewCallbackWithBuffer(new CameraPreviewCallback());
+ camera.addCallbackBuffer(createPreviewBuffer(previewSize));
+ camera.addCallbackBuffer(createPreviewBuffer(previewSize));
+ camera.addCallbackBuffer(createPreviewBuffer(previewSize));
+ camera.addCallbackBuffer(createPreviewBuffer(previewSize));
+
+ return camera;
+ }
+
+ /**
+ * Gets the id for the camera specified by the direction it is facing. Returns -1 if no such
+ * camera was found.
+ *
+ * @param facing the desired camera (front-facing or rear-facing)
+ */
+ private static int getIdForRequestedCamera(int facing) {
+ CameraInfo cameraInfo = new CameraInfo();
+ for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ Camera.getCameraInfo(i, cameraInfo);
+ if (cameraInfo.facing == facing) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ /**
+ * Selects the most suitable preview and picture size, given the desired width and height.
+ *
+ *
Even though we only need to find the preview size, it's necessary to find both the preview
+ * size and the picture size of the camera together, because these need to have the same aspect
+ * ratio. On some hardware, if you would only set the preview size, you will get a distorted
+ * image.
+ *
+ * @param camera the camera to select a preview size from
+ * @param desiredWidth the desired width of the camera preview frames
+ * @param desiredHeight the desired height of the camera preview frames
+ * @return the selected preview and picture size pair
+ */
+ public static SizePair selectSizePair(Camera camera, int desiredWidth, int desiredHeight) {
+ List validPreviewSizes = generateValidPreviewSizeList(camera);
+
+ // The method for selecting the best size is to minimize the sum of the differences between
+ // the desired values and the actual values for width and height. This is certainly not the
+ // only way to select the best size, but it provides a decent tradeoff between using the
+ // closest aspect ratio vs. using the closest pixel area.
+ SizePair selectedPair = null;
+ int minDiff = Integer.MAX_VALUE;
+ for (SizePair sizePair : validPreviewSizes) {
+ Size size = sizePair.preview;
+ int diff =
+ Math.abs(size.getWidth() - desiredWidth) + Math.abs(size.getHeight() - desiredHeight);
+ if (diff < minDiff) {
+ selectedPair = sizePair;
+ minDiff = diff;
+ }
+ }
+
+ return selectedPair;
+ }
+
+ /**
+ * Stores a preview size and a corresponding same-aspect-ratio picture size. To avoid distorted
+ * preview images on some devices, the picture size must be set to a size that is the same aspect
+ * ratio as the preview size or the preview may end up being distorted. If the picture size is
+ * null, then there is no picture size with the same aspect ratio as the preview size.
+ */
+ public static class SizePair {
+ public final Size preview;
+ @Nullable public final Size picture;
+
+ SizePair(
+ Camera.Size previewSize,
+ @Nullable Camera.Size pictureSize) {
+ preview = new Size(previewSize.width, previewSize.height);
+ picture = pictureSize != null ? new Size(pictureSize.width, pictureSize.height) : null;
+ }
+
+ public SizePair(Size previewSize, @Nullable Size pictureSize) {
+ preview = previewSize;
+ picture = pictureSize;
+ }
+ }
+
+ /**
+ * Generates a list of acceptable preview sizes. Preview sizes are not acceptable if there is not
+ * a corresponding picture size of the same aspect ratio. If there is a corresponding picture size
+ * of the same aspect ratio, the picture size is paired up with the preview size.
+ *
+ *
This is necessary because even if we don't use still pictures, the still picture size must
+ * be set to a size that is the same aspect ratio as the preview size we choose. Otherwise, the
+ * preview images may be distorted on some devices.
+ */
+ public static List generateValidPreviewSizeList(Camera camera) {
+ Camera.Parameters parameters = camera.getParameters();
+ List supportedPreviewSizes =
+ parameters.getSupportedPreviewSizes();
+ List supportedPictureSizes =
+ parameters.getSupportedPictureSizes();
+ List validPreviewSizes = new ArrayList<>();
+ for (Camera.Size previewSize : supportedPreviewSizes) {
+ float previewAspectRatio = (float) previewSize.width / (float) previewSize.height;
+
+ // By looping through the picture sizes in order, we favor the higher resolutions.
+ // We choose the highest resolution in order to support taking the full resolution
+ // picture later.
+ for (Camera.Size pictureSize : supportedPictureSizes) {
+ float pictureAspectRatio = (float) pictureSize.width / (float) pictureSize.height;
+ if (Math.abs(previewAspectRatio - pictureAspectRatio) < ASPECT_RATIO_TOLERANCE) {
+ validPreviewSizes.add(new SizePair(previewSize, pictureSize));
+ break;
+ }
+ }
+ }
+
+ // If there are no picture sizes with the same aspect ratio as any preview sizes, allow all
+ // of the preview sizes and hope that the camera can handle it. Probably unlikely, but we
+ // still account for it.
+ if (validPreviewSizes.isEmpty()) {
+ Log.w(TAG, "No preview sizes have a corresponding same-aspect-ratio picture size");
+ for (Camera.Size previewSize : supportedPreviewSizes) {
+ // The null picture size will let us know that we shouldn't set a picture size.
+ validPreviewSizes.add(new SizePair(previewSize, null));
+ }
+ }
+
+ return validPreviewSizes;
+ }
+
+ /**
+ * Selects the most suitable preview frames per second range, given the desired frames per second.
+ *
+ * @param camera the camera to select a frames per second range from
+ * @param desiredPreviewFps the desired frames per second for the camera preview frames
+ * @return the selected preview frames per second range
+ */
+ @SuppressLint("InlinedApi")
+ private static int[] selectPreviewFpsRange(Camera camera, float desiredPreviewFps) {
+ // The camera API uses integers scaled by a factor of 1000 instead of floating-point frame
+ // rates.
+ int desiredPreviewFpsScaled = (int) (desiredPreviewFps * 1000.0f);
+
+ // The method for selecting the best range is to minimize the sum of the differences between
+ // the desired value and the upper and lower bounds of the range. This may select a range
+ // that the desired value is outside of, but this is often preferred. For example, if the
+ // desired frame rate is 29.97, the range (30, 30) is probably more desirable than the
+ // range (15, 30).
+ int[] selectedFpsRange = null;
+ int minDiff = Integer.MAX_VALUE;
+ List previewFpsRangeList = camera.getParameters().getSupportedPreviewFpsRange();
+ for (int[] range : previewFpsRangeList) {
+ int deltaMin = desiredPreviewFpsScaled - range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+ int deltaMax = desiredPreviewFpsScaled - range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+ int diff = Math.abs(deltaMin) + Math.abs(deltaMax);
+ if (diff < minDiff) {
+ selectedFpsRange = range;
+ minDiff = diff;
+ }
+ }
+ return selectedFpsRange;
+ }
+
+ /**
+ * Calculates the correct rotation for the given camera id and sets the rotation in the
+ * parameters. It also sets the camera's display orientation and rotation.
+ *
+ * @param parameters the camera parameters for which to set the rotation
+ * @param cameraId the camera id to set rotation based on
+ */
+ private void setRotation(Camera camera, Camera.Parameters parameters, int cameraId) {
+ WindowManager windowManager = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE);
+ int degrees = 0;
+ int rotation = windowManager.getDefaultDisplay().getRotation();
+ switch (rotation) {
+ case Surface.ROTATION_0:
+ degrees = 0;
+ break;
+ case Surface.ROTATION_90:
+ degrees = 90;
+ break;
+ case Surface.ROTATION_180:
+ degrees = 180;
+ break;
+ case Surface.ROTATION_270:
+ degrees = 270;
+ break;
+ default:
+ Log.e(TAG, "Bad rotation value: " + rotation);
+ }
+
+ CameraInfo cameraInfo = new CameraInfo();
+ Camera.getCameraInfo(cameraId, cameraInfo);
+
+ int displayAngle;
+ if (cameraInfo.facing == CameraInfo.CAMERA_FACING_FRONT) {
+ this.rotationDegrees = (cameraInfo.orientation + degrees) % 360;
+ displayAngle = (360 - this.rotationDegrees) % 360; // compensate for it being mirrored
+ } else { // back-facing
+ this.rotationDegrees = (cameraInfo.orientation - degrees + 360) % 360;
+ displayAngle = this.rotationDegrees;
+ }
+ Log.d(TAG, "Display rotation is: " + rotation);
+ Log.d(TAG, "Camera face is: " + cameraInfo.facing);
+ Log.d(TAG, "Camera rotation is: " + cameraInfo.orientation);
+ // This value should be one of the degrees that ImageMetadata accepts: 0, 90, 180 or 270.
+ Log.d(TAG, "RotationDegrees is: " + this.rotationDegrees);
+
+ camera.setDisplayOrientation(displayAngle);
+ parameters.setRotation(this.rotationDegrees);
+ }
+
+ /**
+ * Creates one buffer for the camera preview callback. The size of the buffer is based off of the
+ * camera preview size and the format of the camera image.
+ *
+ * @return a new preview buffer of the appropriate size for the current camera settings
+ */
+ @SuppressLint("InlinedApi")
+ private byte[] createPreviewBuffer(Size previewSize) {
+ int bitsPerPixel = ImageFormat.getBitsPerPixel(IMAGE_FORMAT);
+ long sizeInBits = (long) previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel;
+ int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1;
+
+ // Creating the byte array this way and wrapping it, as opposed to using .allocate(),
+ // should guarantee that there will be an array to work with.
+ byte[] byteArray = new byte[bufferSize];
+ ByteBuffer buffer = ByteBuffer.wrap(byteArray);
+ if (!buffer.hasArray() || (buffer.array() != byteArray)) {
+ // I don't think that this will ever happen. But if it does, then we wouldn't be
+ // passing the preview content to the underlying detector later.
+ throw new IllegalStateException("Failed to create valid buffer for camera source.");
+ }
+
+ bytesToByteBuffer.put(byteArray, buffer);
+ return byteArray;
+ }
+
+ // ==============================================================================================
+ // Frame processing
+ // ==============================================================================================
+
+ /** Called when the camera has a new preview frame. */
+ private class CameraPreviewCallback implements Camera.PreviewCallback {
+ @Override
+ public void onPreviewFrame(byte[] data, Camera camera) {
+ processingRunnable.setNextFrame(data, camera);
+ }
+ }
+
+ void setMachineLearningFrameProcessor(VisionImageProcessor processor) {
+ synchronized (processorLock) {
+ cleanScreen();
+ if (frameProcessor != null) {
+ frameProcessor.stop();
+ }
+ frameProcessor = processor;
+ }
+ }
+
+ /**
+ * This runnable controls access to the underlying receiver, calling it to process frames when
+ * available from the camera. This is designed to run detection on frames as fast as possible
+ * (i.e., without unnecessary context switching or waiting on the next frame).
+ *
+ *
While detection is running on a frame, new frames may be received from the camera. As these
+ * frames come in, the most recent frame is held onto as pending. As soon as detection and its
+ * associated processing is done for the previous frame, detection on the mostly recently received
+ * frame will immediately start on the same thread.
+ */
+ private class FrameProcessingRunnable implements Runnable {
+
+ // This lock guards all of the member variables below.
+ private final Object lock = new Object();
+ private boolean active = true;
+
+ // These pending variables hold the state associated with the new frame awaiting processing.
+ private ByteBuffer pendingFrameData;
+
+ FrameProcessingRunnable() {}
+
+ /** Marks the runnable as active/not active. Signals any blocked threads to continue. */
+ void setActive(boolean active) {
+ synchronized (lock) {
+ this.active = active;
+ lock.notifyAll();
+ }
+ }
+
+ /**
+ * Sets the frame data received from the camera. This adds the previous unused frame buffer (if
+ * present) back to the camera, and keeps a pending reference to the frame data for future use.
+ */
+ @SuppressWarnings("ByteBufferBackingArray")
+ void setNextFrame(byte[] data, Camera camera) {
+ synchronized (lock) {
+ if (pendingFrameData != null) {
+ camera.addCallbackBuffer(pendingFrameData.array());
+ pendingFrameData = null;
+ }
+
+ if (!bytesToByteBuffer.containsKey(data)) {
+ Log.d(
+ TAG,
+ "Skipping frame. Could not find ByteBuffer associated with the image "
+ + "data from the camera.");
+ return;
+ }
+
+ pendingFrameData = bytesToByteBuffer.get(data);
+
+ // Notify the processor thread if it is waiting on the next frame (see below).
+ lock.notifyAll();
+ }
+ }
+
+ /**
+ * As long as the processing thread is active, this executes detection on frames continuously.
+ * The next pending frame is either immediately available or hasn't been received yet. Once it
+ * is available, we transfer the frame info to local variables and run detection on that frame.
+ * It immediately loops back for the next frame without pausing.
+ *
+ *
If detection takes longer than the time in between new frames from the camera, this will
+ * mean that this loop will run without ever waiting on a frame, avoiding any context switching
+ * or frame acquisition time latency.
+ *
+ *
If you find that this is using more CPU than you'd like, you should probably decrease the
+ * FPS setting above to allow for some idle time in between frames.
+ */
+ @SuppressLint("InlinedApi")
+ @SuppressWarnings({"GuardedBy", "ByteBufferBackingArray"})
+ @Override
+ public void run() {
+ ByteBuffer data;
+
+ while (true) {
+ synchronized (lock) {
+ while (active && (pendingFrameData == null)) {
+ try {
+ // Wait for the next frame to be received from the camera, since we
+ // don't have it yet.
+ lock.wait();
+ } catch (InterruptedException e) {
+ Log.d(TAG, "Frame processing loop terminated.", e);
+ return;
+ }
+ }
+
+ if (!active) {
+ // Exit the loop once this camera source is stopped or released. We check
+ // this here, immediately after the wait() above, to handle the case where
+ // setActive(false) had been called, triggering the termination of this
+ // loop.
+ return;
+ }
+
+ // Hold onto the frame data locally, so that we can use this for detection
+ // below. We need to clear pendingFrameData to ensure that this buffer isn't
+ // recycled back to the camera before we are done using that data.
+ data = pendingFrameData;
+ pendingFrameData = null;
+ }
+
+ // The code below needs to run outside of synchronization, because this will allow
+ // the camera to add pending frame(s) while we are running detection on the current
+ // frame.
+
+ try {
+ synchronized (processorLock) {
+ frameProcessor.processByteBuffer(
+ data,
+ new FrameMetadata.Builder()
+ .setWidth(previewSize.getWidth())
+ .setHeight(previewSize.getHeight())
+ .setRotation(rotationDegrees)
+ .build(),
+ graphicOverlay);
+ }
+ } catch (Exception t) {
+ Log.e(TAG, "Exception thrown from receiver.", t);
+ } finally {
+ camera.addCallbackBuffer(data.array());
+ }
+ }
+ }
+ }
+
+ /** Cleans up graphicOverlay and child classes can do their cleanups as well . */
+ private void cleanScreen() {
+ graphicOverlay.clear();
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraSourcePreview.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraSourcePreview.java
new file mode 100644
index 0000000..fd89383
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraSourcePreview.java
@@ -0,0 +1,178 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import android.content.Context;
+import android.content.res.Configuration;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.ViewGroup;
+import com.google.android.gms.common.images.Size;
+import com.google.mlkit.vision.automl.demo.preference.PreferenceUtils;
+import java.io.IOException;
+
+/** Preview the camera image in the screen. */
+public class CameraSourcePreview extends ViewGroup {
+ private static final String TAG = "MIDemoApp:Preview";
+
+ private final Context context;
+ private final SurfaceView surfaceView;
+ private boolean startRequested;
+ private boolean surfaceAvailable;
+ private CameraSource cameraSource;
+
+ private GraphicOverlay overlay;
+
+ public CameraSourcePreview(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ this.context = context;
+ startRequested = false;
+ surfaceAvailable = false;
+
+ surfaceView = new SurfaceView(context);
+ surfaceView.getHolder().addCallback(new SurfaceCallback());
+ addView(surfaceView);
+ }
+
+ private void start(CameraSource cameraSource) throws IOException {
+ this.cameraSource = cameraSource;
+
+ if (this.cameraSource != null) {
+ startRequested = true;
+ startIfReady();
+ }
+ }
+
+ public void start(CameraSource cameraSource, GraphicOverlay overlay) throws IOException {
+ this.overlay = overlay;
+ start(cameraSource);
+ }
+
+ public void stop() {
+ if (cameraSource != null) {
+ cameraSource.stop();
+ }
+ }
+
+ public void release() {
+ if (cameraSource != null) {
+ cameraSource.release();
+ cameraSource = null;
+ }
+ surfaceView.getHolder().getSurface().release();
+ }
+
+ private void startIfReady() throws IOException, SecurityException {
+ if (startRequested && surfaceAvailable) {
+ if (PreferenceUtils.isCameraLiveViewportEnabled(context)) {
+ cameraSource.start(surfaceView.getHolder());
+ } else {
+ cameraSource.start();
+ }
+ requestLayout();
+
+ if (overlay != null) {
+ Size size = cameraSource.getPreviewSize();
+ int min = Math.min(size.getWidth(), size.getHeight());
+ int max = Math.max(size.getWidth(), size.getHeight());
+ boolean isImageFlipped = cameraSource.getCameraFacing() == CameraSource.CAMERA_FACING_FRONT;
+ if (isPortraitMode()) {
+ // Swap width and height sizes when in portrait, since it will be rotated by 90 degrees.
+ // The camera preview and the image being processed have the same size.
+ overlay.setImageSourceInfo(min, max, isImageFlipped);
+ } else {
+ overlay.setImageSourceInfo(max, min, isImageFlipped);
+ }
+ overlay.clear();
+ }
+ startRequested = false;
+ }
+ }
+
+ private class SurfaceCallback implements SurfaceHolder.Callback {
+ @Override
+ public void surfaceCreated(SurfaceHolder surface) {
+ surfaceAvailable = true;
+ try {
+ startIfReady();
+ } catch (IOException e) {
+ Log.e(TAG, "Could not start camera source.", e);
+ }
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder surface) {
+ surfaceAvailable = false;
+ }
+
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {}
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
+ int width = 320;
+ int height = 240;
+ if (cameraSource != null) {
+ Size size = cameraSource.getPreviewSize();
+ if (size != null) {
+ width = size.getWidth();
+ height = size.getHeight();
+ }
+ }
+
+ // Swap width and height sizes when in portrait, since it will be rotated 90 degrees
+ if (isPortraitMode()) {
+ int tmp = width;
+ width = height;
+ height = tmp;
+ }
+
+ final int layoutWidth = right - left;
+ final int layoutHeight = bottom - top;
+
+ // Computes height and width for potentially doing fit width.
+ int childWidth = layoutWidth;
+ int childHeight = (int) (((float) layoutWidth / (float) width) * height);
+
+ // If height is too tall using fit width, does fit height instead.
+ if (childHeight > layoutHeight) {
+ childHeight = layoutHeight;
+ childWidth = (int) (((float) layoutHeight / (float) height) * width);
+ }
+
+ for (int i = 0; i < getChildCount(); ++i) {
+ getChildAt(i).layout(0, 0, childWidth, childHeight);
+ Log.d(TAG, "Assigned view: " + i);
+ }
+ }
+
+ private boolean isPortraitMode() {
+ int orientation = context.getResources().getConfiguration().orientation;
+ if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
+ return false;
+ }
+ if (orientation == Configuration.ORIENTATION_PORTRAIT) {
+ return true;
+ }
+
+ Log.d(TAG, "isPortraitMode returning false by default");
+ return false;
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraXLivePreviewActivity.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraXLivePreviewActivity.java
new file mode 100644
index 0000000..7e37109
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraXLivePreviewActivity.java
@@ -0,0 +1,408 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import android.content.Context;
+import android.content.Intent;
+import android.content.pm.PackageInfo;
+import android.content.pm.PackageManager;
+import android.os.Build.VERSION_CODES;
+import android.os.Bundle;
+import androidx.appcompat.app.AppCompatActivity;
+import android.util.Log;
+import android.util.Size;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.ArrayAdapter;
+import android.widget.CompoundButton;
+import android.widget.ImageView;
+import android.widget.Spinner;
+import android.widget.Toast;
+import android.widget.ToggleButton;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.camera.core.CameraInfoUnavailableException;
+import androidx.camera.core.CameraSelector;
+import androidx.camera.core.ImageAnalysis;
+import androidx.camera.core.Preview;
+import androidx.camera.lifecycle.ProcessCameraProvider;
+import androidx.camera.view.PreviewView;
+import androidx.core.app.ActivityCompat;
+import androidx.core.app.ActivityCompat.OnRequestPermissionsResultCallback;
+import androidx.core.content.ContextCompat;
+import androidx.lifecycle.ViewModelProvider;
+import androidx.lifecycle.ViewModelProvider.AndroidViewModelFactory;
+import com.google.android.gms.common.annotation.KeepName;
+import com.google.mlkit.common.MlKitException;
+import com.google.mlkit.common.model.CustomRemoteModel;
+import com.google.mlkit.linkfirebase.FirebaseModelSource;
+import com.google.mlkit.vision.automl.demo.automl.AutoMLImageLabelerProcessor;
+import com.google.mlkit.vision.automl.demo.automl.AutoMLImageLabelerProcessor.Mode;
+import com.google.mlkit.vision.automl.demo.object.ObjectDetectorProcessor;
+import com.google.mlkit.vision.automl.demo.preference.PreferenceUtils;
+import com.google.mlkit.vision.automl.demo.preference.SettingsActivity;
+import com.google.mlkit.vision.label.custom.CustomImageLabelerOptions;
+import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions;
+import java.util.ArrayList;
+import java.util.List;
+
+/** Live preview demo app for ML Kit APIs using CameraX. */
+@KeepName
+@RequiresApi(VERSION_CODES.LOLLIPOP)
+public final class CameraXLivePreviewActivity extends AppCompatActivity
+ implements OnRequestPermissionsResultCallback,
+ OnItemSelectedListener,
+ CompoundButton.OnCheckedChangeListener {
+ private static final String TAG = "CameraXLivePreview";
+ private static final int PERMISSION_REQUESTS = 1;
+ private static final String CUSTOM_AUTOML_LABELING = "Custom AutoML Image Labeling";
+ private static final String CUSTOM_AUTOML_OBJECT_DETECTION = "Custom AutoML Object Detection";
+
+ private static final String STATE_SELECTED_MODEL = "selected_model";
+ private static final String STATE_LENS_FACING = "lens_facing";
+
+ private PreviewView previewView;
+ private GraphicOverlay graphicOverlay;
+
+ @Nullable private ProcessCameraProvider cameraProvider;
+ @Nullable private Preview previewUseCase;
+ @Nullable private ImageAnalysis analysisUseCase;
+ @Nullable private VisionImageProcessor imageProcessor;
+ private boolean needUpdateGraphicOverlayImageSourceInfo;
+
+ private String selectedModel = CUSTOM_AUTOML_LABELING;
+ private int lensFacing = CameraSelector.LENS_FACING_BACK;
+ private CameraSelector cameraSelector;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ Log.d(TAG, "onCreate");
+
+ if (savedInstanceState != null) {
+ selectedModel = savedInstanceState.getString(STATE_SELECTED_MODEL, CUSTOM_AUTOML_LABELING);
+ lensFacing = savedInstanceState.getInt(STATE_LENS_FACING, CameraSelector.LENS_FACING_BACK);
+ }
+ cameraSelector = new CameraSelector.Builder().requireLensFacing(lensFacing).build();
+
+ setContentView(R.layout.activity_camerax_live_preview);
+ previewView = findViewById(R.id.preview_view);
+ if (previewView == null) {
+ Log.d(TAG, "previewView is null");
+ }
+ graphicOverlay = findViewById(R.id.graphic_overlay);
+ if (graphicOverlay == null) {
+ Log.d(TAG, "graphicOverlay is null");
+ }
+
+ Spinner spinner = findViewById(R.id.spinner);
+ List options = new ArrayList<>();
+ options.add(CUSTOM_AUTOML_LABELING);
+ options.add(CUSTOM_AUTOML_OBJECT_DETECTION);
+
+ // Creating adapter for spinner
+ ArrayAdapter dataAdapter = new ArrayAdapter<>(this, R.layout.spinner_style, options);
+ // Drop down layout style - list view with radio button
+ dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+ // attaching data adapter to spinner
+ spinner.setAdapter(dataAdapter);
+ spinner.setOnItemSelectedListener(this);
+
+ ToggleButton facingSwitch = findViewById(R.id.facing_switch);
+ facingSwitch.setOnCheckedChangeListener(this);
+
+ new ViewModelProvider(this, AndroidViewModelFactory.getInstance(getApplication()))
+ .get(CameraXViewModel.class)
+ .getProcessCameraProvider()
+ .observe(
+ this,
+ provider -> {
+ cameraProvider = provider;
+ if (allPermissionsGranted()) {
+ bindAllCameraUseCases();
+ }
+ });
+
+ ImageView settingsButton = findViewById(R.id.settings_button);
+ settingsButton.setOnClickListener(
+ v -> {
+ Intent intent = new Intent(getApplicationContext(), SettingsActivity.class);
+ intent.putExtra(
+ SettingsActivity.EXTRA_LAUNCH_SOURCE,
+ SettingsActivity.LaunchSource.CAMERAX_LIVE_PREVIEW);
+ startActivity(intent);
+ });
+
+ if (!allPermissionsGranted()) {
+ getRuntimePermissions();
+ }
+ }
+
+ @Override
+ protected void onSaveInstanceState(@NonNull Bundle bundle) {
+ super.onSaveInstanceState(bundle);
+ bundle.putString(STATE_SELECTED_MODEL, selectedModel);
+ bundle.putInt(STATE_LENS_FACING, lensFacing);
+ }
+
+ @Override
+ public synchronized void onItemSelected(AdapterView> parent, View view, int pos, long id) {
+ // An item was selected. You can retrieve the selected item using
+ // parent.getItemAtPosition(pos)
+ selectedModel = parent.getItemAtPosition(pos).toString();
+ Log.d(TAG, "Selected model: " + selectedModel);
+ bindAnalysisUseCase();
+ }
+
+ @Override
+ public void onNothingSelected(AdapterView> parent) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
+ Log.d(TAG, "Set facing");
+ if (cameraProvider == null) {
+ return;
+ }
+
+ int newLensFacing =
+ lensFacing == CameraSelector.LENS_FACING_FRONT
+ ? CameraSelector.LENS_FACING_BACK
+ : CameraSelector.LENS_FACING_FRONT;
+ CameraSelector newCameraSelector =
+ new CameraSelector.Builder().requireLensFacing(newLensFacing).build();
+ try {
+ if (cameraProvider.hasCamera(newCameraSelector)) {
+ lensFacing = newLensFacing;
+ cameraSelector = newCameraSelector;
+ bindAllCameraUseCases();
+ return;
+ }
+ } catch (CameraInfoUnavailableException e) {
+ // Falls through
+ }
+ Toast.makeText(
+ getApplicationContext(),
+ "This device does not have lens with facing: " + newLensFacing,
+ Toast.LENGTH_SHORT)
+ .show();
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ bindAllCameraUseCases();
+ }
+
+ @Override
+ protected void onPause() {
+ super.onPause();
+ if (imageProcessor != null) {
+ imageProcessor.stop();
+ }
+ }
+
+ @Override
+ public void onDestroy() {
+ super.onDestroy();
+ if (imageProcessor != null) {
+ imageProcessor.stop();
+ }
+ }
+
+ private void bindAllCameraUseCases() {
+ bindPreviewUseCase();
+ bindAnalysisUseCase();
+ }
+
+ private void bindPreviewUseCase() {
+ if (!PreferenceUtils.isCameraLiveViewportEnabled(this)) {
+ return;
+ }
+ if (cameraProvider == null) {
+ return;
+ }
+ if (previewUseCase != null) {
+ cameraProvider.unbind(previewUseCase);
+ }
+
+ Preview.Builder builder = new Preview.Builder();
+ Size targetResolution = PreferenceUtils.getCameraXTargetResolution(this, lensFacing);
+ if (targetResolution != null) {
+ builder.setTargetResolution(targetResolution);
+ }
+ previewUseCase = builder.build();
+ previewUseCase.setSurfaceProvider(previewView.getSurfaceProvider());
+ cameraProvider.bindToLifecycle(/* lifecycleOwner= */ this, cameraSelector, previewUseCase);
+ }
+
+ private void bindAnalysisUseCase() {
+ if (cameraProvider == null) {
+ return;
+ }
+ if (analysisUseCase != null) {
+ cameraProvider.unbind(analysisUseCase);
+ }
+ if (imageProcessor != null) {
+ imageProcessor.stop();
+ }
+
+ try {
+ String autoMLRemoteModelName = PreferenceUtils.getAutoMLRemoteModelName(this);
+ switch (selectedModel) {
+ case CUSTOM_AUTOML_LABELING:
+ Log.i(TAG, "Create Custom AutoML Image Label Processor");
+ CustomRemoteModel customRemoteModel =
+ new CustomRemoteModel.Builder(
+ new FirebaseModelSource.Builder(autoMLRemoteModelName).build())
+ .build();
+ CustomImageLabelerOptions customImageLabelerOptions =
+ new CustomImageLabelerOptions.Builder(customRemoteModel)
+ .setConfidenceThreshold(0)
+ .build();
+ imageProcessor =
+ new AutoMLImageLabelerProcessor(
+ this, customRemoteModel, customImageLabelerOptions, Mode.LIVE_PREVIEW);
+ break;
+ case CUSTOM_AUTOML_OBJECT_DETECTION:
+ Log.i(TAG, "Using Custom AutoML Object Detector Processor");
+ CustomRemoteModel customODTRemoteModel =
+ new CustomRemoteModel.Builder(
+ new FirebaseModelSource.Builder(autoMLRemoteModelName).build())
+ .build();
+ CustomObjectDetectorOptions customAutoMLODTOptions =
+ new CustomObjectDetectorOptions.Builder(customODTRemoteModel)
+ .setDetectorMode(CustomObjectDetectorOptions.STREAM_MODE)
+ .enableClassification()
+ .setClassificationConfidenceThreshold(0)
+ .setMaxPerObjectLabelCount(1)
+ .build();
+ imageProcessor =
+ new ObjectDetectorProcessor(this, customODTRemoteModel, customAutoMLODTOptions);
+ break;
+ default:
+ throw new IllegalStateException("Invalid model name");
+ }
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Can not create image processor: " + selectedModel, e);
+ Toast.makeText(
+ getApplicationContext(),
+ "Can not create image processor: " + e.getLocalizedMessage(),
+ Toast.LENGTH_LONG)
+ .show();
+ return;
+ }
+
+ ImageAnalysis.Builder builder = new ImageAnalysis.Builder();
+ Size targetResolution = PreferenceUtils.getCameraXTargetResolution(this, lensFacing);
+ if (targetResolution != null) {
+ builder.setTargetResolution(targetResolution);
+ }
+ analysisUseCase = builder.build();
+
+ needUpdateGraphicOverlayImageSourceInfo = true;
+ analysisUseCase.setAnalyzer(
+ // imageProcessor.processImageProxy will use another thread to run the detection underneath,
+ // thus we can just runs the analyzer itself on main thread.
+ ContextCompat.getMainExecutor(this),
+ imageProxy -> {
+ if (needUpdateGraphicOverlayImageSourceInfo) {
+ boolean isImageFlipped = lensFacing == CameraSelector.LENS_FACING_FRONT;
+ int rotationDegrees = imageProxy.getImageInfo().getRotationDegrees();
+ if (rotationDegrees == 0 || rotationDegrees == 180) {
+ graphicOverlay.setImageSourceInfo(
+ imageProxy.getWidth(), imageProxy.getHeight(), isImageFlipped);
+ } else {
+ graphicOverlay.setImageSourceInfo(
+ imageProxy.getHeight(), imageProxy.getWidth(), isImageFlipped);
+ }
+ needUpdateGraphicOverlayImageSourceInfo = false;
+ }
+ try {
+ imageProcessor.processImageProxy(imageProxy, graphicOverlay);
+ } catch (MlKitException e) {
+ Log.e(TAG, "Failed to process image. Error: " + e.getLocalizedMessage());
+ Toast.makeText(getApplicationContext(), e.getLocalizedMessage(), Toast.LENGTH_SHORT)
+ .show();
+ }
+ });
+
+ cameraProvider.bindToLifecycle(/* lifecycleOwner= */ this, cameraSelector, analysisUseCase);
+ }
+
+ private String[] getRequiredPermissions() {
+ try {
+ PackageInfo info =
+ this.getPackageManager()
+ .getPackageInfo(this.getPackageName(), PackageManager.GET_PERMISSIONS);
+ String[] ps = info.requestedPermissions;
+ if (ps != null && ps.length > 0) {
+ return ps;
+ } else {
+ return new String[0];
+ }
+ } catch (Exception e) {
+ return new String[0];
+ }
+ }
+
+ private boolean allPermissionsGranted() {
+ for (String permission : getRequiredPermissions()) {
+ if (!isPermissionGranted(this, permission)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private void getRuntimePermissions() {
+ List allNeededPermissions = new ArrayList<>();
+ for (String permission : getRequiredPermissions()) {
+ if (!isPermissionGranted(this, permission)) {
+ allNeededPermissions.add(permission);
+ }
+ }
+
+ if (!allNeededPermissions.isEmpty()) {
+ ActivityCompat.requestPermissions(
+ this, allNeededPermissions.toArray(new String[0]), PERMISSION_REQUESTS);
+ }
+ }
+
+ @Override
+ public void onRequestPermissionsResult(
+ int requestCode, String[] permissions, int[] grantResults) {
+ Log.i(TAG, "Permission granted!");
+ if (allPermissionsGranted()) {
+ bindAllCameraUseCases();
+ }
+ super.onRequestPermissionsResult(requestCode, permissions, grantResults);
+ }
+
+ private static boolean isPermissionGranted(Context context, String permission) {
+ if (ContextCompat.checkSelfPermission(context, permission)
+ == PackageManager.PERMISSION_GRANTED) {
+ Log.i(TAG, "Permission granted: " + permission);
+ return true;
+ }
+ Log.i(TAG, "Permission NOT granted: " + permission);
+ return false;
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraXViewModel.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraXViewModel.java
new file mode 100644
index 0000000..d932118
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/CameraXViewModel.java
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import android.app.Application;
+import android.os.Build.VERSION_CODES;
+import android.util.Log;
+import androidx.annotation.NonNull;
+import androidx.annotation.RequiresApi;
+import androidx.camera.lifecycle.ProcessCameraProvider;
+import androidx.core.content.ContextCompat;
+import androidx.lifecycle.AndroidViewModel;
+import androidx.lifecycle.LiveData;
+import androidx.lifecycle.MutableLiveData;
+import com.google.common.util.concurrent.ListenableFuture;
+import java.util.concurrent.ExecutionException;
+
+/** View model for interacting with CameraX. */
+@RequiresApi(VERSION_CODES.LOLLIPOP)
+public final class CameraXViewModel extends AndroidViewModel {
+
+ private static final String TAG = "CameraXViewModel";
+ private MutableLiveData cameraProviderLiveData;
+
+ /**
+ * Create an instance which interacts with the camera service via the given application context.
+ */
+ public CameraXViewModel(@NonNull Application application) {
+ super(application);
+ }
+
+ LiveData getProcessCameraProvider() {
+ if (cameraProviderLiveData == null) {
+ cameraProviderLiveData = new MutableLiveData<>();
+
+ ListenableFuture cameraProviderFuture =
+ ProcessCameraProvider.getInstance(getApplication());
+ cameraProviderFuture.addListener(
+ () -> {
+ try {
+ cameraProviderLiveData.setValue(cameraProviderFuture.get());
+ } catch (ExecutionException | InterruptedException e) {
+ // Handle any errors (including cancellation) here.
+ Log.e(TAG, "Unhandled exception", e);
+ }
+ },
+ ContextCompat.getMainExecutor(getApplication()));
+ }
+
+ return cameraProviderLiveData;
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/ChooserActivity.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/ChooserActivity.java
new file mode 100644
index 0000000..ee81e60
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/ChooserActivity.java
@@ -0,0 +1,193 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import android.content.Context;
+import android.content.Intent;
+import android.content.pm.PackageInfo;
+import android.content.pm.PackageManager;
+import android.os.Build.VERSION;
+import android.os.Build.VERSION_CODES;
+import android.os.Bundle;
+import android.os.StrictMode;
+import androidx.appcompat.app.AppCompatActivity;
+import android.util.Log;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.ListView;
+import android.widget.TextView;
+import androidx.core.app.ActivityCompat;
+import androidx.core.app.ActivityCompat.OnRequestPermissionsResultCallback;
+import androidx.core.content.ContextCompat;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Demo app chooser which takes care of runtime permission requesting and allow you pick from all
+ * available testing Activities.
+ */
+public final class ChooserActivity extends AppCompatActivity
+ implements OnRequestPermissionsResultCallback, AdapterView.OnItemClickListener {
+ private static final String TAG = "ChooserActivity";
+ private static final int PERMISSION_REQUESTS = 1;
+
+ @SuppressWarnings("NewApi") // CameraX is only available on API 21+
+ private static final Class>[] CLASSES =
+ VERSION.SDK_INT < VERSION_CODES.LOLLIPOP
+ ? new Class>[] {
+ LivePreviewActivity.class, StillImageActivity.class,
+ }
+ : new Class>[] {
+ LivePreviewActivity.class, StillImageActivity.class, CameraXLivePreviewActivity.class,
+ };
+
+ private static final int[] DESCRIPTION_IDS =
+ VERSION.SDK_INT < VERSION_CODES.LOLLIPOP
+ ? new int[] {
+ R.string.desc_camera_source_activity, R.string.desc_still_image_activity,
+ }
+ : new int[] {
+ R.string.desc_camera_source_activity,
+ R.string.desc_still_image_activity,
+ R.string.desc_camerax_live_preview_activity,
+ };
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+
+ if (BuildConfig.DEBUG) {
+ StrictMode.setThreadPolicy(
+ new StrictMode.ThreadPolicy.Builder().detectAll().penaltyLog().build());
+ StrictMode.setVmPolicy(
+ new StrictMode.VmPolicy.Builder()
+ .detectLeakedSqlLiteObjects()
+ .detectLeakedClosableObjects()
+ .penaltyLog()
+ .penaltyDeath()
+ .build());
+ }
+ super.onCreate(savedInstanceState);
+ Log.d(TAG, "onCreate");
+
+ setContentView(R.layout.activity_chooser);
+
+ // Set up ListView and Adapter
+ ListView listView = findViewById(R.id.test_activity_list_view);
+
+ MyArrayAdapter adapter = new MyArrayAdapter(this, android.R.layout.simple_list_item_2, CLASSES);
+ adapter.setDescriptionIds(DESCRIPTION_IDS);
+
+ listView.setAdapter(adapter);
+ listView.setOnItemClickListener(this);
+
+ if (!allPermissionsGranted()) {
+ getRuntimePermissions();
+ }
+ }
+
+ @Override
+ public void onItemClick(AdapterView> parent, View view, int position, long id) {
+ Class> clicked = CLASSES[position];
+ startActivity(new Intent(this, clicked));
+ }
+
+ private String[] getRequiredPermissions() {
+ try {
+ PackageInfo info =
+ this.getPackageManager()
+ .getPackageInfo(this.getPackageName(), PackageManager.GET_PERMISSIONS);
+ String[] ps = info.requestedPermissions;
+ if (ps != null && ps.length > 0) {
+ return ps;
+ } else {
+ return new String[0];
+ }
+ } catch (Exception e) {
+ return new String[0];
+ }
+ }
+
+ private boolean allPermissionsGranted() {
+ for (String permission : getRequiredPermissions()) {
+ if (!isPermissionGranted(this, permission)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private void getRuntimePermissions() {
+ List allNeededPermissions = new ArrayList<>();
+ for (String permission : getRequiredPermissions()) {
+ if (!isPermissionGranted(this, permission)) {
+ allNeededPermissions.add(permission);
+ }
+ }
+
+ if (!allNeededPermissions.isEmpty()) {
+ ActivityCompat.requestPermissions(
+ this, allNeededPermissions.toArray(new String[0]), PERMISSION_REQUESTS);
+ }
+ }
+
+ private static boolean isPermissionGranted(Context context, String permission) {
+ if (ContextCompat.checkSelfPermission(context, permission)
+ == PackageManager.PERMISSION_GRANTED) {
+ Log.i(TAG, "Permission granted: " + permission);
+ return true;
+ }
+ Log.i(TAG, "Permission NOT granted: " + permission);
+ return false;
+ }
+
+ private static class MyArrayAdapter extends ArrayAdapter> {
+
+ private final Context context;
+ private final Class>[] classes;
+ private int[] descriptionIds;
+
+ MyArrayAdapter(Context context, int resource, Class>[] objects) {
+ super(context, resource, objects);
+
+ this.context = context;
+ classes = objects;
+ }
+
+ @Override
+ public View getView(int position, View convertView, ViewGroup parent) {
+ View view = convertView;
+
+ if (convertView == null) {
+ LayoutInflater inflater =
+ (LayoutInflater) context.getSystemService(LAYOUT_INFLATER_SERVICE);
+ view = inflater.inflate(android.R.layout.simple_list_item_2, null);
+ }
+
+ ((TextView) view.findViewById(android.R.id.text1)).setText(classes[position].getSimpleName());
+ ((TextView) view.findViewById(android.R.id.text2)).setText(descriptionIds[position]);
+
+ return view;
+ }
+
+ void setDescriptionIds(int[] descriptionIds) {
+ this.descriptionIds = descriptionIds;
+ }
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/FrameMetadata.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/FrameMetadata.java
new file mode 100644
index 0000000..b3caaae
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/FrameMetadata.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+/** Describing a frame info. */
+public class FrameMetadata {
+
+ private final int width;
+ private final int height;
+ private final int rotation;
+
+ public int getWidth() {
+ return width;
+ }
+
+ public int getHeight() {
+ return height;
+ }
+
+ public int getRotation() {
+ return rotation;
+ }
+
+ private FrameMetadata(int width, int height, int rotation) {
+ this.width = width;
+ this.height = height;
+ this.rotation = rotation;
+ }
+
+ /** Builder of {@link FrameMetadata}. */
+ public static class Builder {
+
+ private int width;
+ private int height;
+ private int rotation;
+
+ public Builder setWidth(int width) {
+ this.width = width;
+ return this;
+ }
+
+ public Builder setHeight(int height) {
+ this.height = height;
+ return this;
+ }
+
+ public Builder setRotation(int rotation) {
+ this.rotation = rotation;
+ return this;
+ }
+
+ public FrameMetadata build() {
+ return new FrameMetadata(width, height, rotation);
+ }
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/GraphicOverlay.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/GraphicOverlay.java
new file mode 100644
index 0000000..422e154
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/GraphicOverlay.java
@@ -0,0 +1,241 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Matrix;
+import android.util.AttributeSet;
+import android.view.View;
+import com.google.common.base.Preconditions;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A view which renders a series of custom graphics to be overlayed on top of an associated preview
+ * (i.e., the camera preview). The creator can add graphics objects, update the objects, and remove
+ * them, triggering the appropriate drawing and invalidation within the view.
+ *
+ *
Supports scaling and mirroring of the graphics relative the camera's preview properties. The
+ * idea is that detection items are expressed in terms of an image size, but need to be scaled up
+ * to the full view size, and also mirrored in the case of the front-facing camera.
+ *
+ *
Associated {@link Graphic} items should use the following methods to convert to view
+ * coordinates for the graphics that are drawn:
+ *
+ *
+ *
{@link Graphic#scale(float)} adjusts the size of the supplied value from the image scale
+ * to the view scale.
+ *
{@link Graphic#translateX(float)} and {@link Graphic#translateY(float)} adjust the
+ * coordinate from the image's coordinate system to the view coordinate system.
+ *
+ */
+public class GraphicOverlay extends View {
+ private final Object lock = new Object();
+ private final List graphics = new ArrayList<>();
+ // Matrix for transforming from image coordinates to overlay view coordinates.
+ private final Matrix transformationMatrix = new Matrix();
+
+ private int imageWidth;
+ private int imageHeight;
+ // The factor of overlay View size to image size. Anything in the image coordinates need to be
+ // scaled by this amount to fit with the area of overlay View.
+ private float scaleFactor = 1.0f;
+ // The number of horizontal pixels needed to be cropped on each side to fit the image with the
+ // area of overlay View after scaling.
+ private float postScaleWidthOffset;
+ // The number of vertical pixels needed to be cropped on each side to fit the image with the
+ // area of overlay View after scaling.
+ private float postScaleHeightOffset;
+ private boolean isImageFlipped;
+ private boolean needUpdateTransformation = true;
+
+ /**
+ * Base class for a custom graphics object to be rendered within the graphic overlay. Subclass
+ * this and implement the {@link Graphic#draw(Canvas)} method to define the graphics element. Add
+ * instances to the overlay using {@link GraphicOverlay#add(Graphic)}.
+ */
+ public abstract static class Graphic {
+ private GraphicOverlay overlay;
+
+ public Graphic(GraphicOverlay overlay) {
+ this.overlay = overlay;
+ }
+
+ /**
+ * Draw the graphic on the supplied canvas. Drawing should use the following methods to convert
+ * to view coordinates for the graphics that are drawn:
+ *
+ *
+ *
{@link Graphic#scale(float)} adjusts the size of the supplied value from the image
+ * scale to the view scale.
+ *
{@link Graphic#translateX(float)} and {@link Graphic#translateY(float)} adjust the
+ * coordinate from the image's coordinate system to the view coordinate system.
+ *
+ *
+ * @param canvas drawing canvas
+ */
+ public abstract void draw(Canvas canvas);
+
+ /**
+ * Adjusts the supplied value from the image scale to the view scale.
+ */
+ public float scale(float imagePixel) {
+ return imagePixel * overlay.scaleFactor;
+ }
+
+ /** Returns the application context of the app. */
+ public Context getApplicationContext() {
+ return overlay.getContext().getApplicationContext();
+ }
+
+ public boolean isImageFlipped() {
+ return overlay.isImageFlipped;
+ }
+
+ /**
+ * Adjusts the x coordinate from the image's coordinate system to the view coordinate system.
+ */
+ public float translateX(float x) {
+ if (overlay.isImageFlipped) {
+ return overlay.getWidth() - (scale(x) - overlay.postScaleWidthOffset);
+ } else {
+ return scale(x) - overlay.postScaleWidthOffset;
+ }
+ }
+
+ /**
+ * Adjusts the y coordinate from the image's coordinate system to the view coordinate system.
+ */
+ public float translateY(float y) {
+ return scale(y) - overlay.postScaleHeightOffset;
+ }
+
+ /**
+ * Returns a {@link Matrix} for transforming from image coordinates to overlay view coordinates.
+ */
+ public Matrix getTransformationMatrix() {
+ return overlay.transformationMatrix;
+ }
+
+ public void postInvalidate() {
+ overlay.postInvalidate();
+ }
+ }
+
+ public GraphicOverlay(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ addOnLayoutChangeListener(
+ (view, left, top, right, bottom, oldLeft, oldTop, oldRight, oldBottom) ->
+ needUpdateTransformation = true);
+ }
+
+ /** Removes all graphics from the overlay. */
+ public void clear() {
+ synchronized (lock) {
+ graphics.clear();
+ }
+ postInvalidate();
+ }
+
+ /** Adds a graphic to the overlay. */
+ public void add(Graphic graphic) {
+ synchronized (lock) {
+ graphics.add(graphic);
+ }
+ }
+
+ /** Removes a graphic from the overlay. */
+ public void remove(Graphic graphic) {
+ synchronized (lock) {
+ graphics.remove(graphic);
+ }
+ postInvalidate();
+ }
+
+ /**
+ * Sets the source information of the image being processed by detectors, including size and
+ * whether it is flipped, which informs how to transform image coordinates later.
+ *
+ * @param imageWidth the width of the image sent to ML Kit detectors
+ * @param imageHeight the height of the image sent to ML Kit detectors
+ * @param isFlipped whether the image is flipped. Should set it to true when the image is from the
+ * front camera.
+ */
+ public void setImageSourceInfo(int imageWidth, int imageHeight, boolean isFlipped) {
+ Preconditions.checkState(imageWidth > 0, "image width must be positive");
+ Preconditions.checkState(imageHeight > 0, "image height must be positive");
+ synchronized (lock) {
+ this.imageWidth = imageWidth;
+ this.imageHeight = imageHeight;
+ this.isImageFlipped = isFlipped;
+ needUpdateTransformation = true;
+ }
+ postInvalidate();
+ }
+
+ public int getImageWidth() {
+ return imageWidth;
+ }
+
+ public int getImageHeight() {
+ return imageHeight;
+ }
+
+ private void updateTransformationIfNeeded() {
+ if (!needUpdateTransformation || imageWidth <= 0 || imageHeight <= 0) {
+ return;
+ }
+ float viewAspectRatio = (float) getWidth() / getHeight();
+ float imageAspectRatio = (float) imageWidth / imageHeight;
+ postScaleWidthOffset = 0;
+ postScaleHeightOffset = 0;
+ if (viewAspectRatio > imageAspectRatio) {
+ // The image needs to be vertically cropped to be displayed in this view.
+ scaleFactor = (float) getWidth() / imageWidth;
+ postScaleHeightOffset = ((float) getWidth() / imageAspectRatio - getHeight()) / 2;
+ } else {
+ // The image needs to be horizontally cropped to be displayed in this view.
+ scaleFactor = (float) getHeight() / imageHeight;
+ postScaleWidthOffset = ((float) getHeight() * imageAspectRatio - getWidth()) / 2;
+ }
+
+ transformationMatrix.reset();
+ transformationMatrix.setScale(scaleFactor, scaleFactor);
+ transformationMatrix.postTranslate(-postScaleWidthOffset, -postScaleHeightOffset);
+
+ if (isImageFlipped) {
+ transformationMatrix.postScale(-1f, 1f, getWidth() / 2f, getHeight() / 2f);
+ }
+
+ needUpdateTransformation = false;
+ }
+
+ /** Draws the overlay with its associated graphic objects. */
+ @Override
+ protected void onDraw(Canvas canvas) {
+ super.onDraw(canvas);
+
+ synchronized (lock) {
+ updateTransformationIfNeeded();
+
+ for (Graphic graphic : graphics) {
+ graphic.draw(canvas);
+ }
+ }
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/InferenceInfoGraphic.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/InferenceInfoGraphic.java
new file mode 100644
index 0000000..08f0624
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/InferenceInfoGraphic.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import androidx.annotation.Nullable;
+
+/** Graphic instance for rendering inference info (latency, FPS, resolution) in an overlay view. */
+public class InferenceInfoGraphic extends GraphicOverlay.Graphic {
+
+ private static final int TEXT_COLOR = Color.WHITE;
+ private static final float TEXT_SIZE = 60.0f;
+
+ private final Paint textPaint;
+ private final GraphicOverlay overlay;
+ private final double latency;
+
+ // Only valid when a stream of input images is being processed. Null for single image mode.
+ @Nullable private final Integer framesPerSecond;
+
+ public InferenceInfoGraphic(
+ GraphicOverlay overlay, double latency, @Nullable Integer framesPerSecond) {
+ super(overlay);
+ this.overlay = overlay;
+ this.latency = latency;
+ this.framesPerSecond = framesPerSecond;
+ textPaint = new Paint();
+ textPaint.setColor(TEXT_COLOR);
+ textPaint.setTextSize(TEXT_SIZE);
+ postInvalidate();
+ }
+
+ @Override
+ public synchronized void draw(Canvas canvas) {
+ float x = TEXT_SIZE * 0.5f;
+ float y = TEXT_SIZE * 1.5f;
+
+ canvas.drawText(
+ "InputImage size: " + overlay.getImageHeight() + "x" + overlay.getImageWidth(),
+ x,
+ y,
+ textPaint);
+
+ // Draw FPS (if valid) and inference latency
+ if (framesPerSecond != null) {
+ canvas.drawText(
+ "FPS: " + framesPerSecond + ", latency: " + latency + " ms", x, y + TEXT_SIZE, textPaint);
+ } else {
+ canvas.drawText("Latency: " + latency + " ms", x, y + TEXT_SIZE, textPaint);
+ }
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/LivePreviewActivity.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/LivePreviewActivity.java
new file mode 100644
index 0000000..c2c21a3
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/LivePreviewActivity.java
@@ -0,0 +1,307 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import android.content.Context;
+import android.content.Intent;
+import android.content.pm.PackageInfo;
+import android.content.pm.PackageManager;
+import android.os.Bundle;
+import androidx.appcompat.app.AppCompatActivity;
+import android.util.Log;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.ArrayAdapter;
+import android.widget.CompoundButton;
+import android.widget.ImageView;
+import android.widget.Spinner;
+import android.widget.Toast;
+import android.widget.ToggleButton;
+import androidx.core.app.ActivityCompat;
+import androidx.core.app.ActivityCompat.OnRequestPermissionsResultCallback;
+import androidx.core.content.ContextCompat;
+import com.google.android.gms.common.annotation.KeepName;
+import com.google.mlkit.common.model.CustomRemoteModel;
+import com.google.mlkit.linkfirebase.FirebaseModelSource;
+import com.google.mlkit.vision.automl.demo.automl.AutoMLImageLabelerProcessor;
+import com.google.mlkit.vision.automl.demo.automl.AutoMLImageLabelerProcessor.Mode;
+import com.google.mlkit.vision.automl.demo.object.ObjectDetectorProcessor;
+import com.google.mlkit.vision.automl.demo.preference.PreferenceUtils;
+import com.google.mlkit.vision.automl.demo.preference.SettingsActivity;
+import com.google.mlkit.vision.label.custom.CustomImageLabelerOptions;
+import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/** Live preview demo for ML Kit APIs. */
+@KeepName
+public final class LivePreviewActivity extends AppCompatActivity
+ implements OnRequestPermissionsResultCallback,
+ OnItemSelectedListener,
+ CompoundButton.OnCheckedChangeListener {
+ private static final String CUSTOM_AUTOML_LABELING = "Custom AutoML Image Labeling";
+ private static final String CUSTOM_AUTOML_OBJECT_DETECTION = "Custom AutoML Object Detection";
+
+ private static final String TAG = "LivePreviewActivity";
+ private static final int PERMISSION_REQUESTS = 1;
+
+ private CameraSource cameraSource = null;
+ private CameraSourcePreview preview;
+ private GraphicOverlay graphicOverlay;
+ private String selectedModel = CUSTOM_AUTOML_LABELING;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ Log.d(TAG, "onCreate");
+
+ setContentView(R.layout.activity_live_preview);
+
+ preview = findViewById(R.id.preview);
+ if (preview == null) {
+ Log.d(TAG, "Preview is null");
+ }
+ graphicOverlay = findViewById(R.id.graphic_overlay);
+ if (graphicOverlay == null) {
+ Log.d(TAG, "graphicOverlay is null");
+ }
+
+ Spinner spinner = findViewById(R.id.spinner);
+ List options = new ArrayList<>();
+ options.add(CUSTOM_AUTOML_LABELING);
+ options.add(CUSTOM_AUTOML_OBJECT_DETECTION);
+
+ // Creating adapter for spinner
+ ArrayAdapter dataAdapter = new ArrayAdapter<>(this, R.layout.spinner_style, options);
+ // Drop down layout style - list view with radio button
+ dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+ // attaching data adapter to spinner
+ spinner.setAdapter(dataAdapter);
+ spinner.setOnItemSelectedListener(this);
+
+ ToggleButton facingSwitch = findViewById(R.id.facing_switch);
+ facingSwitch.setOnCheckedChangeListener(this);
+
+ ImageView settingsButton = findViewById(R.id.settings_button);
+ settingsButton.setOnClickListener(
+ v -> {
+ Intent intent = new Intent(getApplicationContext(), SettingsActivity.class);
+ intent.putExtra(SettingsActivity.EXTRA_LAUNCH_SOURCE,
+ SettingsActivity.LaunchSource.LIVE_PREVIEW);
+ startActivity(intent);
+ });
+
+ if (allPermissionsGranted()) {
+ createCameraSource(selectedModel);
+ } else {
+ getRuntimePermissions();
+ }
+ }
+
+ @Override
+ public synchronized void onItemSelected(AdapterView> parent, View view, int pos, long id) {
+ // An item was selected. You can retrieve the selected item using
+ // parent.getItemAtPosition(pos)
+ selectedModel = parent.getItemAtPosition(pos).toString();
+ Log.d(TAG, "Selected model: " + selectedModel);
+ preview.stop();
+ if (allPermissionsGranted()) {
+ createCameraSource(selectedModel);
+ startCameraSource();
+ } else {
+ getRuntimePermissions();
+ }
+ }
+
+ @Override
+ public void onNothingSelected(AdapterView> parent) {
+ // Do nothing.
+ }
+
+ @Override
+ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
+ Log.d(TAG, "Set facing");
+ if (cameraSource != null) {
+ if (isChecked) {
+ cameraSource.setFacing(CameraSource.CAMERA_FACING_FRONT);
+ } else {
+ cameraSource.setFacing(CameraSource.CAMERA_FACING_BACK);
+ }
+ }
+ preview.stop();
+ startCameraSource();
+ }
+
+ private void createCameraSource(String model) {
+ // If there's no existing cameraSource, create one.
+ if (cameraSource == null) {
+ cameraSource = new CameraSource(this, graphicOverlay);
+ }
+
+ String autoMLRemoteModelName = PreferenceUtils.getAutoMLRemoteModelName(this);
+ try {
+ switch (model) {
+ case CUSTOM_AUTOML_LABELING:
+ Log.i(TAG, "Create Custom AutoML Image Label Processor");
+ CustomRemoteModel customRemoteModel =
+ new CustomRemoteModel.Builder(
+ new FirebaseModelSource.Builder(autoMLRemoteModelName).build())
+ .build();
+ CustomImageLabelerOptions customImageLabelerOptions =
+ new CustomImageLabelerOptions.Builder(customRemoteModel)
+ .setConfidenceThreshold(0)
+ .build();
+ cameraSource.setMachineLearningFrameProcessor(
+ new AutoMLImageLabelerProcessor(
+ this, customRemoteModel, customImageLabelerOptions, Mode.LIVE_PREVIEW));
+ break;
+ case CUSTOM_AUTOML_OBJECT_DETECTION:
+ Log.i(TAG, "Using Custom AutoML Object Detector Processor");
+ CustomRemoteModel customODTRemoteModel =
+ new CustomRemoteModel.Builder(
+ new FirebaseModelSource.Builder(autoMLRemoteModelName).build())
+ .build();
+ CustomObjectDetectorOptions customAutoMLODTOptions =
+ new CustomObjectDetectorOptions.Builder(customODTRemoteModel)
+ .setDetectorMode(CustomObjectDetectorOptions.STREAM_MODE)
+ .enableClassification()
+ .setClassificationConfidenceThreshold(0)
+ .setMaxPerObjectLabelCount(1)
+ .build();
+ cameraSource.setMachineLearningFrameProcessor(
+ new ObjectDetectorProcessor(this, customODTRemoteModel, customAutoMLODTOptions));
+ break;
+ default:
+ Log.e(TAG, "Unknown model: " + model);
+ }
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Can not create image processor: " + model, e);
+ Toast.makeText(
+ getApplicationContext(),
+ "Can not create image processor: " + e.getMessage(),
+ Toast.LENGTH_LONG)
+ .show();
+ }
+ }
+
+ /**
+ * Starts or restarts the camera source, if it exists. If the camera source doesn't exist yet
+ * (e.g., because onResume was called before the camera source was created), this will be called
+ * again when the camera source is created.
+ */
+ private void startCameraSource() {
+ if (cameraSource != null) {
+ try {
+ if (preview == null) {
+ Log.d(TAG, "resume: Preview is null");
+ }
+ if (graphicOverlay == null) {
+ Log.d(TAG, "resume: graphOverlay is null");
+ }
+ preview.start(cameraSource, graphicOverlay);
+ } catch (IOException e) {
+ Log.e(TAG, "Unable to start camera source.", e);
+ cameraSource.release();
+ cameraSource = null;
+ }
+ }
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ Log.d(TAG, "onResume");
+ createCameraSource(selectedModel);
+ startCameraSource();
+ }
+
+ /** Stops the camera. */
+ @Override
+ protected void onPause() {
+ super.onPause();
+ preview.stop();
+ }
+
+ @Override
+ public void onDestroy() {
+ super.onDestroy();
+ if (cameraSource != null) {
+ cameraSource.release();
+ }
+ }
+
+ private String[] getRequiredPermissions() {
+ try {
+ PackageInfo info =
+ this.getPackageManager()
+ .getPackageInfo(this.getPackageName(), PackageManager.GET_PERMISSIONS);
+ String[] ps = info.requestedPermissions;
+ if (ps != null && ps.length > 0) {
+ return ps;
+ } else {
+ return new String[0];
+ }
+ } catch (Exception e) {
+ return new String[0];
+ }
+ }
+
+ private boolean allPermissionsGranted() {
+ for (String permission : getRequiredPermissions()) {
+ if (!isPermissionGranted(this, permission)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private void getRuntimePermissions() {
+ List allNeededPermissions = new ArrayList<>();
+ for (String permission : getRequiredPermissions()) {
+ if (!isPermissionGranted(this, permission)) {
+ allNeededPermissions.add(permission);
+ }
+ }
+
+ if (!allNeededPermissions.isEmpty()) {
+ ActivityCompat.requestPermissions(
+ this, allNeededPermissions.toArray(new String[0]), PERMISSION_REQUESTS);
+ }
+ }
+
+ @Override
+ public void onRequestPermissionsResult(
+ int requestCode, String[] permissions, int[] grantResults) {
+ Log.i(TAG, "Permission granted!");
+ if (allPermissionsGranted()) {
+ createCameraSource(selectedModel);
+ }
+ super.onRequestPermissionsResult(requestCode, permissions, grantResults);
+ }
+
+ private static boolean isPermissionGranted(Context context, String permission) {
+ if (ContextCompat.checkSelfPermission(context, permission)
+ == PackageManager.PERMISSION_GRANTED) {
+ Log.i(TAG, "Permission granted: " + permission);
+ return true;
+ }
+ Log.i(TAG, "Permission NOT granted: " + permission);
+ return false;
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/ScopedExecutor.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/ScopedExecutor.java
new file mode 100644
index 0000000..e1810fd
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/ScopedExecutor.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import androidx.annotation.NonNull;
+import java.util.concurrent.Executor;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * Wraps an existing executor to provide a {@link #shutdown} method that allows subsequent
+ * cancellation of submitted runnables.
+ */
+public class ScopedExecutor implements Executor {
+
+ private final Executor executor;
+ private final AtomicBoolean shutdown = new AtomicBoolean();
+
+ public ScopedExecutor(@NonNull Executor executor) {
+ this.executor = executor;
+ }
+
+ @Override
+ public void execute(@NonNull Runnable command) {
+ // Return early if this object has been shut down.
+ if (shutdown.get()) {
+ return;
+ }
+ executor.execute(
+ () -> {
+ // Check again in case it has been shut down in the mean time.
+ if (shutdown.get()) {
+ return;
+ }
+ command.run();
+ });
+ }
+
+ /**
+ * After this method is called, no runnables that have been submitted or are subsequently
+ * submitted will start to execute, turning this executor into a no-op.
+ *
+ *
Runnables that have already started to execute will continue.
+ */
+ public void shutdown() {
+ shutdown.set(true);
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/StillImageActivity.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/StillImageActivity.java
new file mode 100644
index 0000000..6cd349f
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/StillImageActivity.java
@@ -0,0 +1,380 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import static java.lang.Math.max;
+
+import android.content.ContentValues;
+import android.content.Intent;
+import android.content.res.Configuration;
+import android.graphics.Bitmap;
+import android.net.Uri;
+import android.os.Bundle;
+import android.provider.MediaStore;
+import androidx.appcompat.app.AppCompatActivity;
+import android.util.Log;
+import android.util.Pair;
+import android.view.MenuInflater;
+import android.view.View;
+import android.view.ViewTreeObserver.OnGlobalLayoutListener;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.ArrayAdapter;
+import android.widget.ImageView;
+import android.widget.PopupMenu;
+import android.widget.Spinner;
+import android.widget.Toast;
+import com.google.android.gms.common.annotation.KeepName;
+import com.google.mlkit.common.model.CustomRemoteModel;
+import com.google.mlkit.linkfirebase.FirebaseModelSource;
+import com.google.mlkit.vision.automl.demo.automl.AutoMLImageLabelerProcessor;
+import com.google.mlkit.vision.automl.demo.automl.AutoMLImageLabelerProcessor.Mode;
+import com.google.mlkit.vision.automl.demo.object.ObjectDetectorProcessor;
+import com.google.mlkit.vision.automl.demo.preference.PreferenceUtils;
+import com.google.mlkit.vision.automl.demo.preference.SettingsActivity;
+import com.google.mlkit.vision.automl.demo.preference.SettingsActivity.LaunchSource;
+import com.google.mlkit.vision.label.custom.CustomImageLabelerOptions;
+import com.google.mlkit.vision.objects.custom.CustomObjectDetectorOptions;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/** Activity demonstrating different image detector features with a still image from camera. */
+@KeepName
+public final class StillImageActivity extends AppCompatActivity {
+
+ private static final String TAG = "StillImageActivity";
+
+ private static final String CUSTOM_AUTOML_LABELING = "Custom AutoML Image Labeling";
+ private static final String CUSTOM_AUTOML_OBJECT_DETECTION = "Custom AutoML Object Detection";
+
+ private static final String SIZE_SCREEN = "w:screen"; // Match screen width
+ private static final String SIZE_1024_768 = "w:1024"; // ~1024*768 in a normal ratio
+ private static final String SIZE_640_480 = "w:640"; // ~640*480 in a normal ratio
+
+ private static final String KEY_IMAGE_URI = "com.google.mlkit.vision.automl.demo.KEY_IMAGE_URI";
+ private static final String KEY_SELECTED_SIZE =
+ "com.google.mlkit.vision.automl.demo.KEY_SELECTED_SIZE";
+
+ private static final int REQUEST_IMAGE_CAPTURE = 1001;
+ private static final int REQUEST_CHOOSE_IMAGE = 1002;
+
+ private ImageView preview;
+ private GraphicOverlay graphicOverlay;
+ private String selectedMode = CUSTOM_AUTOML_LABELING;
+ private String selectedSize = SIZE_SCREEN;
+
+ boolean isLandScape;
+
+ private Uri imageUri;
+ private int imageMaxWidth;
+ private int imageMaxHeight;
+ private VisionImageProcessor imageProcessor;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ setContentView(R.layout.activity_still_image);
+
+ findViewById(R.id.select_image_button)
+ .setOnClickListener(
+ view -> {
+ // Menu for selecting either: a) take new photo b) select from existing
+ PopupMenu popup = new PopupMenu(StillImageActivity.this, view);
+ popup.setOnMenuItemClickListener(
+ menuItem -> {
+ int itemId = menuItem.getItemId();
+ if (itemId == R.id.select_images_from_local) {
+ startChooseImageIntentForResult();
+ return true;
+ } else if (itemId == R.id.take_photo_using_camera) {
+ startCameraIntentForResult();
+ return true;
+ }
+ return false;
+ });
+ MenuInflater inflater = popup.getMenuInflater();
+ inflater.inflate(R.menu.camera_button_menu, popup.getMenu());
+ popup.show();
+ });
+ preview = findViewById(R.id.preview);
+ graphicOverlay = findViewById(R.id.graphic_overlay);
+
+ populateFeatureSelector();
+ populateSizeSelector();
+
+ isLandScape =
+ (getResources().getConfiguration().orientation == Configuration.ORIENTATION_LANDSCAPE);
+
+ if (savedInstanceState != null) {
+ imageUri = savedInstanceState.getParcelable(KEY_IMAGE_URI);
+ selectedSize = savedInstanceState.getString(KEY_SELECTED_SIZE);
+ }
+
+ View rootView = findViewById(R.id.root);
+ rootView.getViewTreeObserver().addOnGlobalLayoutListener(new OnGlobalLayoutListener() {
+ @Override
+ public void onGlobalLayout() {
+ rootView.getViewTreeObserver().removeOnGlobalLayoutListener(this);
+ imageMaxWidth = rootView.getWidth();
+ imageMaxHeight = rootView.getHeight() - findViewById(R.id.control).getHeight();
+ if (SIZE_SCREEN.equals(selectedSize)) {
+ tryReloadAndDetectInImage();
+ }
+ }
+ });
+
+ ImageView settingsButton = findViewById(R.id.settings_button);
+ settingsButton.setOnClickListener(
+ v -> {
+ Intent intent = new Intent(getApplicationContext(), SettingsActivity.class);
+ intent.putExtra(SettingsActivity.EXTRA_LAUNCH_SOURCE, LaunchSource.STILL_IMAGE);
+ startActivity(intent);
+ });
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ Log.d(TAG, "onResume");
+ createImageProcessor();
+ tryReloadAndDetectInImage();
+ }
+
+ private void populateFeatureSelector() {
+ Spinner featureSpinner = findViewById(R.id.feature_selector);
+ List options = new ArrayList<>();
+ options.add(CUSTOM_AUTOML_LABELING);
+ options.add(CUSTOM_AUTOML_OBJECT_DETECTION);
+
+ // Creating adapter for featureSpinner
+ ArrayAdapter dataAdapter = new ArrayAdapter<>(this, R.layout.spinner_style, options);
+ // Drop down layout style - list view with radio button
+ dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+ // attaching data adapter to spinner
+ featureSpinner.setAdapter(dataAdapter);
+ featureSpinner.setOnItemSelectedListener(
+ new OnItemSelectedListener() {
+
+ @Override
+ public void onItemSelected(
+ AdapterView> parentView, View selectedItemView, int pos, long id) {
+ selectedMode = parentView.getItemAtPosition(pos).toString();
+ createImageProcessor();
+ tryReloadAndDetectInImage();
+ }
+
+ @Override
+ public void onNothingSelected(AdapterView> arg0) {}
+ });
+ }
+
+ private void populateSizeSelector() {
+ Spinner sizeSpinner = findViewById(R.id.size_selector);
+ List options = new ArrayList<>();
+ options.add(SIZE_SCREEN);
+ options.add(SIZE_1024_768);
+ options.add(SIZE_640_480);
+
+ // Creating adapter for featureSpinner
+ ArrayAdapter dataAdapter = new ArrayAdapter<>(this, R.layout.spinner_style, options);
+ // Drop down layout style - list view with radio button
+ dataAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+ // attaching data adapter to spinner
+ sizeSpinner.setAdapter(dataAdapter);
+ sizeSpinner.setOnItemSelectedListener(
+ new OnItemSelectedListener() {
+
+ @Override
+ public void onItemSelected(
+ AdapterView> parentView, View selectedItemView, int pos, long id) {
+ selectedSize = parentView.getItemAtPosition(pos).toString();
+ createImageProcessor();
+ tryReloadAndDetectInImage();
+ }
+
+ @Override
+ public void onNothingSelected(AdapterView> arg0) {}
+ });
+ }
+
+ @Override
+ public void onSaveInstanceState(Bundle outState) {
+ super.onSaveInstanceState(outState);
+ outState.putParcelable(KEY_IMAGE_URI, imageUri);
+ outState.putString(KEY_SELECTED_SIZE, selectedSize);
+ }
+
+ private void startCameraIntentForResult() {
+ // Clean up last time's image
+ imageUri = null;
+ preview.setImageBitmap(null);
+
+ Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
+ if (takePictureIntent.resolveActivity(getPackageManager()) != null) {
+ ContentValues values = new ContentValues();
+ values.put(MediaStore.Images.Media.TITLE, "New Picture");
+ values.put(MediaStore.Images.Media.DESCRIPTION, "From Camera");
+ imageUri = getContentResolver().insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
+ takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, imageUri);
+ startActivityForResult(takePictureIntent, REQUEST_IMAGE_CAPTURE);
+ }
+ }
+
+ private void startChooseImageIntentForResult() {
+ Intent intent = new Intent();
+ intent.setType("image/*");
+ intent.setAction(Intent.ACTION_GET_CONTENT);
+ startActivityForResult(Intent.createChooser(intent, "Select Picture"), REQUEST_CHOOSE_IMAGE);
+ }
+
+ @Override
+ protected void onActivityResult(int requestCode, int resultCode, Intent data) {
+ if (requestCode == REQUEST_IMAGE_CAPTURE && resultCode == RESULT_OK) {
+ tryReloadAndDetectInImage();
+ } else if (requestCode == REQUEST_CHOOSE_IMAGE && resultCode == RESULT_OK) {
+ // In this case, imageUri is returned by the chooser, save it.
+ imageUri = data.getData();
+ tryReloadAndDetectInImage();
+ } else {
+ super.onActivityResult(requestCode, resultCode, data);
+ }
+ }
+
+ private void tryReloadAndDetectInImage() {
+ Log.d(TAG, "Try reload and detect image");
+ try {
+ if (imageUri == null) {
+ return;
+ }
+
+ if (SIZE_SCREEN.equals(selectedSize) && imageMaxWidth == 0) {
+ // UI layout has not finished yet, will reload once it's ready.
+ return;
+ }
+
+ Bitmap imageBitmap = BitmapUtils.getBitmapFromContentUri(getContentResolver(), imageUri);
+ if (imageBitmap == null) {
+ return;
+ }
+
+ // Clear the overlay first
+ graphicOverlay.clear();
+
+ // Get the dimensions of the image view
+ Pair targetedSize = getTargetedWidthHeight();
+
+ // Determine how much to scale down the image
+ float scaleFactor =
+ max(
+ (float) imageBitmap.getWidth() / (float) targetedSize.first,
+ (float) imageBitmap.getHeight() / (float) targetedSize.second);
+
+ Bitmap resizedBitmap =
+ Bitmap.createScaledBitmap(
+ imageBitmap,
+ (int) (imageBitmap.getWidth() / scaleFactor),
+ (int) (imageBitmap.getHeight() / scaleFactor),
+ true);
+
+ preview.setImageBitmap(resizedBitmap);
+
+ if (imageProcessor != null) {
+ graphicOverlay.setImageSourceInfo(
+ resizedBitmap.getWidth(), resizedBitmap.getHeight(), /* isFlipped= */ false);
+ imageProcessor.processBitmap(resizedBitmap, graphicOverlay);
+ } else {
+ Log.e(TAG, "Null imageProcessor, please check adb logs for imageProcessor creation error");
+ }
+ } catch (IOException e) {
+ Log.e(TAG, "Error retrieving saved image");
+ imageUri = null;
+ }
+ }
+
+ private Pair getTargetedWidthHeight() {
+ int targetWidth;
+ int targetHeight;
+
+ switch (selectedSize) {
+ case SIZE_SCREEN:
+ targetWidth = imageMaxWidth;
+ targetHeight = imageMaxHeight;
+ break;
+ case SIZE_640_480:
+ targetWidth = isLandScape ? 640 : 480;
+ targetHeight = isLandScape ? 480 : 640;
+ break;
+ case SIZE_1024_768:
+ targetWidth = isLandScape ? 1024 : 768;
+ targetHeight = isLandScape ? 768 : 1024;
+ break;
+ default:
+ throw new IllegalStateException("Unknown size");
+ }
+
+ return new Pair<>(targetWidth, targetHeight);
+ }
+
+ private void createImageProcessor() {
+ try {
+ String autoMLRemoteModelName = PreferenceUtils.getAutoMLRemoteModelName(this);
+ switch (selectedMode) {
+ case CUSTOM_AUTOML_LABELING:
+ Log.i(TAG, "Create Custom AutoML Image Label Processor");
+ CustomRemoteModel customRemoteModel =
+ new CustomRemoteModel.Builder(
+ new FirebaseModelSource.Builder(autoMLRemoteModelName).build())
+ .build();
+ CustomImageLabelerOptions customImageLabelerOptions =
+ new CustomImageLabelerOptions.Builder(customRemoteModel)
+ .setConfidenceThreshold(0)
+ .build();
+ imageProcessor =
+ new AutoMLImageLabelerProcessor(
+ this, customRemoteModel, customImageLabelerOptions, Mode.STILL_IMAGE);
+ break;
+ case CUSTOM_AUTOML_OBJECT_DETECTION:
+ Log.i(TAG, "Using Custom AutoML Object Detector Processor");
+ CustomRemoteModel customODTRemoteModel =
+ new CustomRemoteModel.Builder(
+ new FirebaseModelSource.Builder(autoMLRemoteModelName).build())
+ .build();
+ CustomObjectDetectorOptions customAutoMLODTOptions =
+ new CustomObjectDetectorOptions.Builder(customODTRemoteModel)
+ .setDetectorMode(CustomObjectDetectorOptions.SINGLE_IMAGE_MODE)
+ .enableClassification()
+ .setClassificationConfidenceThreshold(0)
+ .setMaxPerObjectLabelCount(1)
+ .build();
+ imageProcessor =
+ new ObjectDetectorProcessor(this, customODTRemoteModel, customAutoMLODTOptions);
+ break;
+ default:
+ Log.e(TAG, "Unknown selectedMode: " + selectedMode);
+ }
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Can not create image processor: " + selectedMode, e);
+ Toast.makeText(
+ getApplicationContext(),
+ "Can not create image processor: " + e.getMessage(),
+ Toast.LENGTH_LONG)
+ .show();
+ }
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/VisionImageProcessor.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/VisionImageProcessor.java
new file mode 100644
index 0000000..e305ded
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/VisionImageProcessor.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import android.graphics.Bitmap;
+import android.os.Build.VERSION_CODES;
+import androidx.annotation.RequiresApi;
+import androidx.camera.core.ImageProxy;
+import com.google.mlkit.common.MlKitException;
+import java.nio.ByteBuffer;
+
+/** An interface to process the images with different vision detectors and custom image models. */
+public interface VisionImageProcessor {
+
+ /** Processes a bitmap image. */
+ void processBitmap(Bitmap bitmap, GraphicOverlay graphicOverlay);
+
+ /** Processes ByteBuffer image data, e.g. used for Camera1 live preview case. */
+ void processByteBuffer(
+ ByteBuffer data, FrameMetadata frameMetadata, GraphicOverlay graphicOverlay)
+ throws MlKitException;
+
+ /** Processes ImageProxy image data, e.g. used for CameraX live preview case. */
+ @RequiresApi(VERSION_CODES.KITKAT)
+ void processImageProxy(ImageProxy image, GraphicOverlay graphicOverlay)
+ throws MlKitException;
+
+ /** Stops the underlying machine learning model and release resources. */
+ void stop();
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/VisionProcessorBase.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/VisionProcessorBase.java
new file mode 100644
index 0000000..4716c31
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/VisionProcessorBase.java
@@ -0,0 +1,247 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo;
+
+import android.app.ActivityManager;
+import android.app.ActivityManager.MemoryInfo;
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.os.Build.VERSION_CODES;
+import android.os.SystemClock;
+import android.util.Log;
+import android.widget.Toast;
+import androidx.annotation.GuardedBy;
+import androidx.annotation.NonNull;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.camera.core.ExperimentalGetImage;
+import androidx.camera.core.ImageProxy;
+import com.google.android.gms.tasks.Task;
+import com.google.android.gms.tasks.TaskExecutors;
+import com.google.mlkit.vision.automl.demo.preference.PreferenceUtils;
+import com.google.mlkit.vision.common.InputImage;
+import java.nio.ByteBuffer;
+import java.util.Timer;
+import java.util.TimerTask;
+
+/**
+ * Abstract base class for vision frame processors. Subclasses need to implement {@link
+ * #onSuccess(Object, GraphicOverlay)} to define what they want to with the detection results and
+ * {@link #detectInImage(InputImage)} to specify the detector object.
+ *
+ * @param The type of the detected feature.
+ */
+public abstract class VisionProcessorBase implements VisionImageProcessor {
+
+ protected static final String MANUAL_TESTING_LOG = "LogTagForTest";
+ private static final String TAG = "VisionProcessorBase";
+
+ private final ActivityManager activityManager;
+ private final Timer fpsTimer = new Timer();
+ private final ScopedExecutor executor;
+ private final Toast toast;
+
+ // Whether this processor is already shut down
+ private boolean isShutdown;
+
+ // Used to calculate latency, running in the same thread, no sync needed.
+ private int numRuns = 0;
+ private long totalRunMs = 0;
+ private long maxRunMs = 0;
+ private long minRunMs = Long.MAX_VALUE;
+
+ // Frame count that have been processed so far in an one second interval to calculate FPS.
+ private int frameProcessedInOneSecondInterval = 0;
+ private int framesPerSecond = 0;
+
+ // To keep the latest images and its metadata.
+ @GuardedBy("this")
+ private ByteBuffer latestImage;
+
+ @GuardedBy("this")
+ private FrameMetadata latestImageMetaData;
+ // To keep the images and metadata in process.
+ @GuardedBy("this")
+ private ByteBuffer processingImage;
+
+ @GuardedBy("this")
+ private FrameMetadata processingMetaData;
+
+ protected VisionProcessorBase(Context context) {
+ activityManager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
+ executor = new ScopedExecutor(TaskExecutors.MAIN_THREAD);
+ fpsTimer.scheduleAtFixedRate(
+ new TimerTask() {
+ @Override
+ public void run() {
+ framesPerSecond = frameProcessedInOneSecondInterval;
+ frameProcessedInOneSecondInterval = 0;
+ }
+ },
+ /* delay= */ 0,
+ /* period= */ 1000);
+ toast = Toast.makeText(context, "", Toast.LENGTH_LONG);
+ }
+
+ // -----------------Code for processing single still image----------------------------------------
+ @Override
+ public void processBitmap(Bitmap bitmap, final GraphicOverlay graphicOverlay) {
+ requestDetectInImage(
+ InputImage.fromBitmap(bitmap, 0),
+ graphicOverlay,
+ /* originalCameraImage= */ null,
+ /* shouldShowFps= */ false);
+ }
+
+ // -----------------Code for processing live preview frame from Camera1 API-----------------------
+ @Override
+ public synchronized void processByteBuffer(
+ ByteBuffer data, final FrameMetadata frameMetadata, final GraphicOverlay graphicOverlay) {
+ latestImage = data;
+ latestImageMetaData = frameMetadata;
+ if (processingImage == null && processingMetaData == null) {
+ processLatestImage(graphicOverlay);
+ }
+ }
+
+ private synchronized void processLatestImage(final GraphicOverlay graphicOverlay) {
+ processingImage = latestImage;
+ processingMetaData = latestImageMetaData;
+ latestImage = null;
+ latestImageMetaData = null;
+ if (processingImage != null && processingMetaData != null && !isShutdown) {
+ processImage(processingImage, processingMetaData, graphicOverlay);
+ }
+ }
+
+ private void processImage(
+ ByteBuffer data, final FrameMetadata frameMetadata, final GraphicOverlay graphicOverlay) {
+ // If live viewport is on (that is the underneath surface view takes care of the camera preview
+ // drawing), skip the unnecessary bitmap creation that used for the manual preview drawing.
+ Bitmap bitmap =
+ PreferenceUtils.isCameraLiveViewportEnabled(graphicOverlay.getContext())
+ ? null
+ : BitmapUtils.getBitmap(data, frameMetadata);
+
+ requestDetectInImage(
+ InputImage.fromByteBuffer(
+ data,
+ frameMetadata.getWidth(),
+ frameMetadata.getHeight(),
+ frameMetadata.getRotation(),
+ InputImage.IMAGE_FORMAT_NV21),
+ graphicOverlay,
+ bitmap,
+ /* shouldShowFps= */ true)
+ .addOnSuccessListener(executor, results -> processLatestImage(graphicOverlay));
+ }
+
+ // -----------------Code for processing live preview frame from CameraX API-----------------------
+ @Override
+ @RequiresApi(VERSION_CODES.LOLLIPOP)
+ @ExperimentalGetImage
+ public void processImageProxy(ImageProxy image, GraphicOverlay graphicOverlay) {
+ if (isShutdown) {
+ image.close();
+ return;
+ }
+
+ Bitmap bitmap = null;
+ if (!PreferenceUtils.isCameraLiveViewportEnabled(graphicOverlay.getContext())) {
+ bitmap = BitmapUtils.getBitmap(image);
+ }
+
+ requestDetectInImage(
+ InputImage.fromMediaImage(image.getImage(), image.getImageInfo().getRotationDegrees()),
+ graphicOverlay,
+ /* originalCameraImage= */ bitmap,
+ /* shouldShowFps= */ true)
+ // When the image is from CameraX analysis use case, must call image.close() on received
+ // images when finished using them. Otherwise, new images may not be received or the camera
+ // may stall.
+ .addOnCompleteListener(results -> image.close());
+ }
+
+ // -----------------Common processing logic-------------------------------------------------------
+ private Task requestDetectInImage(
+ final InputImage image,
+ final GraphicOverlay graphicOverlay,
+ @Nullable final Bitmap originalCameraImage,
+ boolean shouldShowFps) {
+ final long startMs = SystemClock.elapsedRealtime();
+ return detectInImage(image)
+ .addOnSuccessListener(
+ executor,
+ results -> {
+ long currentLatencyMs = SystemClock.elapsedRealtime() - startMs;
+ numRuns++;
+ frameProcessedInOneSecondInterval++;
+ totalRunMs += currentLatencyMs;
+ maxRunMs = Math.max(currentLatencyMs, maxRunMs);
+ minRunMs = Math.min(currentLatencyMs, minRunMs);
+
+ // Only log inference info once per second. When frameProcessedInOneSecondInterval is
+ // equal to 1, it means this is the first frame processed during the current second.
+ if (frameProcessedInOneSecondInterval == 1) {
+ Log.d(TAG, "Max latency is: " + maxRunMs);
+ Log.d(TAG, "Min latency is: " + minRunMs);
+ Log.d(TAG, "Num of Runs: " + numRuns + ", Avg latency is: " + totalRunMs / numRuns);
+ MemoryInfo mi = new MemoryInfo();
+ activityManager.getMemoryInfo(mi);
+ long availableMegs = mi.availMem / 0x100000L;
+ Log.d(TAG, "Memory available in system: " + availableMegs + " MB");
+ }
+
+ graphicOverlay.clear();
+ if (originalCameraImage != null) {
+ graphicOverlay.add(new CameraImageGraphic(graphicOverlay, originalCameraImage));
+ }
+ VisionProcessorBase.this.onSuccess(results, graphicOverlay);
+ graphicOverlay.add(
+ new InferenceInfoGraphic(
+ graphicOverlay, currentLatencyMs, shouldShowFps ? framesPerSecond : null));
+ graphicOverlay.postInvalidate();
+ })
+ .addOnFailureListener(
+ executor,
+ e -> {
+ graphicOverlay.clear();
+ graphicOverlay.postInvalidate();
+ String error = "Failed to process. Error: " + e.getLocalizedMessage();
+ toast.setText(error + "\nCause: " + e.getCause());
+ toast.show();
+ Log.d(TAG, error);
+ e.printStackTrace();
+ VisionProcessorBase.this.onFailure(e);
+ });
+ }
+
+ @Override
+ public void stop() {
+ executor.shutdown();
+ isShutdown = true;
+ numRuns = 0;
+ totalRunMs = 0;
+ fpsTimer.cancel();
+ }
+
+ protected abstract Task detectInImage(InputImage image);
+
+ protected abstract void onSuccess(@NonNull T results, @NonNull GraphicOverlay graphicOverlay);
+
+ protected abstract void onFailure(@NonNull Exception e);
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/automl/AutoMLImageLabelerProcessor.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/automl/AutoMLImageLabelerProcessor.java
new file mode 100644
index 0000000..ca23a9e
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/automl/AutoMLImageLabelerProcessor.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo.automl;
+
+import android.content.Context;
+import android.util.Log;
+import android.widget.Toast;
+import androidx.annotation.NonNull;
+import com.google.android.gms.tasks.Task;
+import com.google.android.gms.tasks.Tasks;
+import com.google.mlkit.common.model.DownloadConditions;
+import com.google.mlkit.common.model.RemoteModel;
+import com.google.mlkit.common.model.RemoteModelManager;
+import com.google.mlkit.vision.automl.demo.GraphicOverlay;
+import com.google.mlkit.vision.automl.demo.VisionProcessorBase;
+import com.google.mlkit.vision.common.InputImage;
+import com.google.mlkit.vision.label.ImageLabel;
+import com.google.mlkit.vision.label.ImageLabeler;
+import com.google.mlkit.vision.label.ImageLabelerOptionsBase;
+import com.google.mlkit.vision.label.ImageLabeling;
+import java.util.ArrayList;
+import java.util.List;
+
+/** AutoML image labeler demo. */
+public class AutoMLImageLabelerProcessor extends VisionProcessorBase> {
+
+ private static final String TAG = "AutoMLProcessor";
+ private final ImageLabeler imageLabeler;
+ private final Context context;
+ private final Task> modelDownloadingTask;
+
+ private final Mode mode;
+
+ public AutoMLImageLabelerProcessor(
+ Context context, RemoteModel remoteModel, ImageLabelerOptionsBase options, Mode mode) {
+ super(context);
+ this.mode = mode;
+ this.context = context;
+ imageLabeler = ImageLabeling.getClient(options);
+
+ DownloadConditions downloadConditions = new DownloadConditions.Builder().requireWifi().build();
+ modelDownloadingTask =
+ RemoteModelManager.getInstance()
+ .download(remoteModel, downloadConditions)
+ .addOnFailureListener(
+ ignored ->
+ Toast.makeText(
+ context,
+ "Model download failed for AutoMLImageLabelerImpl,"
+ + " please check your connection.",
+ Toast.LENGTH_LONG)
+ .show());
+ }
+
+ @Override
+ public void stop() {
+ super.stop();
+ imageLabeler.close();
+ }
+
+ @Override
+ protected Task> detectInImage(InputImage image) {
+ if (!modelDownloadingTask.isComplete()) {
+ if (mode == Mode.LIVE_PREVIEW) {
+ Log.i(TAG, "Model download is in progress. Skip detecting image.");
+ return Tasks.forResult(new ArrayList<>());
+ } else {
+ Log.i(TAG, "Model download is in progress. Waiting...");
+ return modelDownloadingTask.continueWithTask(task -> processImageOnDownloadComplete(image));
+ }
+ } else {
+ return processImageOnDownloadComplete(image);
+ }
+ }
+
+ private Task> processImageOnDownloadComplete(InputImage image) {
+ if (modelDownloadingTask != null && modelDownloadingTask.isSuccessful()) {
+ if (imageLabeler == null) {
+ Log.e(TAG, "image labeler has not been initialized; Skipped.");
+ Toast.makeText(context, "no initialized Labeler.", Toast.LENGTH_SHORT).show();
+ }
+ return imageLabeler.process(image);
+ } else {
+ String downloadingError = "Error downloading remote model.";
+ Log.e(TAG, downloadingError, modelDownloadingTask.getException());
+ Toast.makeText(context, downloadingError, Toast.LENGTH_SHORT).show();
+ return Tasks.forException(
+ new Exception("Failed to download remote model.", modelDownloadingTask.getException()));
+ }
+ }
+
+ @Override
+ protected void onSuccess(
+ @NonNull List labels, @NonNull GraphicOverlay graphicOverlay) {
+ graphicOverlay.add(new LabelGraphic(graphicOverlay, labels));
+ }
+
+ @Override
+ protected void onFailure(@NonNull Exception e) {
+ Log.w(TAG, "Label detection failed.", e);
+ }
+
+ /**
+ * The detection mode of the processor. Different modes will have different behavior on whether or
+ * not waiting for the model download complete.
+ */
+ public enum Mode {
+ STILL_IMAGE,
+ LIVE_PREVIEW
+ }
+}
+
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/automl/LabelGraphic.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/automl/LabelGraphic.java
new file mode 100644
index 0000000..e0ddedc
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/automl/LabelGraphic.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo.automl;
+
+import static com.google.common.primitives.Floats.max;
+import static java.lang.Math.max;
+
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import com.google.mlkit.vision.automl.demo.GraphicOverlay;
+import com.google.mlkit.vision.label.ImageLabel;
+import java.util.List;
+import java.util.Locale;
+
+/** Graphic instance for rendering a label within an associated graphic overlay view. */
+public class LabelGraphic extends GraphicOverlay.Graphic {
+
+ private static final float TEXT_SIZE = 70.0f;
+ private static final String LABEL_FORMAT = "%.2f%% confidence (index: %d)";
+
+ private final Paint textPaint;
+ private final Paint labelPaint;
+ private final GraphicOverlay overlay;
+
+ private final List labels;
+
+ public LabelGraphic(GraphicOverlay overlay, List labels) {
+ super(overlay);
+ this.overlay = overlay;
+ this.labels = labels;
+ textPaint = new Paint();
+ textPaint.setColor(Color.WHITE);
+ textPaint.setTextSize(TEXT_SIZE);
+
+ labelPaint = new Paint();
+ labelPaint.setColor(Color.BLACK);
+ labelPaint.setStyle(Paint.Style.FILL);
+ labelPaint.setAlpha(200);
+ }
+
+ @Override
+ public synchronized void draw(Canvas canvas) {
+ // First try to find maxWidth and totalHeight in order to draw to the center of the screen.
+ float maxWidth = 0;
+ float totalHeight = labels.size() * 2 * TEXT_SIZE;
+ for (ImageLabel label : labels) {
+ float line1Width = textPaint.measureText(label.getText());
+ float line2Width = textPaint.measureText(
+ String.format(Locale.US, LABEL_FORMAT, label.getConfidence() * 100, label.getIndex()));
+ maxWidth = max(maxWidth, line1Width, line2Width);
+ }
+ float x = max(0, overlay.getWidth() / 2.0f - maxWidth / 2.0f);
+ float y = max(200, overlay.getHeight() / 2.0f - totalHeight / 2.0f);
+
+ if (!labels.isEmpty()) {
+ float padding = 20;
+ canvas.drawRect(x - padding,
+ y - padding,
+ x + maxWidth + padding,
+ y + totalHeight + padding,
+ labelPaint);
+ }
+
+ for (ImageLabel label : labels) {
+ if (y + TEXT_SIZE * 2 > overlay.getHeight()) {
+ break;
+ }
+ canvas.drawText(label.getText(), x, y + TEXT_SIZE, textPaint);
+ y += TEXT_SIZE;
+ canvas.drawText(
+ String.format(Locale.US, LABEL_FORMAT, label.getConfidence() * 100, label.getIndex()),
+ x, y + TEXT_SIZE, textPaint);
+ y += TEXT_SIZE;
+ }
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/object/ObjectDetectorProcessor.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/object/ObjectDetectorProcessor.java
new file mode 100755
index 0000000..d79f5bf
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/object/ObjectDetectorProcessor.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo.object;
+
+import android.content.Context;
+import android.util.Log;
+import android.widget.Toast;
+import androidx.annotation.NonNull;
+import com.google.android.gms.tasks.Task;
+import com.google.android.gms.tasks.Tasks;
+import com.google.mlkit.common.model.DownloadConditions;
+import com.google.mlkit.common.model.RemoteModel;
+import com.google.mlkit.common.model.RemoteModelManager;
+import com.google.mlkit.vision.automl.demo.GraphicOverlay;
+import com.google.mlkit.vision.automl.demo.VisionProcessorBase;
+import com.google.mlkit.vision.common.InputImage;
+import com.google.mlkit.vision.objects.DetectedObject;
+import com.google.mlkit.vision.objects.ObjectDetection;
+import com.google.mlkit.vision.objects.ObjectDetector;
+import com.google.mlkit.vision.objects.ObjectDetectorOptionsBase;
+import java.util.ArrayList;
+import java.util.List;
+
+/** A processor to run object detector. */
+public class ObjectDetectorProcessor extends VisionProcessorBase> {
+
+ private static final String TAG = "ObjectDetectorProcessor";
+ private final ObjectDetector detector;
+ private final Context context;
+ private final Task> modelDownloadingTask;
+ private final int detectorMode;
+
+ public ObjectDetectorProcessor(
+ Context context, RemoteModel remoteModel, ObjectDetectorOptionsBase options) {
+ super(context);
+ this.detectorMode = options.getDetectorMode();
+ this.context = context;
+ detector = ObjectDetection.getClient(options);
+
+ DownloadConditions downloadConditions = new DownloadConditions.Builder().requireWifi().build();
+ modelDownloadingTask =
+ RemoteModelManager.getInstance()
+ .download(remoteModel, downloadConditions)
+ .addOnFailureListener(
+ ignored ->
+ Toast.makeText(
+ context,
+ "Model download failed, please check your connection.",
+ Toast.LENGTH_LONG)
+ .show());
+ }
+
+ @Override
+ public void stop() {
+ super.stop();
+ detector.close();
+ }
+
+ @Override
+ protected Task> detectInImage(InputImage image) {
+ if (!modelDownloadingTask.isComplete()) {
+ if (detectorMode == ObjectDetectorOptionsBase.STREAM_MODE) {
+ Log.i(TAG, "Model download is in progress. Skip detecting image.");
+ return Tasks.forResult(new ArrayList<>());
+ } else {
+ Log.i(TAG, "Model download is in progress. Waiting...");
+ return modelDownloadingTask.continueWithTask(task -> processImageOnDownloadComplete(image));
+ }
+ } else {
+ return processImageOnDownloadComplete(image);
+ }
+ }
+
+ private Task> processImageOnDownloadComplete(InputImage image) {
+ if (modelDownloadingTask != null && modelDownloadingTask.isSuccessful()) {
+ if (detector == null) {
+ Log.e(TAG, "object detector has not been initialized; Skipped.");
+ Toast.makeText(context, "no initialized Detector.", Toast.LENGTH_SHORT).show();
+ }
+ return detector.process(image);
+ } else {
+ String downloadingError = "Error downloading remote model.";
+ Log.e(TAG, downloadingError, modelDownloadingTask.getException());
+ Toast.makeText(context, downloadingError, Toast.LENGTH_SHORT).show();
+ return Tasks.forException(
+ new Exception("Failed to download remote model.", modelDownloadingTask.getException()));
+ }
+ }
+
+ @Override
+ protected void onSuccess(
+ @NonNull List results, @NonNull GraphicOverlay graphicOverlay) {
+ for (DetectedObject object : results) {
+ graphicOverlay.add(new ObjectGraphic(graphicOverlay, object));
+ }
+ }
+
+ @Override
+ protected void onFailure(@NonNull Exception e) {
+ Log.e(TAG, "Object detection failed!", e);
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/object/ObjectGraphic.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/object/ObjectGraphic.java
new file mode 100755
index 0000000..09ed5d8
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/object/ObjectGraphic.java
@@ -0,0 +1,140 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo.object;
+
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.RectF;
+import com.google.mlkit.vision.automl.demo.GraphicOverlay;
+import com.google.mlkit.vision.automl.demo.GraphicOverlay.Graphic;
+import com.google.mlkit.vision.objects.DetectedObject;
+import com.google.mlkit.vision.objects.DetectedObject.Label;
+import java.util.Locale;
+
+/** Draw the detected object info in preview. */
+public class ObjectGraphic extends Graphic {
+
+ private static final float TEXT_SIZE = 54.0f;
+ private static final float STROKE_WIDTH = 4.0f;
+ private static final int NUM_COLORS = 10;
+ private static final int[][] COLORS =
+ new int[][] {
+ // {Text color, background color}
+ {Color.BLACK, Color.WHITE},
+ {Color.WHITE, Color.MAGENTA},
+ {Color.BLACK, Color.LTGRAY},
+ {Color.WHITE, Color.RED},
+ {Color.WHITE, Color.BLUE},
+ {Color.WHITE, Color.DKGRAY},
+ {Color.BLACK, Color.CYAN},
+ {Color.BLACK, Color.YELLOW},
+ {Color.WHITE, Color.BLACK},
+ {Color.BLACK, Color.GREEN}
+ };
+ private static final String LABEL_FORMAT = "%.2f%% confidence (index: %d)";
+
+ private final DetectedObject object;
+ private final Paint[] boxPaints;
+ private final Paint[] textPaints;
+ private final Paint[] labelPaints;
+
+ ObjectGraphic(GraphicOverlay overlay, DetectedObject object) {
+ super(overlay);
+
+ this.object = object;
+
+ int numColors = COLORS.length;
+ textPaints = new Paint[numColors];
+ boxPaints = new Paint[numColors];
+ labelPaints = new Paint[numColors];
+ for (int i = 0; i < numColors; i++) {
+ textPaints[i] = new Paint();
+ textPaints[i].setColor(COLORS[i][0] /* text color */);
+ textPaints[i].setTextSize(TEXT_SIZE);
+
+ boxPaints[i] = new Paint();
+ boxPaints[i].setColor(COLORS[i][1] /* background color */);
+ boxPaints[i].setStyle(Paint.Style.STROKE);
+ boxPaints[i].setStrokeWidth(STROKE_WIDTH);
+
+ labelPaints[i] = new Paint();
+ labelPaints[i].setColor(COLORS[i][1] /* background color */);
+ labelPaints[i].setStyle(Paint.Style.FILL);
+ }
+ }
+
+ @Override
+ public void draw(Canvas canvas) {
+ // Decide color based on object tracking ID
+ int colorID =
+ object.getTrackingId() == null ? 0 : Math.abs(object.getTrackingId() % NUM_COLORS);
+ float textWidth = textPaints[colorID].measureText("Tracking ID: " + object.getTrackingId());
+ float lineHeight = TEXT_SIZE + STROKE_WIDTH;
+ float yLabelOffset = -lineHeight;
+
+ // Calculate width and height of label box
+ for (Label label : object.getLabels()) {
+ textWidth = Math.max(textWidth, textPaints[colorID].measureText(label.getText()));
+ textWidth =
+ Math.max(
+ textWidth,
+ textPaints[colorID].measureText(
+ String.format(
+ Locale.US, LABEL_FORMAT, label.getConfidence() * 100, label.getIndex())));
+ yLabelOffset -= 2 * lineHeight;
+ }
+
+ // Draws the bounding box.
+ RectF rect = new RectF(object.getBoundingBox());
+ // If the image is flipped, the left will be translated to right, and the right to left.
+ float x0 = translateX(rect.left);
+ float x1 = translateX(rect.right);
+ rect.left = Math.min(x0, x1);
+ rect.right = Math.max(x0, x1);
+ rect.top = translateY(rect.top);
+ rect.bottom = translateY(rect.bottom);
+ canvas.drawRect(rect, boxPaints[colorID]);
+
+ // Draws other object info.
+ canvas.drawRect(
+ rect.left - STROKE_WIDTH,
+ rect.top + yLabelOffset,
+ rect.left + textWidth + (2 * STROKE_WIDTH),
+ rect.top,
+ labelPaints[colorID]);
+ yLabelOffset += TEXT_SIZE;
+ canvas.drawText(
+ "Tracking ID: " + object.getTrackingId(),
+ rect.left,
+ rect.top + yLabelOffset,
+ textPaints[colorID]);
+ yLabelOffset += lineHeight;
+
+ for (Label label : object.getLabels()) {
+ canvas.drawText(label.getText(), rect.left, rect.top + yLabelOffset, textPaints[colorID]);
+ yLabelOffset += lineHeight;
+ canvas.drawText(
+ String.format(Locale.US, LABEL_FORMAT, label.getConfidence() * 100, label.getIndex()),
+ rect.left,
+ rect.top + yLabelOffset,
+ textPaints[colorID]);
+
+ yLabelOffset += lineHeight;
+ }
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/CameraXLivePreviewPreferenceFragment.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/CameraXLivePreviewPreferenceFragment.java
new file mode 100644
index 0000000..67aaf58
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/CameraXLivePreviewPreferenceFragment.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo.preference;
+
+import android.content.Context;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build.VERSION_CODES;
+import android.preference.ListPreference;
+import android.preference.PreferenceCategory;
+import androidx.annotation.Nullable;
+import android.util.Size;
+import androidx.annotation.RequiresApi;
+import androidx.annotation.StringRes;
+import androidx.camera.core.CameraSelector;
+import com.google.mlkit.vision.automl.demo.R;
+import java.util.Arrays;
+import java.util.List;
+
+/** Configures CameraX live preview demo settings. */
+@RequiresApi(VERSION_CODES.LOLLIPOP)
+public class CameraXLivePreviewPreferenceFragment extends LivePreviewPreferenceFragment {
+
+ @Override
+ void setUpCameraPreferences() {
+ PreferenceCategory cameraPreference =
+ (PreferenceCategory) findPreference(getString(R.string.pref_category_key_camera));
+
+ cameraPreference.removePreference(
+ findPreference(getString(R.string.pref_key_rear_camera_preview_size)));
+ cameraPreference.removePreference(
+ findPreference(getString(R.string.pref_key_front_camera_preview_size)));
+ setUpCameraXTargetAnalysisSizePreference(
+ R.string.pref_key_camerax_rear_camera_target_resolution, CameraSelector.LENS_FACING_BACK);
+ setUpCameraXTargetAnalysisSizePreference(
+ R.string.pref_key_camerax_front_camera_target_resolution, CameraSelector.LENS_FACING_FRONT);
+ }
+
+ private void setUpCameraXTargetAnalysisSizePreference(
+ @StringRes int previewSizePrefKeyId, int lensFacing) {
+ ListPreference pref = (ListPreference) findPreference(getString(previewSizePrefKeyId));
+ CameraCharacteristics cameraCharacteristics =
+ getCameraCharacteristics(getActivity(), lensFacing);
+ String[] entries;
+ if (cameraCharacteristics != null) {
+ StreamConfigurationMap map =
+ cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ Size[] outputSizes = map.getOutputSizes(SurfaceTexture.class);
+ entries = new String[outputSizes.length];
+ for (int i = 0; i < outputSizes.length; i++) {
+ entries[i] = outputSizes[i].toString();
+ }
+ } else {
+ entries =
+ new String[] {
+ "2000x2000",
+ "1600x1600",
+ "1200x1200",
+ "1000x1000",
+ "800x800",
+ "600x600",
+ "400x400",
+ "200x200",
+ "100x100",
+ };
+ }
+ pref.setEntries(entries);
+ pref.setEntryValues(entries);
+ pref.setSummary(pref.getEntry() == null ? "Default" : pref.getEntry());
+ pref.setOnPreferenceChangeListener(
+ (preference, newValue) -> {
+ String newStringValue = (String) newValue;
+ pref.setSummary(newStringValue);
+ PreferenceUtils.saveString(getActivity(), previewSizePrefKeyId, newStringValue);
+ return true;
+ });
+ }
+
+ @Nullable
+ public static CameraCharacteristics getCameraCharacteristics(
+ Context context, Integer lensFacing) {
+ CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ try {
+ List cameraList = Arrays.asList(cameraManager.getCameraIdList());
+ for (String availableCameraId : cameraList) {
+ CameraCharacteristics availableCameraCharacteristics =
+ cameraManager.getCameraCharacteristics(availableCameraId);
+ Integer availableLensFacing =
+ availableCameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
+ if (availableLensFacing == null) {
+ continue;
+ }
+ if (availableLensFacing.equals(lensFacing)) {
+ return availableCameraCharacteristics;
+ }
+ }
+ } catch (CameraAccessException e) {
+ // Accessing camera ID info got error
+ }
+ return null;
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/LivePreviewPreferenceFragment.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/LivePreviewPreferenceFragment.java
new file mode 100644
index 0000000..b50e1cf
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/LivePreviewPreferenceFragment.java
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo.preference;
+
+import android.hardware.Camera;
+import android.os.Bundle;
+import android.preference.ListPreference;
+import android.preference.PreferenceCategory;
+import android.preference.PreferenceFragment;
+import androidx.annotation.StringRes;
+import com.google.mlkit.vision.automl.demo.CameraSource;
+import com.google.mlkit.vision.automl.demo.CameraSource.SizePair;
+import com.google.mlkit.vision.automl.demo.R;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/** Configures live preview demo settings. */
+public class LivePreviewPreferenceFragment extends PreferenceFragment {
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ addPreferencesFromResource(R.xml.preference_live_preview_automl);
+ setUpCameraPreferences();
+ PreferenceUtils.setUpRemoteModelNamePreferences(this);
+ }
+
+ void setUpCameraPreferences() {
+ PreferenceCategory cameraPreference =
+ (PreferenceCategory) findPreference(getString(R.string.pref_category_key_camera));
+ cameraPreference.removePreference(
+ findPreference(getString(R.string.pref_key_camerax_rear_camera_target_resolution)));
+ cameraPreference.removePreference(
+ findPreference(getString(R.string.pref_key_camerax_front_camera_target_resolution)));
+ setUpCameraPreviewSizePreference(
+ R.string.pref_key_rear_camera_preview_size,
+ R.string.pref_key_rear_camera_picture_size,
+ CameraSource.CAMERA_FACING_BACK);
+ setUpCameraPreviewSizePreference(
+ R.string.pref_key_front_camera_preview_size,
+ R.string.pref_key_front_camera_picture_size,
+ CameraSource.CAMERA_FACING_FRONT);
+ }
+
+ private void setUpCameraPreviewSizePreference(
+ @StringRes int previewSizePrefKeyId, @StringRes int pictureSizePrefKeyId, int cameraId) {
+ ListPreference previewSizePreference =
+ (ListPreference) findPreference(getString(previewSizePrefKeyId));
+
+ Camera camera = null;
+ try {
+ camera = Camera.open(cameraId);
+
+ List previewSizeList = CameraSource.generateValidPreviewSizeList(camera);
+ String[] previewSizeStringValues = new String[previewSizeList.size()];
+ Map previewToPictureSizeStringMap = new HashMap<>();
+ for (int i = 0; i < previewSizeList.size(); i++) {
+ SizePair sizePair = previewSizeList.get(i);
+ previewSizeStringValues[i] = sizePair.preview.toString();
+ if (sizePair.picture != null) {
+ previewToPictureSizeStringMap.put(
+ sizePair.preview.toString(), sizePair.picture.toString());
+ }
+ }
+ previewSizePreference.setEntries(previewSizeStringValues);
+ previewSizePreference.setEntryValues(previewSizeStringValues);
+
+ if (previewSizePreference.getEntry() == null) {
+ // First time of opening the Settings page.
+ SizePair sizePair =
+ CameraSource.selectSizePair(
+ camera,
+ CameraSource.DEFAULT_REQUESTED_CAMERA_PREVIEW_WIDTH,
+ CameraSource.DEFAULT_REQUESTED_CAMERA_PREVIEW_HEIGHT);
+ String previewSizeString = sizePair.preview.toString();
+ previewSizePreference.setValue(previewSizeString);
+ previewSizePreference.setSummary(previewSizeString);
+ PreferenceUtils.saveString(
+ getActivity(),
+ pictureSizePrefKeyId,
+ sizePair.picture != null ? sizePair.picture.toString() : null);
+ } else {
+ previewSizePreference.setSummary(previewSizePreference.getEntry());
+ }
+
+ previewSizePreference.setOnPreferenceChangeListener(
+ (preference, newValue) -> {
+ String newPreviewSizeStringValue = (String) newValue;
+ previewSizePreference.setSummary(newPreviewSizeStringValue);
+ PreferenceUtils.saveString(
+ getActivity(),
+ pictureSizePrefKeyId,
+ previewToPictureSizeStringMap.get(newPreviewSizeStringValue));
+ return true;
+ });
+
+ } catch (RuntimeException e) {
+ // If there's no camera for the given camera id, hide the corresponding preference.
+ ((PreferenceCategory) findPreference(getString(R.string.pref_category_key_camera)))
+ .removePreference(previewSizePreference);
+ } finally {
+ if (camera != null) {
+ camera.release();
+ }
+ }
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/PreferenceUtils.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/PreferenceUtils.java
new file mode 100644
index 0000000..f33c48f
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/PreferenceUtils.java
@@ -0,0 +1,130 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo.preference;
+
+import android.content.Context;
+import android.content.SharedPreferences;
+import android.os.Build.VERSION_CODES;
+import android.preference.EditTextPreference;
+import android.preference.PreferenceFragment;
+import android.preference.PreferenceManager;
+import android.widget.Toast;
+import androidx.annotation.Nullable;
+import androidx.annotation.RequiresApi;
+import androidx.annotation.StringRes;
+import androidx.camera.core.CameraSelector;
+import com.google.android.gms.common.images.Size;
+import com.google.common.base.Preconditions;
+import com.google.mlkit.vision.automl.demo.CameraSource;
+import com.google.mlkit.vision.automl.demo.CameraSource.SizePair;
+import com.google.mlkit.vision.automl.demo.R;
+
+/** Utility class to retrieve shared preferences. */
+public final class PreferenceUtils {
+
+ static void saveString(Context context, @StringRes int prefKeyId, @Nullable String value) {
+ PreferenceManager.getDefaultSharedPreferences(context)
+ .edit()
+ .putString(context.getString(prefKeyId), value)
+ .apply();
+ }
+
+ @Nullable
+ public static SizePair getCameraPreviewSizePair(Context context, int cameraId) {
+ Preconditions.checkArgument(
+ cameraId == CameraSource.CAMERA_FACING_BACK
+ || cameraId == CameraSource.CAMERA_FACING_FRONT);
+ String previewSizePrefKey;
+ String pictureSizePrefKey;
+ if (cameraId == CameraSource.CAMERA_FACING_BACK) {
+ previewSizePrefKey = context.getString(R.string.pref_key_rear_camera_preview_size);
+ pictureSizePrefKey = context.getString(R.string.pref_key_rear_camera_picture_size);
+ } else {
+ previewSizePrefKey = context.getString(R.string.pref_key_front_camera_preview_size);
+ pictureSizePrefKey = context.getString(R.string.pref_key_front_camera_picture_size);
+ }
+
+ try {
+ SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
+ return new SizePair(
+ Size.parseSize(sharedPreferences.getString(previewSizePrefKey, null)),
+ Size.parseSize(sharedPreferences.getString(pictureSizePrefKey, null)));
+ } catch (RuntimeException e) {
+ return null;
+ }
+ }
+
+ @RequiresApi(VERSION_CODES.LOLLIPOP)
+ @Nullable
+ public static android.util.Size getCameraXTargetResolution(Context context, int lensfacing) {
+ Preconditions.checkArgument(
+ lensfacing == CameraSelector.LENS_FACING_BACK
+ || lensfacing == CameraSelector.LENS_FACING_FRONT);
+ String prefKey =
+ lensfacing == CameraSelector.LENS_FACING_BACK
+ ? context.getString(R.string.pref_key_camerax_rear_camera_target_resolution)
+ : context.getString(R.string.pref_key_camerax_front_camera_target_resolution);
+ SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
+ try {
+ return android.util.Size.parseSize(sharedPreferences.getString(prefKey, null));
+ } catch (RuntimeException e) {
+ return null;
+ }
+ }
+
+ public static boolean isCameraLiveViewportEnabled(Context context) {
+ SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
+ String prefKey = context.getString(R.string.pref_key_camera_live_viewport);
+ return sharedPreferences.getBoolean(prefKey, false);
+ }
+
+ public static String getAutoMLRemoteModelName(Context context) {
+ SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(context);
+ String modelNamePrefKey = context.getString(R.string.pref_key_automl_remote_model_name);
+ String defaultModelName = "mlkit_flowers";
+ String remoteModelName = sharedPreferences.getString(modelNamePrefKey, defaultModelName);
+ if (remoteModelName.isEmpty()) {
+ remoteModelName = defaultModelName;
+ }
+ return remoteModelName;
+ }
+
+ public static void setUpRemoteModelNamePreferences(PreferenceFragment preferenceFragment) {
+ EditTextPreference autoMLRemoteModelNamePref =
+ (EditTextPreference)
+ preferenceFragment.findPreference(
+ preferenceFragment.getString(R.string.pref_key_automl_remote_model_name));
+ autoMLRemoteModelNamePref.setSummary(autoMLRemoteModelNamePref.getText());
+ autoMLRemoteModelNamePref.setOnPreferenceChangeListener(
+ (preference, newValue) -> {
+ String modelName = (String) newValue;
+ if (!modelName.isEmpty()) {
+ autoMLRemoteModelNamePref.setSummary((String) newValue);
+ return true;
+ }
+
+ Toast.makeText(
+ preferenceFragment.getActivity(),
+ R.string.pref_key_automl_remote_model_name,
+ Toast.LENGTH_LONG)
+ .show();
+ return false;
+ });
+ }
+
+ private PreferenceUtils() {}
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/SettingsActivity.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/SettingsActivity.java
new file mode 100644
index 0000000..8ddd117
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/SettingsActivity.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo.preference;
+
+import android.os.Bundle;
+import android.preference.PreferenceFragment;
+import androidx.appcompat.app.ActionBar;
+import androidx.appcompat.app.AppCompatActivity;
+import com.google.mlkit.vision.automl.demo.R;
+
+/**
+ * Hosts the preference fragment to configure settings for a demo activity that specified by the
+ * {@link LaunchSource}.
+ */
+public class SettingsActivity extends AppCompatActivity {
+
+ public static final String EXTRA_LAUNCH_SOURCE = "extra_launch_source";
+
+ /** Specifies where this activity is launched from. */
+ @SuppressWarnings("NewApi") // CameraX is only available on API 21+
+ public enum LaunchSource {
+ LIVE_PREVIEW(R.string.pref_screen_title_live_preview, LivePreviewPreferenceFragment.class),
+ STILL_IMAGE(R.string.pref_screen_title_still_image, StillImagePreferenceFragment.class),
+ CAMERAX_LIVE_PREVIEW(
+ R.string.pref_screen_title_camerax_live_preview,
+ CameraXLivePreviewPreferenceFragment.class);
+
+ private final int titleResId;
+ private final Class extends PreferenceFragment> prefFragmentClass;
+
+ LaunchSource(int titleResId, Class extends PreferenceFragment> prefFragmentClass) {
+ this.titleResId = titleResId;
+ this.prefFragmentClass = prefFragmentClass;
+ }
+ }
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ setContentView(R.layout.activity_settings);
+
+ LaunchSource launchSource =
+ (LaunchSource) getIntent().getSerializableExtra(EXTRA_LAUNCH_SOURCE);
+ ActionBar actionBar = getSupportActionBar();
+ if (actionBar != null) {
+ actionBar.setTitle(launchSource.titleResId);
+ }
+
+ try {
+ getFragmentManager()
+ .beginTransaction()
+ .replace(
+ R.id.settings_container,
+ launchSource.prefFragmentClass.getDeclaredConstructor().newInstance())
+ .commit();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/StillImagePreferenceFragment.java b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/StillImagePreferenceFragment.java
new file mode 100644
index 0000000..c8f1a9f
--- /dev/null
+++ b/mlkit/automl/app/src/main/java/com/google/mlkit/vision/automl/demo/preference/StillImagePreferenceFragment.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2020 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.vision.automl.demo.preference;
+
+import android.os.Bundle;
+import android.preference.PreferenceFragment;
+import com.google.mlkit.vision.automl.demo.R;
+
+/** Configures still image demo settings. */
+public class StillImagePreferenceFragment extends PreferenceFragment {
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ addPreferencesFromResource(R.xml.preference_still_image);
+ PreferenceUtils.setUpRemoteModelNamePreferences(this);
+ }
+}
diff --git a/mlkit/automl/app/src/main/res/drawable-hdpi/ic_settings_white_24dp.png b/mlkit/automl/app/src/main/res/drawable-hdpi/ic_settings_white_24dp.png
new file mode 100644
index 0000000..a61b8a9
Binary files /dev/null and b/mlkit/automl/app/src/main/res/drawable-hdpi/ic_settings_white_24dp.png differ
diff --git a/mlkit/automl/app/src/main/res/drawable-hdpi/ic_switch_camera_white_48dp.xml b/mlkit/automl/app/src/main/res/drawable-hdpi/ic_switch_camera_white_48dp.xml
new file mode 100644
index 0000000..63266d6
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/drawable-hdpi/ic_switch_camera_white_48dp.xml
@@ -0,0 +1,9 @@
+
+
+
diff --git a/mlkit/automl/app/src/main/res/drawable-hdpi/ic_switch_camera_white_48dp_inset.png b/mlkit/automl/app/src/main/res/drawable-hdpi/ic_switch_camera_white_48dp_inset.png
new file mode 100644
index 0000000..a621627
Binary files /dev/null and b/mlkit/automl/app/src/main/res/drawable-hdpi/ic_switch_camera_white_48dp_inset.png differ
diff --git a/mlkit/automl/app/src/main/res/drawable-mdpi/ic_settings_white_24dp.png b/mlkit/automl/app/src/main/res/drawable-mdpi/ic_settings_white_24dp.png
new file mode 100644
index 0000000..9cc8f31
Binary files /dev/null and b/mlkit/automl/app/src/main/res/drawable-mdpi/ic_settings_white_24dp.png differ
diff --git a/mlkit/automl/app/src/main/res/drawable-mdpi/ic_switch_camera_white_48dp.xml b/mlkit/automl/app/src/main/res/drawable-mdpi/ic_switch_camera_white_48dp.xml
new file mode 100644
index 0000000..38c8412
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/drawable-mdpi/ic_switch_camera_white_48dp.xml
@@ -0,0 +1,9 @@
+
+
+
diff --git a/mlkit/automl/app/src/main/res/drawable-mdpi/ic_switch_camera_white_48dp_inset.png b/mlkit/automl/app/src/main/res/drawable-mdpi/ic_switch_camera_white_48dp_inset.png
new file mode 100644
index 0000000..74b7917
Binary files /dev/null and b/mlkit/automl/app/src/main/res/drawable-mdpi/ic_switch_camera_white_48dp_inset.png differ
diff --git a/mlkit/automl/app/src/main/res/drawable-xhdpi/ic_settings_white_24dp.png b/mlkit/automl/app/src/main/res/drawable-xhdpi/ic_settings_white_24dp.png
new file mode 100644
index 0000000..43f5763
Binary files /dev/null and b/mlkit/automl/app/src/main/res/drawable-xhdpi/ic_settings_white_24dp.png differ
diff --git a/mlkit/automl/app/src/main/res/drawable-xhdpi/ic_switch_camera_white_48dp.xml b/mlkit/automl/app/src/main/res/drawable-xhdpi/ic_switch_camera_white_48dp.xml
new file mode 100644
index 0000000..fb06b0c
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/drawable-xhdpi/ic_switch_camera_white_48dp.xml
@@ -0,0 +1,9 @@
+
+
+
diff --git a/mlkit/automl/app/src/main/res/drawable-xhdpi/ic_switch_camera_white_48dp_inset.png b/mlkit/automl/app/src/main/res/drawable-xhdpi/ic_switch_camera_white_48dp_inset.png
new file mode 100644
index 0000000..8d7cb37
Binary files /dev/null and b/mlkit/automl/app/src/main/res/drawable-xhdpi/ic_switch_camera_white_48dp_inset.png differ
diff --git a/mlkit/automl/app/src/main/res/drawable-xxhdpi/ic_settings_white_24dp.png b/mlkit/automl/app/src/main/res/drawable-xxhdpi/ic_settings_white_24dp.png
new file mode 100644
index 0000000..e3603e9
Binary files /dev/null and b/mlkit/automl/app/src/main/res/drawable-xxhdpi/ic_settings_white_24dp.png differ
diff --git a/mlkit/automl/app/src/main/res/drawable-xxhdpi/ic_switch_camera_white_48dp.xml b/mlkit/automl/app/src/main/res/drawable-xxhdpi/ic_switch_camera_white_48dp.xml
new file mode 100644
index 0000000..a814bfe
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/drawable-xxhdpi/ic_switch_camera_white_48dp.xml
@@ -0,0 +1,9 @@
+
+
+
diff --git a/mlkit/automl/app/src/main/res/drawable-xxhdpi/ic_switch_camera_white_48dp_inset.png b/mlkit/automl/app/src/main/res/drawable-xxhdpi/ic_switch_camera_white_48dp_inset.png
new file mode 100644
index 0000000..74b9f0a
Binary files /dev/null and b/mlkit/automl/app/src/main/res/drawable-xxhdpi/ic_switch_camera_white_48dp_inset.png differ
diff --git a/mlkit/automl/app/src/main/res/drawable-xxxhdpi/ic_settings_white_24dp.png b/mlkit/automl/app/src/main/res/drawable-xxxhdpi/ic_settings_white_24dp.png
new file mode 100644
index 0000000..180470c
Binary files /dev/null and b/mlkit/automl/app/src/main/res/drawable-xxxhdpi/ic_settings_white_24dp.png differ
diff --git a/mlkit/automl/app/src/main/res/drawable-xxxhdpi/ic_switch_camera_white_48dp.xml b/mlkit/automl/app/src/main/res/drawable-xxxhdpi/ic_switch_camera_white_48dp.xml
new file mode 100644
index 0000000..e3c887f
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/drawable-xxxhdpi/ic_switch_camera_white_48dp.xml
@@ -0,0 +1,9 @@
+
+
+
diff --git a/mlkit/automl/app/src/main/res/drawable-xxxhdpi/ic_switch_camera_white_48dp_inset.png b/mlkit/automl/app/src/main/res/drawable-xxxhdpi/ic_switch_camera_white_48dp_inset.png
new file mode 100644
index 0000000..f8ffc75
Binary files /dev/null and b/mlkit/automl/app/src/main/res/drawable-xxxhdpi/ic_switch_camera_white_48dp_inset.png differ
diff --git a/mlkit/automl/app/src/main/res/drawable/logo_mlkit.xml b/mlkit/automl/app/src/main/res/drawable/logo_mlkit.xml
new file mode 100644
index 0000000..1d256e0
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/drawable/logo_mlkit.xml
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/automl/app/src/main/res/layout-land/activity_camerax_live_preview.xml b/mlkit/automl/app/src/main/res/layout-land/activity_camerax_live_preview.xml
new file mode 100644
index 0000000..93eb5d8
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/layout-land/activity_camerax_live_preview.xml
@@ -0,0 +1,57 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/automl/app/src/main/res/layout-land/activity_live_preview.xml b/mlkit/automl/app/src/main/res/layout-land/activity_live_preview.xml
new file mode 100644
index 0000000..706e7c4
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/layout-land/activity_live_preview.xml
@@ -0,0 +1,55 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/automl/app/src/main/res/layout/activity_camerax_live_preview.xml b/mlkit/automl/app/src/main/res/layout/activity_camerax_live_preview.xml
new file mode 100644
index 0000000..499c674
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/layout/activity_camerax_live_preview.xml
@@ -0,0 +1,61 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/automl/app/src/main/res/layout/activity_chooser.xml b/mlkit/automl/app/src/main/res/layout/activity_chooser.xml
new file mode 100644
index 0000000..01e0d79
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/layout/activity_chooser.xml
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/automl/app/src/main/res/layout/activity_live_preview.xml b/mlkit/automl/app/src/main/res/layout/activity_live_preview.xml
new file mode 100644
index 0000000..bca6c9c
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/layout/activity_live_preview.xml
@@ -0,0 +1,65 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/automl/app/src/main/res/layout/activity_settings.xml b/mlkit/automl/app/src/main/res/layout/activity_settings.xml
new file mode 100644
index 0000000..9c37b46
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/layout/activity_settings.xml
@@ -0,0 +1,8 @@
+
+
+
+
diff --git a/mlkit/automl/app/src/main/res/layout/activity_still_image.xml b/mlkit/automl/app/src/main/res/layout/activity_still_image.xml
new file mode 100644
index 0000000..803270e
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/layout/activity_still_image.xml
@@ -0,0 +1,77 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/automl/app/src/main/res/layout/settings_style.xml b/mlkit/automl/app/src/main/res/layout/settings_style.xml
new file mode 100644
index 0000000..8505ae1
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/layout/settings_style.xml
@@ -0,0 +1,8 @@
+
+
diff --git a/mlkit/automl/app/src/main/res/layout/spinner_style.xml b/mlkit/automl/app/src/main/res/layout/spinner_style.xml
new file mode 100644
index 0000000..40949dc
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/layout/spinner_style.xml
@@ -0,0 +1,10 @@
+
+
diff --git a/mlkit/automl/app/src/main/res/layout/toggle_style.xml b/mlkit/automl/app/src/main/res/layout/toggle_style.xml
new file mode 100644
index 0000000..9a0c782
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/layout/toggle_style.xml
@@ -0,0 +1,9 @@
+
+
+
+
+
diff --git a/mlkit/automl/app/src/main/res/menu/camera_button_menu.xml b/mlkit/automl/app/src/main/res/menu/camera_button_menu.xml
new file mode 100644
index 0000000..8caf7e3
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/menu/camera_button_menu.xml
@@ -0,0 +1,12 @@
+
+
diff --git a/mlkit/automl/app/src/main/res/mipmap-hdpi/ic_launcher.png b/mlkit/automl/app/src/main/res/mipmap-hdpi/ic_launcher.png
new file mode 100644
index 0000000..cde69bc
Binary files /dev/null and b/mlkit/automl/app/src/main/res/mipmap-hdpi/ic_launcher.png differ
diff --git a/mlkit/automl/app/src/main/res/mipmap-mdpi/ic_launcher.png b/mlkit/automl/app/src/main/res/mipmap-mdpi/ic_launcher.png
new file mode 100644
index 0000000..c133a0c
Binary files /dev/null and b/mlkit/automl/app/src/main/res/mipmap-mdpi/ic_launcher.png differ
diff --git a/mlkit/automl/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/mlkit/automl/app/src/main/res/mipmap-xhdpi/ic_launcher.png
new file mode 100644
index 0000000..bfa42f0
Binary files /dev/null and b/mlkit/automl/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ
diff --git a/mlkit/automl/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/mlkit/automl/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
new file mode 100644
index 0000000..324e72c
Binary files /dev/null and b/mlkit/automl/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ
diff --git a/mlkit/automl/app/src/main/res/values/colors.xml b/mlkit/automl/app/src/main/res/values/colors.xml
new file mode 100644
index 0000000..899acde
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/values/colors.xml
@@ -0,0 +1,14 @@
+
+
+ #4CAF50
+ #388E3C
+ #7C4DFF
+
+ #78909C
+ #E6E6E6
+ #689F38
+ #BFBFBF
+ #FFFFFF
+ #4286f4
+ #f44242
+
diff --git a/mlkit/automl/app/src/main/res/values/dimens.xml b/mlkit/automl/app/src/main/res/values/dimens.xml
new file mode 100644
index 0000000..b62b22a
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/values/dimens.xml
@@ -0,0 +1,6 @@
+
+
+ 16dp
+ 16dp
+ 10dp
+
diff --git a/mlkit/automl/app/src/main/res/values/strings.xml b/mlkit/automl/app/src/main/res/values/strings.xml
new file mode 100644
index 0000000..4dbd76a
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/values/strings.xml
@@ -0,0 +1,49 @@
+
+
+ MLKit-AutoML-Remote
+ OK
+ Access to the camera is needed for detection
+ This application cannot run because it does not have the camera permission. The application will now exit.
+ Face detector dependencies cannot be downloaded due to low device storage
+ Front
+ Back
+ Vision detectors demo with live camera preview
+ Vision detectors demo with a still image
+ Vision detectors demo with live preview using CameraX. Note that CameraX is only supported on API 21+
+ Download error
+ Start over
+ Settings
+ Select image
+
+
+ Live preview settings
+ Still image settings
+ CameraX live preview settings
+ Face Detection
+ Object Detection / Custom Object Detection
+ AutoML Image Labeling
+ Pose Detection
+
+
+ pckc
+ Camera
+ rcpvs
+ rcpts
+ fcpvs
+ fcpts
+ crctas
+ cfctas
+ clv
+ Rear camera preview size
+ Front camera preview size
+ CameraX rear camera target resolution
+ CameraX front camera target resolution
+ Enable live viewport
+ Do not block camera preview drawing on detection
+
+
+
+ AutoML Remote Model Name
+ armn
+ Remote model name in the Firebase Console.
+
diff --git a/mlkit/automl/app/src/main/res/values/styles.xml b/mlkit/automl/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..f20b21b
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/values/styles.xml
@@ -0,0 +1,3 @@
+
+
+
diff --git a/mlkit/automl/app/src/main/res/xml/preference_live_preview_automl.xml b/mlkit/automl/app/src/main/res/xml/preference_live_preview_automl.xml
new file mode 100644
index 0000000..ee3c393
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/xml/preference_live_preview_automl.xml
@@ -0,0 +1,49 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/automl/app/src/main/res/xml/preference_still_image.xml b/mlkit/automl/app/src/main/res/xml/preference_still_image.xml
new file mode 100644
index 0000000..1468896
--- /dev/null
+++ b/mlkit/automl/app/src/main/res/xml/preference_still_image.xml
@@ -0,0 +1,14 @@
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/automl/build.gradle b/mlkit/automl/build.gradle
new file mode 100644
index 0000000..696cea8
--- /dev/null
+++ b/mlkit/automl/build.gradle
@@ -0,0 +1,28 @@
+// Top-level build file where you can add configuration options common to all sub-projects/modules.
+
+buildscript {
+
+ repositories {
+ mavenLocal()
+ google()
+ jcenter()
+ }
+ dependencies {
+ classpath 'com.android.tools.build:gradle:4.2.0'
+
+ // NOTE: Do not place your application dependencies here; they belong
+ // in the individual module build.gradle files
+ }
+}
+
+allprojects {
+ repositories {
+ mavenLocal()
+ google()
+ jcenter()
+ }
+}
+
+task clean(type: Delete) {
+ delete rootProject.buildDir
+}
diff --git a/mlkit/automl/gradle.properties b/mlkit/automl/gradle.properties
new file mode 100644
index 0000000..682745a
--- /dev/null
+++ b/mlkit/automl/gradle.properties
@@ -0,0 +1,19 @@
+# Project-wide Gradle settings.
+# IDE (e.g. Android Studio) users:
+# Gradle settings configured through the IDE *will override*
+# any settings specified in this file.
+# For more details on how to configure your build environment visit
+# http://www.gradle.org/docs/current/userguide/build_environment.html
+# Specifies the JVM arguments used for the daemon process.
+# The setting is particularly useful for tweaking memory settings.
+org.gradle.jvmargs=-Xmx4096m -XX:MaxPermSize=1024m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
+# When configured, Gradle will run in incubating parallel mode.
+# This option should only be used with decoupled projects. More details, visit
+# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
+# org.gradle.parallel=true
+# AndroidX package structure to make it clearer which packages are bundled with the
+# Android operating system, and which are packaged with your app's APK
+# https://developer.android.com/topic/libraries/support-library/androidx-rn
+android.useAndroidX=true
+# Automatically convert third-party libraries to use AndroidX
+android.enableJetifier=true
diff --git a/mlkit/automl/gradle/wrapper/gradle-wrapper.jar b/mlkit/automl/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..f6b961f
Binary files /dev/null and b/mlkit/automl/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/mlkit/automl/gradle/wrapper/gradle-wrapper.properties b/mlkit/automl/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..13de492
--- /dev/null
+++ b/mlkit/automl/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Mon Jan 24 22:56:49 UTC 2022
+distributionBase=GRADLE_USER_HOME
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.7.1-bin.zip
+distributionPath=wrapper/dists
+zipStorePath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
diff --git a/mlkit/automl/gradlew b/mlkit/automl/gradlew
new file mode 100755
index 0000000..cccdd3d
--- /dev/null
+++ b/mlkit/automl/gradlew
@@ -0,0 +1,172 @@
+#!/usr/bin/env sh
+
+##############################################################################
+##
+## Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+ echo "$*"
+}
+
+die () {
+ echo
+ echo "$*"
+ echo
+ exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+ CYGWIN* )
+ cygwin=true
+ ;;
+ Darwin* )
+ darwin=true
+ ;;
+ MINGW* )
+ msys=true
+ ;;
+ NONSTOP* )
+ nonstop=true
+ ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD="java"
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+ MAX_FD_LIMIT=`ulimit -H -n`
+ if [ $? -eq 0 ] ; then
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+ MAX_FD="$MAX_FD_LIMIT"
+ fi
+ ulimit -n $MAX_FD
+ if [ $? -ne 0 ] ; then
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
+ fi
+ else
+ warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+ fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+ GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+ JAVACMD=`cygpath --unix "$JAVACMD"`
+
+ # We build the pattern for arguments to be converted via cygpath
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+ SEP=""
+ for dir in $ROOTDIRSRAW ; do
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
+ SEP="|"
+ done
+ OURCYGPATTERN="(^($ROOTDIRS))"
+ # Add a user-defined pattern to the cygpath arguments
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+ fi
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ i=0
+ for arg in "$@" ; do
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
+
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+ else
+ eval `echo args$i`="\"$arg\""
+ fi
+ i=$((i+1))
+ done
+ case $i in
+ (0) set -- ;;
+ (1) set -- "$args0" ;;
+ (2) set -- "$args0" "$args1" ;;
+ (3) set -- "$args0" "$args1" "$args2" ;;
+ (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+ (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+ (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+ (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+ (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+ (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+ esac
+fi
+
+# Escape application args
+save () {
+ for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+ echo " "
+}
+APP_ARGS=$(save "$@")
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+ cd "$(dirname "$0")"
+fi
+
+exec "$JAVACMD" "$@"
diff --git a/mlkit/automl/gradlew.bat b/mlkit/automl/gradlew.bat
new file mode 100644
index 0000000..e95643d
--- /dev/null
+++ b/mlkit/automl/gradlew.bat
@@ -0,0 +1,84 @@
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS=
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windows variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/mlkit/automl/settings.gradle b/mlkit/automl/settings.gradle
new file mode 100644
index 0000000..bd7bf32
--- /dev/null
+++ b/mlkit/automl/settings.gradle
@@ -0,0 +1,2 @@
+rootProject.name='ML Kit AutoML'
+include ':app'
diff --git a/mlkit/codescanner/LICENSE b/mlkit/codescanner/LICENSE
new file mode 100644
index 0000000..8de1006
--- /dev/null
+++ b/mlkit/codescanner/LICENSE
@@ -0,0 +1,191 @@
+ Copyright 2022 Google LLC
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/mlkit/codescanner/README.md b/mlkit/codescanner/README.md
new file mode 100644
index 0000000..6b68e28
--- /dev/null
+++ b/mlkit/codescanner/README.md
@@ -0,0 +1,37 @@
+# Google Code Scanner API Quickstart
+
+* [Read more about Google Code Scanner API](https://developers.google.com/ml-kit/code-scanner)
+
+## Introduction
+
+The Google Code Scanner API Quickstart app demonstrates how to use the code
+scanner feature to scan codes without requiring to request camera permission.
+
+## Getting Started
+
+* Run the sample code on your Android device or emulator
+* Tap on the SCAN BARCODE button.
+
+## Support
+
+* [Documentation](https://developers.google.com/ml-kit/code-scanner)
+* [Stack Overflow](https://stackoverflow.com/questions/tagged/mlkit)
+
+## License
+
+Copyright 2022 Google, Inc.
+
+Licensed to the Apache Software Foundation (ASF) under one or more contributor
+license agreements. See the NOTICE file distributed with this work for
+additional information regarding copyright ownership. The ASF licenses this
+file to you under the Apache License, Version 2.0 (the "License"); you may not
+use this file except in compliance with the License. You may obtain a copy of
+the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations under
+the License.
diff --git a/mlkit/codescanner/app/build.gradle b/mlkit/codescanner/app/build.gradle
new file mode 100644
index 0000000..098594e
--- /dev/null
+++ b/mlkit/codescanner/app/build.gradle
@@ -0,0 +1,52 @@
+apply plugin: 'com.android.application'
+apply plugin: 'kotlin-android'
+apply plugin: 'kotlin-android-extensions'
+
+android {
+ compileSdkVersion 31
+ defaultConfig {
+ applicationId "com.google.mlkit.samples.codescanner"
+ minSdkVersion 21
+ targetSdkVersion 31
+ versionCode 1
+ versionName "1.0"
+ setProperty("archivesBaseName", "code-scanner-sample")
+ testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+ }
+ buildTypes {
+ debug {
+ isDefault true
+ minifyEnabled false
+ proguardFiles 'proguard-rules.pro'
+ }
+ proguard {
+ debuggable false
+ minifyEnabled true
+ shrinkResources true
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro', 'proguard.cfg'
+ testProguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguardTest-rules.pro', 'proguard.cfg'
+ }
+ release {
+ minifyEnabled true
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro', 'proguard.cfg'
+ }
+ testBuildType "proguard"
+ }
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+}
+
+dependencies {
+ implementation project(':internal:chooserx')
+ implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
+ implementation 'com.google.android.material:material:1.5.0'
+
+ implementation 'com.google.android.gms:play-services-code-scanner:16.0.0-beta3'
+
+ androidTestImplementation 'androidx.test:rules:1.4.0'
+ androidTestImplementation 'androidx.test:runner:1.4.0'
+ androidTestImplementation 'androidx.test.ext:junit:1.1.3'
+ androidTestImplementation 'com.google.truth:truth:1.0.1'
+}
diff --git a/mlkit/codescanner/app/src/main/AndroidManifest.xml b/mlkit/codescanner/app/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..569e872
--- /dev/null
+++ b/mlkit/codescanner/app/src/main/AndroidManifest.xml
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/codescanner/app/src/main/java/com/google/mlkit/samples/codescanner/EntryChoiceActivity.kt b/mlkit/codescanner/app/src/main/java/com/google/mlkit/samples/codescanner/EntryChoiceActivity.kt
new file mode 100644
index 0000000..92d47e1
--- /dev/null
+++ b/mlkit/codescanner/app/src/main/java/com/google/mlkit/samples/codescanner/EntryChoiceActivity.kt
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2022 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.samples.codescanner
+
+import android.content.Intent
+import com.google.mlkit.samples.codescanner.kotlin.MainActivity
+import com.mlkit.example.internal.BaseEntryChoiceActivity
+import com.mlkit.example.internal.Choice
+
+class EntryChoiceActivity : BaseEntryChoiceActivity() {
+
+ override fun getChoices(): List {
+ return listOf(
+ Choice(
+ "Java",
+ "Run the Google Code Scanner demo written in Java.",
+ Intent(this, com.google.mlkit.samples.codescanner.java.MainActivity::class.java)
+ ),
+ Choice(
+ "Kotlin",
+ "Run the Google Code Scanner demo written in Kotlin.",
+ Intent(this, com.google.mlkit.samples.codescanner.kotlin.MainActivity::class.java)
+ )
+ )
+ }
+}
diff --git a/mlkit/codescanner/app/src/main/java/com/google/mlkit/samples/codescanner/java/MainActivity.java b/mlkit/codescanner/app/src/main/java/com/google/mlkit/samples/codescanner/java/MainActivity.java
new file mode 100644
index 0000000..55d90a9
--- /dev/null
+++ b/mlkit/codescanner/app/src/main/java/com/google/mlkit/samples/codescanner/java/MainActivity.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2022 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.samples.codescanner.java;
+
+import android.annotation.SuppressLint;
+import android.os.Bundle;
+import androidx.appcompat.app.AppCompatActivity;
+import android.view.View;
+import android.widget.CheckBox;
+import android.widget.TextView;
+import com.google.mlkit.common.MlKitException;
+import com.google.mlkit.samples.codescanner.R;
+import com.google.mlkit.vision.barcode.common.Barcode;
+import com.google.mlkit.vision.codescanner.GmsBarcodeScanner;
+import com.google.mlkit.vision.codescanner.GmsBarcodeScannerOptions;
+import com.google.mlkit.vision.codescanner.GmsBarcodeScanning;
+import java.util.Locale;
+
+/** Demonstrates the code scanner powered by Google Play Services. */
+public class MainActivity extends AppCompatActivity {
+
+ private static final String KEY_ALLOW_MANUAL_INPUT = "allow_manual_input";
+
+ private boolean allowManualInput;
+ private TextView barcodeResultView;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_main);
+
+ barcodeResultView = findViewById(R.id.barcode_result_view);
+ }
+
+ public void onAllowManualInputCheckboxClicked(View view) {
+ allowManualInput = ((CheckBox) view).isChecked();
+ }
+
+ public void onScanButtonClicked(View view) {
+ GmsBarcodeScannerOptions.Builder optionsBuilder = new GmsBarcodeScannerOptions.Builder();
+ if (allowManualInput) {
+ optionsBuilder.allowManualInput();
+ }
+ GmsBarcodeScanner gmsBarcodeScanner =
+ GmsBarcodeScanning.getClient(this, optionsBuilder.build());
+ gmsBarcodeScanner
+ .startScan()
+ .addOnSuccessListener(barcode -> barcodeResultView.setText(getSuccessfulMessage(barcode)))
+ .addOnFailureListener(
+ e -> barcodeResultView.setText(getErrorMessage(e)))
+ .addOnCanceledListener(
+ () -> barcodeResultView.setText(getString(R.string.error_scanner_cancelled)));
+ }
+
+ @Override
+ protected void onSaveInstanceState(Bundle savedInstanceState) {
+ savedInstanceState.putBoolean(KEY_ALLOW_MANUAL_INPUT, allowManualInput);
+ super.onSaveInstanceState(savedInstanceState);
+ }
+
+ @Override
+ protected void onRestoreInstanceState(Bundle savedInstanceState) {
+ super.onRestoreInstanceState(savedInstanceState);
+ allowManualInput = savedInstanceState.getBoolean(KEY_ALLOW_MANUAL_INPUT);
+ }
+
+ private String getSuccessfulMessage(Barcode barcode) {
+ String barcodeValue =
+ String.format(
+ Locale.US,
+ "Display Value: %s\nRaw Value: %s\nFormat: %s\nValue Type: %s",
+ barcode.getDisplayValue(),
+ barcode.getRawValue(),
+ barcode.getFormat(),
+ barcode.getValueType());
+ return getString(R.string.barcode_result, barcodeValue);
+ }
+
+ @SuppressLint("SwitchIntDef")
+ private String getErrorMessage(Exception e) {
+ if (e instanceof MlKitException) {
+ switch (((MlKitException) e).getErrorCode()) {
+ case MlKitException.CODE_SCANNER_CAMERA_PERMISSION_NOT_GRANTED:
+ return getString(R.string.error_camera_permission_not_granted);
+ case MlKitException.CODE_SCANNER_APP_NAME_UNAVAILABLE:
+ return getString(R.string.error_app_name_unavailable);
+ default:
+ return getString(R.string.error_default_message, e);
+ }
+ } else {
+ return e.getMessage();
+ }
+ }
+}
diff --git a/mlkit/codescanner/app/src/main/java/com/google/mlkit/samples/codescanner/kotlin/MainActivity.kt b/mlkit/codescanner/app/src/main/java/com/google/mlkit/samples/codescanner/kotlin/MainActivity.kt
new file mode 100644
index 0000000..580e262
--- /dev/null
+++ b/mlkit/codescanner/app/src/main/java/com/google/mlkit/samples/codescanner/kotlin/MainActivity.kt
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2022 Google LLC. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.mlkit.samples.codescanner.kotlin
+
+import android.os.Bundle
+import androidx.appcompat.app.AppCompatActivity
+import android.view.View
+import android.widget.CheckBox
+import android.widget.TextView
+import com.google.mlkit.common.MlKitException
+import com.google.mlkit.samples.codescanner.R
+import com.google.mlkit.vision.barcode.common.Barcode
+import com.google.mlkit.vision.codescanner.GmsBarcodeScannerOptions
+import com.google.mlkit.vision.codescanner.GmsBarcodeScanning
+import java.util.Locale
+
+/** Demonstrates the code scanner powered by Google Play Services. */
+class MainActivity : AppCompatActivity() {
+
+ private var allowManualInput = false
+ private var barcodeResultView: TextView? = null
+
+ override fun onCreate(savedInstanceState: Bundle?) {
+ super.onCreate(savedInstanceState)
+ setContentView(R.layout.activity_main)
+ barcodeResultView = findViewById(R.id.barcode_result_view)
+ }
+
+ fun onAllowManualInputCheckboxClicked(view: View) {
+ allowManualInput = (view as CheckBox).isChecked
+ }
+
+ fun onScanButtonClicked(view: View) {
+ val optionsBuilder = GmsBarcodeScannerOptions.Builder()
+ if (allowManualInput) {
+ optionsBuilder.allowManualInput()
+ }
+ val gmsBarcodeScanner = GmsBarcodeScanning.getClient(this, optionsBuilder.build())
+ gmsBarcodeScanner
+ .startScan()
+ .addOnSuccessListener { barcode: Barcode ->
+ barcodeResultView!!.text = getSuccessfulMessage(barcode)
+ }
+ .addOnFailureListener { e: Exception -> barcodeResultView!!.text = getErrorMessage(e) }
+ .addOnCanceledListener {
+ barcodeResultView!!.text = getString(R.string.error_scanner_cancelled)
+ }
+ }
+
+ override fun onSaveInstanceState(savedInstanceState: Bundle) {
+ savedInstanceState.putBoolean(KEY_ALLOW_MANUAL_INPUT, allowManualInput)
+ super.onSaveInstanceState(savedInstanceState)
+ }
+
+ override fun onRestoreInstanceState(savedInstanceState: Bundle) {
+ super.onRestoreInstanceState(savedInstanceState)
+ allowManualInput = savedInstanceState.getBoolean(KEY_ALLOW_MANUAL_INPUT)
+ }
+
+ private fun getSuccessfulMessage(barcode: Barcode): String {
+ val barcodeValue =
+ String.format(
+ Locale.US,
+ "Display Value: %s\nRaw Value: %s\nFormat: %s\nValue Type: %s",
+ barcode.displayValue,
+ barcode.rawValue,
+ barcode.format,
+ barcode.valueType
+ )
+ return getString(R.string.barcode_result, barcodeValue)
+ }
+
+ private fun getErrorMessage(e: Exception): String? {
+ return if (e is MlKitException) {
+ when (e.errorCode) {
+ MlKitException.CODE_SCANNER_CAMERA_PERMISSION_NOT_GRANTED ->
+ getString(R.string.error_camera_permission_not_granted)
+ MlKitException.CODE_SCANNER_APP_NAME_UNAVAILABLE ->
+ getString(R.string.error_app_name_unavailable)
+ else -> getString(R.string.error_default_message, e)
+ }
+ } else {
+ e.message
+ }
+ }
+
+ companion object {
+ private const val KEY_ALLOW_MANUAL_INPUT = "allow_manual_input"
+ }
+}
diff --git a/mlkit/codescanner/app/src/main/res/drawable/logo_mlkit.xml b/mlkit/codescanner/app/src/main/res/drawable/logo_mlkit.xml
new file mode 100644
index 0000000..770b112
--- /dev/null
+++ b/mlkit/codescanner/app/src/main/res/drawable/logo_mlkit.xml
@@ -0,0 +1,42 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/codescanner/app/src/main/res/layout/activity_main.xml b/mlkit/codescanner/app/src/main/res/layout/activity_main.xml
new file mode 100644
index 0000000..ed87b79
--- /dev/null
+++ b/mlkit/codescanner/app/src/main/res/layout/activity_main.xml
@@ -0,0 +1,25 @@
+
+
+
+
+
+
+
diff --git a/mlkit/codescanner/app/src/main/res/values/colors.xml b/mlkit/codescanner/app/src/main/res/values/colors.xml
new file mode 100644
index 0000000..21218f9
--- /dev/null
+++ b/mlkit/codescanner/app/src/main/res/values/colors.xml
@@ -0,0 +1,9 @@
+
+
+ @color/blue
+ @color/darkBlue
+ @color/red
+ #3681E6
+ #2F5FC0
+ #D81B60
+
diff --git a/mlkit/codescanner/app/src/main/res/values/strings.xml b/mlkit/codescanner/app/src/main/res/values/strings.xml
new file mode 100644
index 0000000..078ec42
--- /dev/null
+++ b/mlkit/codescanner/app/src/main/res/values/strings.xml
@@ -0,0 +1,10 @@
+
+ Code Scanner Demo
+ Allow input code manually
+ Scan Barcode
+ Barcode detected.\n%1$s
+ Code scanner is cancelled.
+ Camera permission is not granted.
+ App name is not set.
+ Failed to scan code: %1$s
+
diff --git a/mlkit/codescanner/app/src/main/res/values/styles.xml b/mlkit/codescanner/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..2359030
--- /dev/null
+++ b/mlkit/codescanner/app/src/main/res/values/styles.xml
@@ -0,0 +1,10 @@
+
+
+
+
+
+
diff --git a/mlkit/codescanner/build.gradle b/mlkit/codescanner/build.gradle
new file mode 100644
index 0000000..1cf5e56
--- /dev/null
+++ b/mlkit/codescanner/build.gradle
@@ -0,0 +1,20 @@
+buildscript {
+ ext.kotlin_version = '1.4.31'
+
+ repositories {
+ google()
+ jcenter()
+ }
+ dependencies {
+ classpath 'com.android.tools.build:gradle:4.2.0'
+ classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
+ }
+}
+
+allprojects {
+ repositories {
+ mavenLocal()
+ google()
+ jcenter()
+ }
+}
diff --git a/mlkit/codescanner/gradle.properties b/mlkit/codescanner/gradle.properties
new file mode 100644
index 0000000..9592636
--- /dev/null
+++ b/mlkit/codescanner/gradle.properties
@@ -0,0 +1,15 @@
+# Project-wide Gradle settings.
+# IDE (e.g. Android Studio) users:
+# Gradle settings configured through the IDE *will override*
+# any settings specified in this file.
+# For more details on how to configure your build environment visit
+# http://www.gradle.org/docs/current/userguide/build_environment.html
+# Specifies the JVM arguments used for the daemon process.
+# The setting is particularly useful for tweaking memory settings.
+org.gradle.jvmargs=-Xmx1536m
+# When configured, Gradle will run in incubating parallel mode.
+# This option should only be used with decoupled projects. More details, visit
+# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
+# org.gradle.parallel=true
+android.useAndroidX=true
+android.enableJetifier=true
diff --git a/mlkit/codescanner/gradle/wrapper/gradle-wrapper.jar b/mlkit/codescanner/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..e708b1c
Binary files /dev/null and b/mlkit/codescanner/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/mlkit/codescanner/gradle/wrapper/gradle-wrapper.properties b/mlkit/codescanner/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..da9702f
--- /dev/null
+++ b/mlkit/codescanner/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,5 @@
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.8-bin.zip
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
diff --git a/mlkit/codescanner/gradlew b/mlkit/codescanner/gradlew
new file mode 100755
index 0000000..4f906e0
--- /dev/null
+++ b/mlkit/codescanner/gradlew
@@ -0,0 +1,185 @@
+#!/usr/bin/env sh
+
+#
+# Copyright 2015 the original author or authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+##############################################################################
+##
+## Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+ echo "$*"
+}
+
+die () {
+ echo
+ echo "$*"
+ echo
+ exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+ CYGWIN* )
+ cygwin=true
+ ;;
+ Darwin* )
+ darwin=true
+ ;;
+ MINGW* )
+ msys=true
+ ;;
+ NONSTOP* )
+ nonstop=true
+ ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD="java"
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+ MAX_FD_LIMIT=`ulimit -H -n`
+ if [ $? -eq 0 ] ; then
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+ MAX_FD="$MAX_FD_LIMIT"
+ fi
+ ulimit -n $MAX_FD
+ if [ $? -ne 0 ] ; then
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
+ fi
+ else
+ warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+ fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+ GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin or MSYS, switch paths to Windows format before running java
+if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+
+ JAVACMD=`cygpath --unix "$JAVACMD"`
+
+ # We build the pattern for arguments to be converted via cygpath
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+ SEP=""
+ for dir in $ROOTDIRSRAW ; do
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
+ SEP="|"
+ done
+ OURCYGPATTERN="(^($ROOTDIRS))"
+ # Add a user-defined pattern to the cygpath arguments
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+ fi
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ i=0
+ for arg in "$@" ; do
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
+
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+ else
+ eval `echo args$i`="\"$arg\""
+ fi
+ i=`expr $i + 1`
+ done
+ case $i in
+ 0) set -- ;;
+ 1) set -- "$args0" ;;
+ 2) set -- "$args0" "$args1" ;;
+ 3) set -- "$args0" "$args1" "$args2" ;;
+ 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+ 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+ 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+ 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+ 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+ 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+ esac
+fi
+
+# Escape application args
+save () {
+ for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+ echo " "
+}
+APP_ARGS=`save "$@"`
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+exec "$JAVACMD" "$@"
diff --git a/mlkit/codescanner/gradlew.bat b/mlkit/codescanner/gradlew.bat
new file mode 100644
index 0000000..ac1b06f
--- /dev/null
+++ b/mlkit/codescanner/gradlew.bat
@@ -0,0 +1,89 @@
+@rem
+@rem Copyright 2015 the original author or authors.
+@rem
+@rem Licensed under the Apache License, Version 2.0 (the "License");
+@rem you may not use this file except in compliance with the License.
+@rem You may obtain a copy of the License at
+@rem
+@rem https://www.apache.org/licenses/LICENSE-2.0
+@rem
+@rem Unless required by applicable law or agreed to in writing, software
+@rem distributed under the License is distributed on an "AS IS" BASIS,
+@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+@rem See the License for the specific language governing permissions and
+@rem limitations under the License.
+@rem
+
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Resolve any "." and ".." in APP_HOME to make it shorter.
+for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto execute
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto execute
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/mlkit/codescanner/settings.gradle b/mlkit/codescanner/settings.gradle
new file mode 100644
index 0000000..604bd99
--- /dev/null
+++ b/mlkit/codescanner/settings.gradle
@@ -0,0 +1,4 @@
+rootProject.name= "mlkit_code_scanner_sample"
+include ':app'
+include ':internal:chooserx'
+project(':internal:chooserx').projectDir = file('../internal/chooserx')
\ No newline at end of file
diff --git a/mlkit/digitalink/LICENSE b/mlkit/digitalink/LICENSE
new file mode 100644
index 0000000..973b3b7
--- /dev/null
+++ b/mlkit/digitalink/LICENSE
@@ -0,0 +1,191 @@
+ Copyright 2020 Google LLC
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ https://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/mlkit/digitalink/README.md b/mlkit/digitalink/README.md
new file mode 100644
index 0000000..491f1fd
--- /dev/null
+++ b/mlkit/digitalink/README.md
@@ -0,0 +1,40 @@
+# ML Kit Digital Ink Recognition Quickstart
+
+- [Read more about ML Kit Digital Ink Recognition](https://developers.google.com/ml-kit/vision/digital-ink-recognition/android)
+
+## Introduction
+
+The ML Kit Digital Ink Recognition Quickstart app demonstrates how to use the
+Digital Ink Recognition feature of ML Kit to perform digital ink recognition
+in your application.
+
+## Getting Started
+
+* Run the sample code on your Android device or emulator
+* Draw on the screen and see your handwriting recognized and turned into text
+* Try extending the code to add new features and functionality
+
+## Support
+
+* [Documentation](https://developers.google.com/ml-kit/guides)
+* [API Reference](https://developers.google.com/ml-kit/reference)
+* [Stack Overflow](https://stackoverflow.com/questions/tagged/mlkit)
+
+## License
+
+Copyright 2020 Google, Inc.
+
+Licensed to the Apache Software Foundation (ASF) under one or more contributor
+license agreements. See the NOTICE file distributed with this work for
+additional information regarding copyright ownership. The ASF licenses this
+file to you under the Apache License, Version 2.0 (the "License"); you may not
+use this file except in compliance with the License. You may obtain a copy of
+the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations under
+the License.
diff --git a/mlkit/digitalink/app/build.gradle b/mlkit/digitalink/app/build.gradle
new file mode 100644
index 0000000..fc5b1c2
--- /dev/null
+++ b/mlkit/digitalink/app/build.gradle
@@ -0,0 +1,53 @@
+apply plugin: "com.android.application"
+apply plugin: 'kotlin-android'
+apply plugin: 'kotlin-android-extensions'
+
+android {
+ compileSdkVersion 31
+ defaultConfig {
+ applicationId "com.google.mlkit.samples.vision.digitalink"
+ minSdkVersion 19
+ targetSdkVersion 31
+ multiDexEnabled true
+ versionCode 1
+ versionName "1.0"
+ setProperty("archivesBaseName", "vision-digital-ink")
+ testApplicationId "com.google.mlkit.vision.digitalink"
+ testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
+ }
+
+ buildTypes {
+ testBuildType "release"
+ release {
+ minifyEnabled true
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro', 'proguard.cfg'
+ }
+ debug {
+ minifyEnabled false
+ proguardFiles 'proguard-rules.pro'
+ }
+ }
+ compileOptions {
+ sourceCompatibility JavaVersion.VERSION_1_8
+ targetCompatibility JavaVersion.VERSION_1_8
+ }
+}
+
+dependencies {
+ implementation "com.google.auto.value:auto-value-annotations:1.6.5"
+ annotationProcessor "com.google.auto.value:auto-value:1.6.3"
+ implementation 'androidx.core:core-ktx:1.3.1'
+ implementation 'androidx.multidex:multidex:2.0.1'
+ implementation 'com.google.guava:guava:28.0-android'
+ implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
+
+ implementation 'androidx.appcompat:appcompat:1.2.0'
+
+ implementation 'com.google.mlkit:digital-ink-recognition:18.0.0'
+
+ androidTestImplementation 'androidx.test:rules:1.2.0'
+ androidTestImplementation 'androidx.test:runner:1.2.0'
+ androidTestImplementation 'androidx.test.ext:junit:1.1.1'
+ androidTestImplementation 'com.google.truth:truth:1.0.1'
+
+}
diff --git a/mlkit/digitalink/app/proguard-rules.pro b/mlkit/digitalink/app/proguard-rules.pro
new file mode 100644
index 0000000..481bb43
--- /dev/null
+++ b/mlkit/digitalink/app/proguard-rules.pro
@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
\ No newline at end of file
diff --git a/mlkit/digitalink/app/src/main/AndroidManifest.xml b/mlkit/digitalink/app/src/main/AndroidManifest.xml
new file mode 100644
index 0000000..f0276df
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/AndroidManifest.xml
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/DigitalInkMainActivity.java b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/DigitalInkMainActivity.java
new file mode 100644
index 0000000..6e9c5dd
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/DigitalInkMainActivity.java
@@ -0,0 +1,196 @@
+package com.google.mlkit.samples.vision.digitalink;
+
+import android.os.Bundle;
+import androidx.annotation.Nullable;
+import androidx.appcompat.app.AppCompatActivity;
+import android.util.Log;
+import android.view.View;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.ArrayAdapter;
+import android.widget.Spinner;
+import androidx.annotation.NonNull;
+import androidx.annotation.VisibleForTesting;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSortedSet;
+import com.google.mlkit.samples.vision.digitalink.StrokeManager.DownloadedModelsChangedListener;
+import com.google.mlkit.vision.digitalink.DigitalInkRecognitionModelIdentifier;
+import java.util.Locale;
+import java.util.Set;
+
+/** Main activity which creates a StrokeManager and connects it to the DrawingView. */
+public class DigitalInkMainActivity extends AppCompatActivity
+ implements DownloadedModelsChangedListener {
+ private static final String TAG = "MLKDI.Activity";
+ private static final ImmutableMap NON_TEXT_MODELS =
+ ImmutableMap.of(
+ "zxx-Zsym-x-autodraw",
+ "Autodraw",
+ "zxx-Zsye-x-emoji",
+ "Emoji",
+ "zxx-Zsym-x-shapes",
+ "Shapes");
+ @VisibleForTesting final StrokeManager strokeManager = new StrokeManager();
+ private ArrayAdapter languageAdapter;
+
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_digital_ink_main);
+
+ Spinner languageSpinner = findViewById(R.id.languages_spinner);
+
+ DrawingView drawingView = findViewById(R.id.drawing_view);
+ StatusTextView statusTextView = findViewById(R.id.status_text_view);
+ drawingView.setStrokeManager(strokeManager);
+ statusTextView.setStrokeManager(strokeManager);
+
+ strokeManager.setStatusChangedListener(statusTextView);
+ strokeManager.setContentChangedListener(drawingView);
+ strokeManager.setDownloadedModelsChangedListener(this);
+ strokeManager.setClearCurrentInkAfterRecognition(true);
+ strokeManager.setTriggerRecognitionAfterInput(false);
+
+ languageAdapter = populateLanguageAdapter();
+ languageAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item);
+ languageSpinner.setAdapter(languageAdapter);
+ strokeManager.refreshDownloadedModelsStatus();
+
+ languageSpinner.setOnItemSelectedListener(
+ new OnItemSelectedListener() {
+ @Override
+ public void onItemSelected(AdapterView> parent, View view, int position, long id) {
+ String languageCode =
+ ((ModelLanguageContainer) parent.getAdapter().getItem(position)).getLanguageTag();
+ if (languageCode == null) {
+ return;
+ }
+ Log.i(TAG, "Selected language: " + languageCode);
+ strokeManager.setActiveModel(languageCode);
+ }
+
+ @Override
+ public void onNothingSelected(AdapterView> parent) {
+ Log.i(TAG, "No language selected");
+ }
+ });
+
+ strokeManager.reset();
+ }
+
+ public void downloadClick(View v) {
+ strokeManager.download();
+ }
+
+ public void recognizeClick(View v) {
+ strokeManager.recognize();
+ }
+
+ public void clearClick(View v) {
+ strokeManager.reset();
+ DrawingView drawingView = findViewById(R.id.drawing_view);
+ drawingView.clear();
+ }
+
+ public void deleteClick(View v) {
+ strokeManager.deleteActiveModel();
+ }
+
+ private static class ModelLanguageContainer implements Comparable {
+ private final String label;
+ @Nullable private final String languageTag;
+ private boolean downloaded;
+
+ private ModelLanguageContainer(String label, @Nullable String languageTag) {
+ this.label = label;
+ this.languageTag = languageTag;
+ }
+
+ /**
+ * Populates and returns a real model identifier, with label, language tag and downloaded
+ * status.
+ */
+ public static ModelLanguageContainer createModelContainer(String label, String languageTag) {
+ // Offset the actual language labels for better readability
+ return new ModelLanguageContainer(label, languageTag);
+ }
+
+ /** Populates and returns a label only, without a language tag. */
+ public static ModelLanguageContainer createLabelOnly(String label) {
+ return new ModelLanguageContainer(label, null);
+ }
+
+ public String getLanguageTag() {
+ return languageTag;
+ }
+
+ public void setDownloaded(boolean downloaded) {
+ this.downloaded = downloaded;
+ }
+
+ @NonNull
+ @Override
+ public String toString() {
+ if (languageTag == null) {
+ return label;
+ } else if (downloaded) {
+ return " [D] " + label;
+ } else {
+ return " " + label;
+ }
+ }
+
+ @Override
+ public int compareTo(ModelLanguageContainer o) {
+ return label.compareTo(o.label);
+ }
+ }
+
+ @Override
+ public void onDownloadedModelsChanged(Set downloadedLanguageTags) {
+ for (int i = 0; i < languageAdapter.getCount(); i++) {
+ ModelLanguageContainer container = languageAdapter.getItem(i);
+ container.setDownloaded(downloadedLanguageTags.contains(container.languageTag));
+ }
+ languageAdapter.notifyDataSetChanged();
+ }
+
+ private ArrayAdapter populateLanguageAdapter() {
+ ArrayAdapter languageAdapter =
+ new ArrayAdapter<>(this, android.R.layout.simple_spinner_item);
+ languageAdapter.add(ModelLanguageContainer.createLabelOnly("Select language"));
+ languageAdapter.add(ModelLanguageContainer.createLabelOnly("Non-text Models"));
+
+ // Manually add non-text models first
+ for (String languageTag : NON_TEXT_MODELS.keySet()) {
+ languageAdapter.add(
+ ModelLanguageContainer.createModelContainer(
+ NON_TEXT_MODELS.get(languageTag), languageTag));
+ }
+ languageAdapter.add(ModelLanguageContainer.createLabelOnly("Text Models"));
+
+ ImmutableSortedSet.Builder textModels =
+ ImmutableSortedSet.naturalOrder();
+ for (DigitalInkRecognitionModelIdentifier modelIdentifier :
+ DigitalInkRecognitionModelIdentifier.allModelIdentifiers()) {
+ if (NON_TEXT_MODELS.containsKey(modelIdentifier.getLanguageTag())) {
+ continue;
+ }
+
+ StringBuilder label = new StringBuilder();
+ label.append(new Locale(modelIdentifier.getLanguageSubtag()).getDisplayName());
+ if (modelIdentifier.getRegionSubtag() != null) {
+ label.append(" (").append(modelIdentifier.getRegionSubtag()).append(")");
+ }
+
+ if (modelIdentifier.getScriptSubtag() != null) {
+ label.append(", ").append(modelIdentifier.getScriptSubtag()).append(" Script");
+ }
+ textModels.add(
+ ModelLanguageContainer.createModelContainer(
+ label.toString(), modelIdentifier.getLanguageTag()));
+ }
+ languageAdapter.addAll(textModels.build());
+ return languageAdapter;
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/DrawingView.java b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/DrawingView.java
new file mode 100644
index 0000000..f614532
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/DrawingView.java
@@ -0,0 +1,218 @@
+package com.google.mlkit.samples.vision.digitalink;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Paint;
+import android.graphics.Path;
+import android.graphics.Rect;
+import android.text.TextPaint;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.util.TypedValue;
+import android.view.MotionEvent;
+import android.view.View;
+import com.google.mlkit.samples.vision.digitalink.StrokeManager.ContentChangedListener;
+import com.google.mlkit.vision.digitalink.Ink;
+import java.util.List;
+
+/**
+ * Main view for rendering content.
+ *
+ *
The view accepts touch inputs, renders them on screen, and passes the content to the
+ * StrokeManager. The view is also able to draw content from the StrokeManager.
+ */
+public class DrawingView extends View implements ContentChangedListener {
+ private static final String TAG = "MLKD.DrawingView";
+ private static final int STROKE_WIDTH_DP = 3;
+ private static final int MIN_BB_WIDTH = 10;
+ private static final int MIN_BB_HEIGHT = 10;
+ private static final int MAX_BB_WIDTH = 256;
+ private static final int MAX_BB_HEIGHT = 256;
+
+ private final Paint recognizedStrokePaint;
+ private final TextPaint textPaint;
+ private final Paint currentStrokePaint;
+ private final Paint canvasPaint;
+
+ private final Path currentStroke;
+ private Canvas drawCanvas;
+ private Bitmap canvasBitmap;
+ private StrokeManager strokeManager;
+
+ public DrawingView(Context context) {
+ this(context, null);
+ }
+
+ public DrawingView(Context context, AttributeSet attributeSet) {
+ super(context, attributeSet);
+ currentStrokePaint = new Paint();
+ currentStrokePaint.setColor(0xFFFF00FF); // pink.
+ currentStrokePaint.setAntiAlias(true);
+ // Set stroke width based on display density.
+ currentStrokePaint.setStrokeWidth(
+ TypedValue.applyDimension(
+ TypedValue.COMPLEX_UNIT_DIP, STROKE_WIDTH_DP, getResources().getDisplayMetrics()));
+ currentStrokePaint.setStyle(Paint.Style.STROKE);
+ currentStrokePaint.setStrokeJoin(Paint.Join.ROUND);
+ currentStrokePaint.setStrokeCap(Paint.Cap.ROUND);
+
+ recognizedStrokePaint = new Paint(currentStrokePaint);
+ recognizedStrokePaint.setColor(0xFFFFCCFF); // pale pink.
+
+ textPaint = new TextPaint();
+ textPaint.setColor(0xFF33CC33); // green.
+
+ currentStroke = new Path();
+ canvasPaint = new Paint(Paint.DITHER_FLAG);
+ }
+
+ private static Rect computeBoundingBox(Ink ink) {
+ float top = Float.MAX_VALUE;
+ float left = Float.MAX_VALUE;
+ float bottom = Float.MIN_VALUE;
+ float right = Float.MIN_VALUE;
+ for (Ink.Stroke s : ink.getStrokes()) {
+ for (Ink.Point p : s.getPoints()) {
+ top = Math.min(top, p.getY());
+ left = Math.min(left, p.getX());
+ bottom = Math.max(bottom, p.getY());
+ right = Math.max(right, p.getX());
+ }
+ }
+ float centerX = (left + right) / 2;
+ float centerY = (top + bottom) / 2;
+ Rect bb = new Rect((int) left, (int) top, (int) right, (int) bottom);
+ // Enforce a minimum size of the bounding box such that recognitions for small inks are readable
+ bb.union(
+ (int) (centerX - MIN_BB_WIDTH / 2),
+ (int) (centerY - MIN_BB_HEIGHT / 2),
+ (int) (centerX + MIN_BB_WIDTH / 2),
+ (int) (centerY + MIN_BB_HEIGHT / 2));
+ // Enforce a maximum size of the bounding box, to ensure Emoji characters get displayed
+ // correctly
+ if (bb.width() > MAX_BB_WIDTH) {
+ bb.set(bb.centerX() - MAX_BB_WIDTH / 2, bb.top, bb.centerX() + MAX_BB_WIDTH / 2, bb.bottom);
+ }
+ if (bb.height() > MAX_BB_HEIGHT) {
+ bb.set(bb.left, bb.centerY() - MAX_BB_HEIGHT / 2, bb.right, bb.centerY() + MAX_BB_HEIGHT / 2);
+ }
+ return bb;
+ }
+
+ void setStrokeManager(StrokeManager strokeManager) {
+ this.strokeManager = strokeManager;
+ }
+
+ @Override
+ protected void onSizeChanged(int width, int height, int oldWidth, int oldHeight) {
+ Log.i(TAG, "onSizeChanged");
+ canvasBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
+ drawCanvas = new Canvas(canvasBitmap);
+ invalidate();
+ }
+
+ public void redrawContent() {
+ clear();
+ Ink currentInk = strokeManager.getCurrentInk();
+ drawInk(currentInk, currentStrokePaint);
+
+ List content = strokeManager.getContent();
+ for (RecognitionTask.RecognizedInk ri : content) {
+ drawInk(ri.ink, recognizedStrokePaint);
+ final Rect bb = computeBoundingBox(ri.ink);
+ drawTextIntoBoundingBox(ri.text, bb, textPaint);
+ }
+ invalidate();
+ }
+
+ private void drawTextIntoBoundingBox(String text, Rect bb, TextPaint textPaint) {
+ final float arbitraryFixedSize = 20.f;
+ // Set an arbitrary text size to learn how high the text will be.
+ textPaint.setTextSize(arbitraryFixedSize);
+ textPaint.setTextScaleX(1.f);
+
+ // Now determine the size of the rendered text with these settings.
+ Rect r = new Rect();
+ textPaint.getTextBounds(text, 0, text.length(), r);
+
+ // Adjust height such that target height is met.
+ float textSize = arbitraryFixedSize * (float) bb.height() / (float) r.height();
+ textPaint.setTextSize(textSize);
+
+ // Redetermine the size of the rendered text with the new settings.
+ textPaint.getTextBounds(text, 0, text.length(), r);
+
+ // Adjust scaleX to squeeze the text.
+ textPaint.setTextScaleX((float) bb.width() / (float) r.width());
+
+ // And finally draw the text.
+ drawCanvas.drawText(text, bb.left, bb.bottom, textPaint);
+ }
+
+ private void drawInk(Ink ink, Paint paint) {
+ for (Ink.Stroke s : ink.getStrokes()) {
+ drawStroke(s, paint);
+ }
+ }
+
+ private void drawStroke(Ink.Stroke s, Paint paint) {
+ Log.i(TAG, "drawstroke");
+ Path path = null;
+ for (Ink.Point p : s.getPoints()) {
+ if (path == null) {
+ path = new Path();
+ path.moveTo(p.getX(), p.getY());
+ } else {
+ path.lineTo(p.getX(), p.getY());
+ }
+ }
+ drawCanvas.drawPath(path, paint);
+ }
+
+ public void clear() {
+ currentStroke.reset();
+ onSizeChanged(
+ canvasBitmap.getWidth(),
+ canvasBitmap.getHeight(),
+ canvasBitmap.getWidth(),
+ canvasBitmap.getHeight());
+ }
+
+ @Override
+ protected void onDraw(Canvas canvas) {
+ canvas.drawBitmap(canvasBitmap, 0, 0, canvasPaint);
+ canvas.drawPath(currentStroke, currentStrokePaint);
+ }
+
+ @Override
+ public boolean onTouchEvent(MotionEvent event) {
+ int action = event.getActionMasked();
+ float x = event.getX();
+ float y = event.getY();
+
+ switch (action) {
+ case MotionEvent.ACTION_DOWN:
+ currentStroke.moveTo(x, y);
+ break;
+ case MotionEvent.ACTION_MOVE:
+ currentStroke.lineTo(x, y);
+ break;
+ case MotionEvent.ACTION_UP:
+ currentStroke.lineTo(x, y);
+ drawCanvas.drawPath(currentStroke, currentStrokePaint);
+ currentStroke.reset();
+ break;
+ default:
+ break;
+ }
+ strokeManager.addNewTouchEvent(event);
+ invalidate();
+ return true;
+ }
+
+ @Override
+ public void onContentChanged() {
+ redrawContent();
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/ModelManager.java b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/ModelManager.java
new file mode 100644
index 0000000..f204d11
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/ModelManager.java
@@ -0,0 +1,116 @@
+package com.google.mlkit.samples.vision.digitalink;
+
+import android.util.Log;
+import com.google.android.gms.tasks.Task;
+import com.google.android.gms.tasks.Tasks;
+import com.google.mlkit.common.MlKitException;
+import com.google.mlkit.common.model.DownloadConditions;
+import com.google.mlkit.common.model.RemoteModelManager;
+import com.google.mlkit.vision.digitalink.DigitalInkRecognition;
+import com.google.mlkit.vision.digitalink.DigitalInkRecognitionModel;
+import com.google.mlkit.vision.digitalink.DigitalInkRecognitionModelIdentifier;
+import com.google.mlkit.vision.digitalink.DigitalInkRecognizer;
+import com.google.mlkit.vision.digitalink.DigitalInkRecognizerOptions;
+import java.util.HashSet;
+import java.util.Set;
+
+/** Class to manage model downloading, deletion, and selection. */
+public class ModelManager {
+
+ private static final String TAG = "MLKD.ModelManager";
+ private DigitalInkRecognitionModel model;
+ private DigitalInkRecognizer recognizer;
+ final RemoteModelManager remoteModelManager = RemoteModelManager.getInstance();
+
+ public String setModel(String languageTag) {
+ // Clear the old model and recognizer.
+ model = null;
+ if (recognizer != null) {
+ recognizer.close();
+ }
+ recognizer = null;
+
+ // Try to parse the languageTag and get a model from it.
+ DigitalInkRecognitionModelIdentifier modelIdentifier;
+ try {
+ modelIdentifier = DigitalInkRecognitionModelIdentifier.fromLanguageTag(languageTag);
+ } catch (MlKitException e) {
+ Log.e(TAG, "Failed to parse language '" + languageTag + "'");
+ return "";
+ }
+ if (modelIdentifier == null) {
+ return "No model for language: " + languageTag;
+ }
+
+ // Initialize the model and recognizer.
+ model = DigitalInkRecognitionModel.builder(modelIdentifier).build();
+ recognizer =
+ DigitalInkRecognition.getClient(DigitalInkRecognizerOptions.builder(model).build());
+ Log.i(
+ TAG,
+ "Model set for language '"
+ + languageTag
+ + "' ('"
+ + modelIdentifier.getLanguageTag()
+ + "').");
+ return "Model set for language: " + languageTag;
+ }
+
+ public DigitalInkRecognizer getRecognizer() {
+ return recognizer;
+ }
+
+ public Task checkIsModelDownloaded() {
+ return remoteModelManager.isModelDownloaded(model);
+ }
+
+ public Task deleteActiveModel() {
+ if (model == null) {
+ Log.i(TAG, "Model not set");
+ return Tasks.forResult("Model not set");
+ }
+ return checkIsModelDownloaded()
+ .onSuccessTask(
+ result -> {
+ if (!result) {
+ return Tasks.forResult("Model not downloaded yet");
+ }
+ return remoteModelManager
+ .deleteDownloadedModel(model)
+ .onSuccessTask(
+ aVoid -> {
+ Log.i(TAG, "Model successfully deleted");
+ return Tasks.forResult("Model successfully deleted");
+ });
+ })
+ .addOnFailureListener(e -> Log.e(TAG, "Error while model deletion: " + e));
+ }
+
+ public Task> getDownloadedModelLanguages() {
+ return remoteModelManager
+ .getDownloadedModels(DigitalInkRecognitionModel.class)
+ .onSuccessTask(
+ (remoteModels) -> {
+ Set result = new HashSet<>();
+ for (DigitalInkRecognitionModel model : remoteModels) {
+ result.add(model.getModelIdentifier().getLanguageTag());
+ }
+ Log.i(TAG, "Downloaded models for languages:" + result);
+ return Tasks.forResult(result);
+ });
+ }
+
+ public Task download() {
+ if (model == null) {
+ return Tasks.forResult("Model not selected.");
+ }
+ return remoteModelManager
+ .download(model, new DownloadConditions.Builder().build())
+ .onSuccessTask(
+ aVoid -> {
+ Log.i(TAG, "Model download succeeded.");
+ return Tasks.forResult("Downloaded model successfully");
+ })
+ .addOnFailureListener(e -> Log.e(TAG, "Error while downloading the model: " + e));
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/RecognitionTask.java b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/RecognitionTask.java
new file mode 100644
index 0000000..faf048d
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/RecognitionTask.java
@@ -0,0 +1,68 @@
+package com.google.mlkit.samples.vision.digitalink;
+
+import androidx.annotation.Nullable;
+import android.util.Log;
+import com.google.android.gms.tasks.Task;
+import com.google.android.gms.tasks.Tasks;
+import com.google.mlkit.vision.digitalink.DigitalInkRecognizer;
+import com.google.mlkit.vision.digitalink.Ink;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/** Task to run asynchronously to obtain recognition results. */
+public class RecognitionTask {
+
+ private static final String TAG = "MLKD.RecognitionTask";
+ private final DigitalInkRecognizer recognizer;
+ private final Ink ink;
+ @Nullable private RecognizedInk currentResult;
+ private final AtomicBoolean cancelled;
+ private final AtomicBoolean done;
+
+ public RecognitionTask(DigitalInkRecognizer recognizer, Ink ink) {
+ this.recognizer = recognizer;
+ this.ink = ink;
+ this.currentResult = null;
+ cancelled = new AtomicBoolean(false);
+ done = new AtomicBoolean(false);
+ }
+
+ public void cancel() {
+ cancelled.set(true);
+ }
+
+ public boolean done() {
+ return done.get();
+ }
+
+ @Nullable
+ public RecognizedInk result() {
+ return this.currentResult;
+ }
+
+ /** Helper class that stores an ink along with the corresponding recognized text. */
+ public static class RecognizedInk {
+ public final Ink ink;
+ public final String text;
+
+ RecognizedInk(Ink ink, String text) {
+ this.ink = ink;
+ this.text = text;
+ }
+ }
+
+ public Task run() {
+ Log.i(TAG, "RecoTask.run");
+ return recognizer
+ .recognize(this.ink)
+ .onSuccessTask(
+ result -> {
+ if (cancelled.get() || result.getCandidates().isEmpty()) {
+ return Tasks.forResult(null);
+ }
+ currentResult = new RecognizedInk(ink, result.getCandidates().get(0).getText());
+ Log.i(TAG, "result: " + currentResult.text);
+ done.set(true);
+ return Tasks.forResult(currentResult.text);
+ });
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/StatusTextView.java b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/StatusTextView.java
new file mode 100644
index 0000000..8e9f5fd
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/StatusTextView.java
@@ -0,0 +1,34 @@
+package com.google.mlkit.samples.vision.digitalink;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.widget.TextView;
+import androidx.annotation.NonNull;
+import com.google.mlkit.samples.vision.digitalink.StrokeManager.StatusChangedListener;
+
+/**
+ * Status bar for the test app.
+ *
+ *
It is updated upon status changes announced by the StrokeManager.
+ */
+public class StatusTextView extends TextView implements StatusChangedListener {
+
+ private StrokeManager strokeManager;
+
+ public StatusTextView(@NonNull Context context) {
+ super(context);
+ }
+
+ public StatusTextView(Context context, AttributeSet attributeSet) {
+ super(context, attributeSet);
+ }
+
+ @Override
+ public void onStatusChanged() {
+ this.setText(this.strokeManager.getStatus());
+ }
+
+ void setStrokeManager(StrokeManager strokeManager) {
+ this.strokeManager = strokeManager;
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/StrokeManager.java b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/StrokeManager.java
new file mode 100644
index 0000000..e51cb9f
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/StrokeManager.java
@@ -0,0 +1,260 @@
+package com.google.mlkit.samples.vision.digitalink;
+
+import android.os.Handler;
+import androidx.annotation.Nullable;
+import android.util.Log;
+import android.view.MotionEvent;
+import androidx.annotation.VisibleForTesting;
+import com.google.android.gms.tasks.Task;
+import com.google.android.gms.tasks.Tasks;
+import com.google.mlkit.vision.digitalink.Ink;
+import com.google.mlkit.vision.digitalink.Ink.Point;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+/** Manages the recognition logic and the content that has been added to the current page. */
+public class StrokeManager {
+
+ /** Interface to register to be notified of changes in the recognized content. */
+ public interface ContentChangedListener {
+
+ /** This method is called when the recognized content changes. */
+ void onContentChanged();
+ }
+
+ /** Interface to register to be notified of changes in the status. */
+ public interface StatusChangedListener {
+
+ /** This method is called when the recognized content changes. */
+ void onStatusChanged();
+ }
+
+ /** Interface to register to be notified of changes in the downloaded model state. */
+ public interface DownloadedModelsChangedListener {
+
+ /** This method is called when the downloaded models changes. */
+ void onDownloadedModelsChanged(Set downloadedLanguageTags);
+ }
+
+ @VisibleForTesting static final long CONVERSION_TIMEOUT_MS = 1000;
+ private static final String TAG = "MLKD.StrokeManager";
+ // This is a constant that is used as a message identifier to trigger the timeout.
+ private static final int TIMEOUT_TRIGGER = 1;
+ // For handling recognition and model downloading.
+ private RecognitionTask recognitionTask = null;
+ @VisibleForTesting ModelManager modelManager = new ModelManager();
+ // Managing the recognition queue.
+ private final List content = new ArrayList<>();
+ // Managing ink currently drawn.
+ private Ink.Stroke.Builder strokeBuilder = Ink.Stroke.builder();
+ private Ink.Builder inkBuilder = Ink.builder();
+ private boolean stateChangedSinceLastRequest = false;
+ @Nullable private ContentChangedListener contentChangedListener = null;
+ @Nullable private StatusChangedListener statusChangedListener = null;
+ @Nullable private DownloadedModelsChangedListener downloadedModelsChangedListener = null;
+
+ private boolean triggerRecognitionAfterInput = true;
+ private boolean clearCurrentInkAfterRecognition = true;
+ private String status = "";
+
+ public void setTriggerRecognitionAfterInput(boolean shouldTrigger) {
+ triggerRecognitionAfterInput = shouldTrigger;
+ }
+
+ public void setClearCurrentInkAfterRecognition(boolean shouldClear) {
+ clearCurrentInkAfterRecognition = shouldClear;
+ }
+
+ // Handler to handle the UI Timeout.
+ // This handler is only used to trigger the UI timeout. Each time a UI interaction happens,
+ // the timer is reset by clearing the queue on this handler and sending a new delayed message (in
+ // addNewTouchEvent).
+ private final Handler uiHandler =
+ new Handler(
+ msg -> {
+ if (msg.what == TIMEOUT_TRIGGER) {
+ Log.i(TAG, "Handling timeout trigger.");
+ commitResult();
+ return true;
+ }
+ // In the current use this statement is never reached because we only ever send
+ // TIMEOUT_TRIGGER messages to this handler.
+ // This line is necessary because otherwise Java's static analysis doesn't allow for
+ // compiling. Returning false indicates that a message wasn't handled.
+ return false;
+ });
+
+ private void setStatus(String newStatus) {
+ status = newStatus;
+ if (statusChangedListener != null) {
+ statusChangedListener.onStatusChanged();
+ }
+ }
+
+ private void commitResult() {
+ if (recognitionTask.done() && recognitionTask.result() != null) {
+ content.add(recognitionTask.result());
+ setStatus("Successful recognition: " + recognitionTask.result().text);
+ if (clearCurrentInkAfterRecognition) {
+ resetCurrentInk();
+ }
+ if (contentChangedListener != null) {
+ contentChangedListener.onContentChanged();
+ }
+ }
+ }
+
+ public void reset() {
+ Log.i(TAG, "reset");
+ resetCurrentInk();
+ content.clear();
+ if (recognitionTask != null && !recognitionTask.done()) {
+ recognitionTask.cancel();
+ }
+ setStatus("");
+ }
+
+ private void resetCurrentInk() {
+ inkBuilder = Ink.builder();
+ strokeBuilder = Ink.Stroke.builder();
+ stateChangedSinceLastRequest = false;
+ }
+
+ public Ink getCurrentInk() {
+ return inkBuilder.build();
+ }
+
+ /**
+ * This method is called when a new touch event happens on the drawing client and notifies the
+ * StrokeManager of new content being added.
+ *
+ *
This method takes care of triggering the UI timeout and scheduling recognitions on the
+ * background thread.
+ *
+ * @return whether the touch event was handled.
+ */
+ public boolean addNewTouchEvent(MotionEvent event) {
+ int action = event.getActionMasked();
+ float x = event.getX();
+ float y = event.getY();
+ long t = System.currentTimeMillis();
+
+ // A new event happened -> clear all pending timeout messages.
+ uiHandler.removeMessages(TIMEOUT_TRIGGER);
+
+ switch (action) {
+ case MotionEvent.ACTION_DOWN:
+ case MotionEvent.ACTION_MOVE:
+ strokeBuilder.addPoint(Point.create(x, y, t));
+ break;
+ case MotionEvent.ACTION_UP:
+ strokeBuilder.addPoint(Point.create(x, y, t));
+ inkBuilder.addStroke(strokeBuilder.build());
+ strokeBuilder = Ink.Stroke.builder();
+ stateChangedSinceLastRequest = true;
+ if (triggerRecognitionAfterInput) {
+ recognize();
+ }
+ break;
+ default:
+ // Indicate touch event wasn't handled.
+ return false;
+ }
+
+ return true;
+ }
+
+ // Listeners to update the drawing and status.
+ public void setContentChangedListener(ContentChangedListener contentChangedListener) {
+ this.contentChangedListener = contentChangedListener;
+ }
+
+ public void setStatusChangedListener(StatusChangedListener statusChangedListener) {
+ this.statusChangedListener = statusChangedListener;
+ }
+
+ public void setDownloadedModelsChangedListener(
+ DownloadedModelsChangedListener downloadedModelsChangedListener) {
+ this.downloadedModelsChangedListener = downloadedModelsChangedListener;
+ }
+
+ public List getContent() {
+ return content;
+ }
+
+ public String getStatus() {
+ return status;
+ }
+
+ // Model downloading / deleting / setting.
+
+ public void setActiveModel(String languageTag) {
+ setStatus(modelManager.setModel(languageTag));
+ }
+
+ public Task deleteActiveModel() {
+ return modelManager
+ .deleteActiveModel()
+ .addOnSuccessListener(unused -> refreshDownloadedModelsStatus())
+ .onSuccessTask(
+ status -> {
+ setStatus(status);
+ return Tasks.forResult(null);
+ });
+ }
+
+ public Task download() {
+ setStatus("Download started.");
+ return modelManager
+ .download()
+ .addOnSuccessListener(unused -> refreshDownloadedModelsStatus())
+ .onSuccessTask(
+ status -> {
+ setStatus(status);
+ return Tasks.forResult(null);
+ });
+ }
+
+ // Recognition-related.
+
+ public Task recognize() {
+
+ if (!stateChangedSinceLastRequest || inkBuilder.isEmpty()) {
+ setStatus("No recognition, ink unchanged or empty");
+ return Tasks.forResult(null);
+ }
+ if (modelManager.getRecognizer() == null) {
+ setStatus("Recognizer not set");
+ return Tasks.forResult(null);
+ }
+
+ return modelManager
+ .checkIsModelDownloaded()
+ .onSuccessTask(
+ result -> {
+ if (!result) {
+ setStatus("Model not downloaded yet");
+ return Tasks.forResult(null);
+ }
+
+ stateChangedSinceLastRequest = false;
+ recognitionTask =
+ new RecognitionTask(modelManager.getRecognizer(), inkBuilder.build());
+ uiHandler.sendMessageDelayed(
+ uiHandler.obtainMessage(TIMEOUT_TRIGGER), CONVERSION_TIMEOUT_MS);
+ return recognitionTask.run();
+ });
+ }
+
+ public void refreshDownloadedModelsStatus() {
+ modelManager
+ .getDownloadedModelLanguages()
+ .addOnSuccessListener(
+ downloadedLanguageTags -> {
+ if (downloadedModelsChangedListener != null) {
+ downloadedModelsChangedListener.onDownloadedModelsChanged(downloadedLanguageTags);
+ }
+ });
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/DigitalInkMainActivity.kt b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/DigitalInkMainActivity.kt
new file mode 100644
index 0000000..c2cb624
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/DigitalInkMainActivity.kt
@@ -0,0 +1,161 @@
+package com.google.mlkit.samples.vision.digitalink.kotlin
+
+import android.os.Bundle
+import androidx.appcompat.app.AppCompatActivity
+import android.util.Log
+import android.view.View
+import android.widget.AdapterView
+import android.widget.AdapterView.OnItemSelectedListener
+import android.widget.ArrayAdapter
+import android.widget.Spinner
+import androidx.annotation.VisibleForTesting
+import com.google.common.collect.ImmutableMap
+import com.google.common.collect.ImmutableSortedSet
+import com.google.mlkit.samples.vision.digitalink.R
+import com.google.mlkit.vision.digitalink.DigitalInkRecognitionModelIdentifier
+import java.util.Locale
+
+/** Main activity which creates a StrokeManager and connects it to the DrawingView. */
+class DigitalInkMainActivity : AppCompatActivity(), StrokeManager.DownloadedModelsChangedListener {
+ @JvmField @VisibleForTesting val strokeManager = StrokeManager()
+ private lateinit var languageAdapter: ArrayAdapter
+
+ public override fun onCreate(savedInstanceState: Bundle?) {
+ super.onCreate(savedInstanceState)
+ setContentView(R.layout.activity_digital_ink_main_kotlin)
+ val languageSpinner = findViewById(R.id.languages_spinner)
+ val drawingView = findViewById(R.id.drawing_view)
+ val statusTextView = findViewById(R.id.status_text_view)
+ drawingView.setStrokeManager(strokeManager)
+ statusTextView.setStrokeManager(strokeManager)
+ strokeManager.setStatusChangedListener(statusTextView)
+ strokeManager.setContentChangedListener(drawingView)
+ strokeManager.setDownloadedModelsChangedListener(this)
+ strokeManager.setClearCurrentInkAfterRecognition(true)
+ strokeManager.setTriggerRecognitionAfterInput(false)
+ languageAdapter = populateLanguageAdapter()
+ languageAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
+ languageSpinner.adapter = languageAdapter
+ strokeManager.refreshDownloadedModelsStatus()
+
+ languageSpinner.onItemSelectedListener =
+ object : OnItemSelectedListener {
+ override fun onItemSelected(parent: AdapterView<*>, view: View, position: Int, id: Long) {
+ val languageCode =
+ (parent.adapter.getItem(position) as ModelLanguageContainer).languageTag ?: return
+ Log.i(TAG, "Selected language: $languageCode")
+ strokeManager.setActiveModel(languageCode)
+ }
+
+ override fun onNothingSelected(parent: AdapterView<*>?) {
+ Log.i(TAG, "No language selected")
+ }
+ }
+ strokeManager.reset()
+ }
+
+ fun downloadClick(v: View?) {
+ strokeManager.download()
+ }
+
+ fun recognizeClick(v: View?) {
+ strokeManager.recognize()
+ }
+
+ fun clearClick(v: View?) {
+ strokeManager.reset()
+ val drawingView = findViewById(R.id.drawing_view)
+ drawingView.clear()
+ }
+
+ fun deleteClick(v: View?) {
+ strokeManager.deleteActiveModel()
+ }
+
+ private class ModelLanguageContainer
+ private constructor(private val label: String, val languageTag: String?) :
+ Comparable {
+
+ var downloaded: Boolean = false
+
+ override fun toString(): String {
+ return when {
+ languageTag == null -> label
+ downloaded -> " [D] $label"
+ else -> " $label"
+ }
+ }
+
+ override fun compareTo(other: ModelLanguageContainer): Int {
+ return label.compareTo(other.label)
+ }
+
+ companion object {
+ /** Populates and returns a real model identifier, with label and language tag. */
+ fun createModelContainer(label: String, languageTag: String?): ModelLanguageContainer {
+ // Offset the actual language labels for better readability
+ return ModelLanguageContainer(label, languageTag)
+ }
+
+ /** Populates and returns a label only, without a language tag. */
+ fun createLabelOnly(label: String): ModelLanguageContainer {
+ return ModelLanguageContainer(label, null)
+ }
+ }
+ }
+
+ private fun populateLanguageAdapter(): ArrayAdapter {
+ val languageAdapter =
+ ArrayAdapter(this, android.R.layout.simple_spinner_item)
+ languageAdapter.add(ModelLanguageContainer.createLabelOnly("Select language"))
+ languageAdapter.add(ModelLanguageContainer.createLabelOnly("Non-text Models"))
+
+ // Manually add non-text models first
+ for (languageTag in NON_TEXT_MODELS.keys) {
+ languageAdapter.add(
+ ModelLanguageContainer.createModelContainer(NON_TEXT_MODELS[languageTag]!!, languageTag)
+ )
+ }
+ languageAdapter.add(ModelLanguageContainer.createLabelOnly("Text Models"))
+ val textModels = ImmutableSortedSet.naturalOrder()
+ for (modelIdentifier in DigitalInkRecognitionModelIdentifier.allModelIdentifiers()) {
+ if (NON_TEXT_MODELS.containsKey(modelIdentifier.languageTag)) {
+ continue
+ }
+ val label = StringBuilder()
+ label.append(Locale(modelIdentifier.languageSubtag).displayName)
+ if (modelIdentifier.regionSubtag != null) {
+ label.append(" (").append(modelIdentifier.regionSubtag).append(")")
+ }
+ if (modelIdentifier.scriptSubtag != null) {
+ label.append(", ").append(modelIdentifier.scriptSubtag).append(" Script")
+ }
+ textModels.add(
+ ModelLanguageContainer.createModelContainer(label.toString(), modelIdentifier.languageTag)
+ )
+ }
+ languageAdapter.addAll(textModels.build())
+ return languageAdapter
+ }
+
+ override fun onDownloadedModelsChanged(downloadedLanguageTags: Set) {
+ for (i in 0 until languageAdapter.count) {
+ val container = languageAdapter.getItem(i)!!
+ container.downloaded = downloadedLanguageTags.contains(container.languageTag)
+ }
+ languageAdapter.notifyDataSetChanged()
+ }
+
+ companion object {
+ private const val TAG = "MLKDI.Activity"
+ private val NON_TEXT_MODELS =
+ ImmutableMap.of(
+ "zxx-Zsym-x-autodraw",
+ "Autodraw",
+ "zxx-Zsye-x-emoji",
+ "Emoji",
+ "zxx-Zsym-x-shapes",
+ "Shapes"
+ )
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/DrawingView.kt b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/DrawingView.kt
new file mode 100644
index 0000000..a5dd1e4
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/DrawingView.kt
@@ -0,0 +1,210 @@
+package com.google.mlkit.samples.vision.digitalink.kotlin
+
+import android.content.Context
+import android.graphics.Bitmap
+import android.graphics.Canvas
+import android.graphics.Paint
+import android.graphics.Path
+import android.graphics.Rect
+import android.text.TextPaint
+import android.util.AttributeSet
+import android.util.Log
+import android.util.TypedValue
+import android.view.MotionEvent
+import android.view.View
+import com.google.mlkit.samples.vision.digitalink.kotlin.StrokeManager.ContentChangedListener
+import com.google.mlkit.vision.digitalink.Ink
+import com.google.mlkit.vision.digitalink.Ink.Stroke
+
+/**
+ * Main view for rendering content.
+ *
+ *
+ * The view accepts touch inputs, renders them on screen, and passes the content to the
+ * StrokeManager. The view is also able to draw content from the StrokeManager.
+ */
+class DrawingView @JvmOverloads constructor(
+ context: Context?,
+ attributeSet: AttributeSet? = null
+) :
+ View(context, attributeSet), ContentChangedListener {
+ private val recognizedStrokePaint: Paint
+ private val textPaint: TextPaint
+ private val currentStrokePaint: Paint
+ private val canvasPaint: Paint
+ private val currentStroke: Path
+ private lateinit var drawCanvas: Canvas
+ private lateinit var canvasBitmap: Bitmap
+ private lateinit var strokeManager: StrokeManager
+ fun setStrokeManager(strokeManager: StrokeManager) {
+ this.strokeManager = strokeManager
+ }
+
+ override fun onSizeChanged(
+ width: Int,
+ height: Int,
+ oldWidth: Int,
+ oldHeight: Int
+ ) {
+ Log.i(TAG, "onSizeChanged")
+ canvasBitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888)
+ drawCanvas = Canvas(canvasBitmap)
+ invalidate()
+ }
+
+ fun redrawContent() {
+ clear()
+ val currentInk = strokeManager.currentInk
+ drawInk(currentInk, currentStrokePaint)
+ val content = strokeManager.getContent()
+ for (ri in content) {
+ drawInk(ri.ink, recognizedStrokePaint)
+ val bb = computeBoundingBox(ri.ink)
+ drawTextIntoBoundingBox(ri.text ?: "", bb, textPaint)
+ }
+ invalidate()
+ }
+
+ private fun drawTextIntoBoundingBox(text: String, bb: Rect, textPaint: TextPaint) {
+ val arbitraryFixedSize = 20f
+ // Set an arbitrary text size to learn how high the text will be.
+ textPaint.textSize = arbitraryFixedSize
+ textPaint.textScaleX = 1f
+
+ // Now determine the size of the rendered text with these settings.
+ val r = Rect()
+ textPaint.getTextBounds(text, 0, text.length, r)
+
+ // Adjust height such that target height is met.
+ val textSize = arbitraryFixedSize * bb.height().toFloat() / r.height().toFloat()
+ textPaint.textSize = textSize
+
+ // Redetermine the size of the rendered text with the new settings.
+ textPaint.getTextBounds(text, 0, text.length, r)
+
+ // Adjust scaleX to squeeze the text.
+ textPaint.textScaleX = bb.width().toFloat() / r.width().toFloat()
+
+ // And finally draw the text.
+ drawCanvas.drawText(text, bb.left.toFloat(), bb.bottom.toFloat(), textPaint)
+ }
+
+ private fun drawInk(ink: Ink, paint: Paint) {
+ for (s in ink.strokes) {
+ drawStroke(s, paint)
+ }
+ }
+
+ private fun drawStroke(s: Stroke, paint: Paint) {
+ Log.i(TAG, "drawstroke")
+ var path: Path = Path()
+ path.moveTo(s.points[0].x, s.points[0].y)
+ for (p in s.points.drop(1)) {
+ path.lineTo(p.x, p.y)
+ }
+ drawCanvas.drawPath(path, paint)
+ }
+
+ fun clear() {
+ currentStroke.reset()
+ onSizeChanged(
+ canvasBitmap.width,
+ canvasBitmap.height,
+ canvasBitmap.width,
+ canvasBitmap.height
+ )
+ }
+
+ override fun onDraw(canvas: Canvas) {
+ canvas.drawBitmap(canvasBitmap, 0f, 0f, canvasPaint)
+ canvas.drawPath(currentStroke, currentStrokePaint)
+ }
+
+ override fun onTouchEvent(event: MotionEvent): Boolean {
+ val action = event.actionMasked
+ val x = event.x
+ val y = event.y
+ when (action) {
+ MotionEvent.ACTION_DOWN -> currentStroke.moveTo(x, y)
+ MotionEvent.ACTION_MOVE -> currentStroke.lineTo(x, y)
+ MotionEvent.ACTION_UP -> {
+ currentStroke.lineTo(x, y)
+ drawCanvas.drawPath(currentStroke, currentStrokePaint)
+ currentStroke.reset()
+ }
+ else -> {
+ }
+ }
+ strokeManager.addNewTouchEvent(event)
+ invalidate()
+ return true
+ }
+
+ override fun onContentChanged() {
+ redrawContent()
+ }
+
+ companion object {
+ private const val TAG = "MLKD.DrawingView"
+ private const val STROKE_WIDTH_DP = 3
+ private const val MIN_BB_WIDTH = 10
+ private const val MIN_BB_HEIGHT = 10
+ private const val MAX_BB_WIDTH = 256
+ private const val MAX_BB_HEIGHT = 256
+ private fun computeBoundingBox(ink: Ink): Rect {
+ var top = Float.MAX_VALUE
+ var left = Float.MAX_VALUE
+ var bottom = Float.MIN_VALUE
+ var right = Float.MIN_VALUE
+ for (s in ink.strokes) {
+ for (p in s.points) {
+ top = Math.min(top, p.y)
+ left = Math.min(left, p.x)
+ bottom = Math.max(bottom, p.y)
+ right = Math.max(right, p.x)
+ }
+ }
+ val centerX = (left + right) / 2
+ val centerY = (top + bottom) / 2
+ val bb =
+ Rect(left.toInt(), top.toInt(), right.toInt(), bottom.toInt())
+ // Enforce a minimum size of the bounding box such that recognitions for small inks are readable
+ bb.union(
+ (centerX - MIN_BB_WIDTH / 2).toInt(),
+ (centerY - MIN_BB_HEIGHT / 2).toInt(),
+ (centerX + MIN_BB_WIDTH / 2).toInt(),
+ (centerY + MIN_BB_HEIGHT / 2).toInt()
+ )
+ // Enforce a maximum size of the bounding box, to ensure Emoji characters get displayed
+ // correctly
+ if (bb.width() > MAX_BB_WIDTH) {
+ bb[bb.centerX() - MAX_BB_WIDTH / 2, bb.top, bb.centerX() + MAX_BB_WIDTH / 2] = bb.bottom
+ }
+ if (bb.height() > MAX_BB_HEIGHT) {
+ bb[bb.left, bb.centerY() - MAX_BB_HEIGHT / 2, bb.right] = bb.centerY() + MAX_BB_HEIGHT / 2
+ }
+ return bb
+ }
+ }
+
+ init {
+ currentStrokePaint = Paint()
+ currentStrokePaint.color = -0xff01 // pink.
+ currentStrokePaint.isAntiAlias = true
+ // Set stroke width based on display density.
+ currentStrokePaint.strokeWidth = TypedValue.applyDimension(
+ TypedValue.COMPLEX_UNIT_DIP,
+ STROKE_WIDTH_DP.toFloat(),
+ resources.displayMetrics
+ )
+ currentStrokePaint.style = Paint.Style.STROKE
+ currentStrokePaint.strokeJoin = Paint.Join.ROUND
+ currentStrokePaint.strokeCap = Paint.Cap.ROUND
+ recognizedStrokePaint = Paint(currentStrokePaint)
+ recognizedStrokePaint.color = -0x3301 // pale pink.
+ textPaint = TextPaint()
+ textPaint.color = -0xcc33cd // green.
+ currentStroke = Path()
+ canvasPaint = Paint(Paint.DITHER_FLAG)
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/ModelManager.kt b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/ModelManager.kt
new file mode 100644
index 0000000..be27f77
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/ModelManager.kt
@@ -0,0 +1,125 @@
+package com.google.mlkit.samples.vision.digitalink.kotlin
+
+import android.util.Log
+import com.google.android.gms.tasks.SuccessContinuation
+import com.google.android.gms.tasks.Task
+import com.google.android.gms.tasks.Tasks
+import com.google.mlkit.common.MlKitException
+import com.google.mlkit.common.model.DownloadConditions
+import com.google.mlkit.common.model.RemoteModelManager
+import com.google.mlkit.vision.digitalink.DigitalInkRecognition
+import com.google.mlkit.vision.digitalink.DigitalInkRecognitionModel
+import com.google.mlkit.vision.digitalink.DigitalInkRecognitionModelIdentifier
+import com.google.mlkit.vision.digitalink.DigitalInkRecognizer
+import com.google.mlkit.vision.digitalink.DigitalInkRecognizerOptions
+import java.util.HashSet
+
+/** Class to manage model downloading, deletion, and selection. */
+class ModelManager {
+ private var model: DigitalInkRecognitionModel? = null
+ var recognizer: DigitalInkRecognizer? = null
+ val remoteModelManager = RemoteModelManager.getInstance()
+ fun setModel(languageTag: String): String {
+ // Clear the old model and recognizer.
+ model = null
+ recognizer?.close()
+ recognizer = null
+
+ // Try to parse the languageTag and get a model from it.
+ val modelIdentifier: DigitalInkRecognitionModelIdentifier?
+ modelIdentifier = try {
+ DigitalInkRecognitionModelIdentifier.fromLanguageTag(languageTag)
+ } catch (e: MlKitException) {
+ Log.e(
+ TAG,
+ "Failed to parse language '$languageTag'"
+ )
+ return ""
+ } ?: return "No model for language: $languageTag"
+
+ // Initialize the model and recognizer.
+ model = DigitalInkRecognitionModel.builder(modelIdentifier).build()
+ recognizer = DigitalInkRecognition.getClient(
+ DigitalInkRecognizerOptions.builder(model!!).build()
+ )
+ Log.i(
+ TAG, "Model set for language '$languageTag' ('$modelIdentifier.languageTag')."
+ )
+ return "Model set for language: $languageTag"
+ }
+
+ fun checkIsModelDownloaded(): Task {
+ return remoteModelManager.isModelDownloaded(model!!)
+ }
+
+ fun deleteActiveModel(): Task {
+ if (model == null) {
+ Log.i(TAG, "Model not set")
+ return Tasks.forResult("Model not set")
+ }
+ return checkIsModelDownloaded()
+ .onSuccessTask { result: Boolean? ->
+ if (!result!!) {
+ return@onSuccessTask Tasks.forResult("Model not downloaded yet")
+ }
+ remoteModelManager
+ .deleteDownloadedModel(model!!)
+ .onSuccessTask { _: Void? ->
+ Log.i(
+ TAG,
+ "Model successfully deleted"
+ )
+ Tasks.forResult(
+ "Model successfully deleted"
+ )
+ }
+ }
+ .addOnFailureListener { e: Exception ->
+ Log.e(
+ TAG,
+ "Error while model deletion: $e"
+ )
+ }
+ }
+
+ val downloadedModelLanguages: Task>
+ get() = remoteModelManager
+ .getDownloadedModels(DigitalInkRecognitionModel::class.java)
+ .onSuccessTask(
+ SuccessContinuation { remoteModels: Set? ->
+ val result: MutableSet = HashSet()
+ for (model in remoteModels!!) {
+ result.add(model.modelIdentifier.languageTag)
+ }
+ Log.i(
+ TAG,
+ "Downloaded models for languages:$result"
+ )
+ Tasks.forResult>(result.toSet())
+ }
+ )
+
+ fun download(): Task {
+ return if (model == null) {
+ Tasks.forResult("Model not selected.")
+ } else remoteModelManager
+ .download(model!!, DownloadConditions.Builder().build())
+ .onSuccessTask { _: Void? ->
+ Log.i(
+ TAG,
+ "Model download succeeded."
+ )
+ Tasks.forResult("Downloaded model successfully")
+ }
+ .addOnFailureListener { e: Exception ->
+ Log.e(
+ TAG,
+ "Error while downloading the model: $e"
+ )
+ }
+ }
+
+ companion object {
+ private const val TAG = "MLKD.ModelManager"
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/RecognitionTask.kt b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/RecognitionTask.kt
new file mode 100644
index 0000000..32f0251
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/RecognitionTask.kt
@@ -0,0 +1,68 @@
+package com.google.mlkit.samples.vision.digitalink.kotlin
+
+import android.util.Log
+import com.google.android.gms.tasks.SuccessContinuation
+import com.google.android.gms.tasks.Task
+import com.google.android.gms.tasks.Tasks
+import com.google.mlkit.vision.digitalink.DigitalInkRecognizer
+import com.google.mlkit.vision.digitalink.Ink
+import com.google.mlkit.vision.digitalink.RecognitionResult
+import java.util.concurrent.atomic.AtomicBoolean
+
+/** Task to run asynchronously to obtain recognition results. */
+class RecognitionTask(private val recognizer: DigitalInkRecognizer?, private val ink: Ink) {
+ private var currentResult: RecognizedInk? = null
+ private val cancelled: AtomicBoolean
+ private val done: AtomicBoolean
+ fun cancel() {
+ cancelled.set(true)
+ }
+
+ fun done(): Boolean {
+ return done.get()
+ }
+
+ fun result(): RecognizedInk? {
+ return currentResult
+ }
+
+ /** Helper class that stores an ink along with the corresponding recognized text. */
+ class RecognizedInk internal constructor(val ink: Ink, val text: String?)
+
+ fun run(): Task {
+ Log.i(TAG, "RecoTask.run")
+ return recognizer!!
+ .recognize(ink)
+ .onSuccessTask(
+ SuccessContinuation { result: RecognitionResult? ->
+ if (cancelled.get() || result == null || result.candidates.isEmpty()
+ ) {
+ return@SuccessContinuation Tasks.forResult(null)
+ }
+ currentResult =
+ RecognizedInk(
+ ink,
+ result.candidates[0]
+ .text
+ )
+ Log.i(
+ TAG,
+ "result: " + currentResult!!.text
+ )
+ done.set(
+ true
+ )
+ return@SuccessContinuation Tasks.forResult(currentResult!!.text)
+ }
+ )
+ }
+
+ companion object {
+ private const val TAG = "MLKD.RecognitionTask"
+ }
+
+ init {
+ cancelled = AtomicBoolean(false)
+ done = AtomicBoolean(false)
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/StatusTextView.kt b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/StatusTextView.kt
new file mode 100644
index 0000000..b12a859
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/StatusTextView.kt
@@ -0,0 +1,31 @@
+package com.google.mlkit.samples.vision.digitalink.kotlin
+
+import android.content.Context
+import androidx.appcompat.widget.AppCompatTextView
+import android.util.AttributeSet
+import com.google.mlkit.samples.vision.digitalink.kotlin.StrokeManager.StatusChangedListener
+
+/**
+ * Status bar for the test app.
+ *
+ *
+ * It is updated upon status changes announced by the StrokeManager.
+ */
+class StatusTextView : AppCompatTextView, StatusChangedListener {
+ private var strokeManager: StrokeManager? = null
+
+ constructor(context: Context) : super(context) {}
+ constructor(context: Context?, attributeSet: AttributeSet?) : super(
+ context!!,
+ attributeSet
+ ) {
+ }
+
+ override fun onStatusChanged() {
+ this.text = strokeManager!!.status
+ }
+
+ fun setStrokeManager(strokeManager: StrokeManager?) {
+ this.strokeManager = strokeManager
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/StrokeManager.kt b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/StrokeManager.kt
new file mode 100644
index 0000000..8d765b8
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/java/com/google/mlkit/samples/vision/digitalink/kotlin/StrokeManager.kt
@@ -0,0 +1,257 @@
+package com.google.mlkit.samples.vision.digitalink.kotlin
+
+import android.os.Handler
+import android.os.Message
+import androidx.annotation.VisibleForTesting
+import android.util.Log
+import android.view.MotionEvent
+import com.google.android.gms.tasks.SuccessContinuation
+import com.google.android.gms.tasks.Task
+import com.google.android.gms.tasks.Tasks
+import com.google.mlkit.samples.vision.digitalink.kotlin.RecognitionTask.RecognizedInk
+import com.google.mlkit.vision.digitalink.Ink
+import com.google.mlkit.vision.digitalink.Ink.Stroke
+import java.util.ArrayList
+
+/** Manages the recognition logic and the content that has been added to the current page. */
+class StrokeManager {
+ /** Interface to register to be notified of changes in the recognized content. */
+ interface ContentChangedListener {
+ /** This method is called when the recognized content changes. */
+ fun onContentChanged()
+ }
+
+ /** Interface to register to be notified of changes in the status. */
+ interface StatusChangedListener {
+ /** This method is called when the recognized content changes. */
+ fun onStatusChanged()
+ }
+
+ /** Interface to register to be notified of changes in the downloaded model state. */
+ interface DownloadedModelsChangedListener {
+ /** This method is called when the downloaded models changes. */
+ fun onDownloadedModelsChanged(downloadedLanguageTags: Set)
+ }
+
+ // For handling recognition and model downloading.
+ private var recognitionTask: RecognitionTask? = null
+
+ @JvmField
+ @VisibleForTesting
+ var modelManager =
+ ModelManager()
+
+ // Managing the recognition queue.
+ private val content: MutableList = ArrayList()
+
+ // Managing ink currently drawn.
+ private var strokeBuilder = Stroke.builder()
+ private var inkBuilder = Ink.builder()
+ private var stateChangedSinceLastRequest = false
+ private var contentChangedListener: ContentChangedListener? = null
+ private var statusChangedListener: StatusChangedListener? = null
+ private var downloadedModelsChangedListener: DownloadedModelsChangedListener? = null
+ private var triggerRecognitionAfterInput = true
+ private var clearCurrentInkAfterRecognition = true
+ var status: String? = ""
+ private set(newStatus) {
+ field = newStatus
+ statusChangedListener?.onStatusChanged()
+ }
+
+ fun setTriggerRecognitionAfterInput(shouldTrigger: Boolean) {
+ triggerRecognitionAfterInput = shouldTrigger
+ }
+
+ fun setClearCurrentInkAfterRecognition(shouldClear: Boolean) {
+ clearCurrentInkAfterRecognition = shouldClear
+ }
+
+ // Handler to handle the UI Timeout.
+ // This handler is only used to trigger the UI timeout. Each time a UI interaction happens,
+ // the timer is reset by clearing the queue on this handler and sending a new delayed message (in
+ // addNewTouchEvent).
+ private val uiHandler = Handler(
+ Handler.Callback { msg: Message ->
+ if (msg.what == TIMEOUT_TRIGGER) {
+ Log.i(
+ TAG,
+ "Handling timeout trigger."
+ )
+ commitResult()
+ return@Callback true
+ }
+ false
+ }
+ )
+
+ private fun commitResult() {
+ recognitionTask!!.result()?.let {
+ content.add(it)
+ status = "Successful recognition: " + it.text
+ if (clearCurrentInkAfterRecognition) {
+ resetCurrentInk()
+ }
+
+ contentChangedListener?.onContentChanged()
+ }
+ }
+
+ fun reset() {
+ Log.i(TAG, "reset")
+ resetCurrentInk()
+ content.clear()
+ recognitionTask?.cancel()
+ status = ""
+ }
+
+ private fun resetCurrentInk() {
+ inkBuilder = Ink.builder()
+ strokeBuilder = Stroke.builder()
+ stateChangedSinceLastRequest = false
+ }
+
+ val currentInk: Ink
+ get() = inkBuilder.build()
+
+ /**
+ * This method is called when a new touch event happens on the drawing client and notifies the
+ * StrokeManager of new content being added.
+ *
+ *
+ * This method takes care of triggering the UI timeout and scheduling recognitions on the
+ * background thread.
+ *
+ * @return whether the touch event was handled.
+ */
+ fun addNewTouchEvent(event: MotionEvent): Boolean {
+ val action = event.actionMasked
+ val x = event.x
+ val y = event.y
+ val t = System.currentTimeMillis()
+
+ // A new event happened -> clear all pending timeout messages.
+ uiHandler.removeMessages(TIMEOUT_TRIGGER)
+ when (action) {
+ MotionEvent.ACTION_DOWN, MotionEvent.ACTION_MOVE -> strokeBuilder.addPoint(
+ Ink.Point.create(
+ x,
+ y,
+ t
+ )
+ )
+ MotionEvent.ACTION_UP -> {
+ strokeBuilder.addPoint(Ink.Point.create(x, y, t))
+ inkBuilder.addStroke(strokeBuilder.build())
+ strokeBuilder = Stroke.builder()
+ stateChangedSinceLastRequest = true
+ if (triggerRecognitionAfterInput) {
+ recognize()
+ }
+ }
+ else -> // Indicate touch event wasn't handled.
+ return false
+ }
+ return true
+ }
+
+ // Listeners to update the drawing and status.
+ fun setContentChangedListener(contentChangedListener: ContentChangedListener?) {
+ this.contentChangedListener = contentChangedListener
+ }
+
+ fun setStatusChangedListener(statusChangedListener: StatusChangedListener?) {
+ this.statusChangedListener = statusChangedListener
+ }
+
+ fun setDownloadedModelsChangedListener(
+ downloadedModelsChangedListener: DownloadedModelsChangedListener?
+ ) {
+ this.downloadedModelsChangedListener = downloadedModelsChangedListener
+ }
+
+ fun getContent(): List {
+ return content
+ }
+
+ // Model downloading / deleting / setting.
+ fun setActiveModel(languageTag: String) {
+ status = modelManager.setModel(languageTag)
+ }
+
+ fun deleteActiveModel(): Task {
+ return modelManager
+ .deleteActiveModel()
+ .addOnSuccessListener { refreshDownloadedModelsStatus() }
+ .onSuccessTask(
+ SuccessContinuation { status: String? ->
+ this.status = status
+ return@SuccessContinuation Tasks.forResult(null)
+ }
+ )
+ }
+
+ fun download(): Task {
+ status = "Download started."
+ return modelManager
+ .download()
+ .addOnSuccessListener { refreshDownloadedModelsStatus() }
+ .onSuccessTask(
+ SuccessContinuation { status: String? ->
+ this.status = status
+ return@SuccessContinuation Tasks.forResult(null)
+ }
+ )
+ }
+
+ // Recognition-related.
+ fun recognize(): Task {
+ if (!stateChangedSinceLastRequest || inkBuilder.isEmpty) {
+ status = "No recognition, ink unchanged or empty"
+ return Tasks.forResult(null)
+ }
+ if (modelManager.recognizer == null) {
+ status = "Recognizer not set"
+ return Tasks.forResult(null)
+ }
+ return modelManager
+ .checkIsModelDownloaded()
+ .onSuccessTask { result: Boolean? ->
+ if (!result!!) {
+ status = "Model not downloaded yet"
+ return@onSuccessTask Tasks.forResult(
+ null
+ )
+ }
+ stateChangedSinceLastRequest = false
+ recognitionTask =
+ RecognitionTask(
+ modelManager.recognizer,
+ inkBuilder.build()
+ )
+ uiHandler.sendMessageDelayed(
+ uiHandler.obtainMessage(TIMEOUT_TRIGGER),
+ CONVERSION_TIMEOUT_MS
+ )
+ recognitionTask!!.run()
+ }
+ }
+
+ fun refreshDownloadedModelsStatus() {
+ modelManager
+ .downloadedModelLanguages
+ .addOnSuccessListener { downloadedLanguageTags: Set ->
+ downloadedModelsChangedListener?.onDownloadedModelsChanged(downloadedLanguageTags)
+ }
+ }
+
+ companion object {
+ @JvmField
+ @VisibleForTesting
+ val CONVERSION_TIMEOUT_MS: Long = 1000
+ private const val TAG = "MLKD.StrokeManager"
+
+ // This is a constant that is used as a message identifier to trigger the timeout.
+ private const val TIMEOUT_TRIGGER = 1
+ }
+}
diff --git a/mlkit/digitalink/app/src/main/res/layout/activity_digital_ink_main.xml b/mlkit/digitalink/app/src/main/res/layout/activity_digital_ink_main.xml
new file mode 100644
index 0000000..3415da5
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/res/layout/activity_digital_ink_main.xml
@@ -0,0 +1,75 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/mlkit/digitalink/app/src/main/res/layout/activity_digital_ink_main_kotlin.xml b/mlkit/digitalink/app/src/main/res/layout/activity_digital_ink_main_kotlin.xml
new file mode 100644
index 0000000..bf6f580
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/res/layout/activity_digital_ink_main_kotlin.xml
@@ -0,0 +1,75 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/mlkit/digitalink/app/src/main/res/mipmap-hdpi/ic_launcher.png b/mlkit/digitalink/app/src/main/res/mipmap-hdpi/ic_launcher.png
new file mode 100644
index 0000000..a571e60
Binary files /dev/null and b/mlkit/digitalink/app/src/main/res/mipmap-hdpi/ic_launcher.png differ
diff --git a/mlkit/digitalink/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/mlkit/digitalink/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
new file mode 100644
index 0000000..61da551
Binary files /dev/null and b/mlkit/digitalink/app/src/main/res/mipmap-hdpi/ic_launcher_round.png differ
diff --git a/mlkit/digitalink/app/src/main/res/mipmap-mdpi/ic_launcher.png b/mlkit/digitalink/app/src/main/res/mipmap-mdpi/ic_launcher.png
new file mode 100644
index 0000000..c41dd28
Binary files /dev/null and b/mlkit/digitalink/app/src/main/res/mipmap-mdpi/ic_launcher.png differ
diff --git a/mlkit/digitalink/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/mlkit/digitalink/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
new file mode 100644
index 0000000..db5080a
Binary files /dev/null and b/mlkit/digitalink/app/src/main/res/mipmap-mdpi/ic_launcher_round.png differ
diff --git a/mlkit/digitalink/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/mlkit/digitalink/app/src/main/res/mipmap-xhdpi/ic_launcher.png
new file mode 100644
index 0000000..6dba46d
Binary files /dev/null and b/mlkit/digitalink/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ
diff --git a/mlkit/digitalink/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/mlkit/digitalink/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
new file mode 100644
index 0000000..da31a87
Binary files /dev/null and b/mlkit/digitalink/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png differ
diff --git a/mlkit/digitalink/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/mlkit/digitalink/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
new file mode 100644
index 0000000..15ac681
Binary files /dev/null and b/mlkit/digitalink/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ
diff --git a/mlkit/digitalink/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/mlkit/digitalink/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
new file mode 100644
index 0000000..b216f2d
Binary files /dev/null and b/mlkit/digitalink/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png differ
diff --git a/mlkit/digitalink/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/mlkit/digitalink/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
new file mode 100644
index 0000000..f25a419
Binary files /dev/null and b/mlkit/digitalink/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ
diff --git a/mlkit/digitalink/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/mlkit/digitalink/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
new file mode 100644
index 0000000..e96783c
Binary files /dev/null and b/mlkit/digitalink/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png differ
diff --git a/mlkit/digitalink/app/src/main/res/values/colors.xml b/mlkit/digitalink/app/src/main/res/values/colors.xml
new file mode 100644
index 0000000..4faecfa
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/res/values/colors.xml
@@ -0,0 +1,6 @@
+
+
+ #6200EE
+ #3700B3
+ #03DAC5
+
\ No newline at end of file
diff --git a/mlkit/digitalink/app/src/main/res/values/strings.xml b/mlkit/digitalink/app/src/main/res/values/strings.xml
new file mode 100644
index 0000000..77c3e74
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/res/values/strings.xml
@@ -0,0 +1,5 @@
+
+ ML Kit Digital Ink Recognition Demo
+ ML Kit Digital Ink Recognition Demo
+ Clear
+
diff --git a/mlkit/digitalink/app/src/main/res/values/styles.xml b/mlkit/digitalink/app/src/main/res/values/styles.xml
new file mode 100644
index 0000000..fac9291
--- /dev/null
+++ b/mlkit/digitalink/app/src/main/res/values/styles.xml
@@ -0,0 +1,10 @@
+
+
+
+
+
\ No newline at end of file
diff --git a/mlkit/digitalink/build.gradle b/mlkit/digitalink/build.gradle
new file mode 100644
index 0000000..cecae92
--- /dev/null
+++ b/mlkit/digitalink/build.gradle
@@ -0,0 +1,27 @@
+// Top-level build file where you can add configuration options common to all sub-projects/modules.
+buildscript {
+ ext.kotlin_version = "1.5.20"
+ repositories {
+ google()
+ jcenter()
+ }
+ dependencies {
+ classpath "com.android.tools.build:gradle:4.2.0"
+ classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
+
+ // NOTE: Do not place your application dependencies here; they belong
+ // in the individual module build.gradle files
+ }
+}
+
+allprojects {
+ repositories {
+ mavenLocal()
+ google()
+ jcenter()
+ }
+}
+
+task clean(type: Delete) {
+ delete rootProject.buildDir
+}
\ No newline at end of file
diff --git a/mlkit/digitalink/gradle.properties b/mlkit/digitalink/gradle.properties
new file mode 100644
index 0000000..c62d03d
--- /dev/null
+++ b/mlkit/digitalink/gradle.properties
@@ -0,0 +1,21 @@
+# Project-wide Gradle settings.
+
+# IDE (e.g. Android Studio) users:
+# Gradle settings configured through the IDE *will override*
+# any settings specified in this file.
+
+# For more details on how to configure your build environment visit
+# http://www.gradle.org/docs/current/userguide/build_environment.html
+
+# Specifies the JVM arguments used for the daemon process.
+# The setting is particularly useful for tweaking memory settings.
+# Default value: -Xmx10248m -XX:MaxPermSize=256m
+org.gradle.jvmargs=-Xmx2048m -XX:MaxPermSize=512m -XX:+HeapDumpOnOutOfMemoryError -Dfile.encoding=UTF-8
+
+# When configured, Gradle will run in incubating parallel mode.
+# This option should only be used with decoupled projects. More details, visit
+# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
+# org.gradle.parallel=true
+
+android.useAndroidX=true
+android.enableJetifier=true
diff --git a/mlkit/digitalink/gradle/wrapper/gradle-wrapper.jar b/mlkit/digitalink/gradle/wrapper/gradle-wrapper.jar
new file mode 100644
index 0000000..f6b961f
Binary files /dev/null and b/mlkit/digitalink/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/mlkit/digitalink/gradle/wrapper/gradle-wrapper.properties b/mlkit/digitalink/gradle/wrapper/gradle-wrapper.properties
new file mode 100644
index 0000000..3fa0d2c
--- /dev/null
+++ b/mlkit/digitalink/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Mon Jan 24 22:29:24 UTC 2022
+distributionBase=GRADLE_USER_HOME
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.7.1-bin.zip
+distributionPath=wrapper/dists
+zipStorePath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
diff --git a/mlkit/digitalink/gradlew b/mlkit/digitalink/gradlew
new file mode 100755
index 0000000..cccdd3d
--- /dev/null
+++ b/mlkit/digitalink/gradlew
@@ -0,0 +1,172 @@
+#!/usr/bin/env sh
+
+##############################################################################
+##
+## Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+ echo "$*"
+}
+
+die () {
+ echo
+ echo "$*"
+ echo
+ exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+ CYGWIN* )
+ cygwin=true
+ ;;
+ Darwin* )
+ darwin=true
+ ;;
+ MINGW* )
+ msys=true
+ ;;
+ NONSTOP* )
+ nonstop=true
+ ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD="java"
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+ MAX_FD_LIMIT=`ulimit -H -n`
+ if [ $? -eq 0 ] ; then
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+ MAX_FD="$MAX_FD_LIMIT"
+ fi
+ ulimit -n $MAX_FD
+ if [ $? -ne 0 ] ; then
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
+ fi
+ else
+ warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+ fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+ GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+ JAVACMD=`cygpath --unix "$JAVACMD"`
+
+ # We build the pattern for arguments to be converted via cygpath
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+ SEP=""
+ for dir in $ROOTDIRSRAW ; do
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
+ SEP="|"
+ done
+ OURCYGPATTERN="(^($ROOTDIRS))"
+ # Add a user-defined pattern to the cygpath arguments
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+ fi
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ i=0
+ for arg in "$@" ; do
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
+
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+ else
+ eval `echo args$i`="\"$arg\""
+ fi
+ i=$((i+1))
+ done
+ case $i in
+ (0) set -- ;;
+ (1) set -- "$args0" ;;
+ (2) set -- "$args0" "$args1" ;;
+ (3) set -- "$args0" "$args1" "$args2" ;;
+ (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+ (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+ (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+ (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+ (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+ (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+ esac
+fi
+
+# Escape application args
+save () {
+ for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+ echo " "
+}
+APP_ARGS=$(save "$@")
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+ cd "$(dirname "$0")"
+fi
+
+exec "$JAVACMD" "$@"
diff --git a/mlkit/digitalink/gradlew.bat b/mlkit/digitalink/gradlew.bat
new file mode 100644
index 0000000..e95643d
--- /dev/null
+++ b/mlkit/digitalink/gradlew.bat
@@ -0,0 +1,84 @@
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS=
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windows variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/mlkit/digitalink/settings.gradle b/mlkit/digitalink/settings.gradle
new file mode 100644
index 0000000..a497315
--- /dev/null
+++ b/mlkit/digitalink/settings.gradle
@@ -0,0 +1,2 @@
+include ':app'
+rootProject.name = "ML Kit Digital Ink Recognition Demo"
\ No newline at end of file
diff --git a/mlkit/entityextraction/README.md b/mlkit/entityextraction/README.md
new file mode 100644
index 0000000..7b35315
--- /dev/null
+++ b/mlkit/entityextraction/README.md
@@ -0,0 +1,81 @@
+# ML Kit Entity Extraction Quickstart
+
+* [Read more about ML Kit Entity Extraction API](https://developers.google.com/ml-kit/language/entity-extraction)
+
+## Introduction
+
+The ML Kit Entity Extraction Android Quickstart app demonstrates how to use the
+ML Kit
+Entity Extraction feature to recognize structured data in
+text.
+
+## Getting Started
+
+* Run the sample code on your Android device or emulator
+* Type messages to identify languages
+* Try extending the code to add new features and functionality
+
+## How to use the app
+
+- Enter text in the input box and hit "Trigger Entity Extraction".
+- If any entities are found in the text, they will show up on the bottom of
+the screen.
+- Click the download arrow in the top right corner to ensure the correct
+language model has been downloaded.
+
+### Examples
+
+
+
Input text
Detected entities
+
+
Meet me at 1600 Amphitheatre Parkway, Mountain View, CA, 94043
+Let’s organize a meeting to discuss.