diff --git a/android/.gitignore b/android/.gitignore
new file mode 100755
index 0000000000000000000000000000000000000000..4e94553f8871994ad627657153b927b9f0b4aec1
--- /dev/null
+++ b/android/.gitignore
@@ -0,0 +1,13 @@
+*.iml
+.gradle
+/local.properties
+/.idea/libraries
+/.idea/modules.xml
+/.idea/workspace.xml
+.DS_Store
+/build
+/captures
+.externalNativeBuild
+
+/.gradle/
+/.idea/
diff --git a/android/app/.gitignore b/android/app/.gitignore
new file mode 100755
index 0000000000000000000000000000000000000000..4ccdcdfc6cc358cc4cec545af945fe5c6f69cfd0
--- /dev/null
+++ b/android/app/.gitignore
@@ -0,0 +1,2 @@
+/build
+/build/
\ No newline at end of file
diff --git a/android/app/build.gradle b/android/app/build.gradle
new file mode 100755
index 0000000000000000000000000000000000000000..f86c2d3f312417d551c8fb5eb4f8dafe1ecde669
--- /dev/null
+++ b/android/app/build.gradle
@@ -0,0 +1,61 @@
+apply plugin: 'com.android.application'
+apply plugin: 'de.undercouch.download'
+
+android {
+ compileSdkVersion 28
+ buildToolsVersion '28.0.3'
+ defaultConfig {
+ applicationId "org.tensorflow.lite.examples.detection"
+ minSdkVersion 21
+ targetSdkVersion 28
+ versionCode 1
+ versionName "1.0"
+
+// ndk {
+// abiFilters 'armeabi-v7a', 'arm64-v8a'
+// }
+ }
+ buildTypes {
+ release {
+ minifyEnabled false
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
+ }
+ }
+ aaptOptions {
+ noCompress "tflite"
+ }
+ compileOptions {
+ sourceCompatibility = '1.8'
+ targetCompatibility = '1.8'
+ }
+ lintOptions {
+ abortOnError false
+ }
+}
+
+// import DownloadModels task
+project.ext.ASSET_DIR = projectDir.toString() + '/src/main/assets'
+project.ext.TMP_DIR = project.buildDir.toString() + '/downloads'
+
+// Download default models; if you wish to use your own models then
+// place them in the "assets" directory and comment out this line.
+//apply from: "download_model.gradle"
+
+apply from: 'download_model.gradle'
+
+dependencies {
+ implementation fileTree(dir: 'libs', include: ['*.jar', '*.aar'])
+ implementation 'androidx.appcompat:appcompat:1.1.0'
+ implementation 'androidx.coordinatorlayout:coordinatorlayout:1.1.0'
+ implementation 'com.google.android.material:material:1.1.0'
+// implementation 'org.tensorflow:tensorflow-lite:0.0.0-nightly'
+// implementation 'org.tensorflow:tensorflow-lite-gpu:0.0.0-nightly'
+ implementation 'org.tensorflow:tensorflow-lite:2.2.0'
+ implementation 'org.tensorflow:tensorflow-lite-gpu:2.2.0'
+// implementation 'org.tensorflow:tensorflow-lite:0.0.0-gpu-experimental'
+ implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
+ implementation 'com.google.code.gson:gson:2.8.6'
+ androidTestImplementation 'androidx.test.ext:junit:1.1.1'
+ androidTestImplementation 'com.android.support.test:rules:1.0.2'
+ androidTestImplementation 'com.google.truth:truth:1.0.1'
+}
diff --git a/android/app/download_model.gradle b/android/app/download_model.gradle
new file mode 100755
index 0000000000000000000000000000000000000000..08b86fdc3c6c498299cdf9ac42df213494a3cc13
--- /dev/null
+++ b/android/app/download_model.gradle
@@ -0,0 +1,26 @@
+
+task downloadZipFile(type: Download) {
+ src 'http://storage.googleapis.com/download.tensorflow.org/models/tflite/coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.zip'
+ dest new File(buildDir, 'zips/')
+ overwrite false
+}
+
+
+task downloadAndUnzipFile(dependsOn: downloadZipFile, type: Copy) {
+ from zipTree(downloadZipFile.dest)
+ into project.ext.ASSET_DIR
+}
+
+
+task extractModels(type: Copy) {
+ dependsOn downloadAndUnzipFile
+}
+
+tasks.whenTaskAdded { task ->
+ if (task.name == 'assembleDebug') {
+ task.dependsOn 'extractModels'
+ }
+ if (task.name == 'assembleRelease') {
+ task.dependsOn 'extractModels'
+ }
+}
\ No newline at end of file
diff --git a/android/app/proguard-rules.pro b/android/app/proguard-rules.pro
new file mode 100755
index 0000000000000000000000000000000000000000..f1b424510da51fd82143bc74a0a801ae5a1e2fcd
--- /dev/null
+++ b/android/app/proguard-rules.pro
@@ -0,0 +1,21 @@
+# Add project specific ProGuard rules here.
+# You can control the set of applied configuration files using the
+# proguardFiles setting in build.gradle.
+#
+# For more details, see
+# http://developer.android.com/guide/developing/tools/proguard.html
+
+# If your project uses WebView with JS, uncomment the following
+# and specify the fully qualified class name to the JavaScript interface
+# class:
+#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
+# public *;
+#}
+
+# Uncomment this to preserve the line number information for
+# debugging stack traces.
+#-keepattributes SourceFile,LineNumberTable
+
+# If you keep the line number information, uncomment this to
+# hide the original source file name.
+#-renamesourcefileattribute SourceFile
diff --git a/android/app/src/androidTest/assets/table.jpg b/android/app/src/androidTest/assets/table.jpg
new file mode 100755
index 0000000000000000000000000000000000000000..82213777e29f6f039c62904976b4ca71434fccfe
Binary files /dev/null and b/android/app/src/androidTest/assets/table.jpg differ
diff --git a/android/app/src/androidTest/assets/table_results.txt b/android/app/src/androidTest/assets/table_results.txt
new file mode 100755
index 0000000000000000000000000000000000000000..11709e22f07b9bbb5e7c3f95ce65b5512e1f76ac
--- /dev/null
+++ b/android/app/src/androidTest/assets/table_results.txt
@@ -0,0 +1,4 @@
+dining_table 27.492085 97.94615 623.1435 444.8627 0.48828125
+knife 342.53433 243.71082 583.89185 416.34595 0.4765625
+cup 68.025925 197.5857 202.02031 374.2206 0.4375
+book 185.43098 139.64153 244.51149 203.37737 0.3125
diff --git a/android/app/src/androidTest/java/AndroidManifest.xml b/android/app/src/androidTest/java/AndroidManifest.xml
new file mode 100755
index 0000000000000000000000000000000000000000..484ab4769b2277bf39531f2ed930e780990af030
--- /dev/null
+++ b/android/app/src/androidTest/java/AndroidManifest.xml
@@ -0,0 +1,5 @@
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/DetectorTest.java b/android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/DetectorTest.java
new file mode 100755
index 0000000000000000000000000000000000000000..3fc33490846baeed8dfbd7d48a5f9bfd13ba22f9
--- /dev/null
+++ b/android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/DetectorTest.java
@@ -0,0 +1,165 @@
+/*
+ * Copyright 2020 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.tensorflow.lite.examples.detection;
+
+import static com.google.common.truth.Truth.assertThat;
+import static java.lang.Math.abs;
+import static java.lang.Math.max;
+import static java.lang.Math.min;
+
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.Bitmap.Config;
+import android.graphics.BitmapFactory;
+import android.graphics.Canvas;
+import android.graphics.Matrix;
+import android.graphics.RectF;
+import android.util.Size;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.platform.app.InstrumentationRegistry;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Scanner;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.tensorflow.lite.examples.detection.env.ImageUtils;
+import org.tensorflow.lite.examples.detection.tflite.Classifier;
+import org.tensorflow.lite.examples.detection.tflite.Classifier.Recognition;
+import org.tensorflow.lite.examples.detection.tflite.TFLiteObjectDetectionAPIModel;
+
+/** Golden test for Object Detection Reference app. */
+@RunWith(AndroidJUnit4.class)
+public class DetectorTest {
+
+ private static final int MODEL_INPUT_SIZE = 300;
+ private static final boolean IS_MODEL_QUANTIZED = true;
+ private static final String MODEL_FILE = "detect.tflite";
+ private static final String LABELS_FILE = "file:///android_asset/labelmap.txt";
+ private static final Size IMAGE_SIZE = new Size(640, 480);
+
+ private Classifier detector;
+ private Bitmap croppedBitmap;
+ private Matrix frameToCropTransform;
+ private Matrix cropToFrameTransform;
+
+ @Before
+ public void setUp() throws IOException {
+ AssetManager assetManager =
+ InstrumentationRegistry.getInstrumentation().getContext().getAssets();
+ detector =
+ TFLiteObjectDetectionAPIModel.create(
+ assetManager,
+ MODEL_FILE,
+ LABELS_FILE,
+ MODEL_INPUT_SIZE,
+ IS_MODEL_QUANTIZED);
+ int cropSize = MODEL_INPUT_SIZE;
+ int previewWidth = IMAGE_SIZE.getWidth();
+ int previewHeight = IMAGE_SIZE.getHeight();
+ int sensorOrientation = 0;
+ croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
+
+ frameToCropTransform =
+ ImageUtils.getTransformationMatrix(
+ previewWidth, previewHeight,
+ cropSize, cropSize,
+ sensorOrientation, false);
+ cropToFrameTransform = new Matrix();
+ frameToCropTransform.invert(cropToFrameTransform);
+ }
+
+ @Test
+ public void detectionResultsShouldNotChange() throws Exception {
+ Canvas canvas = new Canvas(croppedBitmap);
+ canvas.drawBitmap(loadImage("table.jpg"), frameToCropTransform, null);
+ final List results = detector.recognizeImage(croppedBitmap);
+ final List expected = loadRecognitions("table_results.txt");
+
+ for (Recognition target : expected) {
+ // Find a matching result in results
+ boolean matched = false;
+ for (Recognition item : results) {
+ RectF bbox = new RectF();
+ cropToFrameTransform.mapRect(bbox, item.getLocation());
+ if (item.getTitle().equals(target.getTitle())
+ && matchBoundingBoxes(bbox, target.getLocation())
+ && matchConfidence(item.getConfidence(), target.getConfidence())) {
+ matched = true;
+ break;
+ }
+ }
+ assertThat(matched).isTrue();
+ }
+ }
+
+ // Confidence tolerance: absolute 1%
+ private static boolean matchConfidence(float a, float b) {
+ return abs(a - b) < 0.01;
+ }
+
+ // Bounding Box tolerance: overlapped area > 95% of each one
+ private static boolean matchBoundingBoxes(RectF a, RectF b) {
+ float areaA = a.width() * a.height();
+ float areaB = b.width() * b.height();
+ RectF overlapped =
+ new RectF(
+ max(a.left, b.left), max(a.top, b.top), min(a.right, b.right), min(a.bottom, b.bottom));
+ float overlappedArea = overlapped.width() * overlapped.height();
+ return overlappedArea > 0.95 * areaA && overlappedArea > 0.95 * areaB;
+ }
+
+ private static Bitmap loadImage(String fileName) throws Exception {
+ AssetManager assetManager =
+ InstrumentationRegistry.getInstrumentation().getContext().getAssets();
+ InputStream inputStream = assetManager.open(fileName);
+ return BitmapFactory.decodeStream(inputStream);
+ }
+
+ // The format of result:
+ // category bbox.left bbox.top bbox.right bbox.bottom confidence
+ // ...
+ // Example:
+ // Apple 99 25 30 75 80 0.99
+ // Banana 25 90 75 200 0.98
+ // ...
+ private static List loadRecognitions(String fileName) throws Exception {
+ AssetManager assetManager =
+ InstrumentationRegistry.getInstrumentation().getContext().getAssets();
+ InputStream inputStream = assetManager.open(fileName);
+ Scanner scanner = new Scanner(inputStream);
+ List result = new ArrayList<>();
+ while (scanner.hasNext()) {
+ String category = scanner.next();
+ category = category.replace('_', ' ');
+ if (!scanner.hasNextFloat()) {
+ break;
+ }
+ float left = scanner.nextFloat();
+ float top = scanner.nextFloat();
+ float right = scanner.nextFloat();
+ float bottom = scanner.nextFloat();
+ RectF boundingBox = new RectF(left, top, right, bottom);
+ float confidence = scanner.nextFloat();
+ Recognition recognition = new Recognition(null, category, confidence, boundingBox);
+ result.add(recognition);
+ }
+ return result;
+ }
+}
diff --git a/android/app/src/main/AndroidManifest.xml b/android/app/src/main/AndroidManifest.xml
new file mode 100755
index 0000000000000000000000000000000000000000..397ac8ce465e6cd3cfdaf3c20a705b7fb30de301
--- /dev/null
+++ b/android/app/src/main/AndroidManifest.xml
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/android/app/src/main/assets/coco.txt b/android/app/src/main/assets/coco.txt
new file mode 100755
index 0000000000000000000000000000000000000000..ec82f0ffde8100863d293f3d45555cc58dbfab7c
--- /dev/null
+++ b/android/app/src/main/assets/coco.txt
@@ -0,0 +1,80 @@
+person
+bicycle
+car
+motorbike
+aeroplane
+bus
+train
+truck
+boat
+traffic light
+fire hydrant
+stop sign
+parking meter
+bench
+bird
+cat
+dog
+horse
+sheep
+cow
+elephant
+bear
+zebra
+giraffe
+backpack
+umbrella
+handbag
+tie
+suitcase
+frisbee
+skis
+snowboard
+sports ball
+kite
+baseball bat
+baseball glove
+skateboard
+surfboard
+tennis racket
+bottle
+wine glass
+cup
+fork
+knife
+spoon
+bowl
+banana
+apple
+sandwich
+orange
+broccoli
+carrot
+hot dog
+pizza
+donut
+cake
+chair
+sofa
+potted plant
+bed
+dining table
+toilet
+tvmonitor
+laptop
+mouse
+remote
+keyboard
+cell phone
+microwave
+oven
+toaster
+sink
+refrigerator
+book
+clock
+vase
+scissors
+teddy bear
+hair drier
+toothbrush
diff --git a/android/app/src/main/assets/kite.jpg b/android/app/src/main/assets/kite.jpg
new file mode 100755
index 0000000000000000000000000000000000000000..9eb325ac5fc375cb2513380087dd713be9be19d8
Binary files /dev/null and b/android/app/src/main/assets/kite.jpg differ
diff --git a/android/app/src/main/assets/labelmap.txt b/android/app/src/main/assets/labelmap.txt
new file mode 100755
index 0000000000000000000000000000000000000000..5a70ff82aa7b0fa7315ca591820e4cf7d2f5ad18
--- /dev/null
+++ b/android/app/src/main/assets/labelmap.txt
@@ -0,0 +1,91 @@
+???
+person
+bicycle
+car
+motorcycle
+airplane
+bus
+train
+truck
+boat
+traffic light
+fire hydrant
+???
+stop sign
+parking meter
+bench
+bird
+cat
+dog
+horse
+sheep
+cow
+elephant
+bear
+zebra
+giraffe
+???
+backpack
+umbrella
+???
+???
+handbag
+tie
+suitcase
+frisbee
+skis
+snowboard
+sports ball
+kite
+baseball bat
+baseball glove
+skateboard
+surfboard
+tennis racket
+bottle
+???
+wine glass
+cup
+fork
+knife
+spoon
+bowl
+banana
+apple
+sandwich
+orange
+broccoli
+carrot
+hot dog
+pizza
+donut
+cake
+chair
+couch
+potted plant
+bed
+???
+dining table
+???
+???
+toilet
+???
+tv
+laptop
+mouse
+remote
+keyboard
+cell phone
+microwave
+oven
+toaster
+sink
+refrigerator
+???
+book
+clock
+vase
+scissors
+teddy bear
+hair drier
+toothbrush
diff --git a/android/app/src/main/assets/yolov4-416-fp32.tflite b/android/app/src/main/assets/yolov4-416-fp32.tflite
new file mode 100755
index 0000000000000000000000000000000000000000..22ca076c8ca77f3ce9c0e4ebe760296c7d1456e7
--- /dev/null
+++ b/android/app/src/main/assets/yolov4-416-fp32.tflite
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7160a2f3e58629a15506a6c77685fb5583cddf186dac3015be7998975d662465
+size 24279948
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraActivity.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraActivity.java
new file mode 100755
index 0000000000000000000000000000000000000000..b1cedd08c1f607341ba6899123bc077a01883398
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraActivity.java
@@ -0,0 +1,550 @@
+/*
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.tensorflow.lite.examples.detection;
+
+import android.Manifest;
+import android.app.Fragment;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.hardware.Camera;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.media.Image;
+import android.media.Image.Plane;
+import android.media.ImageReader;
+import android.media.ImageReader.OnImageAvailableListener;
+import android.os.Build;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Trace;
+import androidx.annotation.NonNull;
+import androidx.appcompat.app.AppCompatActivity;
+import androidx.appcompat.widget.SwitchCompat;
+import androidx.appcompat.widget.Toolbar;
+import android.util.Size;
+import android.view.Surface;
+import android.view.View;
+import android.view.ViewTreeObserver;
+import android.view.WindowManager;
+import android.widget.CompoundButton;
+import android.widget.ImageView;
+import android.widget.LinearLayout;
+import android.widget.TextView;
+import android.widget.Toast;
+import com.google.android.material.bottomsheet.BottomSheetBehavior;
+import java.nio.ByteBuffer;
+import org.tensorflow.lite.examples.detection.env.ImageUtils;
+import org.tensorflow.lite.examples.detection.env.Logger;
+
+public abstract class CameraActivity extends AppCompatActivity
+ implements OnImageAvailableListener,
+ Camera.PreviewCallback,
+ CompoundButton.OnCheckedChangeListener,
+ View.OnClickListener {
+ private static final Logger LOGGER = new Logger();
+
+ private static final int PERMISSIONS_REQUEST = 1;
+
+ private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;
+ protected int previewWidth = 0;
+ protected int previewHeight = 0;
+ private boolean debug = false;
+ private Handler handler;
+ private HandlerThread handlerThread;
+ private boolean useCamera2API;
+ private boolean isProcessingFrame = false;
+ private byte[][] yuvBytes = new byte[3][];
+ private int[] rgbBytes = null;
+ private int yRowStride;
+ private Runnable postInferenceCallback;
+ private Runnable imageConverter;
+
+ private LinearLayout bottomSheetLayout;
+ private LinearLayout gestureLayout;
+ private BottomSheetBehavior sheetBehavior;
+
+ protected TextView frameValueTextView, cropValueTextView, inferenceTimeTextView;
+ protected ImageView bottomSheetArrowImageView;
+ private ImageView plusImageView, minusImageView;
+ private SwitchCompat apiSwitchCompat;
+ private TextView threadsTextView;
+
+ @Override
+ protected void onCreate(final Bundle savedInstanceState) {
+ LOGGER.d("onCreate " + this);
+ super.onCreate(null);
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
+
+ setContentView(R.layout.tfe_od_activity_camera);
+ Toolbar toolbar = findViewById(R.id.toolbar);
+ setSupportActionBar(toolbar);
+ getSupportActionBar().setDisplayShowTitleEnabled(false);
+
+ if (hasPermission()) {
+ setFragment();
+ } else {
+ requestPermission();
+ }
+
+ threadsTextView = findViewById(R.id.threads);
+ plusImageView = findViewById(R.id.plus);
+ minusImageView = findViewById(R.id.minus);
+ apiSwitchCompat = findViewById(R.id.api_info_switch);
+ bottomSheetLayout = findViewById(R.id.bottom_sheet_layout);
+ gestureLayout = findViewById(R.id.gesture_layout);
+ sheetBehavior = BottomSheetBehavior.from(bottomSheetLayout);
+ bottomSheetArrowImageView = findViewById(R.id.bottom_sheet_arrow);
+
+ ViewTreeObserver vto = gestureLayout.getViewTreeObserver();
+ vto.addOnGlobalLayoutListener(
+ new ViewTreeObserver.OnGlobalLayoutListener() {
+ @Override
+ public void onGlobalLayout() {
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
+ gestureLayout.getViewTreeObserver().removeGlobalOnLayoutListener(this);
+ } else {
+ gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this);
+ }
+ // int width = bottomSheetLayout.getMeasuredWidth();
+ int height = gestureLayout.getMeasuredHeight();
+
+ sheetBehavior.setPeekHeight(height);
+ }
+ });
+ sheetBehavior.setHideable(false);
+
+ sheetBehavior.setBottomSheetCallback(
+ new BottomSheetBehavior.BottomSheetCallback() {
+ @Override
+ public void onStateChanged(@NonNull View bottomSheet, int newState) {
+ switch (newState) {
+ case BottomSheetBehavior.STATE_HIDDEN:
+ break;
+ case BottomSheetBehavior.STATE_EXPANDED:
+ {
+ bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_down);
+ }
+ break;
+ case BottomSheetBehavior.STATE_COLLAPSED:
+ {
+ bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
+ }
+ break;
+ case BottomSheetBehavior.STATE_DRAGGING:
+ break;
+ case BottomSheetBehavior.STATE_SETTLING:
+ bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
+ break;
+ }
+ }
+
+ @Override
+ public void onSlide(@NonNull View bottomSheet, float slideOffset) {}
+ });
+
+ frameValueTextView = findViewById(R.id.frame_info);
+ cropValueTextView = findViewById(R.id.crop_info);
+ inferenceTimeTextView = findViewById(R.id.inference_info);
+
+ apiSwitchCompat.setOnCheckedChangeListener(this);
+
+ plusImageView.setOnClickListener(this);
+ minusImageView.setOnClickListener(this);
+ }
+
+ protected int[] getRgbBytes() {
+ imageConverter.run();
+ return rgbBytes;
+ }
+
+ protected int getLuminanceStride() {
+ return yRowStride;
+ }
+
+ protected byte[] getLuminance() {
+ return yuvBytes[0];
+ }
+
+ /** Callback for android.hardware.Camera API */
+ @Override
+ public void onPreviewFrame(final byte[] bytes, final Camera camera) {
+ if (isProcessingFrame) {
+ LOGGER.w("Dropping frame!");
+ return;
+ }
+
+ try {
+ // Initialize the storage bitmaps once when the resolution is known.
+ if (rgbBytes == null) {
+ Camera.Size previewSize = camera.getParameters().getPreviewSize();
+ previewHeight = previewSize.height;
+ previewWidth = previewSize.width;
+ rgbBytes = new int[previewWidth * previewHeight];
+ onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90);
+ }
+ } catch (final Exception e) {
+ LOGGER.e(e, "Exception!");
+ return;
+ }
+
+ isProcessingFrame = true;
+ yuvBytes[0] = bytes;
+ yRowStride = previewWidth;
+
+ imageConverter =
+ new Runnable() {
+ @Override
+ public void run() {
+ ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
+ }
+ };
+
+ postInferenceCallback =
+ new Runnable() {
+ @Override
+ public void run() {
+ camera.addCallbackBuffer(bytes);
+ isProcessingFrame = false;
+ }
+ };
+ processImage();
+ }
+
+ /** Callback for Camera2 API */
+ @Override
+ public void onImageAvailable(final ImageReader reader) {
+ // We need wait until we have some size from onPreviewSizeChosen
+ if (previewWidth == 0 || previewHeight == 0) {
+ return;
+ }
+ if (rgbBytes == null) {
+ rgbBytes = new int[previewWidth * previewHeight];
+ }
+ try {
+ final Image image = reader.acquireLatestImage();
+
+ if (image == null) {
+ return;
+ }
+
+ if (isProcessingFrame) {
+ image.close();
+ return;
+ }
+ isProcessingFrame = true;
+ Trace.beginSection("imageAvailable");
+ final Plane[] planes = image.getPlanes();
+ fillBytes(planes, yuvBytes);
+ yRowStride = planes[0].getRowStride();
+ final int uvRowStride = planes[1].getRowStride();
+ final int uvPixelStride = planes[1].getPixelStride();
+
+ imageConverter =
+ new Runnable() {
+ @Override
+ public void run() {
+ ImageUtils.convertYUV420ToARGB8888(
+ yuvBytes[0],
+ yuvBytes[1],
+ yuvBytes[2],
+ previewWidth,
+ previewHeight,
+ yRowStride,
+ uvRowStride,
+ uvPixelStride,
+ rgbBytes);
+ }
+ };
+
+ postInferenceCallback =
+ new Runnable() {
+ @Override
+ public void run() {
+ image.close();
+ isProcessingFrame = false;
+ }
+ };
+
+ processImage();
+ } catch (final Exception e) {
+ LOGGER.e(e, "Exception!");
+ Trace.endSection();
+ return;
+ }
+ Trace.endSection();
+ }
+
+ @Override
+ public synchronized void onStart() {
+ LOGGER.d("onStart " + this);
+ super.onStart();
+ }
+
+ @Override
+ public synchronized void onResume() {
+ LOGGER.d("onResume " + this);
+ super.onResume();
+
+ handlerThread = new HandlerThread("inference");
+ handlerThread.start();
+ handler = new Handler(handlerThread.getLooper());
+ }
+
+ @Override
+ public synchronized void onPause() {
+ LOGGER.d("onPause " + this);
+
+ handlerThread.quitSafely();
+ try {
+ handlerThread.join();
+ handlerThread = null;
+ handler = null;
+ } catch (final InterruptedException e) {
+ LOGGER.e(e, "Exception!");
+ }
+
+ super.onPause();
+ }
+
+ @Override
+ public synchronized void onStop() {
+ LOGGER.d("onStop " + this);
+ super.onStop();
+ }
+
+ @Override
+ public synchronized void onDestroy() {
+ LOGGER.d("onDestroy " + this);
+ super.onDestroy();
+ }
+
+ protected synchronized void runInBackground(final Runnable r) {
+ if (handler != null) {
+ handler.post(r);
+ }
+ }
+
+ @Override
+ public void onRequestPermissionsResult(
+ final int requestCode, final String[] permissions, final int[] grantResults) {
+ super.onRequestPermissionsResult(requestCode, permissions, grantResults);
+ if (requestCode == PERMISSIONS_REQUEST) {
+ if (allPermissionsGranted(grantResults)) {
+ setFragment();
+ } else {
+ requestPermission();
+ }
+ }
+ }
+
+ private static boolean allPermissionsGranted(final int[] grantResults) {
+ for (int result : grantResults) {
+ if (result != PackageManager.PERMISSION_GRANTED) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private boolean hasPermission() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED;
+ } else {
+ return true;
+ }
+ }
+
+ private void requestPermission() {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) {
+ Toast.makeText(
+ CameraActivity.this,
+ "Camera permission is required for this demo",
+ Toast.LENGTH_LONG)
+ .show();
+ }
+ requestPermissions(new String[] {PERMISSION_CAMERA}, PERMISSIONS_REQUEST);
+ }
+ }
+
+ // Returns true if the device supports the required hardware level, or better.
+ private boolean isHardwareLevelSupported(
+ CameraCharacteristics characteristics, int requiredLevel) {
+ int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+ if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+ return requiredLevel == deviceLevel;
+ }
+ // deviceLevel is not LEGACY, can use numerical sort
+ return requiredLevel <= deviceLevel;
+ }
+
+ private String chooseCamera() {
+ final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
+ try {
+ for (final String cameraId : manager.getCameraIdList()) {
+ final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
+
+ // We don't use a front facing camera in this sample.
+ final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
+ if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
+ continue;
+ }
+
+ final StreamConfigurationMap map =
+ characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+ if (map == null) {
+ continue;
+ }
+
+ // Fallback to camera1 API for internal cameras that don't have full support.
+ // This should help with legacy situations where using the camera2 API causes
+ // distorted or otherwise broken previews.
+ useCamera2API =
+ (facing == CameraCharacteristics.LENS_FACING_EXTERNAL)
+ || isHardwareLevelSupported(
+ characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
+ LOGGER.i("Camera API lv2?: %s", useCamera2API);
+ return cameraId;
+ }
+ } catch (CameraAccessException e) {
+ LOGGER.e(e, "Not allowed to access camera");
+ }
+
+ return null;
+ }
+
+ protected void setFragment() {
+ String cameraId = chooseCamera();
+
+ Fragment fragment;
+ if (useCamera2API) {
+ CameraConnectionFragment camera2Fragment =
+ CameraConnectionFragment.newInstance(
+ new CameraConnectionFragment.ConnectionCallback() {
+ @Override
+ public void onPreviewSizeChosen(final Size size, final int rotation) {
+ previewHeight = size.getHeight();
+ previewWidth = size.getWidth();
+ CameraActivity.this.onPreviewSizeChosen(size, rotation);
+ }
+ },
+ this,
+ getLayoutId(),
+ getDesiredPreviewFrameSize());
+
+ camera2Fragment.setCamera(cameraId);
+ fragment = camera2Fragment;
+ } else {
+ fragment =
+ new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
+ }
+
+ getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit();
+ }
+
+ protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
+ // Because of the variable row stride it's not possible to know in
+ // advance the actual necessary dimensions of the yuv planes.
+ for (int i = 0; i < planes.length; ++i) {
+ final ByteBuffer buffer = planes[i].getBuffer();
+ if (yuvBytes[i] == null) {
+ LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
+ yuvBytes[i] = new byte[buffer.capacity()];
+ }
+ buffer.get(yuvBytes[i]);
+ }
+ }
+
+ public boolean isDebug() {
+ return debug;
+ }
+
+ protected void readyForNextImage() {
+ if (postInferenceCallback != null) {
+ postInferenceCallback.run();
+ }
+ }
+
+ protected int getScreenOrientation() {
+ switch (getWindowManager().getDefaultDisplay().getRotation()) {
+ case Surface.ROTATION_270:
+ return 270;
+ case Surface.ROTATION_180:
+ return 180;
+ case Surface.ROTATION_90:
+ return 90;
+ default:
+ return 0;
+ }
+ }
+
+ @Override
+ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
+ setUseNNAPI(isChecked);
+ if (isChecked) apiSwitchCompat.setText("NNAPI");
+ else apiSwitchCompat.setText("TFLITE");
+ }
+
+ @Override
+ public void onClick(View v) {
+ if (v.getId() == R.id.plus) {
+ String threads = threadsTextView.getText().toString().trim();
+ int numThreads = Integer.parseInt(threads);
+ if (numThreads >= 9) return;
+ numThreads++;
+ threadsTextView.setText(String.valueOf(numThreads));
+ setNumThreads(numThreads);
+ } else if (v.getId() == R.id.minus) {
+ String threads = threadsTextView.getText().toString().trim();
+ int numThreads = Integer.parseInt(threads);
+ if (numThreads == 1) {
+ return;
+ }
+ numThreads--;
+ threadsTextView.setText(String.valueOf(numThreads));
+ setNumThreads(numThreads);
+ }
+ }
+
+ protected void showFrameInfo(String frameInfo) {
+ frameValueTextView.setText(frameInfo);
+ }
+
+ protected void showCropInfo(String cropInfo) {
+ cropValueTextView.setText(cropInfo);
+ }
+
+ protected void showInference(String inferenceTime) {
+ inferenceTimeTextView.setText(inferenceTime);
+ }
+
+ protected abstract void processImage();
+
+ protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
+
+ protected abstract int getLayoutId();
+
+ protected abstract Size getDesiredPreviewFrameSize();
+
+ protected abstract void setNumThreads(int numThreads);
+
+ protected abstract void setUseNNAPI(boolean isChecked);
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraConnectionFragment.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraConnectionFragment.java
new file mode 100755
index 0000000000000000000000000000000000000000..641f3084bdaaa1331624bebccb440410b1296301
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraConnectionFragment.java
@@ -0,0 +1,569 @@
+/*
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.tensorflow.lite.examples.detection;
+
+import android.annotation.SuppressLint;
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.app.Dialog;
+import android.app.DialogFragment;
+import android.app.Fragment;
+import android.content.Context;
+import android.content.DialogInterface;
+import android.content.res.Configuration;
+import android.graphics.ImageFormat;
+import android.graphics.Matrix;
+import android.graphics.RectF;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.media.ImageReader;
+import android.media.ImageReader.OnImageAvailableListener;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.text.TextUtils;
+import android.util.Size;
+import android.util.SparseIntArray;
+import android.view.LayoutInflater;
+import android.view.Surface;
+import android.view.TextureView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Toast;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+import org.tensorflow.lite.examples.detection.customview.AutoFitTextureView;
+import org.tensorflow.lite.examples.detection.env.Logger;
+
+@SuppressLint("ValidFragment")
+public class CameraConnectionFragment extends Fragment {
+ private static final Logger LOGGER = new Logger();
+
+ /**
+ * The camera preview size will be chosen to be the smallest frame by pixel size capable of
+ * containing a DESIRED_SIZE x DESIRED_SIZE square.
+ */
+ private static final int MINIMUM_PREVIEW_SIZE = 320;
+
+ /** Conversion from screen rotation to JPEG orientation. */
+ private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
+
+ private static final String FRAGMENT_DIALOG = "dialog";
+
+ static {
+ ORIENTATIONS.append(Surface.ROTATION_0, 90);
+ ORIENTATIONS.append(Surface.ROTATION_90, 0);
+ ORIENTATIONS.append(Surface.ROTATION_180, 270);
+ ORIENTATIONS.append(Surface.ROTATION_270, 180);
+ }
+
+ /** A {@link Semaphore} to prevent the app from exiting before closing the camera. */
+ private final Semaphore cameraOpenCloseLock = new Semaphore(1);
+ /** A {@link OnImageAvailableListener} to receive frames as they are available. */
+ private final OnImageAvailableListener imageListener;
+ /** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */
+ private final Size inputSize;
+ /** The layout identifier to inflate for this Fragment. */
+ private final int layout;
+
+ private final ConnectionCallback cameraConnectionCallback;
+ private final CameraCaptureSession.CaptureCallback captureCallback =
+ new CameraCaptureSession.CaptureCallback() {
+ @Override
+ public void onCaptureProgressed(
+ final CameraCaptureSession session,
+ final CaptureRequest request,
+ final CaptureResult partialResult) {}
+
+ @Override
+ public void onCaptureCompleted(
+ final CameraCaptureSession session,
+ final CaptureRequest request,
+ final TotalCaptureResult result) {}
+ };
+ /** ID of the current {@link CameraDevice}. */
+ private String cameraId;
+ /** An {@link AutoFitTextureView} for camera preview. */
+ private AutoFitTextureView textureView;
+ /** A {@link CameraCaptureSession } for camera preview. */
+ private CameraCaptureSession captureSession;
+ /** A reference to the opened {@link CameraDevice}. */
+ private CameraDevice cameraDevice;
+ /** The rotation in degrees of the camera sensor from the display. */
+ private Integer sensorOrientation;
+ /** The {@link Size} of camera preview. */
+ private Size previewSize;
+ /** An additional thread for running tasks that shouldn't block the UI. */
+ private HandlerThread backgroundThread;
+ /** A {@link Handler} for running tasks in the background. */
+ private Handler backgroundHandler;
+ /** An {@link ImageReader} that handles preview frame capture. */
+ private ImageReader previewReader;
+ /** {@link CaptureRequest.Builder} for the camera preview */
+ private CaptureRequest.Builder previewRequestBuilder;
+ /** {@link CaptureRequest} generated by {@link #previewRequestBuilder} */
+ private CaptureRequest previewRequest;
+ /** {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */
+ private final CameraDevice.StateCallback stateCallback =
+ new CameraDevice.StateCallback() {
+ @Override
+ public void onOpened(final CameraDevice cd) {
+ // This method is called when the camera is opened. We start camera preview here.
+ cameraOpenCloseLock.release();
+ cameraDevice = cd;
+ createCameraPreviewSession();
+ }
+
+ @Override
+ public void onDisconnected(final CameraDevice cd) {
+ cameraOpenCloseLock.release();
+ cd.close();
+ cameraDevice = null;
+ }
+
+ @Override
+ public void onError(final CameraDevice cd, final int error) {
+ cameraOpenCloseLock.release();
+ cd.close();
+ cameraDevice = null;
+ final Activity activity = getActivity();
+ if (null != activity) {
+ activity.finish();
+ }
+ }
+ };
+ /**
+ * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
+ * TextureView}.
+ */
+ private final TextureView.SurfaceTextureListener surfaceTextureListener =
+ new TextureView.SurfaceTextureListener() {
+ @Override
+ public void onSurfaceTextureAvailable(
+ final SurfaceTexture texture, final int width, final int height) {
+ openCamera(width, height);
+ }
+
+ @Override
+ public void onSurfaceTextureSizeChanged(
+ final SurfaceTexture texture, final int width, final int height) {
+ configureTransform(width, height);
+ }
+
+ @Override
+ public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
+ return true;
+ }
+
+ @Override
+ public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
+ };
+
+ private CameraConnectionFragment(
+ final ConnectionCallback connectionCallback,
+ final OnImageAvailableListener imageListener,
+ final int layout,
+ final Size inputSize) {
+ this.cameraConnectionCallback = connectionCallback;
+ this.imageListener = imageListener;
+ this.layout = layout;
+ this.inputSize = inputSize;
+ }
+
+ /**
+ * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
+ * width and height are at least as large as the minimum of both, or an exact match if possible.
+ *
+ * @param choices The list of sizes that the camera supports for the intended output class
+ * @param width The minimum desired width
+ * @param height The minimum desired height
+ * @return The optimal {@code Size}, or an arbitrary one if none were big enough
+ */
+ protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
+ final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
+ final Size desiredSize = new Size(width, height);
+
+ // Collect the supported resolutions that are at least as big as the preview Surface
+ boolean exactSizeFound = false;
+ final List bigEnough = new ArrayList();
+ final List tooSmall = new ArrayList();
+ for (final Size option : choices) {
+ if (option.equals(desiredSize)) {
+ // Set the size but don't return yet so that remaining sizes will still be logged.
+ exactSizeFound = true;
+ }
+
+ if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
+ bigEnough.add(option);
+ } else {
+ tooSmall.add(option);
+ }
+ }
+
+ LOGGER.i("Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
+ LOGGER.i("Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
+ LOGGER.i("Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
+
+ if (exactSizeFound) {
+ LOGGER.i("Exact size match found.");
+ return desiredSize;
+ }
+
+ // Pick the smallest of those, assuming we found any
+ if (bigEnough.size() > 0) {
+ final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
+ LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
+ return chosenSize;
+ } else {
+ LOGGER.e("Couldn't find any suitable preview size");
+ return choices[0];
+ }
+ }
+
+ public static CameraConnectionFragment newInstance(
+ final ConnectionCallback callback,
+ final OnImageAvailableListener imageListener,
+ final int layout,
+ final Size inputSize) {
+ return new CameraConnectionFragment(callback, imageListener, layout, inputSize);
+ }
+
+ /**
+ * Shows a {@link Toast} on the UI thread.
+ *
+ * @param text The message to show
+ */
+ private void showToast(final String text) {
+ final Activity activity = getActivity();
+ if (activity != null) {
+ activity.runOnUiThread(
+ new Runnable() {
+ @Override
+ public void run() {
+ Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
+ }
+ });
+ }
+ }
+
+ @Override
+ public View onCreateView(
+ final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
+ return inflater.inflate(layout, container, false);
+ }
+
+ @Override
+ public void onViewCreated(final View view, final Bundle savedInstanceState) {
+ textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
+ }
+
+ @Override
+ public void onActivityCreated(final Bundle savedInstanceState) {
+ super.onActivityCreated(savedInstanceState);
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ startBackgroundThread();
+
+ // When the screen is turned off and turned back on, the SurfaceTexture is already
+ // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
+ // a camera and start preview from here (otherwise, we wait until the surface is ready in
+ // the SurfaceTextureListener).
+ if (textureView.isAvailable()) {
+ openCamera(textureView.getWidth(), textureView.getHeight());
+ } else {
+ textureView.setSurfaceTextureListener(surfaceTextureListener);
+ }
+ }
+
+ @Override
+ public void onPause() {
+ closeCamera();
+ stopBackgroundThread();
+ super.onPause();
+ }
+
+ public void setCamera(String cameraId) {
+ this.cameraId = cameraId;
+ }
+
+ /** Sets up member variables related to camera. */
+ private void setUpCameraOutputs() {
+ final Activity activity = getActivity();
+ final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
+ try {
+ final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
+
+ final StreamConfigurationMap map =
+ characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+ sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+
+ // Danger, W.R.! Attempting to use too large a preview size could exceed the camera
+ // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
+ // garbage capture data.
+ previewSize =
+ chooseOptimalSize(
+ map.getOutputSizes(SurfaceTexture.class),
+ inputSize.getWidth(),
+ inputSize.getHeight());
+
+ // We fit the aspect ratio of TextureView to the size of preview we picked.
+ final int orientation = getResources().getConfiguration().orientation;
+ if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
+ textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
+ } else {
+ textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
+ }
+ } catch (final CameraAccessException e) {
+ LOGGER.e(e, "Exception!");
+ } catch (final NullPointerException e) {
+ // Currently an NPE is thrown when the Camera2API is used but not supported on the
+ // device this code runs.
+ ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error))
+ .show(getChildFragmentManager(), FRAGMENT_DIALOG);
+ throw new IllegalStateException(getString(R.string.tfe_od_camera_error));
+ }
+
+ cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation);
+ }
+
+ /** Opens the camera specified by {@link CameraConnectionFragment#cameraId}. */
+ private void openCamera(final int width, final int height) {
+ setUpCameraOutputs();
+ configureTransform(width, height);
+ final Activity activity = getActivity();
+ final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
+ try {
+ if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
+ throw new RuntimeException("Time out waiting to lock camera opening.");
+ }
+ manager.openCamera(cameraId, stateCallback, backgroundHandler);
+ } catch (final CameraAccessException e) {
+ LOGGER.e(e, "Exception!");
+ } catch (final InterruptedException e) {
+ throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
+ }
+ }
+
+ /** Closes the current {@link CameraDevice}. */
+ private void closeCamera() {
+ try {
+ cameraOpenCloseLock.acquire();
+ if (null != captureSession) {
+ captureSession.close();
+ captureSession = null;
+ }
+ if (null != cameraDevice) {
+ cameraDevice.close();
+ cameraDevice = null;
+ }
+ if (null != previewReader) {
+ previewReader.close();
+ previewReader = null;
+ }
+ } catch (final InterruptedException e) {
+ throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
+ } finally {
+ cameraOpenCloseLock.release();
+ }
+ }
+
+ /** Starts a background thread and its {@link Handler}. */
+ private void startBackgroundThread() {
+ backgroundThread = new HandlerThread("ImageListener");
+ backgroundThread.start();
+ backgroundHandler = new Handler(backgroundThread.getLooper());
+ }
+
+ /** Stops the background thread and its {@link Handler}. */
+ private void stopBackgroundThread() {
+ backgroundThread.quitSafely();
+ try {
+ backgroundThread.join();
+ backgroundThread = null;
+ backgroundHandler = null;
+ } catch (final InterruptedException e) {
+ LOGGER.e(e, "Exception!");
+ }
+ }
+
+ /** Creates a new {@link CameraCaptureSession} for camera preview. */
+ private void createCameraPreviewSession() {
+ try {
+ final SurfaceTexture texture = textureView.getSurfaceTexture();
+ assert texture != null;
+
+ // We configure the size of default buffer to be the size of camera preview we want.
+ texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
+
+ // This is the output Surface we need to start preview.
+ final Surface surface = new Surface(texture);
+
+ // We set up a CaptureRequest.Builder with the output Surface.
+ previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+ previewRequestBuilder.addTarget(surface);
+
+ LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());
+
+ // Create the reader for the preview frames.
+ previewReader =
+ ImageReader.newInstance(
+ previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
+
+ previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
+ previewRequestBuilder.addTarget(previewReader.getSurface());
+
+ // Here, we create a CameraCaptureSession for camera preview.
+ cameraDevice.createCaptureSession(
+ Arrays.asList(surface, previewReader.getSurface()),
+ new CameraCaptureSession.StateCallback() {
+
+ @Override
+ public void onConfigured(final CameraCaptureSession cameraCaptureSession) {
+ // The camera is already closed
+ if (null == cameraDevice) {
+ return;
+ }
+
+ // When the session is ready, we start displaying the preview.
+ captureSession = cameraCaptureSession;
+ try {
+ // Auto focus should be continuous for camera preview.
+ previewRequestBuilder.set(
+ CaptureRequest.CONTROL_AF_MODE,
+ CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
+ // Flash is automatically enabled when necessary.
+ previewRequestBuilder.set(
+ CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
+
+ // Finally, we start displaying the camera preview.
+ previewRequest = previewRequestBuilder.build();
+ captureSession.setRepeatingRequest(
+ previewRequest, captureCallback, backgroundHandler);
+ } catch (final CameraAccessException e) {
+ LOGGER.e(e, "Exception!");
+ }
+ }
+
+ @Override
+ public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
+ showToast("Failed");
+ }
+ },
+ null);
+ } catch (final CameraAccessException e) {
+ LOGGER.e(e, "Exception!");
+ }
+ }
+
+ /**
+ * Configures the necessary {@link Matrix} transformation to `mTextureView`. This method should be
+ * called after the camera preview size is determined in setUpCameraOutputs and also the size of
+ * `mTextureView` is fixed.
+ *
+ * @param viewWidth The width of `mTextureView`
+ * @param viewHeight The height of `mTextureView`
+ */
+ private void configureTransform(final int viewWidth, final int viewHeight) {
+ final Activity activity = getActivity();
+ if (null == textureView || null == previewSize || null == activity) {
+ return;
+ }
+ final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
+ final Matrix matrix = new Matrix();
+ final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
+ final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
+ final float centerX = viewRect.centerX();
+ final float centerY = viewRect.centerY();
+ if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
+ bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
+ matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
+ final float scale =
+ Math.max(
+ (float) viewHeight / previewSize.getHeight(),
+ (float) viewWidth / previewSize.getWidth());
+ matrix.postScale(scale, scale, centerX, centerY);
+ matrix.postRotate(90 * (rotation - 2), centerX, centerY);
+ } else if (Surface.ROTATION_180 == rotation) {
+ matrix.postRotate(180, centerX, centerY);
+ }
+ textureView.setTransform(matrix);
+ }
+
+ /**
+ * Callback for Activities to use to initialize their data once the selected preview size is
+ * known.
+ */
+ public interface ConnectionCallback {
+ void onPreviewSizeChosen(Size size, int cameraRotation);
+ }
+
+ /** Compares two {@code Size}s based on their areas. */
+ static class CompareSizesByArea implements Comparator {
+ @Override
+ public int compare(final Size lhs, final Size rhs) {
+ // We cast here to ensure the multiplications won't overflow
+ return Long.signum(
+ (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
+ }
+ }
+
+ /** Shows an error message dialog. */
+ public static class ErrorDialog extends DialogFragment {
+ private static final String ARG_MESSAGE = "message";
+
+ public static ErrorDialog newInstance(final String message) {
+ final ErrorDialog dialog = new ErrorDialog();
+ final Bundle args = new Bundle();
+ args.putString(ARG_MESSAGE, message);
+ dialog.setArguments(args);
+ return dialog;
+ }
+
+ @Override
+ public Dialog onCreateDialog(final Bundle savedInstanceState) {
+ final Activity activity = getActivity();
+ return new AlertDialog.Builder(activity)
+ .setMessage(getArguments().getString(ARG_MESSAGE))
+ .setPositiveButton(
+ android.R.string.ok,
+ new DialogInterface.OnClickListener() {
+ @Override
+ public void onClick(final DialogInterface dialogInterface, final int i) {
+ activity.finish();
+ }
+ })
+ .create();
+ }
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/DetectorActivity.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/DetectorActivity.java
new file mode 100755
index 0000000000000000000000000000000000000000..cbedf8431cfd503fe486fc4d5d54aa5672cba23a
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/DetectorActivity.java
@@ -0,0 +1,266 @@
+/*
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.tensorflow.lite.examples.detection;
+
+import android.graphics.Bitmap;
+import android.graphics.Bitmap.Config;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Matrix;
+import android.graphics.Paint;
+import android.graphics.Paint.Style;
+import android.graphics.RectF;
+import android.graphics.Typeface;
+import android.media.ImageReader.OnImageAvailableListener;
+import android.os.SystemClock;
+import android.util.Log;
+import android.util.Size;
+import android.util.TypedValue;
+import android.widget.Toast;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.tensorflow.lite.examples.detection.customview.OverlayView;
+import org.tensorflow.lite.examples.detection.customview.OverlayView.DrawCallback;
+import org.tensorflow.lite.examples.detection.env.BorderedText;
+import org.tensorflow.lite.examples.detection.env.ImageUtils;
+import org.tensorflow.lite.examples.detection.env.Logger;
+import org.tensorflow.lite.examples.detection.tflite.Classifier;
+import org.tensorflow.lite.examples.detection.tflite.YoloV4Classifier;
+import org.tensorflow.lite.examples.detection.tracking.MultiBoxTracker;
+
+/**
+ * An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track
+ * objects.
+ */
+public class DetectorActivity extends CameraActivity implements OnImageAvailableListener {
+ private static final Logger LOGGER = new Logger();
+
+ private static final int TF_OD_API_INPUT_SIZE = 416;
+ private static final boolean TF_OD_API_IS_QUANTIZED = false;
+ private static final String TF_OD_API_MODEL_FILE = "yolov4-416-fp32.tflite";
+
+ private static final String TF_OD_API_LABELS_FILE = "file:///android_asset/coco.txt";
+
+ private static final DetectorMode MODE = DetectorMode.TF_OD_API;
+ private static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.5f;
+ private static final boolean MAINTAIN_ASPECT = false;
+ private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
+ private static final boolean SAVE_PREVIEW_BITMAP = false;
+ private static final float TEXT_SIZE_DIP = 10;
+ OverlayView trackingOverlay;
+ private Integer sensorOrientation;
+
+ private Classifier detector;
+
+ private long lastProcessingTimeMs;
+ private Bitmap rgbFrameBitmap = null;
+ private Bitmap croppedBitmap = null;
+ private Bitmap cropCopyBitmap = null;
+
+ private boolean computingDetection = false;
+
+ private long timestamp = 0;
+
+ private Matrix frameToCropTransform;
+ private Matrix cropToFrameTransform;
+
+ private MultiBoxTracker tracker;
+
+ private BorderedText borderedText;
+
+ @Override
+ public void onPreviewSizeChosen(final Size size, final int rotation) {
+ final float textSizePx =
+ TypedValue.applyDimension(
+ TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
+ borderedText = new BorderedText(textSizePx);
+ borderedText.setTypeface(Typeface.MONOSPACE);
+
+ tracker = new MultiBoxTracker(this);
+
+ int cropSize = TF_OD_API_INPUT_SIZE;
+
+ try {
+ detector =
+ YoloV4Classifier.create(
+ getAssets(),
+ TF_OD_API_MODEL_FILE,
+ TF_OD_API_LABELS_FILE,
+ TF_OD_API_IS_QUANTIZED);
+// detector = TFLiteObjectDetectionAPIModel.create(
+// getAssets(),
+// TF_OD_API_MODEL_FILE,
+// TF_OD_API_LABELS_FILE,
+// TF_OD_API_INPUT_SIZE,
+// TF_OD_API_IS_QUANTIZED);
+ cropSize = TF_OD_API_INPUT_SIZE;
+ } catch (final IOException e) {
+ e.printStackTrace();
+ LOGGER.e(e, "Exception initializing classifier!");
+ Toast toast =
+ Toast.makeText(
+ getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
+ toast.show();
+ finish();
+ }
+
+ previewWidth = size.getWidth();
+ previewHeight = size.getHeight();
+
+ sensorOrientation = rotation - getScreenOrientation();
+ LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);
+
+ LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
+ rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
+ croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
+
+ frameToCropTransform =
+ ImageUtils.getTransformationMatrix(
+ previewWidth, previewHeight,
+ cropSize, cropSize,
+ sensorOrientation, MAINTAIN_ASPECT);
+
+ cropToFrameTransform = new Matrix();
+ frameToCropTransform.invert(cropToFrameTransform);
+
+ trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
+ trackingOverlay.addCallback(
+ new DrawCallback() {
+ @Override
+ public void drawCallback(final Canvas canvas) {
+ tracker.draw(canvas);
+ if (isDebug()) {
+ tracker.drawDebug(canvas);
+ }
+ }
+ });
+
+ tracker.setFrameConfiguration(previewWidth, previewHeight, sensorOrientation);
+ }
+
+ @Override
+ protected void processImage() {
+ ++timestamp;
+ final long currTimestamp = timestamp;
+ trackingOverlay.postInvalidate();
+
+ // No mutex needed as this method is not reentrant.
+ if (computingDetection) {
+ readyForNextImage();
+ return;
+ }
+ computingDetection = true;
+ LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread.");
+
+ rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
+
+ readyForNextImage();
+
+ final Canvas canvas = new Canvas(croppedBitmap);
+ canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
+ // For examining the actual TF input.
+ if (SAVE_PREVIEW_BITMAP) {
+ ImageUtils.saveBitmap(croppedBitmap);
+ }
+
+ runInBackground(
+ new Runnable() {
+ @Override
+ public void run() {
+ LOGGER.i("Running detection on image " + currTimestamp);
+ final long startTime = SystemClock.uptimeMillis();
+ final List results = detector.recognizeImage(croppedBitmap);
+ lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
+
+ Log.e("CHECK", "run: " + results.size());
+
+ cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
+ final Canvas canvas = new Canvas(cropCopyBitmap);
+ final Paint paint = new Paint();
+ paint.setColor(Color.RED);
+ paint.setStyle(Style.STROKE);
+ paint.setStrokeWidth(2.0f);
+
+ float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
+ switch (MODE) {
+ case TF_OD_API:
+ minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
+ break;
+ }
+
+ final List mappedRecognitions =
+ new LinkedList();
+
+ for (final Classifier.Recognition result : results) {
+ final RectF location = result.getLocation();
+ if (location != null && result.getConfidence() >= minimumConfidence) {
+ canvas.drawRect(location, paint);
+
+ cropToFrameTransform.mapRect(location);
+
+ result.setLocation(location);
+ mappedRecognitions.add(result);
+ }
+ }
+
+ tracker.trackResults(mappedRecognitions, currTimestamp);
+ trackingOverlay.postInvalidate();
+
+ computingDetection = false;
+
+ runOnUiThread(
+ new Runnable() {
+ @Override
+ public void run() {
+ showFrameInfo(previewWidth + "x" + previewHeight);
+ showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight());
+ showInference(lastProcessingTimeMs + "ms");
+ }
+ });
+ }
+ });
+ }
+
+ @Override
+ protected int getLayoutId() {
+ return R.layout.tfe_od_camera_connection_fragment_tracking;
+ }
+
+ @Override
+ protected Size getDesiredPreviewFrameSize() {
+ return DESIRED_PREVIEW_SIZE;
+ }
+
+ // Which detection model to use: by default uses Tensorflow Object Detection API frozen
+ // checkpoints.
+ private enum DetectorMode {
+ TF_OD_API;
+ }
+
+ @Override
+ protected void setUseNNAPI(final boolean isChecked) {
+ runInBackground(() -> detector.setUseNNAPI(isChecked));
+ }
+
+ @Override
+ protected void setNumThreads(final int numThreads) {
+ runInBackground(() -> detector.setNumThreads(numThreads));
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/LegacyCameraConnectionFragment.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/LegacyCameraConnectionFragment.java
new file mode 100755
index 0000000000000000000000000000000000000000..afb9d75999503cd714a12d149578286dbf066906
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/LegacyCameraConnectionFragment.java
@@ -0,0 +1,199 @@
+package org.tensorflow.lite.examples.detection;
+
+/*
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import android.app.Fragment;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.util.Size;
+import android.util.SparseIntArray;
+import android.view.LayoutInflater;
+import android.view.Surface;
+import android.view.TextureView;
+import android.view.View;
+import android.view.ViewGroup;
+import java.io.IOException;
+import java.util.List;
+import org.tensorflow.lite.examples.detection.customview.AutoFitTextureView;
+import org.tensorflow.lite.examples.detection.env.ImageUtils;
+import org.tensorflow.lite.examples.detection.env.Logger;
+
+public class LegacyCameraConnectionFragment extends Fragment {
+ private static final Logger LOGGER = new Logger();
+ /** Conversion from screen rotation to JPEG orientation. */
+ private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
+
+ static {
+ ORIENTATIONS.append(Surface.ROTATION_0, 90);
+ ORIENTATIONS.append(Surface.ROTATION_90, 0);
+ ORIENTATIONS.append(Surface.ROTATION_180, 270);
+ ORIENTATIONS.append(Surface.ROTATION_270, 180);
+ }
+
+ private Camera camera;
+ private Camera.PreviewCallback imageListener;
+ private Size desiredSize;
+ /** The layout identifier to inflate for this Fragment. */
+ private int layout;
+ /** An {@link AutoFitTextureView} for camera preview. */
+ private AutoFitTextureView textureView;
+ /**
+ * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
+ * TextureView}.
+ */
+ private final TextureView.SurfaceTextureListener surfaceTextureListener =
+ new TextureView.SurfaceTextureListener() {
+ @Override
+ public void onSurfaceTextureAvailable(
+ final SurfaceTexture texture, final int width, final int height) {
+
+ int index = getCameraId();
+ camera = Camera.open(index);
+
+ try {
+ Camera.Parameters parameters = camera.getParameters();
+ List focusModes = parameters.getSupportedFocusModes();
+ if (focusModes != null
+ && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
+ parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
+ }
+ List cameraSizes = parameters.getSupportedPreviewSizes();
+ Size[] sizes = new Size[cameraSizes.size()];
+ int i = 0;
+ for (Camera.Size size : cameraSizes) {
+ sizes[i++] = new Size(size.width, size.height);
+ }
+ Size previewSize =
+ CameraConnectionFragment.chooseOptimalSize(
+ sizes, desiredSize.getWidth(), desiredSize.getHeight());
+ parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
+ camera.setDisplayOrientation(90);
+ camera.setParameters(parameters);
+ camera.setPreviewTexture(texture);
+ } catch (IOException exception) {
+ camera.release();
+ }
+
+ camera.setPreviewCallbackWithBuffer(imageListener);
+ Camera.Size s = camera.getParameters().getPreviewSize();
+ camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
+
+ textureView.setAspectRatio(s.height, s.width);
+
+ camera.startPreview();
+ }
+
+ @Override
+ public void onSurfaceTextureSizeChanged(
+ final SurfaceTexture texture, final int width, final int height) {}
+
+ @Override
+ public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
+ return true;
+ }
+
+ @Override
+ public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
+ };
+ /** An additional thread for running tasks that shouldn't block the UI. */
+ private HandlerThread backgroundThread;
+
+ public LegacyCameraConnectionFragment(
+ final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
+ this.imageListener = imageListener;
+ this.layout = layout;
+ this.desiredSize = desiredSize;
+ }
+
+ @Override
+ public View onCreateView(
+ final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
+ return inflater.inflate(layout, container, false);
+ }
+
+ @Override
+ public void onViewCreated(final View view, final Bundle savedInstanceState) {
+ textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
+ }
+
+ @Override
+ public void onActivityCreated(final Bundle savedInstanceState) {
+ super.onActivityCreated(savedInstanceState);
+ }
+
+ @Override
+ public void onResume() {
+ super.onResume();
+ startBackgroundThread();
+ // When the screen is turned off and turned back on, the SurfaceTexture is already
+ // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
+ // a camera and start preview from here (otherwise, we wait until the surface is ready in
+ // the SurfaceTextureListener).
+
+ if (textureView.isAvailable()) {
+ camera.startPreview();
+ } else {
+ textureView.setSurfaceTextureListener(surfaceTextureListener);
+ }
+ }
+
+ @Override
+ public void onPause() {
+ stopCamera();
+ stopBackgroundThread();
+ super.onPause();
+ }
+
+ /** Starts a background thread and its {@link Handler}. */
+ private void startBackgroundThread() {
+ backgroundThread = new HandlerThread("CameraBackground");
+ backgroundThread.start();
+ }
+
+ /** Stops the background thread and its {@link Handler}. */
+ private void stopBackgroundThread() {
+ backgroundThread.quitSafely();
+ try {
+ backgroundThread.join();
+ backgroundThread = null;
+ } catch (final InterruptedException e) {
+ LOGGER.e(e, "Exception!");
+ }
+ }
+
+ protected void stopCamera() {
+ if (camera != null) {
+ camera.stopPreview();
+ camera.setPreviewCallback(null);
+ camera.release();
+ camera = null;
+ }
+ }
+
+ private int getCameraId() {
+ CameraInfo ci = new CameraInfo();
+ for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
+ Camera.getCameraInfo(i, ci);
+ if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i;
+ }
+ return -1; // No camera found
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/MainActivity.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/MainActivity.java
new file mode 100755
index 0000000000000000000000000000000000000000..81412bda22c46e322d7bad8d83fef5c4a6ceb180
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/MainActivity.java
@@ -0,0 +1,162 @@
+package org.tensorflow.lite.examples.detection;
+
+import androidx.appcompat.app.AppCompatActivity;
+
+import android.content.Context;
+import android.content.Intent;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Matrix;
+import android.graphics.Paint;
+import android.graphics.RectF;
+import android.os.Bundle;
+import android.os.Handler;
+import android.util.Log;
+import android.view.View;
+import android.widget.Button;
+import android.widget.ImageView;
+import android.widget.Toast;
+
+import org.tensorflow.lite.examples.detection.customview.OverlayView;
+import org.tensorflow.lite.examples.detection.env.ImageUtils;
+import org.tensorflow.lite.examples.detection.env.Logger;
+import org.tensorflow.lite.examples.detection.env.Utils;
+import org.tensorflow.lite.examples.detection.tflite.Classifier;
+import org.tensorflow.lite.examples.detection.tflite.YoloV4Classifier;
+import org.tensorflow.lite.examples.detection.tracking.MultiBoxTracker;
+
+import java.io.IOException;
+import java.util.LinkedList;
+import java.util.List;
+
+public class MainActivity extends AppCompatActivity {
+
+ public static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.5f;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.activity_main);
+
+ cameraButton = findViewById(R.id.cameraButton);
+ detectButton = findViewById(R.id.detectButton);
+ imageView = findViewById(R.id.imageView);
+
+ cameraButton.setOnClickListener(v -> startActivity(new Intent(MainActivity.this, DetectorActivity.class)));
+
+ detectButton.setOnClickListener(v -> {
+ Handler handler = new Handler();
+
+ new Thread(() -> {
+ final List results = detector.recognizeImage(cropBitmap);
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ handleResult(cropBitmap, results);
+ }
+ });
+ }).start();
+
+ });
+ this.sourceBitmap = Utils.getBitmapFromAsset(MainActivity.this, "kite.jpg");
+
+ this.cropBitmap = Utils.processBitmap(sourceBitmap, TF_OD_API_INPUT_SIZE);
+
+ this.imageView.setImageBitmap(cropBitmap);
+
+ initBox();
+ }
+
+ private static final Logger LOGGER = new Logger();
+
+ public static final int TF_OD_API_INPUT_SIZE = 416;
+
+ private static final boolean TF_OD_API_IS_QUANTIZED = false;
+
+ private static final String TF_OD_API_MODEL_FILE = "yolov4-416-fp32.tflite";
+
+ private static final String TF_OD_API_LABELS_FILE = "file:///android_asset/coco.txt";
+
+ // Minimum detection confidence to track a detection.
+ private static final boolean MAINTAIN_ASPECT = false;
+ private Integer sensorOrientation = 90;
+
+ private Classifier detector;
+
+ private Matrix frameToCropTransform;
+ private Matrix cropToFrameTransform;
+ private MultiBoxTracker tracker;
+ private OverlayView trackingOverlay;
+
+ protected int previewWidth = 0;
+ protected int previewHeight = 0;
+
+ private Bitmap sourceBitmap;
+ private Bitmap cropBitmap;
+
+ private Button cameraButton, detectButton;
+ private ImageView imageView;
+
+ private void initBox() {
+ previewHeight = TF_OD_API_INPUT_SIZE;
+ previewWidth = TF_OD_API_INPUT_SIZE;
+ frameToCropTransform =
+ ImageUtils.getTransformationMatrix(
+ previewWidth, previewHeight,
+ TF_OD_API_INPUT_SIZE, TF_OD_API_INPUT_SIZE,
+ sensorOrientation, MAINTAIN_ASPECT);
+
+ cropToFrameTransform = new Matrix();
+ frameToCropTransform.invert(cropToFrameTransform);
+
+ tracker = new MultiBoxTracker(this);
+ trackingOverlay = findViewById(R.id.tracking_overlay);
+ trackingOverlay.addCallback(
+ canvas -> tracker.draw(canvas));
+
+ tracker.setFrameConfiguration(TF_OD_API_INPUT_SIZE, TF_OD_API_INPUT_SIZE, sensorOrientation);
+
+ try {
+ detector =
+ YoloV4Classifier.create(
+ getAssets(),
+ TF_OD_API_MODEL_FILE,
+ TF_OD_API_LABELS_FILE,
+ TF_OD_API_IS_QUANTIZED);
+ } catch (final IOException e) {
+ e.printStackTrace();
+ LOGGER.e(e, "Exception initializing classifier!");
+ Toast toast =
+ Toast.makeText(
+ getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
+ toast.show();
+ finish();
+ }
+ }
+
+ private void handleResult(Bitmap bitmap, List results) {
+ final Canvas canvas = new Canvas(bitmap);
+ final Paint paint = new Paint();
+ paint.setColor(Color.RED);
+ paint.setStyle(Paint.Style.STROKE);
+ paint.setStrokeWidth(2.0f);
+
+ final List mappedRecognitions =
+ new LinkedList();
+
+ for (final Classifier.Recognition result : results) {
+ final RectF location = result.getLocation();
+ if (location != null && result.getConfidence() >= MINIMUM_CONFIDENCE_TF_OD_API) {
+ canvas.drawRect(location, paint);
+// cropToFrameTransform.mapRect(location);
+//
+// result.setLocation(location);
+// mappedRecognitions.add(result);
+ }
+ }
+// tracker.trackResults(mappedRecognitions, new Random().nextInt());
+// trackingOverlay.postInvalidate();
+ imageView.setImageBitmap(bitmap);
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/AutoFitTextureView.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/AutoFitTextureView.java
new file mode 100755
index 0000000000000000000000000000000000000000..8f41eb71336b82fbb43dc8c6d9e88a11d13d1d81
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/AutoFitTextureView.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.tensorflow.lite.examples.detection.customview;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.view.TextureView;
+
+/** A {@link TextureView} that can be adjusted to a specified aspect ratio. */
+public class AutoFitTextureView extends TextureView {
+ private int ratioWidth = 0;
+ private int ratioHeight = 0;
+
+ public AutoFitTextureView(final Context context) {
+ this(context, null);
+ }
+
+ public AutoFitTextureView(final Context context, final AttributeSet attrs) {
+ this(context, attrs, 0);
+ }
+
+ public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
+ super(context, attrs, defStyle);
+ }
+
+ /**
+ * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
+ * calculated from the parameters. Note that the actual sizes of parameters don't matter, that is,
+ * calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
+ *
+ * @param width Relative horizontal size
+ * @param height Relative vertical size
+ */
+ public void setAspectRatio(final int width, final int height) {
+ if (width < 0 || height < 0) {
+ throw new IllegalArgumentException("Size cannot be negative.");
+ }
+ ratioWidth = width;
+ ratioHeight = height;
+ requestLayout();
+ }
+
+ @Override
+ protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+ final int width = MeasureSpec.getSize(widthMeasureSpec);
+ final int height = MeasureSpec.getSize(heightMeasureSpec);
+ if (0 == ratioWidth || 0 == ratioHeight) {
+ setMeasuredDimension(width, height);
+ } else {
+ if (width < height * ratioWidth / ratioHeight) {
+ setMeasuredDimension(width, width * ratioHeight / ratioWidth);
+ } else {
+ setMeasuredDimension(height * ratioWidth / ratioHeight, height);
+ }
+ }
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/OverlayView.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/OverlayView.java
new file mode 100755
index 0000000000000000000000000000000000000000..8f7e66102a7d56976a81aee3351e315b4257343f
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/OverlayView.java
@@ -0,0 +1,48 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+package org.tensorflow.lite.examples.detection.customview;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.util.AttributeSet;
+import android.view.View;
+import java.util.LinkedList;
+import java.util.List;
+
+/** A simple View providing a render callback to other classes. */
+public class OverlayView extends View {
+ private final List callbacks = new LinkedList();
+
+ public OverlayView(final Context context, final AttributeSet attrs) {
+ super(context, attrs);
+ }
+
+ public void addCallback(final DrawCallback callback) {
+ callbacks.add(callback);
+ }
+
+ @Override
+ public synchronized void draw(final Canvas canvas) {
+ for (final DrawCallback callback : callbacks) {
+ callback.drawCallback(canvas);
+ }
+ }
+
+ /** Interface defining the callback for client classes. */
+ public interface DrawCallback {
+ public void drawCallback(final Canvas canvas);
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/RecognitionScoreView.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/RecognitionScoreView.java
new file mode 100755
index 0000000000000000000000000000000000000000..daf862d30e5c1fb3bc04beb1a4ff5537f84a7b96
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/RecognitionScoreView.java
@@ -0,0 +1,67 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+package org.tensorflow.lite.examples.detection.customview;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Paint;
+import android.util.AttributeSet;
+import android.util.TypedValue;
+import android.view.View;
+import java.util.List;
+import org.tensorflow.lite.examples.detection.tflite.Classifier.Recognition;
+
+public class RecognitionScoreView extends View implements ResultsView {
+ private static final float TEXT_SIZE_DIP = 14;
+ private final float textSizePx;
+ private final Paint fgPaint;
+ private final Paint bgPaint;
+ private List results;
+
+ public RecognitionScoreView(final Context context, final AttributeSet set) {
+ super(context, set);
+
+ textSizePx =
+ TypedValue.applyDimension(
+ TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
+ fgPaint = new Paint();
+ fgPaint.setTextSize(textSizePx);
+
+ bgPaint = new Paint();
+ bgPaint.setColor(0xcc4285f4);
+ }
+
+ @Override
+ public void setResults(final List results) {
+ this.results = results;
+ postInvalidate();
+ }
+
+ @Override
+ public void onDraw(final Canvas canvas) {
+ final int x = 10;
+ int y = (int) (fgPaint.getTextSize() * 1.5f);
+
+ canvas.drawPaint(bgPaint);
+
+ if (results != null) {
+ for (final Recognition recog : results) {
+ canvas.drawText(recog.getTitle() + ": " + recog.getConfidence(), x, y, fgPaint);
+ y += (int) (fgPaint.getTextSize() * 1.5f);
+ }
+ }
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/ResultsView.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/ResultsView.java
new file mode 100755
index 0000000000000000000000000000000000000000..6d54020d7bc047604b46829a74b104c1a5e5ba55
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/ResultsView.java
@@ -0,0 +1,23 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+package org.tensorflow.lite.examples.detection.customview;
+
+import java.util.List;
+import org.tensorflow.lite.examples.detection.tflite.Classifier.Recognition;
+
+public interface ResultsView {
+ public void setResults(final List results);
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/BorderedText.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/BorderedText.java
new file mode 100755
index 0000000000000000000000000000000000000000..006c4c7847eec8e42aa642e6d805148a6f47d8cf
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/BorderedText.java
@@ -0,0 +1,128 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+package org.tensorflow.lite.examples.detection.env;
+
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.Paint.Align;
+import android.graphics.Paint.Style;
+import android.graphics.Rect;
+import android.graphics.Typeface;
+import java.util.Vector;
+
+/** A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. */
+public class BorderedText {
+ private final Paint interiorPaint;
+ private final Paint exteriorPaint;
+
+ private final float textSize;
+
+ /**
+ * Creates a left-aligned bordered text object with a white interior, and a black exterior with
+ * the specified text size.
+ *
+ * @param textSize text size in pixels
+ */
+ public BorderedText(final float textSize) {
+ this(Color.WHITE, Color.BLACK, textSize);
+ }
+
+ /**
+ * Create a bordered text object with the specified interior and exterior colors, text size and
+ * alignment.
+ *
+ * @param interiorColor the interior text color
+ * @param exteriorColor the exterior text color
+ * @param textSize text size in pixels
+ */
+ public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) {
+ interiorPaint = new Paint();
+ interiorPaint.setTextSize(textSize);
+ interiorPaint.setColor(interiorColor);
+ interiorPaint.setStyle(Style.FILL);
+ interiorPaint.setAntiAlias(false);
+ interiorPaint.setAlpha(255);
+
+ exteriorPaint = new Paint();
+ exteriorPaint.setTextSize(textSize);
+ exteriorPaint.setColor(exteriorColor);
+ exteriorPaint.setStyle(Style.FILL_AND_STROKE);
+ exteriorPaint.setStrokeWidth(textSize / 8);
+ exteriorPaint.setAntiAlias(false);
+ exteriorPaint.setAlpha(255);
+
+ this.textSize = textSize;
+ }
+
+ public void setTypeface(Typeface typeface) {
+ interiorPaint.setTypeface(typeface);
+ exteriorPaint.setTypeface(typeface);
+ }
+
+ public void drawText(final Canvas canvas, final float posX, final float posY, final String text) {
+ canvas.drawText(text, posX, posY, exteriorPaint);
+ canvas.drawText(text, posX, posY, interiorPaint);
+ }
+
+ public void drawText(
+ final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) {
+
+ float width = exteriorPaint.measureText(text);
+ float textSize = exteriorPaint.getTextSize();
+ Paint paint = new Paint(bgPaint);
+ paint.setStyle(Paint.Style.FILL);
+ paint.setAlpha(160);
+ canvas.drawRect(posX, (posY + (int) (textSize)), (posX + (int) (width)), posY, paint);
+
+ canvas.drawText(text, posX, (posY + textSize), interiorPaint);
+ }
+
+ public void drawLines(Canvas canvas, final float posX, final float posY, Vector lines) {
+ int lineNum = 0;
+ for (final String line : lines) {
+ drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line);
+ ++lineNum;
+ }
+ }
+
+ public void setInteriorColor(final int color) {
+ interiorPaint.setColor(color);
+ }
+
+ public void setExteriorColor(final int color) {
+ exteriorPaint.setColor(color);
+ }
+
+ public float getTextSize() {
+ return textSize;
+ }
+
+ public void setAlpha(final int alpha) {
+ interiorPaint.setAlpha(alpha);
+ exteriorPaint.setAlpha(alpha);
+ }
+
+ public void getTextBounds(
+ final String line, final int index, final int count, final Rect lineBounds) {
+ interiorPaint.getTextBounds(line, index, count, lineBounds);
+ }
+
+ public void setTextAlign(final Align align) {
+ interiorPaint.setTextAlign(align);
+ exteriorPaint.setTextAlign(align);
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/ImageUtils.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/ImageUtils.java
new file mode 100755
index 0000000000000000000000000000000000000000..df7b0999a80b74da0f2289eecfff0be8b4c7f37b
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/ImageUtils.java
@@ -0,0 +1,219 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+package org.tensorflow.lite.examples.detection.env;
+
+import android.graphics.Bitmap;
+import android.graphics.Matrix;
+import android.os.Environment;
+import java.io.File;
+import java.io.FileOutputStream;
+
+/** Utility class for manipulating images. */
+public class ImageUtils {
+ // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
+ // are normalized to eight bits.
+ static final int kMaxChannelValue = 262143;
+
+ @SuppressWarnings("unused")
+ private static final Logger LOGGER = new Logger();
+
+ /**
+ * Utility method to compute the allocated size in bytes of a YUV420SP image of the given
+ * dimensions.
+ */
+ public static int getYUVByteSize(final int width, final int height) {
+ // The luminance plane requires 1 byte per pixel.
+ final int ySize = width * height;
+
+ // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up.
+ // Each 2x2 block takes 2 bytes to encode, one each for U and V.
+ final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;
+
+ return ySize + uvSize;
+ }
+
+ /**
+ * Saves a Bitmap object to disk for analysis.
+ *
+ * @param bitmap The bitmap to save.
+ */
+ public static void saveBitmap(final Bitmap bitmap) {
+ saveBitmap(bitmap, "preview.png");
+ }
+
+ /**
+ * Saves a Bitmap object to disk for analysis.
+ *
+ * @param bitmap The bitmap to save.
+ * @param filename The location to save the bitmap to.
+ */
+ public static void saveBitmap(final Bitmap bitmap, final String filename) {
+ final String root =
+ Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
+ LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root);
+ final File myDir = new File(root);
+
+ if (!myDir.mkdirs()) {
+ LOGGER.i("Make dir failed");
+ }
+
+ final String fname = filename;
+ final File file = new File(myDir, fname);
+ if (file.exists()) {
+ file.delete();
+ }
+ try {
+ final FileOutputStream out = new FileOutputStream(file);
+ bitmap.compress(Bitmap.CompressFormat.PNG, 99, out);
+ out.flush();
+ out.close();
+ } catch (final Exception e) {
+ LOGGER.e(e, "Exception!");
+ }
+ }
+
+ public static void convertYUV420SPToARGB8888(byte[] input, int width, int height, int[] output) {
+ final int frameSize = width * height;
+ for (int j = 0, yp = 0; j < height; j++) {
+ int uvp = frameSize + (j >> 1) * width;
+ int u = 0;
+ int v = 0;
+
+ for (int i = 0; i < width; i++, yp++) {
+ int y = 0xff & input[yp];
+ if ((i & 1) == 0) {
+ v = 0xff & input[uvp++];
+ u = 0xff & input[uvp++];
+ }
+
+ output[yp] = YUV2RGB(y, u, v);
+ }
+ }
+ }
+
+ private static int YUV2RGB(int y, int u, int v) {
+ // Adjust and check YUV values
+ y = (y - 16) < 0 ? 0 : (y - 16);
+ u -= 128;
+ v -= 128;
+
+ // This is the floating point equivalent. We do the conversion in integer
+ // because some Android devices do not have floating point in hardware.
+ // nR = (int)(1.164 * nY + 2.018 * nU);
+ // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
+ // nB = (int)(1.164 * nY + 1.596 * nV);
+ int y1192 = 1192 * y;
+ int r = (y1192 + 1634 * v);
+ int g = (y1192 - 833 * v - 400 * u);
+ int b = (y1192 + 2066 * u);
+
+ // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ]
+ r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r);
+ g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g);
+ b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b);
+
+ return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
+ }
+
+ public static void convertYUV420ToARGB8888(
+ byte[] yData,
+ byte[] uData,
+ byte[] vData,
+ int width,
+ int height,
+ int yRowStride,
+ int uvRowStride,
+ int uvPixelStride,
+ int[] out) {
+ int yp = 0;
+ for (int j = 0; j < height; j++) {
+ int pY = yRowStride * j;
+ int pUV = uvRowStride * (j >> 1);
+
+ for (int i = 0; i < width; i++) {
+ int uv_offset = pUV + (i >> 1) * uvPixelStride;
+
+ out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]);
+ }
+ }
+ }
+
+ /**
+ * Returns a transformation matrix from one reference frame into another. Handles cropping (if
+ * maintaining aspect ratio is desired) and rotation.
+ *
+ * @param srcWidth Width of source frame.
+ * @param srcHeight Height of source frame.
+ * @param dstWidth Width of destination frame.
+ * @param dstHeight Height of destination frame.
+ * @param applyRotation Amount of rotation to apply from one frame to another. Must be a multiple
+ * of 90.
+ * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
+ * cropping the image if necessary.
+ * @return The transformation fulfilling the desired requirements.
+ */
+ public static Matrix getTransformationMatrix(
+ final int srcWidth,
+ final int srcHeight,
+ final int dstWidth,
+ final int dstHeight,
+ final int applyRotation,
+ final boolean maintainAspectRatio) {
+ final Matrix matrix = new Matrix();
+
+ if (applyRotation != 0) {
+ if (applyRotation % 90 != 0) {
+ LOGGER.w("Rotation of %d % 90 != 0", applyRotation);
+ }
+
+ // Translate so center of image is at origin.
+ matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
+
+ // Rotate around origin.
+ matrix.postRotate(applyRotation);
+ }
+
+ // Account for the already applied rotation, if any, and then determine how
+ // much scaling is needed for each axis.
+ final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
+
+ final int inWidth = transpose ? srcHeight : srcWidth;
+ final int inHeight = transpose ? srcWidth : srcHeight;
+
+ // Apply scaling if necessary.
+ if (inWidth != dstWidth || inHeight != dstHeight) {
+ final float scaleFactorX = dstWidth / (float) inWidth;
+ final float scaleFactorY = dstHeight / (float) inHeight;
+
+ if (maintainAspectRatio) {
+ // Scale by minimum factor so that dst is filled completely while
+ // maintaining the aspect ratio. Some image may fall off the edge.
+ final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
+ matrix.postScale(scaleFactor, scaleFactor);
+ } else {
+ // Scale exactly to fill dst from src.
+ matrix.postScale(scaleFactorX, scaleFactorY);
+ }
+ }
+
+ if (applyRotation != 0) {
+ // Translate back from origin centered reference to destination frame.
+ matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
+ }
+
+ return matrix;
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Logger.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Logger.java
new file mode 100755
index 0000000000000000000000000000000000000000..9dc05f4d1bd958192ad9bcf8a46f452736c1315a
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Logger.java
@@ -0,0 +1,186 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+package org.tensorflow.lite.examples.detection.env;
+
+import android.util.Log;
+import java.util.HashSet;
+import java.util.Set;
+
+/** Wrapper for the platform log function, allows convenient message prefixing and log disabling. */
+public final class Logger {
+ private static final String DEFAULT_TAG = "tensorflow";
+ private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG;
+
+ // Classes to be ignored when examining the stack trace
+ private static final Set IGNORED_CLASS_NAMES;
+
+ static {
+ IGNORED_CLASS_NAMES = new HashSet(3);
+ IGNORED_CLASS_NAMES.add("dalvik.system.VMStack");
+ IGNORED_CLASS_NAMES.add("java.lang.Thread");
+ IGNORED_CLASS_NAMES.add(Logger.class.getCanonicalName());
+ }
+
+ private final String tag;
+ private final String messagePrefix;
+ private int minLogLevel = DEFAULT_MIN_LOG_LEVEL;
+
+ /**
+ * Creates a Logger using the class name as the message prefix.
+ *
+ * @param clazz the simple name of this class is used as the message prefix.
+ */
+ public Logger(final Class> clazz) {
+ this(clazz.getSimpleName());
+ }
+
+ /**
+ * Creates a Logger using the specified message prefix.
+ *
+ * @param messagePrefix is prepended to the text of every message.
+ */
+ public Logger(final String messagePrefix) {
+ this(DEFAULT_TAG, messagePrefix);
+ }
+
+ /**
+ * Creates a Logger with a custom tag and a custom message prefix. If the message prefix is set to
+ *
+ *
null
+ *
+ * , the caller's class name is used as the prefix.
+ *
+ * @param tag identifies the source of a log message.
+ * @param messagePrefix prepended to every message if non-null. If null, the name of the caller is
+ * being used
+ */
+ public Logger(final String tag, final String messagePrefix) {
+ this.tag = tag;
+ final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix;
+ this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix;
+ }
+
+ /** Creates a Logger using the caller's class name as the message prefix. */
+ public Logger() {
+ this(DEFAULT_TAG, null);
+ }
+
+ /** Creates a Logger using the caller's class name as the message prefix. */
+ public Logger(final int minLogLevel) {
+ this(DEFAULT_TAG, null);
+ this.minLogLevel = minLogLevel;
+ }
+
+ /**
+ * Return caller's simple name.
+ *
+ *
Android getStackTrace() returns an array that looks like this: stackTrace[0]:
+ * dalvik.system.VMStack stackTrace[1]: java.lang.Thread stackTrace[2]:
+ * com.google.android.apps.unveil.env.UnveilLogger stackTrace[3]:
+ * com.google.android.apps.unveil.BaseApplication
+ *
+ *
This function returns the simple version of the first non-filtered name.
+ *
+ * @return caller's simple name
+ */
+ private static String getCallerSimpleName() {
+ // Get the current callstack so we can pull the class of the caller off of it.
+ final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
+
+ for (final StackTraceElement elem : stackTrace) {
+ final String className = elem.getClassName();
+ if (!IGNORED_CLASS_NAMES.contains(className)) {
+ // We're only interested in the simple name of the class, not the complete package.
+ final String[] classParts = className.split("\\.");
+ return classParts[classParts.length - 1];
+ }
+ }
+
+ return Logger.class.getSimpleName();
+ }
+
+ public void setMinLogLevel(final int minLogLevel) {
+ this.minLogLevel = minLogLevel;
+ }
+
+ public boolean isLoggable(final int logLevel) {
+ return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel);
+ }
+
+ private String toMessage(final String format, final Object... args) {
+ return messagePrefix + (args.length > 0 ? String.format(format, args) : format);
+ }
+
+ public void v(final String format, final Object... args) {
+ if (isLoggable(Log.VERBOSE)) {
+ Log.v(tag, toMessage(format, args));
+ }
+ }
+
+ public void v(final Throwable t, final String format, final Object... args) {
+ if (isLoggable(Log.VERBOSE)) {
+ Log.v(tag, toMessage(format, args), t);
+ }
+ }
+
+ public void d(final String format, final Object... args) {
+ if (isLoggable(Log.DEBUG)) {
+ Log.d(tag, toMessage(format, args));
+ }
+ }
+
+ public void d(final Throwable t, final String format, final Object... args) {
+ if (isLoggable(Log.DEBUG)) {
+ Log.d(tag, toMessage(format, args), t);
+ }
+ }
+
+ public void i(final String format, final Object... args) {
+ if (isLoggable(Log.INFO)) {
+ Log.i(tag, toMessage(format, args));
+ }
+ }
+
+ public void i(final Throwable t, final String format, final Object... args) {
+ if (isLoggable(Log.INFO)) {
+ Log.i(tag, toMessage(format, args), t);
+ }
+ }
+
+ public void w(final String format, final Object... args) {
+ if (isLoggable(Log.WARN)) {
+ Log.w(tag, toMessage(format, args));
+ }
+ }
+
+ public void w(final Throwable t, final String format, final Object... args) {
+ if (isLoggable(Log.WARN)) {
+ Log.w(tag, toMessage(format, args), t);
+ }
+ }
+
+ public void e(final String format, final Object... args) {
+ if (isLoggable(Log.ERROR)) {
+ Log.e(tag, toMessage(format, args));
+ }
+ }
+
+ public void e(final Throwable t, final String format, final Object... args) {
+ if (isLoggable(Log.ERROR)) {
+ Log.e(tag, toMessage(format, args), t);
+ }
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Size.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Size.java
new file mode 100755
index 0000000000000000000000000000000000000000..e3f71e0e03ce1ef974358ebe39e6896521c02083
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Size.java
@@ -0,0 +1,142 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+package org.tensorflow.lite.examples.detection.env;
+
+import android.graphics.Bitmap;
+import android.text.TextUtils;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+/** Size class independent of a Camera object. */
+public class Size implements Comparable, Serializable {
+
+ // 1.4 went out with this UID so we'll need to maintain it to preserve pending queries when
+ // upgrading.
+ public static final long serialVersionUID = 7689808733290872361L;
+
+ public final int width;
+ public final int height;
+
+ public Size(final int width, final int height) {
+ this.width = width;
+ this.height = height;
+ }
+
+ public Size(final Bitmap bmp) {
+ this.width = bmp.getWidth();
+ this.height = bmp.getHeight();
+ }
+
+ /**
+ * Rotate a size by the given number of degrees.
+ *
+ * @param size Size to rotate.
+ * @param rotation Degrees {0, 90, 180, 270} to rotate the size.
+ * @return Rotated size.
+ */
+ public static Size getRotatedSize(final Size size, final int rotation) {
+ if (rotation % 180 != 0) {
+ // The phone is portrait, therefore the camera is sideways and frame should be rotated.
+ return new Size(size.height, size.width);
+ }
+ return size;
+ }
+
+ public static Size parseFromString(String sizeString) {
+ if (TextUtils.isEmpty(sizeString)) {
+ return null;
+ }
+
+ sizeString = sizeString.trim();
+
+ // The expected format is "x".
+ final String[] components = sizeString.split("x");
+ if (components.length == 2) {
+ try {
+ final int width = Integer.parseInt(components[0]);
+ final int height = Integer.parseInt(components[1]);
+ return new Size(width, height);
+ } catch (final NumberFormatException e) {
+ return null;
+ }
+ } else {
+ return null;
+ }
+ }
+
+ public static List sizeStringToList(final String sizes) {
+ final List sizeList = new ArrayList();
+ if (sizes != null) {
+ final String[] pairs = sizes.split(",");
+ for (final String pair : pairs) {
+ final Size size = Size.parseFromString(pair);
+ if (size != null) {
+ sizeList.add(size);
+ }
+ }
+ }
+ return sizeList;
+ }
+
+ public static String sizeListToString(final List sizes) {
+ String sizesString = "";
+ if (sizes != null && sizes.size() > 0) {
+ sizesString = sizes.get(0).toString();
+ for (int i = 1; i < sizes.size(); i++) {
+ sizesString += "," + sizes.get(i).toString();
+ }
+ }
+ return sizesString;
+ }
+
+ public static final String dimensionsAsString(final int width, final int height) {
+ return width + "x" + height;
+ }
+
+ public final float aspectRatio() {
+ return (float) width / (float) height;
+ }
+
+ @Override
+ public int compareTo(final Size other) {
+ return width * height - other.width * other.height;
+ }
+
+ @Override
+ public boolean equals(final Object other) {
+ if (other == null) {
+ return false;
+ }
+
+ if (!(other instanceof Size)) {
+ return false;
+ }
+
+ final Size otherSize = (Size) other;
+ return (width == otherSize.width && height == otherSize.height);
+ }
+
+ @Override
+ public int hashCode() {
+ return width * 32713 + height;
+ }
+
+ @Override
+ public String toString() {
+ return dimensionsAsString(width, height);
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Utils.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Utils.java
new file mode 100755
index 0000000000000000000000000000000000000000..47fd3e50dfc9b73c73db8b72d751ae18e334c531
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Utils.java
@@ -0,0 +1,188 @@
+package org.tensorflow.lite.examples.detection.env;
+
+import android.content.Context;
+import android.content.res.AssetFileDescriptor;
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Canvas;
+import android.graphics.Matrix;
+import android.os.Environment;
+import android.util.Log;
+
+import org.tensorflow.lite.examples.detection.MainActivity;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStreamWriter;
+import java.nio.MappedByteBuffer;
+import java.nio.channels.FileChannel;
+
+public class Utils {
+
+ /**
+ * Memory-map the model file in Assets.
+ */
+ public static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename)
+ throws IOException {
+ AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename);
+ FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
+ FileChannel fileChannel = inputStream.getChannel();
+ long startOffset = fileDescriptor.getStartOffset();
+ long declaredLength = fileDescriptor.getDeclaredLength();
+ return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
+ }
+
+ public static void softmax(final float[] vals) {
+ float max = Float.NEGATIVE_INFINITY;
+ for (final float val : vals) {
+ max = Math.max(max, val);
+ }
+ float sum = 0.0f;
+ for (int i = 0; i < vals.length; ++i) {
+ vals[i] = (float) Math.exp(vals[i] - max);
+ sum += vals[i];
+ }
+ for (int i = 0; i < vals.length; ++i) {
+ vals[i] = vals[i] / sum;
+ }
+ }
+
+ public static float expit(final float x) {
+ return (float) (1. / (1. + Math.exp(-x)));
+ }
+
+// public static Bitmap scale(Context context, String filePath) {
+// AssetManager assetManager = context.getAssets();
+//
+// InputStream istr;
+// Bitmap bitmap = null;
+// try {
+// istr = assetManager.open(filePath);
+// bitmap = BitmapFactory.decodeStream(istr);
+// bitmap = Bitmap.createScaledBitmap(bitmap, MainActivity.TF_OD_API_INPUT_SIZE, MainActivity.TF_OD_API_INPUT_SIZE, false);
+// } catch (IOException e) {
+// // handle exception
+// Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
+// }
+//
+// return bitmap;
+// }
+
+ public static Bitmap getBitmapFromAsset(Context context, String filePath) {
+ AssetManager assetManager = context.getAssets();
+
+ InputStream istr;
+ Bitmap bitmap = null;
+ try {
+ istr = assetManager.open(filePath);
+ bitmap = BitmapFactory.decodeStream(istr);
+// return bitmap.copy(Bitmap.Config.ARGB_8888,true);
+ } catch (IOException e) {
+ // handle exception
+ Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
+ }
+
+ return bitmap;
+ }
+
+ /**
+ * Returns a transformation matrix from one reference frame into another.
+ * Handles cropping (if maintaining aspect ratio is desired) and rotation.
+ *
+ * @param srcWidth Width of source frame.
+ * @param srcHeight Height of source frame.
+ * @param dstWidth Width of destination frame.
+ * @param dstHeight Height of destination frame.
+ * @param applyRotation Amount of rotation to apply from one frame to another.
+ * Must be a multiple of 90.
+ * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
+ * cropping the image if necessary.
+ * @return The transformation fulfilling the desired requirements.
+ */
+ public static Matrix getTransformationMatrix(
+ final int srcWidth,
+ final int srcHeight,
+ final int dstWidth,
+ final int dstHeight,
+ final int applyRotation,
+ final boolean maintainAspectRatio) {
+ final Matrix matrix = new Matrix();
+
+ if (applyRotation != 0) {
+ // Translate so center of image is at origin.
+ matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
+
+ // Rotate around origin.
+ matrix.postRotate(applyRotation);
+ }
+
+ // Account for the already applied rotation, if any, and then determine how
+ // much scaling is needed for each axis.
+ final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
+
+ final int inWidth = transpose ? srcHeight : srcWidth;
+ final int inHeight = transpose ? srcWidth : srcHeight;
+
+ // Apply scaling if necessary.
+ if (inWidth != dstWidth || inHeight != dstHeight) {
+ final float scaleFactorX = dstWidth / (float) inWidth;
+ final float scaleFactorY = dstHeight / (float) inHeight;
+
+ if (maintainAspectRatio) {
+ // Scale by minimum factor so that dst is filled completely while
+ // maintaining the aspect ratio. Some image may fall off the edge.
+ final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
+ matrix.postScale(scaleFactor, scaleFactor);
+ } else {
+ // Scale exactly to fill dst from src.
+ matrix.postScale(scaleFactorX, scaleFactorY);
+ }
+ }
+
+ if (applyRotation != 0) {
+ // Translate back from origin centered reference to destination frame.
+ matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
+ }
+
+ return matrix;
+ }
+
+ public static Bitmap processBitmap(Bitmap source, int size){
+
+ int image_height = source.getHeight();
+ int image_width = source.getWidth();
+
+ Bitmap croppedBitmap = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888);
+
+ Matrix frameToCropTransformations = getTransformationMatrix(image_width,image_height,size,size,0,false);
+ Matrix cropToFrameTransformations = new Matrix();
+ frameToCropTransformations.invert(cropToFrameTransformations);
+
+ final Canvas canvas = new Canvas(croppedBitmap);
+ canvas.drawBitmap(source, frameToCropTransformations, null);
+
+ return croppedBitmap;
+ }
+
+ public static void writeToFile(String data, Context context) {
+ try {
+ String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
+ String fileName = "myFile.txt";
+
+ File file = new File(baseDir + File.separator + fileName);
+
+ FileOutputStream stream = new FileOutputStream(file);
+ try {
+ stream.write(data.getBytes());
+ } finally {
+ stream.close();
+ }
+ } catch (IOException e) {
+ Log.e("Exception", "File write failed: " + e.toString());
+ }
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/tflite/Classifier.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/tflite/Classifier.java
new file mode 100755
index 0000000000000000000000000000000000000000..cffb0afaf64bbc68a022ae7ce4ce4e1d2a034f1f
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/tflite/Classifier.java
@@ -0,0 +1,134 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+package org.tensorflow.lite.examples.detection.tflite;
+
+import android.graphics.Bitmap;
+import android.graphics.RectF;
+
+import java.util.List;
+
+/**
+ * Generic interface for interacting with different recognition engines.
+ */
+public interface Classifier {
+ List recognizeImage(Bitmap bitmap);
+
+ void enableStatLogging(final boolean debug);
+
+ String getStatString();
+
+ void close();
+
+ void setNumThreads(int num_threads);
+
+ void setUseNNAPI(boolean isChecked);
+
+ abstract float getObjThresh();
+
+ /**
+ * An immutable result returned by a Classifier describing what was recognized.
+ */
+ public class Recognition {
+ /**
+ * A unique identifier for what has been recognized. Specific to the class, not the instance of
+ * the object.
+ */
+ private final String id;
+
+ /**
+ * Display name for the recognition.
+ */
+ private final String title;
+
+ /**
+ * A sortable score for how good the recognition is relative to others. Higher should be better.
+ */
+ private final Float confidence;
+
+ /**
+ * Optional location within the source image for the location of the recognized object.
+ */
+ private RectF location;
+
+ private int detectedClass;
+
+ public Recognition(
+ final String id, final String title, final Float confidence, final RectF location) {
+ this.id = id;
+ this.title = title;
+ this.confidence = confidence;
+ this.location = location;
+ }
+
+ public Recognition(final String id, final String title, final Float confidence, final RectF location, int detectedClass) {
+ this.id = id;
+ this.title = title;
+ this.confidence = confidence;
+ this.location = location;
+ this.detectedClass = detectedClass;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public String getTitle() {
+ return title;
+ }
+
+ public Float getConfidence() {
+ return confidence;
+ }
+
+ public RectF getLocation() {
+ return new RectF(location);
+ }
+
+ public void setLocation(RectF location) {
+ this.location = location;
+ }
+
+ public int getDetectedClass() {
+ return detectedClass;
+ }
+
+ public void setDetectedClass(int detectedClass) {
+ this.detectedClass = detectedClass;
+ }
+
+ @Override
+ public String toString() {
+ String resultString = "";
+ if (id != null) {
+ resultString += "[" + id + "] ";
+ }
+
+ if (title != null) {
+ resultString += title + " ";
+ }
+
+ if (confidence != null) {
+ resultString += String.format("(%.1f%%) ", confidence * 100.0f);
+ }
+
+ if (location != null) {
+ resultString += location + " ";
+ }
+
+ return resultString.trim();
+ }
+ }
+}
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/tflite/YoloV4Classifier.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/tflite/YoloV4Classifier.java
new file mode 100755
index 0000000000000000000000000000000000000000..ce3488fdd1cda97a11c95d835c1aaeb553b6f7fb
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/tflite/YoloV4Classifier.java
@@ -0,0 +1,599 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+package org.tensorflow.lite.examples.detection.tflite;
+
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.RectF;
+import android.os.Build;
+import android.os.Trace;
+import android.util.Log;
+
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.PriorityQueue;
+import java.util.Vector;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.tensorflow.lite.Interpreter;
+import org.tensorflow.lite.examples.detection.MainActivity;
+import org.tensorflow.lite.examples.detection.env.Logger;
+import org.tensorflow.lite.examples.detection.env.Utils;
+
+import static org.tensorflow.lite.examples.detection.env.Utils.expit;
+import static org.tensorflow.lite.examples.detection.env.Utils.softmax;
+
+import org.tensorflow.lite.Interpreter;
+import org.tensorflow.lite.gpu.GpuDelegate;
+import org.tensorflow.lite.nnapi.NnApiDelegate;
+
+/**
+ * Wrapper for frozen detection models trained using the Tensorflow Object Detection API:
+ * - https://github.com/tensorflow/models/tree/master/research/object_detection
+ * where you can find the training code.
+ *
+ * To use pretrained models in the API or convert to TF Lite models, please see docs for details:
+ * - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md
+ * - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android
+ */
+public class YoloV4Classifier implements Classifier {
+
+ /**
+ * Initializes a native TensorFlow session for classifying images.
+ *
+ * @param assetManager The asset manager to be used to load assets.
+ * @param modelFilename The filepath of the model GraphDef protocol buffer.
+ * @param labelFilename The filepath of label file for classes.
+ * @param isQuantized Boolean representing model is quantized or not
+ */
+ public static Classifier create(
+ final AssetManager assetManager,
+ final String modelFilename,
+ final String labelFilename,
+ final boolean isQuantized)
+ throws IOException {
+ final YoloV4Classifier d = new YoloV4Classifier();
+
+ String actualFilename = labelFilename.split("file:///android_asset/")[1];
+ InputStream labelsInput = assetManager.open(actualFilename);
+ BufferedReader br = new BufferedReader(new InputStreamReader(labelsInput));
+ String line;
+ while ((line = br.readLine()) != null) {
+ LOGGER.w(line);
+ d.labels.add(line);
+ }
+ br.close();
+
+ try {
+ Interpreter.Options options = (new Interpreter.Options());
+ options.setNumThreads(NUM_THREADS);
+ if (isNNAPI) {
+ NnApiDelegate nnApiDelegate = null;
+ // Initialize interpreter with NNAPI delegate for Android Pie or above
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
+ nnApiDelegate = new NnApiDelegate();
+ options.addDelegate(nnApiDelegate);
+ options.setNumThreads(NUM_THREADS);
+ options.setUseNNAPI(false);
+ options.setAllowFp16PrecisionForFp32(true);
+ options.setAllowBufferHandleOutput(true);
+ options.setUseNNAPI(true);
+ }
+ }
+ if (isGPU) {
+ GpuDelegate gpuDelegate = new GpuDelegate();
+ options.addDelegate(gpuDelegate);
+ }
+ d.tfLite = new Interpreter(Utils.loadModelFile(assetManager, modelFilename), options);
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+
+ d.isModelQuantized = isQuantized;
+ // Pre-allocate buffers.
+ int numBytesPerChannel;
+ if (isQuantized) {
+ numBytesPerChannel = 1; // Quantized
+ } else {
+ numBytesPerChannel = 4; // Floating point
+ }
+ d.imgData = ByteBuffer.allocateDirect(1 * d.INPUT_SIZE * d.INPUT_SIZE * 3 * numBytesPerChannel);
+ d.imgData.order(ByteOrder.nativeOrder());
+ d.intValues = new int[d.INPUT_SIZE * d.INPUT_SIZE];
+
+ return d;
+ }
+
+ @Override
+ public void enableStatLogging(final boolean logStats) {
+ }
+
+ @Override
+ public String getStatString() {
+ return "";
+ }
+
+ @Override
+ public void close() {
+ }
+
+ public void setNumThreads(int num_threads) {
+ if (tfLite != null) tfLite.setNumThreads(num_threads);
+ }
+
+ @Override
+ public void setUseNNAPI(boolean isChecked) {
+ if (tfLite != null) tfLite.setUseNNAPI(isChecked);
+ }
+
+ @Override
+ public float getObjThresh() {
+ return MainActivity.MINIMUM_CONFIDENCE_TF_OD_API;
+ }
+
+ private static final Logger LOGGER = new Logger();
+
+ // Float model
+ private static final float IMAGE_MEAN = 0;
+
+ private static final float IMAGE_STD = 255.0f;
+
+ //config yolov4
+ private static final int INPUT_SIZE = 416;
+ private static final int[] OUTPUT_WIDTH = new int[]{52, 26, 13};
+
+ private static final int[][] MASKS = new int[][]{{0, 1, 2}, {3, 4, 5}, {6, 7, 8}};
+ private static final int[] ANCHORS = new int[]{
+ 12, 16, 19, 36, 40, 28, 36, 75, 76, 55, 72, 146, 142, 110, 192, 243, 459, 401
+ };
+ private static final float[] XYSCALE = new float[]{1.2f, 1.1f, 1.05f};
+
+ private static final int NUM_BOXES_PER_BLOCK = 3;
+
+ // Number of threads in the java app
+ private static final int NUM_THREADS = 4;
+ private static boolean isNNAPI = false;
+ private static boolean isGPU = true;
+
+ // tiny or not
+ private static boolean isTiny = false;
+
+ // config yolov4 tiny
+ private static final int[] OUTPUT_WIDTH_TINY = new int[]{2535, 2535};
+ private static final int[] OUTPUT_WIDTH_FULL = new int[]{10647, 10647};
+ private static final int[][] MASKS_TINY = new int[][]{{3, 4, 5}, {1, 2, 3}};
+ private static final int[] ANCHORS_TINY = new int[]{
+ 23, 27, 37, 58, 81, 82, 81, 82, 135, 169, 344, 319};
+ private static final float[] XYSCALE_TINY = new float[]{1.05f, 1.05f};
+
+ private boolean isModelQuantized;
+
+ // Config values.
+
+ // Pre-allocated buffers.
+ private Vector labels = new Vector();
+ private int[] intValues;
+
+ private ByteBuffer imgData;
+
+ private Interpreter tfLite;
+
+ private YoloV4Classifier() {
+ }
+
+ //non maximum suppression
+ protected ArrayList nms(ArrayList list) {
+ ArrayList nmsList = new ArrayList();
+
+ for (int k = 0; k < labels.size(); k++) {
+ //1.find max confidence per class
+ PriorityQueue pq =
+ new PriorityQueue(
+ 50,
+ new Comparator() {
+ @Override
+ public int compare(final Recognition lhs, final Recognition rhs) {
+ // Intentionally reversed to put high confidence at the head of the queue.
+ return Float.compare(rhs.getConfidence(), lhs.getConfidence());
+ }
+ });
+
+ for (int i = 0; i < list.size(); ++i) {
+ if (list.get(i).getDetectedClass() == k) {
+ pq.add(list.get(i));
+ }
+ }
+
+ //2.do non maximum suppression
+ while (pq.size() > 0) {
+ //insert detection with max confidence
+ Recognition[] a = new Recognition[pq.size()];
+ Recognition[] detections = pq.toArray(a);
+ Recognition max = detections[0];
+ nmsList.add(max);
+ pq.clear();
+
+ for (int j = 1; j < detections.length; j++) {
+ Recognition detection = detections[j];
+ RectF b = detection.getLocation();
+ if (box_iou(max.getLocation(), b) < mNmsThresh) {
+ pq.add(detection);
+ }
+ }
+ }
+ }
+ return nmsList;
+ }
+
+ protected float mNmsThresh = 0.6f;
+
+ protected float box_iou(RectF a, RectF b) {
+ return box_intersection(a, b) / box_union(a, b);
+ }
+
+ protected float box_intersection(RectF a, RectF b) {
+ float w = overlap((a.left + a.right) / 2, a.right - a.left,
+ (b.left + b.right) / 2, b.right - b.left);
+ float h = overlap((a.top + a.bottom) / 2, a.bottom - a.top,
+ (b.top + b.bottom) / 2, b.bottom - b.top);
+ if (w < 0 || h < 0) return 0;
+ float area = w * h;
+ return area;
+ }
+
+ protected float box_union(RectF a, RectF b) {
+ float i = box_intersection(a, b);
+ float u = (a.right - a.left) * (a.bottom - a.top) + (b.right - b.left) * (b.bottom - b.top) - i;
+ return u;
+ }
+
+ protected float overlap(float x1, float w1, float x2, float w2) {
+ float l1 = x1 - w1 / 2;
+ float l2 = x2 - w2 / 2;
+ float left = l1 > l2 ? l1 : l2;
+ float r1 = x1 + w1 / 2;
+ float r2 = x2 + w2 / 2;
+ float right = r1 < r2 ? r1 : r2;
+ return right - left;
+ }
+
+ protected static final int BATCH_SIZE = 1;
+ protected static final int PIXEL_SIZE = 3;
+
+ /**
+ * Writes Image data into a {@code ByteBuffer}.
+ */
+ protected ByteBuffer convertBitmapToByteBuffer(Bitmap bitmap) {
+ ByteBuffer byteBuffer = ByteBuffer.allocateDirect(4 * BATCH_SIZE * INPUT_SIZE * INPUT_SIZE * PIXEL_SIZE);
+ byteBuffer.order(ByteOrder.nativeOrder());
+ int[] intValues = new int[INPUT_SIZE * INPUT_SIZE];
+ bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
+ int pixel = 0;
+ for (int i = 0; i < INPUT_SIZE; ++i) {
+ for (int j = 0; j < INPUT_SIZE; ++j) {
+ final int val = intValues[pixel++];
+ byteBuffer.putFloat(((val >> 16) & 0xFF) / 255.0f);
+ byteBuffer.putFloat(((val >> 8) & 0xFF) / 255.0f);
+ byteBuffer.putFloat((val & 0xFF) / 255.0f);
+ }
+ }
+ return byteBuffer;
+ }
+
+// private ArrayList getDetections(ByteBuffer byteBuffer, Bitmap bitmap) {
+// ArrayList detections = new ArrayList();
+// Map outputMap = new HashMap<>();
+// for (int i = 0; i < OUTPUT_WIDTH.length; i++) {
+// float[][][][][] out = new float[1][OUTPUT_WIDTH[i]][OUTPUT_WIDTH[i]][3][5 + labels.size()];
+// outputMap.put(i, out);
+// }
+//
+// Log.d("YoloV4Classifier", "mObjThresh: " + getObjThresh());
+//
+// Object[] inputArray = {byteBuffer};
+// tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
+//
+// for (int i = 0; i < OUTPUT_WIDTH.length; i++) {
+// int gridWidth = OUTPUT_WIDTH[i];
+// float[][][][][] out = (float[][][][][]) outputMap.get(i);
+//
+// Log.d("YoloV4Classifier", "out[" + i + "] detect start");
+// for (int y = 0; y < gridWidth; ++y) {
+// for (int x = 0; x < gridWidth; ++x) {
+// for (int b = 0; b < NUM_BOXES_PER_BLOCK; ++b) {
+// final int offset =
+// (gridWidth * (NUM_BOXES_PER_BLOCK * (labels.size() + 5))) * y
+// + (NUM_BOXES_PER_BLOCK * (labels.size() + 5)) * x
+// + (labels.size() + 5) * b;
+//
+// final float confidence = expit(out[0][y][x][b][4]);
+// int detectedClass = -1;
+// float maxClass = 0;
+//
+// final float[] classes = new float[labels.size()];
+// for (int c = 0; c < labels.size(); ++c) {
+// classes[c] = out[0][y][x][b][5 + c];
+// }
+//
+// for (int c = 0; c < labels.size(); ++c) {
+// if (classes[c] > maxClass) {
+// detectedClass = c;
+// maxClass = classes[c];
+// }
+// }
+//
+// final float confidenceInClass = maxClass * confidence;
+// if (confidenceInClass > getObjThresh()) {
+//// final float xPos = (x + (expit(out[0][y][x][b][0]) * XYSCALE[i]) - (0.5f * (XYSCALE[i] - 1))) * (INPUT_SIZE / gridWidth);
+//// final float yPos = (y + (expit(out[0][y][x][b][1]) * XYSCALE[i]) - (0.5f * (XYSCALE[i] - 1))) * (INPUT_SIZE / gridWidth);
+//
+// final float xPos = (x + expit(out[0][y][x][b][0])) * (1.0f * INPUT_SIZE / gridWidth);
+// final float yPos = (y + expit(out[0][y][x][b][1])) * (1.0f * INPUT_SIZE / gridWidth);
+//
+// final float w = (float) (Math.exp(out[0][y][x][b][2]) * ANCHORS[2 * MASKS[i][b]]);
+// final float h = (float) (Math.exp(out[0][y][x][b][3]) * ANCHORS[2 * MASKS[i][b] + 1]);
+//
+// final RectF rect =
+// new RectF(
+// Math.max(0, xPos - w / 2),
+// Math.max(0, yPos - h / 2),
+// Math.min(bitmap.getWidth() - 1, xPos + w / 2),
+// Math.min(bitmap.getHeight() - 1, yPos + h / 2));
+// detections.add(new Recognition("" + offset, labels.get(detectedClass),
+// confidenceInClass, rect, detectedClass));
+// }
+// }
+// }
+// }
+// Log.d("YoloV4Classifier", "out[" + i + "] detect end");
+// }
+// return detections;
+// }
+
+ /**
+ * For yolov4-tiny, the situation would be a little different from the yolov4, it only has two
+ * output. Both has three dimenstion. The first one is a tensor with dimension [1, 2535,4], containing all the bounding boxes.
+ * The second one is a tensor with dimension [1, 2535, class_num], containing all the classes score.
+ * @param byteBuffer input ByteBuffer, which contains the image information
+ * @param bitmap pixel disenty used to resize the output images
+ * @return an array list containing the recognitions
+ */
+
+ private ArrayList getDetectionsForFull(ByteBuffer byteBuffer, Bitmap bitmap) {
+ ArrayList detections = new ArrayList();
+ Map outputMap = new HashMap<>();
+ outputMap.put(0, new float[1][OUTPUT_WIDTH_FULL[0]][4]);
+ outputMap.put(1, new float[1][OUTPUT_WIDTH_FULL[1]][labels.size()]);
+ Object[] inputArray = {byteBuffer};
+ tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
+
+ int gridWidth = OUTPUT_WIDTH_FULL[0];
+ float[][][] bboxes = (float [][][]) outputMap.get(0);
+ float[][][] out_score = (float[][][]) outputMap.get(1);
+
+ for (int i = 0; i < gridWidth;i++){
+ float maxClass = 0;
+ int detectedClass = -1;
+ final float[] classes = new float[labels.size()];
+ for (int c = 0;c< labels.size();c++){
+ classes [c] = out_score[0][i][c];
+ }
+ for (int c = 0;c maxClass){
+ detectedClass = c;
+ maxClass = classes[c];
+ }
+ }
+ final float score = maxClass;
+ if (score > getObjThresh()){
+ final float xPos = bboxes[0][i][0];
+ final float yPos = bboxes[0][i][1];
+ final float w = bboxes[0][i][2];
+ final float h = bboxes[0][i][3];
+ final RectF rectF = new RectF(
+ Math.max(0, xPos - w / 2),
+ Math.max(0, yPos - h / 2),
+ Math.min(bitmap.getWidth() - 1, xPos + w / 2),
+ Math.min(bitmap.getHeight() - 1, yPos + h / 2));
+ detections.add(new Recognition("" + i, labels.get(detectedClass),score,rectF,detectedClass ));
+ }
+ }
+ return detections;
+ }
+
+ private ArrayList getDetectionsForTiny(ByteBuffer byteBuffer, Bitmap bitmap) {
+ ArrayList detections = new ArrayList();
+ Map outputMap = new HashMap<>();
+ outputMap.put(0, new float[1][OUTPUT_WIDTH_TINY[0]][4]);
+ outputMap.put(1, new float[1][OUTPUT_WIDTH_TINY[1]][labels.size()]);
+ Object[] inputArray = {byteBuffer};
+ tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
+
+ int gridWidth = OUTPUT_WIDTH_TINY[0];
+ float[][][] bboxes = (float [][][]) outputMap.get(0);
+ float[][][] out_score = (float[][][]) outputMap.get(1);
+
+ for (int i = 0; i < gridWidth;i++){
+ float maxClass = 0;
+ int detectedClass = -1;
+ final float[] classes = new float[labels.size()];
+ for (int c = 0;c< labels.size();c++){
+ classes [c] = out_score[0][i][c];
+ }
+ for (int c = 0;c maxClass){
+ detectedClass = c;
+ maxClass = classes[c];
+ }
+ }
+ final float score = maxClass;
+ if (score > getObjThresh()){
+ final float xPos = bboxes[0][i][0];
+ final float yPos = bboxes[0][i][1];
+ final float w = bboxes[0][i][2];
+ final float h = bboxes[0][i][3];
+ final RectF rectF = new RectF(
+ Math.max(0, xPos - w / 2),
+ Math.max(0, yPos - h / 2),
+ Math.min(bitmap.getWidth() - 1, xPos + w / 2),
+ Math.min(bitmap.getHeight() - 1, yPos + h / 2));
+ detections.add(new Recognition("" + i, labels.get(detectedClass),score,rectF,detectedClass ));
+ }
+ }
+ return detections;
+ }
+
+ public ArrayList recognizeImage(Bitmap bitmap) {
+ ByteBuffer byteBuffer = convertBitmapToByteBuffer(bitmap);
+
+// Map outputMap = new HashMap<>();
+// for (int i = 0; i < OUTPUT_WIDTH.length; i++) {
+// float[][][][][] out = new float[1][OUTPUT_WIDTH[i]][OUTPUT_WIDTH[i]][3][5 + labels.size()];
+// outputMap.put(i, out);
+// }
+//
+// Log.d("YoloV4Classifier", "mObjThresh: " + getObjThresh());
+//
+// Object[] inputArray = {byteBuffer};
+// tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
+//
+// ArrayList detections = new ArrayList();
+//
+// for (int i = 0; i < OUTPUT_WIDTH.length; i++) {
+// int gridWidth = OUTPUT_WIDTH[i];
+// float[][][][][] out = (float[][][][][]) outputMap.get(i);
+//
+// Log.d("YoloV4Classifier", "out[" + i + "] detect start");
+// for (int y = 0; y < gridWidth; ++y) {
+// for (int x = 0; x < gridWidth; ++x) {
+// for (int b = 0; b < NUM_BOXES_PER_BLOCK; ++b) {
+// final int offset =
+// (gridWidth * (NUM_BOXES_PER_BLOCK * (labels.size() + 5))) * y
+// + (NUM_BOXES_PER_BLOCK * (labels.size() + 5)) * x
+// + (labels.size() + 5) * b;
+//
+// final float confidence = expit(out[0][y][x][b][4]);
+// int detectedClass = -1;
+// float maxClass = 0;
+//
+// final float[] classes = new float[labels.size()];
+// for (int c = 0; c < labels.size(); ++c) {
+// classes[c] = out[0][y][x][b][5 + c];
+// }
+//
+// for (int c = 0; c < labels.size(); ++c) {
+// if (classes[c] > maxClass) {
+// detectedClass = c;
+// maxClass = classes[c];
+// }
+// }
+//
+// final float confidenceInClass = maxClass * confidence;
+// if (confidenceInClass > getObjThresh()) {
+//// final float xPos = (x + (expit(out[0][y][x][b][0]) * XYSCALE[i]) - (0.5f * (XYSCALE[i] - 1))) * (INPUT_SIZE / gridWidth);
+//// final float yPos = (y + (expit(out[0][y][x][b][1]) * XYSCALE[i]) - (0.5f * (XYSCALE[i] - 1))) * (INPUT_SIZE / gridWidth);
+//
+// final float xPos = (x + expit(out[0][y][x][b][0])) * (1.0f * INPUT_SIZE / gridWidth);
+// final float yPos = (y + expit(out[0][y][x][b][1])) * (1.0f * INPUT_SIZE / gridWidth);
+//
+// final float w = (float) (Math.exp(out[0][y][x][b][2]) * ANCHORS[2 * MASKS[i][b]]);
+// final float h = (float) (Math.exp(out[0][y][x][b][3]) * ANCHORS[2 * MASKS[i][b] + 1]);
+//
+// final RectF rect =
+// new RectF(
+// Math.max(0, xPos - w / 2),
+// Math.max(0, yPos - h / 2),
+// Math.min(bitmap.getWidth() - 1, xPos + w / 2),
+// Math.min(bitmap.getHeight() - 1, yPos + h / 2));
+// detections.add(new Recognition("" + offset, labels.get(detectedClass),
+// confidenceInClass, rect, detectedClass));
+// }
+// }
+// }
+// }
+// Log.d("YoloV4Classifier", "out[" + i + "] detect end");
+// }
+ ArrayList detections;
+ if (isTiny) {
+ detections = getDetectionsForTiny(byteBuffer, bitmap);
+ } else {
+ detections = getDetectionsForFull(byteBuffer, bitmap);
+ }
+ final ArrayList recognitions = nms(detections);
+ return recognitions;
+ }
+
+ public boolean checkInvalidateBox(float x, float y, float width, float height, float oriW, float oriH, int intputSize) {
+ // (1) (x, y, w, h) --> (xmin, ymin, xmax, ymax)
+ float halfHeight = height / 2.0f;
+ float halfWidth = width / 2.0f;
+
+ float[] pred_coor = new float[]{x - halfWidth, y - halfHeight, x + halfWidth, y + halfHeight};
+
+ // (2) (xmin, ymin, xmax, ymax) -> (xmin_org, ymin_org, xmax_org, ymax_org)
+ float resize_ratioW = 1.0f * intputSize / oriW;
+ float resize_ratioH = 1.0f * intputSize / oriH;
+
+ float resize_ratio = resize_ratioW > resize_ratioH ? resize_ratioH : resize_ratioW; //min
+
+ float dw = (intputSize - resize_ratio * oriW) / 2;
+ float dh = (intputSize - resize_ratio * oriH) / 2;
+
+ pred_coor[0] = 1.0f * (pred_coor[0] - dw) / resize_ratio;
+ pred_coor[2] = 1.0f * (pred_coor[2] - dw) / resize_ratio;
+
+ pred_coor[1] = 1.0f * (pred_coor[1] - dh) / resize_ratio;
+ pred_coor[3] = 1.0f * (pred_coor[3] - dh) / resize_ratio;
+
+ // (3) clip some boxes those are out of range
+ pred_coor[0] = pred_coor[0] > 0 ? pred_coor[0] : 0;
+ pred_coor[1] = pred_coor[1] > 0 ? pred_coor[1] : 0;
+
+ pred_coor[2] = pred_coor[2] < (oriW - 1) ? pred_coor[2] : (oriW - 1);
+ pred_coor[3] = pred_coor[3] < (oriH - 1) ? pred_coor[3] : (oriH - 1);
+
+ if ((pred_coor[0] > pred_coor[2]) || (pred_coor[1] > pred_coor[3])) {
+ pred_coor[0] = 0;
+ pred_coor[1] = 0;
+ pred_coor[2] = 0;
+ pred_coor[3] = 0;
+ }
+
+ // (4) discard some invalid boxes
+ float temp1 = pred_coor[2] - pred_coor[0];
+ float temp2 = pred_coor[3] - pred_coor[1];
+ float temp = temp1 * temp2;
+ if (temp < 0) {
+ Log.e("checkInvalidateBox", "temp < 0");
+ return false;
+ }
+ if (Math.sqrt(temp) > Float.MAX_VALUE) {
+ Log.e("checkInvalidateBox", "temp max");
+ return false;
+ }
+
+ return true;
+ }
+}
\ No newline at end of file
diff --git a/android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/MultiBoxTracker.java b/android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/MultiBoxTracker.java
new file mode 100755
index 0000000000000000000000000000000000000000..cd8a8b2113e2537684a5895433c8b2c1595045d1
--- /dev/null
+++ b/android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/MultiBoxTracker.java
@@ -0,0 +1,211 @@
+/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+==============================================================================*/
+
+package org.tensorflow.lite.examples.detection.tracking;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Matrix;
+import android.graphics.Paint;
+import android.graphics.Paint.Cap;
+import android.graphics.Paint.Join;
+import android.graphics.Paint.Style;
+import android.graphics.RectF;
+import android.text.TextUtils;
+import android.util.Pair;
+import android.util.TypedValue;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Queue;
+import org.tensorflow.lite.examples.detection.env.BorderedText;
+import org.tensorflow.lite.examples.detection.env.ImageUtils;
+import org.tensorflow.lite.examples.detection.env.Logger;
+import org.tensorflow.lite.examples.detection.tflite.Classifier.Recognition;
+
+/** A tracker that handles non-max suppression and matches existing objects to new detections. */
+public class MultiBoxTracker {
+ private static final float TEXT_SIZE_DIP = 18;
+ private static final float MIN_SIZE = 16.0f;
+ private static final int[] COLORS = {
+ Color.BLUE,
+ Color.RED,
+ Color.GREEN,
+ Color.YELLOW,
+ Color.CYAN,
+ Color.MAGENTA,
+ Color.WHITE,
+ Color.parseColor("#55FF55"),
+ Color.parseColor("#FFA500"),
+ Color.parseColor("#FF8888"),
+ Color.parseColor("#AAAAFF"),
+ Color.parseColor("#FFFFAA"),
+ Color.parseColor("#55AAAA"),
+ Color.parseColor("#AA33AA"),
+ Color.parseColor("#0D0068")
+ };
+ final List> screenRects = new LinkedList>();
+ private final Logger logger = new Logger();
+ private final Queue availableColors = new LinkedList();
+ private final List trackedObjects = new LinkedList();
+ private final Paint boxPaint = new Paint();
+ private final float textSizePx;
+ private final BorderedText borderedText;
+ private Matrix frameToCanvasMatrix;
+ private int frameWidth;
+ private int frameHeight;
+ private int sensorOrientation;
+
+ public MultiBoxTracker(final Context context) {
+ for (final int color : COLORS) {
+ availableColors.add(color);
+ }
+
+ boxPaint.setColor(Color.RED);
+ boxPaint.setStyle(Style.STROKE);
+ boxPaint.setStrokeWidth(10.0f);
+ boxPaint.setStrokeCap(Cap.ROUND);
+ boxPaint.setStrokeJoin(Join.ROUND);
+ boxPaint.setStrokeMiter(100);
+
+ textSizePx =
+ TypedValue.applyDimension(
+ TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics());
+ borderedText = new BorderedText(textSizePx);
+ }
+
+ public synchronized void setFrameConfiguration(
+ final int width, final int height, final int sensorOrientation) {
+ frameWidth = width;
+ frameHeight = height;
+ this.sensorOrientation = sensorOrientation;
+ }
+
+ public synchronized void drawDebug(final Canvas canvas) {
+ final Paint textPaint = new Paint();
+ textPaint.setColor(Color.WHITE);
+ textPaint.setTextSize(60.0f);
+
+ final Paint boxPaint = new Paint();
+ boxPaint.setColor(Color.RED);
+ boxPaint.setAlpha(200);
+ boxPaint.setStyle(Style.STROKE);
+
+ for (final Pair detection : screenRects) {
+ final RectF rect = detection.second;
+ canvas.drawRect(rect, boxPaint);
+ canvas.drawText("" + detection.first, rect.left, rect.top, textPaint);
+ borderedText.drawText(canvas, rect.centerX(), rect.centerY(), "" + detection.first);
+ }
+ }
+
+ public synchronized void trackResults(final List results, final long timestamp) {
+ logger.i("Processing %d results from %d", results.size(), timestamp);
+ processResults(results);
+ }
+
+ private Matrix getFrameToCanvasMatrix() {
+ return frameToCanvasMatrix;
+ }
+
+ public synchronized void draw(final Canvas canvas) {
+ final boolean rotated = sensorOrientation % 180 == 90;
+ final float multiplier =
+ Math.min(
+ canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
+ canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth));
+ frameToCanvasMatrix =
+ ImageUtils.getTransformationMatrix(
+ frameWidth,
+ frameHeight,
+ (int) (multiplier * (rotated ? frameHeight : frameWidth)),
+ (int) (multiplier * (rotated ? frameWidth : frameHeight)),
+ sensorOrientation,
+ false);
+ for (final TrackedRecognition recognition : trackedObjects) {
+ final RectF trackedPos = new RectF(recognition.location);
+
+ getFrameToCanvasMatrix().mapRect(trackedPos);
+ boxPaint.setColor(recognition.color);
+
+ float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f;
+ canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint);
+
+ final String labelString =
+ !TextUtils.isEmpty(recognition.title)
+ ? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence))
+ : String.format("%.2f", (100 * recognition.detectionConfidence));
+ // borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top,
+ // labelString);
+ borderedText.drawText(
+ canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint);
+ }
+ }
+
+ private void processResults(final List results) {
+ final List> rectsToTrack = new LinkedList>();
+
+ screenRects.clear();
+ final Matrix rgbFrameToScreen = new Matrix(getFrameToCanvasMatrix());
+
+ for (final Recognition result : results) {
+ if (result.getLocation() == null) {
+ continue;
+ }
+ final RectF detectionFrameRect = new RectF(result.getLocation());
+
+ final RectF detectionScreenRect = new RectF();
+ rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect);
+
+ logger.v(
+ "Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
+
+ screenRects.add(new Pair(result.getConfidence(), detectionScreenRect));
+
+ if (detectionFrameRect.width() < MIN_SIZE || detectionFrameRect.height() < MIN_SIZE) {
+ logger.w("Degenerate rectangle! " + detectionFrameRect);
+ continue;
+ }
+
+ rectsToTrack.add(new Pair(result.getConfidence(), result));
+ }
+
+ trackedObjects.clear();
+ if (rectsToTrack.isEmpty()) {
+ logger.v("Nothing to track, aborting.");
+ return;
+ }
+
+ for (final Pair potential : rectsToTrack) {
+ final TrackedRecognition trackedRecognition = new TrackedRecognition();
+ trackedRecognition.detectionConfidence = potential.first;
+ trackedRecognition.location = new RectF(potential.second.getLocation());
+ trackedRecognition.title = potential.second.getTitle();
+ trackedRecognition.color = COLORS[trackedObjects.size()];
+ trackedObjects.add(trackedRecognition);
+
+ if (trackedObjects.size() >= COLORS.length) {
+ break;
+ }
+ }
+ }
+
+ private static class TrackedRecognition {
+ RectF location;
+ float detectionConfidence;
+ int color;
+ String title;
+ }
+}
diff --git a/android/app/src/main/res/drawable-hdpi/ic_launcher.png b/android/app/src/main/res/drawable-hdpi/ic_launcher.png
new file mode 100755
index 0000000000000000000000000000000000000000..de511b0c4372645defcbc715c416a9c31bcf0828
Binary files /dev/null and b/android/app/src/main/res/drawable-hdpi/ic_launcher.png differ
diff --git a/android/app/src/main/res/drawable-mdpi/ic_launcher.png b/android/app/src/main/res/drawable-mdpi/ic_launcher.png
new file mode 100755
index 0000000000000000000000000000000000000000..5d502241dd757ef702f8f6f3d9988c48831633da
Binary files /dev/null and b/android/app/src/main/res/drawable-mdpi/ic_launcher.png differ
diff --git a/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml b/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml
new file mode 100755
index 0000000000000000000000000000000000000000..b1517edf496ef5800b97d046b92012a9f94a34d0
--- /dev/null
+++ b/android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml
@@ -0,0 +1,34 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/android/app/src/main/res/drawable-v24/kite.jpg b/android/app/src/main/res/drawable-v24/kite.jpg
new file mode 100755
index 0000000000000000000000000000000000000000..9eb325ac5fc375cb2513380087dd713be9be19d8
Binary files /dev/null and b/android/app/src/main/res/drawable-v24/kite.jpg differ
diff --git a/android/app/src/main/res/drawable-xxhdpi/ic_launcher.png b/android/app/src/main/res/drawable-xxhdpi/ic_launcher.png
new file mode 100755
index 0000000000000000000000000000000000000000..aa1311bc49af57654b2445ab2d428f0191897f77
Binary files /dev/null and b/android/app/src/main/res/drawable-xxhdpi/ic_launcher.png differ
diff --git a/android/app/src/main/res/drawable-xxhdpi/icn_chevron_down.png b/android/app/src/main/res/drawable-xxhdpi/icn_chevron_down.png
new file mode 100755
index 0000000000000000000000000000000000000000..2392eb0ce20f8a2a4e2a51942c10469e30ccf624
Binary files /dev/null and b/android/app/src/main/res/drawable-xxhdpi/icn_chevron_down.png differ
diff --git a/android/app/src/main/res/drawable-xxhdpi/icn_chevron_up.png b/android/app/src/main/res/drawable-xxhdpi/icn_chevron_up.png
new file mode 100755
index 0000000000000000000000000000000000000000..1ec6a07e919c5409d6e94b2f16c968ca75427cab
Binary files /dev/null and b/android/app/src/main/res/drawable-xxhdpi/icn_chevron_up.png differ
diff --git a/android/app/src/main/res/drawable-xxhdpi/tfl2_logo.png b/android/app/src/main/res/drawable-xxhdpi/tfl2_logo.png
new file mode 100755
index 0000000000000000000000000000000000000000..48c5f33fc5710fdf0692e049139fe163a2c3888a
Binary files /dev/null and b/android/app/src/main/res/drawable-xxhdpi/tfl2_logo.png differ
diff --git a/android/app/src/main/res/drawable-xxhdpi/tfl2_logo_dark.png b/android/app/src/main/res/drawable-xxhdpi/tfl2_logo_dark.png
new file mode 100755
index 0000000000000000000000000000000000000000..23ff809b5f85c5cea23b53a95b1cdef13d9e8281
Binary files /dev/null and b/android/app/src/main/res/drawable-xxhdpi/tfl2_logo_dark.png differ
diff --git a/android/app/src/main/res/drawable-xxxhdpi/caret.jpg b/android/app/src/main/res/drawable-xxxhdpi/caret.jpg
new file mode 100755
index 0000000000000000000000000000000000000000..4229e340be3b1554d526c5ed2eee935241491812
Binary files /dev/null and b/android/app/src/main/res/drawable-xxxhdpi/caret.jpg differ
diff --git a/android/app/src/main/res/drawable-xxxhdpi/chair.jpg b/android/app/src/main/res/drawable-xxxhdpi/chair.jpg
new file mode 100755
index 0000000000000000000000000000000000000000..90efe35a14b1afd31716808c28563ed40815f56b
Binary files /dev/null and b/android/app/src/main/res/drawable-xxxhdpi/chair.jpg differ
diff --git a/android/app/src/main/res/drawable-xxxhdpi/sample_image.jpg b/android/app/src/main/res/drawable-xxxhdpi/sample_image.jpg
new file mode 100755
index 0000000000000000000000000000000000000000..785603e37b7c48f6d5b0185791666c256275c070
Binary files /dev/null and b/android/app/src/main/res/drawable-xxxhdpi/sample_image.jpg differ
diff --git a/android/app/src/main/res/drawable/bottom_sheet_bg.xml b/android/app/src/main/res/drawable/bottom_sheet_bg.xml
new file mode 100755
index 0000000000000000000000000000000000000000..70f4b24e35039e6bfc35989bcbe570a4bdc2ae07
--- /dev/null
+++ b/android/app/src/main/res/drawable/bottom_sheet_bg.xml
@@ -0,0 +1,9 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/drawable/ic_baseline_add.xml b/android/app/src/main/res/drawable/ic_baseline_add.xml
new file mode 100755
index 0000000000000000000000000000000000000000..757f4503314fb9e5837f68ac515f4487d9b5fc2c
--- /dev/null
+++ b/android/app/src/main/res/drawable/ic_baseline_add.xml
@@ -0,0 +1,9 @@
+
+
+
diff --git a/android/app/src/main/res/drawable/ic_baseline_remove.xml b/android/app/src/main/res/drawable/ic_baseline_remove.xml
new file mode 100755
index 0000000000000000000000000000000000000000..a64b853e79137f0fd95f9d5fa6e0552cc255c7ae
--- /dev/null
+++ b/android/app/src/main/res/drawable/ic_baseline_remove.xml
@@ -0,0 +1,9 @@
+
+
+
diff --git a/android/app/src/main/res/drawable/ic_launcher_background.xml b/android/app/src/main/res/drawable/ic_launcher_background.xml
new file mode 100755
index 0000000000000000000000000000000000000000..d5fccc538c179838bfdce779c26eebb4fa0b5ce9
--- /dev/null
+++ b/android/app/src/main/res/drawable/ic_launcher_background.xml
@@ -0,0 +1,170 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/android/app/src/main/res/drawable/rectangle.xml b/android/app/src/main/res/drawable/rectangle.xml
new file mode 100755
index 0000000000000000000000000000000000000000..b8f5d3559c4e83072d5d73a3241d240aa68daccf
--- /dev/null
+++ b/android/app/src/main/res/drawable/rectangle.xml
@@ -0,0 +1,13 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/layout/activity_main.xml b/android/app/src/main/res/layout/activity_main.xml
new file mode 100755
index 0000000000000000000000000000000000000000..3b4d2c175d5258122aed458920456a3d4a999828
--- /dev/null
+++ b/android/app/src/main/res/layout/activity_main.xml
@@ -0,0 +1,52 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/android/app/src/main/res/layout/tfe_od_activity_camera.xml b/android/app/src/main/res/layout/tfe_od_activity_camera.xml
new file mode 100755
index 0000000000000000000000000000000000000000..6d7fcc8c62275f17ab4f46903215d03f9cf594ae
--- /dev/null
+++ b/android/app/src/main/res/layout/tfe_od_activity_camera.xml
@@ -0,0 +1,56 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/android/app/src/main/res/layout/tfe_od_camera_connection_fragment_tracking.xml b/android/app/src/main/res/layout/tfe_od_camera_connection_fragment_tracking.xml
new file mode 100755
index 0000000000000000000000000000000000000000..754f29512b7f54b4cfb2f8200400eb101cec98cd
--- /dev/null
+++ b/android/app/src/main/res/layout/tfe_od_camera_connection_fragment_tracking.xml
@@ -0,0 +1,30 @@
+
+
+
+
+
+
+
+
diff --git a/android/app/src/main/res/layout/tfe_od_layout_bottom_sheet.xml b/android/app/src/main/res/layout/tfe_od_layout_bottom_sheet.xml
new file mode 100755
index 0000000000000000000000000000000000000000..0598043d427f48959474e123ef1493e98bd541af
--- /dev/null
+++ b/android/app/src/main/res/layout/tfe_od_layout_bottom_sheet.xml
@@ -0,0 +1,187 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml b/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
new file mode 100755
index 0000000000000000000000000000000000000000..0c2a915e91af65a077d2e01db4ca21acd42906f3
--- /dev/null
+++ b/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml
@@ -0,0 +1,5 @@
+
+
+
+
+
diff --git a/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml b/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
new file mode 100755
index 0000000000000000000000000000000000000000..0c2a915e91af65a077d2e01db4ca21acd42906f3
--- /dev/null
+++ b/android/app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml
@@ -0,0 +1,5 @@
+
+
+
+
+
diff --git a/android/app/src/main/res/mipmap-hdpi/ic_launcher.png b/android/app/src/main/res/mipmap-hdpi/ic_launcher.png
new file mode 100755
index 0000000000000000000000000000000000000000..ef568ef1bde999ca477eea0fd842280551843950
Binary files /dev/null and b/android/app/src/main/res/mipmap-hdpi/ic_launcher.png differ
diff --git a/android/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png b/android/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png
new file mode 100755
index 0000000000000000000000000000000000000000..4eaccdd665e445c1eb7d26cca8a54afbc37ada1c
Binary files /dev/null and b/android/app/src/main/res/mipmap-hdpi/ic_launcher_foreground.png differ
diff --git a/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png b/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png
new file mode 100755
index 0000000000000000000000000000000000000000..316ec9bd0fc95261ed9cbea01aadcbd7314a747b
Binary files /dev/null and b/android/app/src/main/res/mipmap-hdpi/ic_launcher_round.png differ
diff --git a/android/app/src/main/res/mipmap-mdpi/ic_launcher.png b/android/app/src/main/res/mipmap-mdpi/ic_launcher.png
new file mode 100755
index 0000000000000000000000000000000000000000..f9a7c5b12fb924ea9fe2f8a8bba729a63de7a837
Binary files /dev/null and b/android/app/src/main/res/mipmap-mdpi/ic_launcher.png differ
diff --git a/android/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png b/android/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png
new file mode 100755
index 0000000000000000000000000000000000000000..7baaea015b4e7043a04500cb15a913fe53d6aaaf
Binary files /dev/null and b/android/app/src/main/res/mipmap-mdpi/ic_launcher_foreground.png differ
diff --git a/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png b/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png
new file mode 100755
index 0000000000000000000000000000000000000000..31251d8f4d8ea362f7ec44b92a95cc47a54ff0b0
Binary files /dev/null and b/android/app/src/main/res/mipmap-mdpi/ic_launcher_round.png differ
diff --git a/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png b/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png
new file mode 100755
index 0000000000000000000000000000000000000000..07dbc7ffaf7834b42076cd4eca8dbb06f947ef60
Binary files /dev/null and b/android/app/src/main/res/mipmap-xhdpi/ic_launcher.png differ
diff --git a/android/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png b/android/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png
new file mode 100755
index 0000000000000000000000000000000000000000..9b01b6d4d9bdaeac15e6a5657264204d7a258c6a
Binary files /dev/null and b/android/app/src/main/res/mipmap-xhdpi/ic_launcher_foreground.png differ
diff --git a/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png b/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png
new file mode 100755
index 0000000000000000000000000000000000000000..af08f94ab87ba1e10b4fde6cf7b7c49efb688cc3
Binary files /dev/null and b/android/app/src/main/res/mipmap-xhdpi/ic_launcher_round.png differ
diff --git a/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png b/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png
new file mode 100755
index 0000000000000000000000000000000000000000..92b0946720efc8a170ac3b658959615600b0075b
Binary files /dev/null and b/android/app/src/main/res/mipmap-xxhdpi/ic_launcher.png differ
diff --git a/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png b/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png
new file mode 100755
index 0000000000000000000000000000000000000000..bfc2f073c6a7cd8d0356a402b3e3bf6e13ff399c
Binary files /dev/null and b/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_foreground.png differ
diff --git a/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png b/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png
new file mode 100755
index 0000000000000000000000000000000000000000..e7f67b6b4615f0bbaae0506b3aea2d17b7b335cc
Binary files /dev/null and b/android/app/src/main/res/mipmap-xxhdpi/ic_launcher_round.png differ
diff --git a/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png b/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png
new file mode 100755
index 0000000000000000000000000000000000000000..5258ade38bc28d46c0904ec203bb32a969650ae3
Binary files /dev/null and b/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher.png differ
diff --git a/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png b/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png
new file mode 100755
index 0000000000000000000000000000000000000000..0ee89d8055ea137ec4193753ee989f105988c72e
Binary files /dev/null and b/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_foreground.png differ
diff --git a/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png b/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png
new file mode 100755
index 0000000000000000000000000000000000000000..f27f4cdd5cd360b1d77de600253bed4d8105d192
Binary files /dev/null and b/android/app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.png differ
diff --git a/android/app/src/main/res/values/colors.xml b/android/app/src/main/res/values/colors.xml
new file mode 100755
index 0000000000000000000000000000000000000000..ed82bafb536474c6a88c996b439a2781f31f3d3e
--- /dev/null
+++ b/android/app/src/main/res/values/colors.xml
@@ -0,0 +1,8 @@
+
+
+ #ffa800
+ #ff6f00
+ #425066
+
+ #66000000
+
diff --git a/android/app/src/main/res/values/dimens.xml b/android/app/src/main/res/values/dimens.xml
new file mode 100755
index 0000000000000000000000000000000000000000..5d3609029ca66b612c88b4f395e4e2e3cfc1f0e6
--- /dev/null
+++ b/android/app/src/main/res/values/dimens.xml
@@ -0,0 +1,5 @@
+
+
+ 15dp
+ 8dp
+
\ No newline at end of file
diff --git a/android/app/src/main/res/values/strings.xml b/android/app/src/main/res/values/strings.xml
new file mode 100755
index 0000000000000000000000000000000000000000..90842d0716978958329d04ade5ef6c13f77496c3
--- /dev/null
+++ b/android/app/src/main/res/values/strings.xml
@@ -0,0 +1,4 @@
+
+ TFL Detect
+ This device doesn\'t support Camera2 API.
+
diff --git a/android/app/src/main/res/values/styles.xml b/android/app/src/main/res/values/styles.xml
new file mode 100755
index 0000000000000000000000000000000000000000..3df01bea5ceab044ac610ffeb1bff03a31a0f150
--- /dev/null
+++ b/android/app/src/main/res/values/styles.xml
@@ -0,0 +1,11 @@
+
+
+
+
+
+
diff --git a/android/build.gradle b/android/build.gradle
new file mode 100755
index 0000000000000000000000000000000000000000..d9c884cdb5309accdb759abe0b0f1d6aa501db0b
--- /dev/null
+++ b/android/build.gradle
@@ -0,0 +1,29 @@
+// Top-level build file where you can add configuration options common to all sub-projects/modules.
+
+buildscript {
+
+ repositories {
+ google()
+ jcenter()
+ mavenLocal()
+ }
+ dependencies {
+ classpath 'com.android.tools.build:gradle:3.5.0'
+ classpath 'de.undercouch:gradle-download-task:3.4.3'
+
+ // NOTE: Do not place your application dependencies here; they belong
+ // in the individual module build.gradle files
+ }
+}
+
+allprojects {
+ repositories {
+ google()
+ jcenter()
+ mavenLocal()
+ }
+}
+
+task clean(type: Delete) {
+ delete rootProject.buildDir
+}
diff --git a/android/gradle.properties b/android/gradle.properties
new file mode 100755
index 0000000000000000000000000000000000000000..9592636c07d9d5e6f61c0cfce1311d3e1ffcf34d
--- /dev/null
+++ b/android/gradle.properties
@@ -0,0 +1,15 @@
+# Project-wide Gradle settings.
+# IDE (e.g. Android Studio) users:
+# Gradle settings configured through the IDE *will override*
+# any settings specified in this file.
+# For more details on how to configure your build environment visit
+# http://www.gradle.org/docs/current/userguide/build_environment.html
+# Specifies the JVM arguments used for the daemon process.
+# The setting is particularly useful for tweaking memory settings.
+org.gradle.jvmargs=-Xmx1536m
+# When configured, Gradle will run in incubating parallel mode.
+# This option should only be used with decoupled projects. More details, visit
+# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
+# org.gradle.parallel=true
+android.useAndroidX=true
+android.enableJetifier=true
diff --git a/android/gradle/wrapper/gradle-wrapper.jar b/android/gradle/wrapper/gradle-wrapper.jar
new file mode 100755
index 0000000000000000000000000000000000000000..7a3265ee94c0ab25cf079ac8ccdf87f41d455d42
Binary files /dev/null and b/android/gradle/wrapper/gradle-wrapper.jar differ
diff --git a/android/gradle/wrapper/gradle-wrapper.properties b/android/gradle/wrapper/gradle-wrapper.properties
new file mode 100755
index 0000000000000000000000000000000000000000..0cd8404bd376f01aab03c8d97b1d42504d1f20b0
--- /dev/null
+++ b/android/gradle/wrapper/gradle-wrapper.properties
@@ -0,0 +1,6 @@
+#Wed Apr 29 08:34:23 ICT 2020
+distributionBase=GRADLE_USER_HOME
+distributionPath=wrapper/dists
+zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-5.4.1-all.zip
diff --git a/android/gradlew b/android/gradlew
new file mode 100755
index 0000000000000000000000000000000000000000..cccdd3d517fc5249beaefa600691cf150f2fa3e6
--- /dev/null
+++ b/android/gradlew
@@ -0,0 +1,172 @@
+#!/usr/bin/env sh
+
+##############################################################################
+##
+## Gradle start up script for UN*X
+##
+##############################################################################
+
+# Attempt to set APP_HOME
+# Resolve links: $0 may be a link
+PRG="$0"
+# Need this for relative symlinks.
+while [ -h "$PRG" ] ; do
+ ls=`ls -ld "$PRG"`
+ link=`expr "$ls" : '.*-> \(.*\)$'`
+ if expr "$link" : '/.*' > /dev/null; then
+ PRG="$link"
+ else
+ PRG=`dirname "$PRG"`"/$link"
+ fi
+done
+SAVED="`pwd`"
+cd "`dirname \"$PRG\"`/" >/dev/null
+APP_HOME="`pwd -P`"
+cd "$SAVED" >/dev/null
+
+APP_NAME="Gradle"
+APP_BASE_NAME=`basename "$0"`
+
+# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+DEFAULT_JVM_OPTS=""
+
+# Use the maximum available, or set MAX_FD != -1 to use that value.
+MAX_FD="maximum"
+
+warn () {
+ echo "$*"
+}
+
+die () {
+ echo
+ echo "$*"
+ echo
+ exit 1
+}
+
+# OS specific support (must be 'true' or 'false').
+cygwin=false
+msys=false
+darwin=false
+nonstop=false
+case "`uname`" in
+ CYGWIN* )
+ cygwin=true
+ ;;
+ Darwin* )
+ darwin=true
+ ;;
+ MINGW* )
+ msys=true
+ ;;
+ NONSTOP* )
+ nonstop=true
+ ;;
+esac
+
+CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
+
+# Determine the Java command to use to start the JVM.
+if [ -n "$JAVA_HOME" ] ; then
+ if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
+ # IBM's JDK on AIX uses strange locations for the executables
+ JAVACMD="$JAVA_HOME/jre/sh/java"
+ else
+ JAVACMD="$JAVA_HOME/bin/java"
+ fi
+ if [ ! -x "$JAVACMD" ] ; then
+ die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+ fi
+else
+ JAVACMD="java"
+ which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+
+Please set the JAVA_HOME variable in your environment to match the
+location of your Java installation."
+fi
+
+# Increase the maximum file descriptors if we can.
+if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
+ MAX_FD_LIMIT=`ulimit -H -n`
+ if [ $? -eq 0 ] ; then
+ if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
+ MAX_FD="$MAX_FD_LIMIT"
+ fi
+ ulimit -n $MAX_FD
+ if [ $? -ne 0 ] ; then
+ warn "Could not set maximum file descriptor limit: $MAX_FD"
+ fi
+ else
+ warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
+ fi
+fi
+
+# For Darwin, add options to specify how the application appears in the dock
+if $darwin; then
+ GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
+fi
+
+# For Cygwin, switch paths to Windows format before running java
+if $cygwin ; then
+ APP_HOME=`cygpath --path --mixed "$APP_HOME"`
+ CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
+ JAVACMD=`cygpath --unix "$JAVACMD"`
+
+ # We build the pattern for arguments to be converted via cygpath
+ ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
+ SEP=""
+ for dir in $ROOTDIRSRAW ; do
+ ROOTDIRS="$ROOTDIRS$SEP$dir"
+ SEP="|"
+ done
+ OURCYGPATTERN="(^($ROOTDIRS))"
+ # Add a user-defined pattern to the cygpath arguments
+ if [ "$GRADLE_CYGPATTERN" != "" ] ; then
+ OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
+ fi
+ # Now convert the arguments - kludge to limit ourselves to /bin/sh
+ i=0
+ for arg in "$@" ; do
+ CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
+ CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
+
+ if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
+ eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
+ else
+ eval `echo args$i`="\"$arg\""
+ fi
+ i=$((i+1))
+ done
+ case $i in
+ (0) set -- ;;
+ (1) set -- "$args0" ;;
+ (2) set -- "$args0" "$args1" ;;
+ (3) set -- "$args0" "$args1" "$args2" ;;
+ (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
+ (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
+ (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
+ (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
+ (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
+ (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
+ esac
+fi
+
+# Escape application args
+save () {
+ for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
+ echo " "
+}
+APP_ARGS=$(save "$@")
+
+# Collect all arguments for the java command, following the shell quoting and substitution rules
+eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
+
+# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
+if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
+ cd "$(dirname "$0")"
+fi
+
+exec "$JAVACMD" "$@"
diff --git a/android/gradlew.bat b/android/gradlew.bat
new file mode 100755
index 0000000000000000000000000000000000000000..f9553162f122c71b34635112e717c3e733b5b212
--- /dev/null
+++ b/android/gradlew.bat
@@ -0,0 +1,84 @@
+@if "%DEBUG%" == "" @echo off
+@rem ##########################################################################
+@rem
+@rem Gradle startup script for Windows
+@rem
+@rem ##########################################################################
+
+@rem Set local scope for the variables with windows NT shell
+if "%OS%"=="Windows_NT" setlocal
+
+set DIRNAME=%~dp0
+if "%DIRNAME%" == "" set DIRNAME=.
+set APP_BASE_NAME=%~n0
+set APP_HOME=%DIRNAME%
+
+@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
+set DEFAULT_JVM_OPTS=
+
+@rem Find java.exe
+if defined JAVA_HOME goto findJavaFromJavaHome
+
+set JAVA_EXE=java.exe
+%JAVA_EXE% -version >NUL 2>&1
+if "%ERRORLEVEL%" == "0" goto init
+
+echo.
+echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:findJavaFromJavaHome
+set JAVA_HOME=%JAVA_HOME:"=%
+set JAVA_EXE=%JAVA_HOME%/bin/java.exe
+
+if exist "%JAVA_EXE%" goto init
+
+echo.
+echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
+echo.
+echo Please set the JAVA_HOME variable in your environment to match the
+echo location of your Java installation.
+
+goto fail
+
+:init
+@rem Get command-line arguments, handling Windows variants
+
+if not "%OS%" == "Windows_NT" goto win9xME_args
+
+:win9xME_args
+@rem Slurp the command line arguments.
+set CMD_LINE_ARGS=
+set _SKIP=2
+
+:win9xME_args_slurp
+if "x%~1" == "x" goto execute
+
+set CMD_LINE_ARGS=%*
+
+:execute
+@rem Setup the command line
+
+set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
+
+@rem Execute Gradle
+"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
+
+:end
+@rem End local scope for the variables with windows NT shell
+if "%ERRORLEVEL%"=="0" goto mainEnd
+
+:fail
+rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
+rem the _cmd.exe /c_ return code!
+if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
+exit /b 1
+
+:mainEnd
+if "%OS%"=="Windows_NT" endlocal
+
+:omega
diff --git a/android/settings.gradle b/android/settings.gradle
new file mode 100755
index 0000000000000000000000000000000000000000..f2cd4a43a4a058002ffcc1541c2155578c9c63dd
--- /dev/null
+++ b/android/settings.gradle
@@ -0,0 +1 @@
+include ':app' ,':tensorflow-lite'