Skip to content
This repository was archived by the owner on Feb 22, 2023. It is now read-only.

WIP: Live MLKit vision detection using the camera plugin #688

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
67fa8fd
Add a placeholder for the live preview screen.
dustin-graham Jul 7, 2018
c864974
WIP. basic barcode detection impl.
dustin-graham Jul 7, 2018
73f4fe2
WIP.
dustin-graham Jul 7, 2018
04395d8
rotate image on Android prior to creating the FirebaseVisionImage
dustin-graham Jul 9, 2018
cb953e7
Add a basic camera preview implementation
dustin-graham Jul 9, 2018
973b32c
resume camera preview
dustin-graham Jul 9, 2018
8689993
strip out picture taking and video recording from Android camera impl
dustin-graham Jul 9, 2018
beaaa5f
android: insert per-frame handling of preview images
dustin-graham Jul 9, 2018
1ad5ae7
android: pipe image frames to MLKit Barcode detector
dustin-graham Jul 9, 2018
76930fc
android: send count of recognized barcodes
dustin-graham Jul 10, 2018
a0a1b62
android: get barcode bounding boxes displaying in Flutter
dustin-graham Jul 10, 2018
56272e4
ios: Add basic barcode scanning.
dustin-graham Jul 10, 2018
4e10f44
ios: Add live view barcode scanning
dustin-graham Jul 11, 2018
1313564
WIP: live text detection
dustin-graham Jul 13, 2018
99196c3
WIP: android live text detection.
dustin-graham Jul 14, 2018
dacaccf
WIP: Android legacy camera detector impl. Min SDK on this lib is 16.
dustin-graham Jul 19, 2018
6054f37
Android: allow detector and resolution to be set
dustin-graham Jul 19, 2018
de93bc7
Working live detection implementation for Android and iOS
dustin-graham Jul 20, 2018
7c8cca4
Merge remote-tracking branch 'upstream/master' into live_preview
dustin-graham Jul 20, 2018
4c5bf21
update Android with latest from upstream.
dustin-graham Jul 20, 2018
2219e8e
update both Android and iOS live view to work with new detectors.
dustin-graham Jul 20, 2018
8477cf9
Merge remote-tracking branch 'upstream/master' into live_preview
dustin-graham Jul 20, 2018
d1d24e7
remove unused ExifInterface dependency
dustin-graham Jul 21, 2018
b85bdb8
resolve dart analysis warnings.
dustin-graham Jul 21, 2018
a86fe62
reformat code.
dustin-graham Jul 21, 2018
e688aef
fix barcode test
dustin-graham Jul 21, 2018
762b04b
revert accidental camera plugin changes
dustin-graham Jul 21, 2018
8190bff
clang-format iOS files
dustin-graham Jul 21, 2018
f98ce94
Clean up LiveView implementation and fix formatting issues.
dustin-graham Jul 26, 2018
9fdee7c
Merge remote-tracking branch 'upstream/master' into live_preview
dustin-graham Jul 26, 2018
d7e6b32
update to support new LabelDetector.
dustin-graham Jul 26, 2018
776294d
undo inadvertent formatting changes outside firebase_ml_vision.
dustin-graham Jul 26, 2018
ef72e0a
add camera plugin as a dependency to firebase_ml_vision
dustin-graham Jul 27, 2018
543b6d7
wIP: send detected data back to flutter from live feed.
dustin-graham Jul 27, 2018
22e10c2
fix formatting issues.
dustin-graham Jul 27, 2018
858b2aa
restore normal camera functionality.
dustin-graham Jul 27, 2018
5bbbcbc
Merge remote-tracking branch 'upstream/master' into live_preview_came…
dustin-graham Aug 9, 2018
e91181f
present detection boundaries in Flutter from camera plugin integration
dustin-graham Aug 9, 2018
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,19 +21,16 @@
import android.media.MediaRecorder;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Surface;
import io.flutter.plugin.common.EventChannel;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
import io.flutter.plugin.common.MethodChannel.Result;
import io.flutter.plugin.common.PluginRegistry;
import io.flutter.plugin.common.PluginRegistry.Registrar;
import io.flutter.view.FlutterView;
import android.view.WindowManager;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
Expand All @@ -46,6 +43,15 @@
import java.util.List;
import java.util.Map;

import io.flutter.plugin.common.EventChannel;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
import io.flutter.plugin.common.MethodChannel.Result;
import io.flutter.plugin.common.PluginRegistry;
import io.flutter.plugin.common.PluginRegistry.Registrar;
import io.flutter.view.FlutterView;

public class CameraPlugin implements MethodCallHandler {

private static final int CAMERA_REQUEST_ID = 513469796;
Expand All @@ -68,11 +74,16 @@ public class CameraPlugin implements MethodCallHandler {
// The code to run after requesting camera permissions.
private Runnable cameraPermissionContinuation;
private boolean requestingPermission;
@Nullable private PreviewImageDelegate previewImageDelegate;
private WindowManager windowManager;

private CameraPlugin(Registrar registrar, FlutterView view, Activity activity) {
this.registrar = registrar;
this.view = view;
this.activity = activity;
if (activity instanceof PreviewImageDelegate) {
this.previewImageDelegate = (PreviewImageDelegate) activity;
}

registrar.addRequestPermissionsResultListener(new CameraRequestPermissionsListener());

Expand Down Expand Up @@ -239,7 +250,8 @@ private class Camera {
private CameraDevice cameraDevice;
private CameraCaptureSession cameraCaptureSession;
private EventChannel.EventSink eventSink;
private ImageReader imageReader;
private ImageReader previewImageReader;
private ImageReader captureImageReader;
private int sensorOrientation;
private boolean isFrontFacing;
private String cameraName;
Expand Down Expand Up @@ -376,7 +388,8 @@ private void computeBestPreviewAndRecordingSize(
} else {
previewSize = goodEnough.get(0);

// Video capture size should not be greater than 1080 because MediaRecorder cannot handle higher resolutions.
// Video capture size should not be greater than 1080 because MediaRecorder cannot handle
// higher resolutions.
videoSize = goodEnough.get(0);
for (int i = goodEnough.size() - 1; i >= 0; i--) {
if (goodEnough.get(i).getHeight() <= 1080) {
Expand Down Expand Up @@ -419,14 +432,57 @@ private void prepareMediaRecorder(String outputFilePath) throws IOException {
mediaRecorder.prepare();
}

private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
private final ImageReader.OnImageAvailableListener imageAvailable =
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image != null) {
if (previewImageDelegate != null) {
previewImageDelegate.onImageAvailable(image, getRotation());
}
image.close();
}
}
};

/** Starts a background thread and its {@link Handler}. */
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}

/** Stops the background thread and its {@link Handler}. */
private void stopBackgroundThread() {
if (mBackgroundThread != null) {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}

private void open(@Nullable final Result result) {
if (!hasCameraPermission()) {
if (result != null) result.error("cameraPermission", "Camera permission not granted", null);
} else {
try {
imageReader =
startBackgroundThread();
// this image reader is used for sending frame data to other packages that need it, such as firebase_ml_vision
previewImageReader =
ImageReader.newInstance(
previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 4);
captureImageReader =
ImageReader.newInstance(
captureSize.getWidth(), captureSize.getHeight(), ImageFormat.JPEG, 2);
previewImageReader.setOnImageAvailableListener(imageAvailable, mBackgroundHandler);
cameraManager.openCamera(
cameraName,
new CameraDevice.StateCallback() {
Expand Down Expand Up @@ -519,7 +575,7 @@ private void takePicture(String filePath, @NonNull final Result result) {
return;
}

imageReader.setOnImageAvailableListener(
captureImageReader.setOnImageAvailableListener(
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Expand All @@ -537,7 +593,7 @@ public void onImageAvailable(ImageReader reader) {
try {
final CaptureRequest.Builder captureBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(imageReader.getSurface());
captureBuilder.addTarget(captureImageReader.getSurface());
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
int displayOrientation = ORIENTATIONS.get(displayRotation);
if (isFrontFacing) displayOrientation = -displayOrientation;
Expand Down Expand Up @@ -667,7 +723,12 @@ private void startPreview() throws CameraAccessException {
surfaces.add(previewSurface);
captureRequestBuilder.addTarget(previewSurface);

surfaces.add(imageReader.getSurface());
surfaces.add(captureImageReader.getSurface());

// This is so we can send sample frames out to other plugins that need a live feed of frames
Surface previewImageReaderSurface = previewImageReader.getSurface();
surfaces.add(previewImageReaderSurface);
captureRequestBuilder.addTarget(previewImageReaderSurface);

cameraDevice.createCaptureSession(
surfaces,
Expand Down Expand Up @@ -720,20 +781,60 @@ private void close() {
cameraDevice.close();
cameraDevice = null;
}
if (imageReader != null) {
imageReader.close();
imageReader = null;
if (previewImageReader != null) {
previewImageReader.close();
previewImageReader = null;
}
if (mediaRecorder != null) {
mediaRecorder.reset();
mediaRecorder.release();
mediaRecorder = null;
}
stopBackgroundThread();
}

private void dispose() {
close();
textureEntry.release();
}
}

private int getRotation() {
if (windowManager == null) {
windowManager = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE);
}
int degrees = 0;
int rotation = windowManager.getDefaultDisplay().getRotation();
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
default:
Log.e("ML", "Bad rotation value: $rotation");
}

try {
int angle;
int displayAngle; // TODO? setDisplayOrientation?
CameraCharacteristics cameraCharacteristics =
cameraManager.getCameraCharacteristics(camera.cameraName);
Integer orientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// back-facing
angle = (orientation - degrees + 360) % 360;
displayAngle = angle;
int translatedAngle = angle / 90;
return translatedAngle; // this corresponds to the rotation constants
} catch (CameraAccessException e) {
return 0;
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
package io.flutter.plugins.camera;

import android.media.Image;

public interface PreviewImageDelegate {
void onImageAvailable(Image image, int rotation);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>IDEDidComputeMac32BitWarning</key>
<true/>
</dict>
</plist>
3 changes: 2 additions & 1 deletion packages/firebase_ml_vision/android/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ android {
compileSdkVersion 27

defaultConfig {
minSdkVersion 16
minSdkVersion 21
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
lintOptions {
Expand All @@ -34,5 +34,6 @@ android {
dependencies {
api 'com.google.firebase:firebase-ml-vision:16.0.0'
api 'com.google.firebase:firebase-ml-vision-image-label-model:15.0.0'
implementation project(':camera')
}
}
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="io.flutter.plugins.firebasemlvision">

<uses-permission android:name="android.permission.CAMERA" />
</manifest>
Original file line number Diff line number Diff line change
Expand Up @@ -3,27 +3,32 @@
import android.graphics.Point;
import android.graphics.Rect;
import android.support.annotation.NonNull;
import android.util.Size;

import com.google.android.gms.tasks.OnFailureListener;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.ml.vision.FirebaseVision;
import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcode;
import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetector;
import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetectorOptions;
import com.google.firebase.ml.vision.common.FirebaseVisionImage;
import io.flutter.plugin.common.MethodChannel;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

class BarcodeDetector implements Detector {
public class BarcodeDetector extends Detector {
public static final BarcodeDetector instance = new BarcodeDetector();

private BarcodeDetector() {}

@Override
public void handleDetection(
FirebaseVisionImage image, Map<String, Object> options, final MethodChannel.Result result) {
void processImage(
final FirebaseVisionImage image,
final Size imageSize,
Map<String, Object> options,
final OperationFinishedCallback finishedCallback) {

FirebaseVisionBarcodeDetector detector =
FirebaseVision.getInstance().getVisionBarcodeDetector(parseOptions(options));
Expand All @@ -32,6 +37,7 @@ public void handleDetection(
.detectInImage(image)
.addOnSuccessListener(
new OnSuccessListener<List<FirebaseVisionBarcode>>() {
@SuppressWarnings("ConstantConditions")
@Override
public void onSuccess(List<FirebaseVisionBarcode> firebaseVisionBarcodes) {
List<Map<String, Object>> barcodes = new ArrayList<>();
Expand Down Expand Up @@ -210,14 +216,16 @@ public void onSuccess(List<FirebaseVisionBarcode> firebaseVisionBarcodes) {

barcodes.add(barcodeMap);
}
result.success(barcodes);
finishedCallback.success(BarcodeDetector.this, barcodes, imageSize);
}
})
.addOnFailureListener(
new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception exception) {
result.error("barcodeDetectorError", exception.getLocalizedMessage(), null);
finishedCallback.error(
new DetectorException(
"barcodeDetectorError", exception.getLocalizedMessage(), null));
}
});
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,55 @@
package io.flutter.plugins.firebasemlvision;

import android.util.Size;

import com.google.firebase.ml.vision.common.FirebaseVisionImage;
import io.flutter.plugin.common.MethodChannel;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;

public abstract class Detector {

public interface OperationFinishedCallback {
void success(Detector detector, Object data, Size size);

void error(DetectorException e);
}

private final AtomicBoolean shouldThrottle = new AtomicBoolean(false);

public void handleDetection(
final FirebaseVisionImage image,
final Size imageSize,
Map<String, Object> options,
final OperationFinishedCallback finishedCallback) {
if (shouldThrottle.get()) {
return;
}
processImage(
image,
imageSize,
options,
new OperationFinishedCallback() {
@Override
public void success(Detector detector, Object data, Size size) {
shouldThrottle.set(false);
finishedCallback.success(detector, data, size);
}

@Override
public void error(DetectorException e) {
shouldThrottle.set(false);
finishedCallback.error(e);
}
});

// Begin throttling until this frame of input has been processed, either in onSuccess or
// onFailure.
shouldThrottle.set(true);
}

interface Detector {
void handleDetection(
FirebaseVisionImage image, Map<String, Object> options, final MethodChannel.Result result);
abstract void processImage(
FirebaseVisionImage image,
Size imageSize,
Map<String, Object> options,
OperationFinishedCallback finishedCallback);
}
Loading