diff --git a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java index 1d4bedefbbf4..17d80727a45d 100644 --- a/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java +++ b/packages/camera/android/src/main/java/io/flutter/plugins/camera/CameraPlugin.java @@ -21,19 +21,16 @@ import android.media.MediaRecorder; import android.os.Build; import android.os.Bundle; +import android.os.Handler; +import android.os.HandlerThread; import android.support.annotation.NonNull; import android.support.annotation.Nullable; +import android.util.Log; import android.util.Size; import android.util.SparseIntArray; import android.view.Surface; -import io.flutter.plugin.common.EventChannel; -import io.flutter.plugin.common.MethodCall; -import io.flutter.plugin.common.MethodChannel; -import io.flutter.plugin.common.MethodChannel.MethodCallHandler; -import io.flutter.plugin.common.MethodChannel.Result; -import io.flutter.plugin.common.PluginRegistry; -import io.flutter.plugin.common.PluginRegistry.Registrar; -import io.flutter.view.FlutterView; +import android.view.WindowManager; + import java.io.File; import java.io.FileOutputStream; import java.io.IOException; @@ -46,6 +43,15 @@ import java.util.List; import java.util.Map; +import io.flutter.plugin.common.EventChannel; +import io.flutter.plugin.common.MethodCall; +import io.flutter.plugin.common.MethodChannel; +import io.flutter.plugin.common.MethodChannel.MethodCallHandler; +import io.flutter.plugin.common.MethodChannel.Result; +import io.flutter.plugin.common.PluginRegistry; +import io.flutter.plugin.common.PluginRegistry.Registrar; +import io.flutter.view.FlutterView; + public class CameraPlugin implements MethodCallHandler { private static final int CAMERA_REQUEST_ID = 513469796; @@ -68,11 +74,16 @@ public class CameraPlugin implements MethodCallHandler { // The code to run after requesting camera permissions. private Runnable cameraPermissionContinuation; private boolean requestingPermission; + @Nullable private PreviewImageDelegate previewImageDelegate; + private WindowManager windowManager; private CameraPlugin(Registrar registrar, FlutterView view, Activity activity) { this.registrar = registrar; this.view = view; this.activity = activity; + if (activity instanceof PreviewImageDelegate) { + this.previewImageDelegate = (PreviewImageDelegate) activity; + } registrar.addRequestPermissionsResultListener(new CameraRequestPermissionsListener()); @@ -239,7 +250,8 @@ private class Camera { private CameraDevice cameraDevice; private CameraCaptureSession cameraCaptureSession; private EventChannel.EventSink eventSink; - private ImageReader imageReader; + private ImageReader previewImageReader; + private ImageReader captureImageReader; private int sensorOrientation; private boolean isFrontFacing; private String cameraName; @@ -376,7 +388,8 @@ private void computeBestPreviewAndRecordingSize( } else { previewSize = goodEnough.get(0); - // Video capture size should not be greater than 1080 because MediaRecorder cannot handle higher resolutions. + // Video capture size should not be greater than 1080 because MediaRecorder cannot handle + // higher resolutions. videoSize = goodEnough.get(0); for (int i = goodEnough.size() - 1; i >= 0; i--) { if (goodEnough.get(i).getHeight() <= 1080) { @@ -419,14 +432,57 @@ private void prepareMediaRecorder(String outputFilePath) throws IOException { mediaRecorder.prepare(); } + private Handler mBackgroundHandler; + private HandlerThread mBackgroundThread; + private final ImageReader.OnImageAvailableListener imageAvailable = + new ImageReader.OnImageAvailableListener() { + @Override + public void onImageAvailable(ImageReader reader) { + Image image = reader.acquireLatestImage(); + if (image != null) { + if (previewImageDelegate != null) { + previewImageDelegate.onImageAvailable(image, getRotation()); + } + image.close(); + } + } + }; + + /** Starts a background thread and its {@link Handler}. */ + private void startBackgroundThread() { + mBackgroundThread = new HandlerThread("CameraBackground"); + mBackgroundThread.start(); + mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); + } + + /** Stops the background thread and its {@link Handler}. */ + private void stopBackgroundThread() { + if (mBackgroundThread != null) { + mBackgroundThread.quitSafely(); + try { + mBackgroundThread.join(); + mBackgroundThread = null; + mBackgroundHandler = null; + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + private void open(@Nullable final Result result) { if (!hasCameraPermission()) { if (result != null) result.error("cameraPermission", "Camera permission not granted", null); } else { try { - imageReader = + startBackgroundThread(); + // this image reader is used for sending frame data to other packages that need it, such as firebase_ml_vision + previewImageReader = + ImageReader.newInstance( + previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 4); + captureImageReader = ImageReader.newInstance( captureSize.getWidth(), captureSize.getHeight(), ImageFormat.JPEG, 2); + previewImageReader.setOnImageAvailableListener(imageAvailable, mBackgroundHandler); cameraManager.openCamera( cameraName, new CameraDevice.StateCallback() { @@ -519,7 +575,7 @@ private void takePicture(String filePath, @NonNull final Result result) { return; } - imageReader.setOnImageAvailableListener( + captureImageReader.setOnImageAvailableListener( new ImageReader.OnImageAvailableListener() { @Override public void onImageAvailable(ImageReader reader) { @@ -537,7 +593,7 @@ public void onImageAvailable(ImageReader reader) { try { final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE); - captureBuilder.addTarget(imageReader.getSurface()); + captureBuilder.addTarget(captureImageReader.getSurface()); int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation(); int displayOrientation = ORIENTATIONS.get(displayRotation); if (isFrontFacing) displayOrientation = -displayOrientation; @@ -667,7 +723,12 @@ private void startPreview() throws CameraAccessException { surfaces.add(previewSurface); captureRequestBuilder.addTarget(previewSurface); - surfaces.add(imageReader.getSurface()); + surfaces.add(captureImageReader.getSurface()); + + // This is so we can send sample frames out to other plugins that need a live feed of frames + Surface previewImageReaderSurface = previewImageReader.getSurface(); + surfaces.add(previewImageReaderSurface); + captureRequestBuilder.addTarget(previewImageReaderSurface); cameraDevice.createCaptureSession( surfaces, @@ -720,15 +781,16 @@ private void close() { cameraDevice.close(); cameraDevice = null; } - if (imageReader != null) { - imageReader.close(); - imageReader = null; + if (previewImageReader != null) { + previewImageReader.close(); + previewImageReader = null; } if (mediaRecorder != null) { mediaRecorder.reset(); mediaRecorder.release(); mediaRecorder = null; } + stopBackgroundThread(); } private void dispose() { @@ -736,4 +798,43 @@ private void dispose() { textureEntry.release(); } } + + private int getRotation() { + if (windowManager == null) { + windowManager = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE); + } + int degrees = 0; + int rotation = windowManager.getDefaultDisplay().getRotation(); + switch (rotation) { + case Surface.ROTATION_0: + degrees = 0; + break; + case Surface.ROTATION_90: + degrees = 90; + break; + case Surface.ROTATION_180: + degrees = 180; + break; + case Surface.ROTATION_270: + degrees = 270; + break; + default: + Log.e("ML", "Bad rotation value: $rotation"); + } + + try { + int angle; + int displayAngle; // TODO? setDisplayOrientation? + CameraCharacteristics cameraCharacteristics = + cameraManager.getCameraCharacteristics(camera.cameraName); + Integer orientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); + // back-facing + angle = (orientation - degrees + 360) % 360; + displayAngle = angle; + int translatedAngle = angle / 90; + return translatedAngle; // this corresponds to the rotation constants + } catch (CameraAccessException e) { + return 0; + } + } } diff --git a/packages/camera/android/src/main/java/io/flutter/plugins/camera/PreviewImageDelegate.java b/packages/camera/android/src/main/java/io/flutter/plugins/camera/PreviewImageDelegate.java new file mode 100644 index 000000000000..ba53aa9fc21b --- /dev/null +++ b/packages/camera/android/src/main/java/io/flutter/plugins/camera/PreviewImageDelegate.java @@ -0,0 +1,7 @@ +package io.flutter.plugins.camera; + +import android.media.Image; + +public interface PreviewImageDelegate { + void onImageAvailable(Image image, int rotation); +} diff --git a/packages/camera/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/packages/camera/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 000000000000..18d981003d68 --- /dev/null +++ b/packages/camera/example/ios/Runner.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/packages/firebase_ml_vision/android/build.gradle b/packages/firebase_ml_vision/android/build.gradle index e24abb469cec..e17215ec4ad3 100644 --- a/packages/firebase_ml_vision/android/build.gradle +++ b/packages/firebase_ml_vision/android/build.gradle @@ -25,7 +25,7 @@ android { compileSdkVersion 27 defaultConfig { - minSdkVersion 16 + minSdkVersion 21 testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" } lintOptions { @@ -34,5 +34,6 @@ android { dependencies { api 'com.google.firebase:firebase-ml-vision:16.0.0' api 'com.google.firebase:firebase-ml-vision-image-label-model:15.0.0' + implementation project(':camera') } } diff --git a/packages/firebase_ml_vision/android/src/main/AndroidManifest.xml b/packages/firebase_ml_vision/android/src/main/AndroidManifest.xml index d43e3337d812..f5c4e1cb9f35 100644 --- a/packages/firebase_ml_vision/android/src/main/AndroidManifest.xml +++ b/packages/firebase_ml_vision/android/src/main/AndroidManifest.xml @@ -1,3 +1,5 @@ + + diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/BarcodeDetector.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/BarcodeDetector.java index ebf6753e28e3..060c96b09810 100644 --- a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/BarcodeDetector.java +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/BarcodeDetector.java @@ -3,6 +3,8 @@ import android.graphics.Point; import android.graphics.Rect; import android.support.annotation.NonNull; +import android.util.Size; + import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.firebase.ml.vision.FirebaseVision; @@ -10,20 +12,23 @@ import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetector; import com.google.firebase.ml.vision.barcode.FirebaseVisionBarcodeDetectorOptions; import com.google.firebase.ml.vision.common.FirebaseVisionImage; -import io.flutter.plugin.common.MethodChannel; + import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -class BarcodeDetector implements Detector { +public class BarcodeDetector extends Detector { public static final BarcodeDetector instance = new BarcodeDetector(); private BarcodeDetector() {} @Override - public void handleDetection( - FirebaseVisionImage image, Map options, final MethodChannel.Result result) { + void processImage( + final FirebaseVisionImage image, + final Size imageSize, + Map options, + final OperationFinishedCallback finishedCallback) { FirebaseVisionBarcodeDetector detector = FirebaseVision.getInstance().getVisionBarcodeDetector(parseOptions(options)); @@ -32,6 +37,7 @@ public void handleDetection( .detectInImage(image) .addOnSuccessListener( new OnSuccessListener>() { + @SuppressWarnings("ConstantConditions") @Override public void onSuccess(List firebaseVisionBarcodes) { List> barcodes = new ArrayList<>(); @@ -210,14 +216,16 @@ public void onSuccess(List firebaseVisionBarcodes) { barcodes.add(barcodeMap); } - result.success(barcodes); + finishedCallback.success(BarcodeDetector.this, barcodes, imageSize); } }) .addOnFailureListener( new OnFailureListener() { @Override public void onFailure(@NonNull Exception exception) { - result.error("barcodeDetectorError", exception.getLocalizedMessage(), null); + finishedCallback.error( + new DetectorException( + "barcodeDetectorError", exception.getLocalizedMessage(), null)); } }); } diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/Detector.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/Detector.java index 06311c1fdb4a..a450dcac0e02 100644 --- a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/Detector.java +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/Detector.java @@ -1,10 +1,55 @@ package io.flutter.plugins.firebasemlvision; +import android.util.Size; + import com.google.firebase.ml.vision.common.FirebaseVisionImage; -import io.flutter.plugin.common.MethodChannel; import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +public abstract class Detector { + + public interface OperationFinishedCallback { + void success(Detector detector, Object data, Size size); + + void error(DetectorException e); + } + + private final AtomicBoolean shouldThrottle = new AtomicBoolean(false); + + public void handleDetection( + final FirebaseVisionImage image, + final Size imageSize, + Map options, + final OperationFinishedCallback finishedCallback) { + if (shouldThrottle.get()) { + return; + } + processImage( + image, + imageSize, + options, + new OperationFinishedCallback() { + @Override + public void success(Detector detector, Object data, Size size) { + shouldThrottle.set(false); + finishedCallback.success(detector, data, size); + } + + @Override + public void error(DetectorException e) { + shouldThrottle.set(false); + finishedCallback.error(e); + } + }); + + // Begin throttling until this frame of input has been processed, either in onSuccess or + // onFailure. + shouldThrottle.set(true); + } -interface Detector { - void handleDetection( - FirebaseVisionImage image, Map options, final MethodChannel.Result result); + abstract void processImage( + FirebaseVisionImage image, + Size imageSize, + Map options, + OperationFinishedCallback finishedCallback); } diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/DetectorException.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/DetectorException.java new file mode 100644 index 000000000000..9b2b3f47c71c --- /dev/null +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/DetectorException.java @@ -0,0 +1,29 @@ +package io.flutter.plugins.firebasemlvision; + +import android.support.annotation.NonNull; +import android.support.annotation.Nullable; + +import io.flutter.plugin.common.EventChannel; +import io.flutter.plugin.common.MethodChannel; + +public class DetectorException extends Exception { + private final String detectorExceptionType; + private final String detectorExceptionDescription; + private final Object exceptionData; + + public DetectorException( + String detectorExceptionType, String detectorExceptionDescription, Object exceptionData) { + super(detectorExceptionType + ": " + detectorExceptionDescription); + this.detectorExceptionType = detectorExceptionType; + this.detectorExceptionDescription = detectorExceptionDescription; + this.exceptionData = exceptionData; + } + + public void sendError(@NonNull EventChannel.EventSink eventSink) { + eventSink.error(detectorExceptionType, detectorExceptionDescription, exceptionData); + } + + public void sendError(MethodChannel.Result result) { + result.error(detectorExceptionType, detectorExceptionDescription, exceptionData); + } +} diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/FaceDetector.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/FaceDetector.java index 4f416f2f0272..8ae9bb69528a 100644 --- a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/FaceDetector.java +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/FaceDetector.java @@ -1,6 +1,8 @@ package io.flutter.plugins.firebasemlvision; import android.support.annotation.NonNull; +import android.util.Size; + import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.firebase.ml.vision.FirebaseVision; @@ -9,21 +11,22 @@ import com.google.firebase.ml.vision.face.FirebaseVisionFaceDetector; import com.google.firebase.ml.vision.face.FirebaseVisionFaceDetectorOptions; import com.google.firebase.ml.vision.face.FirebaseVisionFaceLandmark; -import io.flutter.plugin.common.MethodChannel; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -class FaceDetector implements Detector { +public class FaceDetector extends Detector { public static final FaceDetector instance = new FaceDetector(); private FaceDetector() {} @Override - public void handleDetection( - FirebaseVisionImage image, Map options, final MethodChannel.Result result) { - + void processImage( + FirebaseVisionImage image, + final Size imageSize, + Map options, + final OperationFinishedCallback finishedCallback) { FirebaseVisionFaceDetector detector; if (options == null) { detector = FirebaseVision.getInstance().getVisionFaceDetector(); @@ -72,14 +75,16 @@ public void onSuccess(List firebaseVisionFaces) { faces.add(faceData); } - result.success(faces); + finishedCallback.success(FaceDetector.this, faces, imageSize); } }) .addOnFailureListener( new OnFailureListener() { @Override public void onFailure(@NonNull Exception exception) { - result.error("faceDetectorError", exception.getLocalizedMessage(), null); + finishedCallback.error( + new DetectorException( + "faceDetectorError", exception.getLocalizedMessage(), null)); } }); } diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/FirebaseMlVisionPlugin.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/FirebaseMlVisionPlugin.java index b88306e81524..29d438662da2 100644 --- a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/FirebaseMlVisionPlugin.java +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/FirebaseMlVisionPlugin.java @@ -1,22 +1,91 @@ package io.flutter.plugins.firebasemlvision; +import android.app.Activity; +import android.media.Image; import android.net.Uri; +import android.support.annotation.Nullable; +import android.util.Log; +import android.util.Size; + import com.google.firebase.ml.vision.common.FirebaseVisionImage; +import com.google.firebase.ml.vision.common.FirebaseVisionImageMetadata; + +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +import io.flutter.plugin.common.EventChannel; import io.flutter.plugin.common.MethodCall; import io.flutter.plugin.common.MethodChannel; import io.flutter.plugin.common.MethodChannel.MethodCallHandler; import io.flutter.plugin.common.MethodChannel.Result; import io.flutter.plugin.common.PluginRegistry.Registrar; -import java.io.File; -import java.io.IOException; -import java.util.Map; +import io.flutter.plugins.camera.PreviewImageDelegate; +import io.flutter.plugins.firebasemlvision.live.CameraPreviewImageProvider; /** FirebaseMlVisionPlugin */ -public class FirebaseMlVisionPlugin implements MethodCallHandler { - private Registrar registrar; +public class FirebaseMlVisionPlugin implements MethodCallHandler, PreviewImageDelegate { + public static final int CAMERA_REQUEST_ID = 928291720; + private final Registrar registrar; + private final Activity activity; + @Nullable private EventChannel.EventSink eventSink; + @Nullable private Detector liveViewDetector; + @Nullable private Map liveViewOptions; + + private final Detector.OperationFinishedCallback liveDetectorFinishedCallback = + new Detector.OperationFinishedCallback() { + @Override + public void success(Detector detector, Object data, Size imageSize) { + if (eventSink == null) return; + Log.d("ML", "detector finished"); + shouldThrottle.set(false); + Map event = new HashMap<>(); + event.put("eventType", "detection"); + String dataType; + String dataLabel; + if (detector instanceof BarcodeDetector) { + dataType = "barcode"; + } else if (detector instanceof TextDetector) { + dataType = "text"; + } else if (detector instanceof FaceDetector) { + dataType = "face"; + } else if (detector instanceof LabelDetector) { + dataType = "label"; + } else { + // unsupported detector + return; + } + event.put("detectionType", dataType); + event.put("data", data); + Map sizeMap = new HashMap<>(); + sizeMap.put("width", imageSize.getWidth()); + sizeMap.put("height", imageSize.getHeight()); + event.put("imageSize", sizeMap); + eventSink.success(event); + } + + @Override + public void error(DetectorException e) { + Log.d("ML", "detector error"); + shouldThrottle.set(false); + if (eventSink != null) { + e.sendError(eventSink); + } + } + }; + + // @Nullable private LegacyCamera camera; private FirebaseMlVisionPlugin(Registrar registrar) { this.registrar = registrar; + this.activity = registrar.activity(); + registerEventChannel(); + if (activity instanceof CameraPreviewImageProvider) { + ((CameraPreviewImageProvider) activity).setImageDelegate(this); + } } /** Plugin registration. */ @@ -26,15 +95,52 @@ public static void registerWith(Registrar registrar) { channel.setMethodCallHandler(new FirebaseMlVisionPlugin(registrar)); } + private void registerEventChannel() { + new EventChannel(registrar.messenger(), "plugins.flutter.io/firebase_ml_vision/liveViewEvents") + .setStreamHandler( + new EventChannel.StreamHandler() { + @Override + public void onListen(Object arguments, EventChannel.EventSink eventSink) { + FirebaseMlVisionPlugin.this.eventSink = eventSink; + } + + @Override + public void onCancel(Object arguments) { + FirebaseMlVisionPlugin.this.eventSink = null; + } + }); + } + @Override - public void onMethodCall(MethodCall call, Result result) { + public void onMethodCall(MethodCall call, final Result result) { Map options = call.argument("options"); FirebaseVisionImage image; switch (call.method) { + case "LiveView#setDetector": + liveViewOptions = options; + String detectorType = call.argument("detectorType"); + switch (detectorType) { + case "text": + liveViewDetector = TextDetector.instance; + break; + case "barcode": + liveViewDetector = BarcodeDetector.instance; + break; + case "face": + liveViewDetector = FaceDetector.instance; + break; + case "label": + liveViewDetector = LabelDetector.instance; + default: + liveViewDetector = TextDetector.instance; + } + result.success(null); + break; case "BarcodeDetector#detectInImage": try { image = filePathToVisionImage((String) call.argument("path")); - BarcodeDetector.instance.handleDetection(image, options, result); + BarcodeDetector.instance.handleDetection( + image, new Size(0, 0), options, handleDetection(result)); } catch (IOException e) { result.error("barcodeDetectorIOError", e.getLocalizedMessage(), null); } catch (Exception e) { @@ -44,7 +150,8 @@ public void onMethodCall(MethodCall call, Result result) { case "FaceDetector#detectInImage": try { image = filePathToVisionImage((String) call.argument("path")); - FaceDetector.instance.handleDetection(image, options, result); + FaceDetector.instance.handleDetection( + image, new Size(0, 0), options, handleDetection(result)); } catch (IOException e) { result.error("faceDetectorIOError", e.getLocalizedMessage(), null); } catch (Exception e) { @@ -54,7 +161,8 @@ public void onMethodCall(MethodCall call, Result result) { case "LabelDetector#detectInImage": try { image = filePathToVisionImage((String) call.argument("path")); - LabelDetector.instance.handleDetection(image, options, result); + LabelDetector.instance.handleDetection( + image, new Size(0, 0), options, handleDetection(result)); } catch (IOException e) { result.error("labelDetectorIOError", e.getLocalizedMessage(), null); } catch (Exception e) { @@ -64,7 +172,8 @@ public void onMethodCall(MethodCall call, Result result) { case "TextDetector#detectInImage": try { image = filePathToVisionImage((String) call.argument("path")); - TextDetector.instance.handleDetection(image, options, result); + TextDetector.instance.handleDetection( + image, new Size(0, 0), options, handleDetection(result)); } catch (IOException e) { result.error("textDetectorIOError", e.getLocalizedMessage(), null); } catch (Exception e) { @@ -76,6 +185,60 @@ public void onMethodCall(MethodCall call, Result result) { } } + private final AtomicBoolean shouldThrottle = new AtomicBoolean(false); + + @Override + public void onImageAvailable(Image image, int rotation) { + if (eventSink == null) return; + if (liveViewDetector == null) return; + if (shouldThrottle.get()) return; + shouldThrottle.set(true); + ByteBuffer imageBuffer = YUV_420_888toNV21(image); + FirebaseVisionImageMetadata metadata = + new FirebaseVisionImageMetadata.Builder() + .setFormat(FirebaseVisionImageMetadata.IMAGE_FORMAT_NV21) + .setWidth(image.getWidth()) + .setHeight(image.getHeight()) + .setRotation(rotation) + .build(); + FirebaseVisionImage firebaseVisionImage = + FirebaseVisionImage.fromByteBuffer(imageBuffer, metadata); + + liveViewDetector.handleDetection( + firebaseVisionImage, + new Size(image.getWidth(), image.getHeight()), + liveViewOptions, + liveDetectorFinishedCallback); + } + + private static ByteBuffer YUV_420_888toNV21(Image image) { + byte[] nv21; + ByteBuffer yBuffer = image.getPlanes()[0].getBuffer(); + ByteBuffer uBuffer = image.getPlanes()[1].getBuffer(); + ByteBuffer vBuffer = image.getPlanes()[2].getBuffer(); + + int ySize = yBuffer.remaining(); + int uSize = uBuffer.remaining(); + int vSize = vBuffer.remaining(); + + return ByteBuffer.allocate(ySize + uSize + vSize).put(yBuffer).put(vBuffer).put(uBuffer); + } + + private Detector.OperationFinishedCallback handleDetection(final Result result) { + return new Detector.OperationFinishedCallback() { + @Override + public void success( + Detector detector, Object data, Size imageSize /*ignore size for file image detection*/) { + result.success(data); + } + + @Override + public void error(DetectorException e) { + e.sendError(result); + } + }; + } + private FirebaseVisionImage filePathToVisionImage(String path) throws IOException { File file = new File(path); return FirebaseVisionImage.fromFilePath(registrar.context(), Uri.fromFile(file)); diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/LabelDetector.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/LabelDetector.java index 99a6860377f9..59c7da84636a 100644 --- a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/LabelDetector.java +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/LabelDetector.java @@ -1,6 +1,8 @@ package io.flutter.plugins.firebasemlvision; import android.support.annotation.NonNull; +import android.util.Size; + import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.firebase.ml.vision.FirebaseVision; @@ -8,20 +10,27 @@ import com.google.firebase.ml.vision.label.FirebaseVisionLabel; import com.google.firebase.ml.vision.label.FirebaseVisionLabelDetector; import com.google.firebase.ml.vision.label.FirebaseVisionLabelDetectorOptions; -import io.flutter.plugin.common.MethodChannel; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -class LabelDetector implements Detector { +public class LabelDetector extends Detector { public static final LabelDetector instance = new LabelDetector(); private LabelDetector() {} + private FirebaseVisionLabelDetectorOptions parseOptions(Map optionsData) { + float conf = (float) (double) optionsData.get("confidenceThreshold"); + return new FirebaseVisionLabelDetectorOptions.Builder().setConfidenceThreshold(conf).build(); + } + @Override - public void handleDetection( - FirebaseVisionImage image, Map options, final MethodChannel.Result result) { + void processImage( + FirebaseVisionImage image, + final Size imageSize, + Map options, + final OperationFinishedCallback finishedCallback) { FirebaseVisionLabelDetector detector = FirebaseVision.getInstance().getVisionLabelDetector(parseOptions(options)); detector @@ -40,20 +49,16 @@ public void onSuccess(List firebaseVisionLabels) { labels.add(labelData); } - result.success(labels); + finishedCallback.success(LabelDetector.this, labels, imageSize); } }) .addOnFailureListener( new OnFailureListener() { @Override public void onFailure(@NonNull Exception e) { - result.error("labelDetectorError", e.getLocalizedMessage(), null); + finishedCallback.error( + new DetectorException("labelDetectorError", e.getLocalizedMessage(), null)); } }); } - - private FirebaseVisionLabelDetectorOptions parseOptions(Map optionsData) { - float conf = (float) (double) optionsData.get("confidenceThreshold"); - return new FirebaseVisionLabelDetectorOptions.Builder().setConfidenceThreshold(conf).build(); - } } diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/TextDetector.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/TextDetector.java index 86ca386bea2d..0a5b9733504a 100644 --- a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/TextDetector.java +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/TextDetector.java @@ -3,27 +3,31 @@ import android.graphics.Point; import android.graphics.Rect; import android.support.annotation.NonNull; +import android.util.Size; + import com.google.android.gms.tasks.OnFailureListener; import com.google.android.gms.tasks.OnSuccessListener; import com.google.firebase.ml.vision.FirebaseVision; import com.google.firebase.ml.vision.common.FirebaseVisionImage; import com.google.firebase.ml.vision.text.FirebaseVisionText; import com.google.firebase.ml.vision.text.FirebaseVisionTextDetector; -import io.flutter.plugin.common.MethodChannel; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; -public class TextDetector implements Detector { +public class TextDetector extends Detector { public static final TextDetector instance = new TextDetector(); private static FirebaseVisionTextDetector textDetector; private TextDetector() {} @Override - public void handleDetection( - FirebaseVisionImage image, Map options, final MethodChannel.Result result) { + void processImage( + FirebaseVisionImage image, + final Size imageSize, + Map options, + final OperationFinishedCallback finishedCallback) { if (textDetector == null) textDetector = FirebaseVision.getInstance().getVisionTextDetector(); textDetector .detectInImage(image) @@ -59,14 +63,16 @@ public void onSuccess(FirebaseVisionText firebaseVisionText) { blockData.put("lines", lines); blocks.add(blockData); } - result.success(blocks); + finishedCallback.success(TextDetector.this, blocks, imageSize); } }) .addOnFailureListener( new OnFailureListener() { @Override public void onFailure(@NonNull Exception exception) { - result.error("textDetectorError", exception.getLocalizedMessage(), null); + finishedCallback.error( + new DetectorException( + "textDetectorError", exception.getLocalizedMessage(), null)); } }); } diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/Camera.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/Camera.java new file mode 100644 index 000000000000..6984f2f972f1 --- /dev/null +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/Camera.java @@ -0,0 +1,576 @@ +package io.flutter.plugins.firebasemlvision.live; + +import android.Manifest; +import android.annotation.TargetApi; +import android.app.Activity; +import android.content.Context; +import android.content.pm.PackageManager; +import android.graphics.ImageFormat; +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CameraMetadata; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.media.Image; +import android.media.ImageReader; +import android.media.MediaRecorder; +import android.os.Build; +import android.os.Handler; +import android.os.HandlerThread; +import android.support.annotation.NonNull; +import android.support.annotation.Nullable; +import android.support.annotation.RequiresApi; +import android.util.Log; +import android.util.Size; +import android.util.SparseIntArray; +import android.view.Surface; +import android.view.WindowManager; + +import com.google.firebase.ml.vision.common.FirebaseVisionImage; +import com.google.firebase.ml.vision.common.FirebaseVisionImageMetadata; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicBoolean; + +import io.flutter.plugin.common.EventChannel; +import io.flutter.plugin.common.MethodChannel; +import io.flutter.plugin.common.PluginRegistry; +import io.flutter.plugins.firebasemlvision.BarcodeDetector; +import io.flutter.plugins.firebasemlvision.Detector; +import io.flutter.plugins.firebasemlvision.DetectorException; +import io.flutter.plugins.firebasemlvision.TextDetector; +import io.flutter.view.FlutterView; + +import static io.flutter.plugins.firebasemlvision.FirebaseMlVisionPlugin.CAMERA_REQUEST_ID; + +@SuppressWarnings("WeakerAccess") +@TargetApi(Build.VERSION_CODES.LOLLIPOP) +class Camera { + private static final SparseIntArray ORIENTATIONS = new SparseIntArray(4); + + static { + ORIENTATIONS.append(Surface.ROTATION_0, 90); + ORIENTATIONS.append(Surface.ROTATION_90, 0); + ORIENTATIONS.append(Surface.ROTATION_180, 270); + ORIENTATIONS.append(Surface.ROTATION_270, 180); + } + + private final FlutterView.SurfaceTextureEntry textureEntry; + private CameraDevice cameraDevice; + private CameraCaptureSession cameraCaptureSession; + private EventChannel.EventSink eventSink; + private ImageReader imageReader; + private String cameraName; + private Size captureSize; + private Size previewSize; + private CaptureRequest.Builder captureRequestBuilder; + private MediaRecorder mediaRecorder; + private Runnable cameraPermissionContinuation; + private boolean requestingPermission; + private PluginRegistry.Registrar registrar; + private Activity activity; + private CameraManager cameraManager; + private HandlerThread mBackgroundThread; + private Handler mBackgroundHandler; + private Surface imageReaderSurface; + private WindowManager windowManager; + private Detector currentDetector = TextDetector.instance; + + private final Detector.OperationFinishedCallback liveDetectorFinishedCallback = + new Detector.OperationFinishedCallback() { + @Override + public void success(Detector detector, Object data, Size imageSize) { + shouldThrottle.set(false); + Map event = new HashMap<>(); + event.put("eventType", "recognized"); + String dataType; + String dataLabel; + if (detector instanceof BarcodeDetector) { + dataType = "barcode"; + dataLabel = "barcodeData"; + } else if (detector instanceof TextDetector) { + dataType = "text"; + dataLabel = "textData"; + } else { + // unsupported live detector + return; + } + event.put("recognitionType", dataType); + Map sizeMap = new HashMap<>(); + sizeMap.put("width", imageSize.getWidth()); + sizeMap.put("height", imageSize.getHeight()); + event.put("imageSize", sizeMap); + event.put(dataLabel, data); + eventSink.success(event); + } + + @Override + public void error(DetectorException e) { + shouldThrottle.set(false); + e.sendError(eventSink); + } + }; + + public Camera( + PluginRegistry.Registrar registrar, + final String cameraName, + @NonNull final String resolutionPreset, + @NonNull final MethodChannel.Result result) { + + this.activity = registrar.activity(); + this.cameraManager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); + this.registrar = registrar; + this.cameraName = cameraName; + textureEntry = registrar.view().createSurfaceTexture(); + + registerEventChannel(); + + try { + Size minPreviewSize; + switch (resolutionPreset) { + case "high": + minPreviewSize = new Size(1024, 768); + break; + case "medium": + minPreviewSize = new Size(640, 480); + break; + case "low": + minPreviewSize = new Size(320, 240); + break; + default: + throw new IllegalArgumentException("Unknown preset: " + resolutionPreset); + } + + CameraCharacteristics cameraCharacteristics = + cameraManager.getCameraCharacteristics(cameraName); + StreamConfigurationMap streamConfigurationMap = + cameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + + computeBestCaptureSize(streamConfigurationMap); + computeBestPreviewAndRecordingSize(streamConfigurationMap, minPreviewSize, captureSize); + + if (cameraPermissionContinuation != null) { + result.error("cameraPermission", "Camera permission request ongoing", null); + } + cameraPermissionContinuation = + new Runnable() { + @Override + public void run() { + cameraPermissionContinuation = null; + if (!hasCameraPermission()) { + result.error( + "cameraPermission", "MediaRecorderCamera permission not granted", null); + return; + } + open(result); + } + }; + requestingPermission = false; + if (hasCameraPermission() /* && hasAudioPermission()*/) { + cameraPermissionContinuation.run(); + } else { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + requestingPermission = true; + registrar + .activity() + .requestPermissions(new String[] {Manifest.permission.CAMERA}, CAMERA_REQUEST_ID); + } + } + } catch (CameraAccessException e) { + result.error("CameraAccess", e.getMessage(), null); + } catch (IllegalArgumentException e) { + result.error("IllegalArgumentException", e.getMessage(), null); + } + } + + public void continueRequestingPermissions() { + cameraPermissionContinuation.run(); + } + + public boolean getRequestingPermission() { + return requestingPermission; + } + + public void setRequestingPermission(boolean isRequesting) { + requestingPermission = isRequesting; + } + + private void registerEventChannel() { + new EventChannel( + registrar.messenger(), + "plugins.flutter.io/firebase_ml_vision/liveViewEvents" + textureEntry.id()) + .setStreamHandler( + new EventChannel.StreamHandler() { + @Override + public void onListen(Object arguments, EventChannel.EventSink eventSink) { + Camera.this.eventSink = eventSink; + } + + @Override + public void onCancel(Object arguments) { + Camera.this.eventSink = null; + } + }); + } + + private boolean hasCameraPermission() { + return Build.VERSION.SDK_INT < Build.VERSION_CODES.M + || activity.checkSelfPermission(Manifest.permission.CAMERA) + == PackageManager.PERMISSION_GRANTED; + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + private void computeBestPreviewAndRecordingSize( + StreamConfigurationMap streamConfigurationMap, Size minPreviewSize, Size captureSize) { + Size[] sizes = streamConfigurationMap.getOutputSizes(SurfaceTexture.class); + float captureSizeRatio = (float) captureSize.getWidth() / captureSize.getHeight(); + List goodEnough = new ArrayList<>(); + for (Size s : sizes) { + if ((float) s.getWidth() / s.getHeight() == captureSizeRatio + && minPreviewSize.getWidth() < s.getWidth() + && minPreviewSize.getHeight() < s.getHeight()) { + goodEnough.add(s); + } + } + + Collections.sort(goodEnough, new CompareSizesByArea()); + + if (goodEnough.isEmpty()) { + previewSize = sizes[0]; + } else { + previewSize = goodEnough.get(0); + + // Video capture size should not be greater than 1080 because MediaRecorder cannot handle + // higher resolutions. + for (int i = goodEnough.size() - 1; i >= 0; i--) { + if (goodEnough.get(i).getHeight() <= 1080) { + break; + } + } + } + } + + @RequiresApi(api = Build.VERSION_CODES.LOLLIPOP) + private void computeBestCaptureSize(StreamConfigurationMap streamConfigurationMap) { + // For still image captures, we use the largest available size. + captureSize = + Collections.max( + Arrays.asList(streamConfigurationMap.getOutputSizes(ImageFormat.JPEG)), + new CompareSizesByArea()); + } + + /** Starts a background thread and its {@link Handler}. */ + private void startBackgroundThread() { + mBackgroundThread = new HandlerThread("CameraBackground"); + mBackgroundThread.start(); + mBackgroundHandler = new Handler(mBackgroundThread.getLooper()); + } + + /** Stops the background thread and its {@link Handler}. */ + private void stopBackgroundThread() { + if (mBackgroundThread != null) { + mBackgroundThread.quitSafely(); + try { + mBackgroundThread.join(); + mBackgroundThread = null; + mBackgroundHandler = null; + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + } + + private static ByteBuffer YUV_420_888toNV21(Image image) { + byte[] nv21; + ByteBuffer yBuffer = image.getPlanes()[0].getBuffer(); + ByteBuffer uBuffer = image.getPlanes()[1].getBuffer(); + ByteBuffer vBuffer = image.getPlanes()[2].getBuffer(); + + int ySize = yBuffer.remaining(); + int uSize = uBuffer.remaining(); + int vSize = vBuffer.remaining(); + + return ByteBuffer.allocate(ySize + uSize + vSize).put(yBuffer).put(vBuffer).put(uBuffer); + } + + private int getRotation() { + if (windowManager == null) { + windowManager = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE); + } + int degrees = 0; + int rotation = windowManager.getDefaultDisplay().getRotation(); + switch (rotation) { + case Surface.ROTATION_0: + degrees = 0; + break; + case Surface.ROTATION_90: + degrees = 90; + break; + case Surface.ROTATION_180: + degrees = 180; + break; + case Surface.ROTATION_270: + degrees = 270; + break; + default: + Log.e("ML", "Bad rotation value: $rotation"); + } + + try { + int angle; + int displayAngle; // TODO? setDisplayOrientation? + CameraCharacteristics cameraCharacteristics = + cameraManager.getCameraCharacteristics(cameraName); + Integer orientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); + // back-facing + angle = (orientation - degrees + 360) % 360; + displayAngle = angle; + int translatedAngle = angle / 90; + Log.d("ML", "Translated angle: " + translatedAngle); + return translatedAngle; // this corresponds to the rotation constants + } catch (CameraAccessException e) { + return 0; + } + } + + private final AtomicBoolean shouldThrottle = new AtomicBoolean(false); + + private void processImage(Image image) { + if (eventSink == null) return; + if (shouldThrottle.get()) { + return; + } + shouldThrottle.set(true); + ByteBuffer imageBuffer = YUV_420_888toNV21(image); + FirebaseVisionImageMetadata metadata = + new FirebaseVisionImageMetadata.Builder() + .setFormat(FirebaseVisionImageMetadata.IMAGE_FORMAT_NV21) + .setWidth(image.getWidth()) + .setHeight(image.getHeight()) + .setRotation(getRotation()) + .build(); + FirebaseVisionImage firebaseVisionImage = + FirebaseVisionImage.fromByteBuffer(imageBuffer, metadata); + + currentDetector.handleDetection( + firebaseVisionImage, + new Size(image.getWidth(), image.getHeight()), + new HashMap(), + liveDetectorFinishedCallback); + } + + private final ImageReader.OnImageAvailableListener imageAvailable = + new ImageReader.OnImageAvailableListener() { + @Override + public void onImageAvailable(ImageReader reader) { + Image image = reader.acquireLatestImage(); + if (image != null) { + processImage(image); + image.close(); + } + } + }; + + public void open(@Nullable final MethodChannel.Result result) { + if (!hasCameraPermission()) { + if (result != null) result.error("cameraPermission", "Camera permission not granted", null); + } else { + try { + startBackgroundThread(); + imageReader = + ImageReader.newInstance( + previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 4); + imageReaderSurface = imageReader.getSurface(); + imageReader.setOnImageAvailableListener(imageAvailable, mBackgroundHandler); + cameraManager.openCamera( + cameraName, + new CameraDevice.StateCallback() { + @Override + public void onOpened(@NonNull CameraDevice cameraDevice) { + Camera.this.cameraDevice = cameraDevice; + try { + startPreview(); + } catch (CameraAccessException e) { + if (result != null) result.error("CameraAccess", e.getMessage(), null); + } + + if (result != null) { + Map reply = new HashMap<>(); + reply.put("textureId", textureEntry.id()); + reply.put("previewWidth", previewSize.getWidth()); + reply.put("previewHeight", previewSize.getHeight()); + result.success(reply); + } + } + + @Override + public void onClosed(@NonNull CameraDevice camera) { + if (eventSink != null) { + Map event = new HashMap<>(); + event.put("eventType", "cameraClosing"); + eventSink.success(event); + } + super.onClosed(camera); + } + + @Override + public void onDisconnected(@NonNull CameraDevice cameraDevice) { + cameraDevice.close(); + Camera.this.cameraDevice = null; + sendErrorEvent("The camera was disconnected."); + } + + @Override + public void onError(@NonNull CameraDevice cameraDevice, int errorCode) { + cameraDevice.close(); + Camera.this.cameraDevice = null; + String errorDescription; + switch (errorCode) { + case ERROR_CAMERA_IN_USE: + errorDescription = "The camera device is in use already."; + break; + case ERROR_MAX_CAMERAS_IN_USE: + errorDescription = "Max cameras in use"; + break; + case ERROR_CAMERA_DISABLED: + errorDescription = + "The camera device could not be opened due to a device policy."; + break; + case ERROR_CAMERA_DEVICE: + errorDescription = "The camera device has encountered a fatal error"; + break; + case ERROR_CAMERA_SERVICE: + errorDescription = "The camera service has encountered a fatal error."; + break; + default: + errorDescription = "Unknown camera error"; + } + sendErrorEvent(errorDescription); + } + }, + null); + } catch (CameraAccessException e) { + if (result != null) result.error("cameraAccess", e.getMessage(), null); + } + } + } + + private void startPreview() throws CameraAccessException { + + SurfaceTexture surfaceTexture = textureEntry.surfaceTexture(); + surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); + captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); + + List surfaces = new ArrayList<>(); + + Surface previewSurface = new Surface(surfaceTexture); + surfaces.add(previewSurface); + captureRequestBuilder.addTarget(previewSurface); + + surfaces.add(imageReaderSurface); + captureRequestBuilder.addTarget(imageReaderSurface); + + cameraDevice.createCaptureSession( + surfaces, + new CameraCaptureSession.StateCallback() { + + @Override + public void onConfigured(@NonNull CameraCaptureSession session) { + if (cameraDevice == null) { + sendErrorEvent("The camera was closed during configuration."); + return; + } + try { + cameraCaptureSession = session; + captureRequestBuilder.set( + CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO); + cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, null); + } catch (CameraAccessException e) { + sendErrorEvent(e.getMessage()); + } + } + + @Override + public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) { + sendErrorEvent("Failed to configure the camera for preview."); + } + }, + null); + } + + // private void sendRecognizedBarcodes(List barcodes) { + // if (eventSink != null) { + // List> outputMap = new ArrayList<>(); + // for (FirebaseVisionBarcode barcode : barcodes) { + // Map barcodeData = new HashMap<>(); + // Rect boundingBox = barcode.getBoundingBox(); + // if (boundingBox != null) { + // barcodeData.putAll(DetectedItemUtils.rectToFlutterMap(boundingBox)); + // } + // barcodeData.put(BARCODE_VALUE_TYPE, barcode.getValueType()); + // barcodeData.put(BARCODE_DISPLAY_VALUE, barcode.getDisplayValue()); + // barcodeData.put(BARCODE_RAW_VALUE, barcode.getRawValue()); + // outputMap.add(barcodeData); + // } + // Map event = new HashMap<>(); + // event.put("eventType", "recognized"); + // event.put("recognitionType", "barcode"); + // event.put("barcodeData", outputMap); + // eventSink.success(event); + // } + // } + + private void sendErrorEvent(String errorDescription) { + if (eventSink != null) { + Map event = new HashMap<>(); + event.put("eventType", "error"); + event.put("errorDescription", errorDescription); + eventSink.success(event); + } + } + + public void close() { + if (cameraCaptureSession != null) { + cameraCaptureSession.close(); + } + if (cameraDevice != null) { + cameraDevice.close(); + cameraDevice = null; + } + if (imageReader != null) { + imageReader.close(); + imageReader = null; + } + if (mediaRecorder != null) { + mediaRecorder.reset(); + mediaRecorder.release(); + mediaRecorder = null; + } + stopBackgroundThread(); + } + + public void dispose() { + close(); + textureEntry.release(); + } + + private static class CompareSizesByArea implements Comparator { + @Override + public int compare(Size lhs, Size rhs) { + // We cast here to ensure the multiplications won't overflow. + return Long.signum( + (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); + } + } +} diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/CameraInfo.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/CameraInfo.java new file mode 100644 index 000000000000..cd823f56b7f5 --- /dev/null +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/CameraInfo.java @@ -0,0 +1,49 @@ +package io.flutter.plugins.firebasemlvision.live; + +import android.annotation.TargetApi; +import android.content.Context; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CameraMetadata; +import android.os.Build; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@TargetApi(Build.VERSION_CODES.LOLLIPOP) +class CameraInfo { + public static List> getAvailableCameras(Context context) + throws CameraInfoException { + try { + CameraManager cameraManager = + (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); + assert cameraManager != null; + String[] cameraNames = cameraManager.getCameraIdList(); + List> cameras = new ArrayList<>(); + for (String cameraName : cameraNames) { + HashMap details = new HashMap<>(); + CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraName); + details.put("name", cameraName); + @SuppressWarnings("ConstantConditions") + int lensFacing = characteristics.get(CameraCharacteristics.LENS_FACING); + switch (lensFacing) { + case CameraMetadata.LENS_FACING_FRONT: + details.put("lensFacing", "front"); + break; + case CameraMetadata.LENS_FACING_BACK: + details.put("lensFacing", "back"); + break; + case CameraMetadata.LENS_FACING_EXTERNAL: + details.put("lensFacing", "external"); + break; + } + cameras.add(details); + } + return cameras; + } catch (CameraAccessException e) { + throw new CameraInfoException(e.getMessage()); + } + } +} diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/CameraInfoException.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/CameraInfoException.java new file mode 100644 index 000000000000..19492972cd19 --- /dev/null +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/CameraInfoException.java @@ -0,0 +1,7 @@ +package io.flutter.plugins.firebasemlvision.live; + +class CameraInfoException extends Exception { + CameraInfoException(String message) { + super(message); + } +} diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/CameraPreviewImageProvider.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/CameraPreviewImageProvider.java new file mode 100644 index 000000000000..5424c0d99a0b --- /dev/null +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/CameraPreviewImageProvider.java @@ -0,0 +1,7 @@ +package io.flutter.plugins.firebasemlvision.live; + +import io.flutter.plugins.camera.PreviewImageDelegate; + +public interface CameraPreviewImageProvider { + void setImageDelegate(PreviewImageDelegate delegate); +} diff --git a/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/LegacyCamera.java b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/LegacyCamera.java new file mode 100644 index 000000000000..23577492c585 --- /dev/null +++ b/packages/firebase_ml_vision/android/src/main/java/io/flutter/plugins/firebasemlvision/live/LegacyCamera.java @@ -0,0 +1,804 @@ +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package io.flutter.plugins.firebasemlvision.live; + +import static android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK; +import static android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT; + +import android.Manifest; +import android.annotation.SuppressLint; +import android.app.Activity; +import android.content.Context; +import android.graphics.ImageFormat; +import android.graphics.SurfaceTexture; +import android.hardware.Camera; +import android.hardware.Camera.CameraInfo; +import android.support.annotation.Nullable; +import android.support.annotation.RequiresPermission; +import android.util.Log; +import android.view.Surface; +import android.view.WindowManager; +import com.google.android.gms.common.images.Size; +import com.google.firebase.ml.vision.common.FirebaseVisionImage; +import com.google.firebase.ml.vision.common.FirebaseVisionImageMetadata; +import io.flutter.plugin.common.EventChannel; +import io.flutter.plugin.common.PluginRegistry; +import io.flutter.plugins.firebasemlvision.BarcodeDetector; +import io.flutter.plugins.firebasemlvision.Detector; +import io.flutter.plugins.firebasemlvision.DetectorException; +import io.flutter.plugins.firebasemlvision.FaceDetector; +import io.flutter.plugins.firebasemlvision.LabelDetector; +import io.flutter.plugins.firebasemlvision.TextDetector; +import io.flutter.view.FlutterView; +import java.io.IOException; +import java.lang.Thread.State; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.IdentityHashMap; +import java.util.List; +import java.util.Map; + +/** + * Manages the camera and allows UI updates on top of it (e.g. overlaying extra Graphics or + * displaying extra information). This receives preview frames from the camera at a specified rate, + * sending those frames to child classes' detectors / classifiers as fast as it is able to process. + */ +@SuppressLint("MissingPermission") +public class LegacyCamera { + + private static final String TAG = "MIDemoApp:CameraSource"; + + public interface OnCameraOpenedCallback { + void onOpened(long textureId, int width, int height); + } + + /** + * If the absolute difference between a preview size aspect ratio and a picture size aspect ratio + * is less than this tolerance, they are considered to be the same aspect ratio. + */ + private static final float ASPECT_RATIO_TOLERANCE = 0.01f; + + private final Activity activity; + + private final PluginRegistry.Registrar registrar; + + private final FlutterView.SurfaceTextureEntry textureEntry; + + private EventChannel.EventSink eventSink; + + private Camera camera; + + private int facing = CAMERA_FACING_BACK; + + /** + * Rotation of the device, and thus the associated preview images captured from the device. See + * Frame.Metadata#getRotation(). + */ + private int rotation; + + private Size previewSize; + + // These values may be requested by the caller. Due to hardware limitations, we may need to + // select close, but not exactly the same values for these. + @SuppressWarnings("FieldCanBeLocal") + private final float requestedFps = 20.0f; + + private int requestedPreviewWidth = 1280; + private int requestedPreviewHeight = 960; + + // True if a SurfaceTexture is being used for the preview, false if a SurfaceHolder is being + // used for the preview. We want to be compatible back to Gingerbread, but SurfaceTexture + // wasn't introduced until Honeycomb. Since the interface cannot use a SurfaceTexture, if the + // developer wants to display a preview we must use a SurfaceHolder. If the developer doesn't + // want to display a preview we use a SurfaceTexture if we are running at least Honeycomb. + private boolean usingSurfaceTexture; + + /** + * Dedicated thread and associated runnable for calling into the detector with frames, as the + * frames become available from the camera. + */ + private Thread processingThread; + + private final FrameProcessingRunnable processingRunnable; + + private final Object processorLock = new Object(); + private Detector detector; + private Map detectorOptions; + + /** + * Map to convert between a byte array, received from the camera, and its associated byte buffer. + * We use byte buffers internally because this is a more efficient way to call into native code + * later (avoids a potential copy). + * + *

+ * + *

Note: uses IdentityHashMap here instead of HashMap because the behavior of an array's + * equals, hashCode and toString methods is both useless and unexpected. IdentityHashMap enforces + * identity ('==') check on the keys. + */ + private final Map bytesToByteBuffer = new IdentityHashMap<>(); + + private final Detector.OperationFinishedCallback liveDetectorFinishedCallback = + new Detector.OperationFinishedCallback() { + @Override + public void success(Detector detector, Object data, android.util.Size size) { + Map event = new HashMap<>(); + event.put("eventType", "detection"); + String dataType; + if (detector instanceof BarcodeDetector) { + dataType = "barcode"; + } else if (detector instanceof TextDetector) { + dataType = "text"; + } else if (detector instanceof LabelDetector) { + dataType = "label"; + } else if (detector instanceof FaceDetector) { + dataType = "face"; + } else { + // unsupported live detector + return; + } + event.put("detectionType", dataType); + event.put("data", data); + eventSink.success(event); + } + + @Override + public void error(DetectorException e) { + e.sendError(eventSink); + } + }; + + public LegacyCamera( + PluginRegistry.Registrar registrar, String resolutionPreset, int cameraFacing) { + this.registrar = registrar; + this.activity = registrar.activity(); + this.textureEntry = registrar.view().createSurfaceTexture(); + processingRunnable = new FrameProcessingRunnable(); + + registerEventChannel(); + + switch (resolutionPreset) { + case "high": + requestedPreviewWidth = 1024; + requestedPreviewHeight = 768; + break; + case "medium": + requestedPreviewWidth = 640; + requestedPreviewHeight = 480; + break; + case "low": + requestedPreviewWidth = 320; + requestedPreviewHeight = 240; + break; + } + + setFacing(cameraFacing); + } + + private void registerEventChannel() { + new EventChannel( + registrar.messenger(), + "plugins.flutter.io/firebase_ml_vision/liveViewEvents" + textureEntry.id()) + .setStreamHandler( + new EventChannel.StreamHandler() { + @Override + public void onListen(Object arguments, EventChannel.EventSink eventSink) { + LegacyCamera.this.eventSink = eventSink; + } + + @Override + public void onCancel(Object arguments) { + LegacyCamera.this.eventSink = null; + } + }); + } + + // ============================================================================================== + // Public + // ============================================================================================== + + /** Stops the camera and releases the resources of the camera and underlying detector. */ + public void release() { + synchronized (processorLock) { + stop(); + processingRunnable.release(); + } + } + + /** + * Opens the camera and starts sending preview frames to the underlying detector. The supplied + * surface holder is used for the preview so frames can be displayed to the user. + * + * @throws IOException if the supplied surface holder could not be used as the preview display + */ + @RequiresPermission(Manifest.permission.CAMERA) + public synchronized LegacyCamera start(OnCameraOpenedCallback callback) throws IOException { + if (camera != null) { + return this; + } + + camera = createCamera(callback); + + SurfaceTexture surfaceTexture = textureEntry.surfaceTexture(); + surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); + + camera.setPreviewTexture(surfaceTexture); + usingSurfaceTexture = true; + camera.startPreview(); + + processingThread = new Thread(processingRunnable); + processingRunnable.setActive(true); + processingThread.start(); + + return this; + } + + /** + * Closes the camera and stops sending frames to the underlying frame detector. + * + *

+ * + *

This camera source may be restarted again by calling {@link #start(OnCameraOpenedCallback)}. + * + *

+ * + *

Call {@link #release()} instead to completely shut down this camera source and release the + * resources of the underlying detector. + */ + public synchronized void stop() { + processingRunnable.setActive(false); + if (processingThread != null) { + try { + // Wait for the thread to complete to ensure that we can't have multiple threads + // executing at the same time (i.e., which would happen if we called start too + // quickly after stop). + processingThread.join(); + } catch (InterruptedException e) { + Log.d(TAG, "Frame processing thread interrupted on release."); + } + processingThread = null; + } + + if (camera != null) { + camera.stopPreview(); + camera.setPreviewCallbackWithBuffer(null); + try { + if (usingSurfaceTexture) { + camera.setPreviewTexture(null); + } else { + camera.setPreviewDisplay(null); + } + } catch (Exception e) { + Log.e(TAG, "Failed to clear camera preview: " + e); + } + camera.release(); + camera = null; + } + + // Release the reference to any image buffers, since these will no longer be in use. + bytesToByteBuffer.clear(); + } + + /** Changes the facing of the camera. */ + public synchronized void setFacing(int facing) { + if ((facing != CAMERA_FACING_BACK) && (facing != CAMERA_FACING_FRONT)) { + throw new IllegalArgumentException("Invalid camera: " + facing); + } + this.facing = facing; + } + + /** Returns the preview size that is currently in use by the underlying camera. */ + public Size getPreviewSize() { + return previewSize; + } + + /** + * Returns the selected camera; one of {@link CameraInfo#CAMERA_FACING_BACK} or {@link + * CameraInfo#CAMERA_FACING_FRONT}. + */ + public int getCameraFacing() { + return facing; + } + + /** + * Opens the camera and applies the user settings. + * + * @throws IOException if camera cannot be found or preview cannot be processed + */ + @SuppressLint("InlinedApi") + private Camera createCamera(@Nullable OnCameraOpenedCallback callback) throws IOException { + int requestedCameraId = getIdForRequestedCamera(facing); + if (requestedCameraId == -1) { + throw new IOException("Could not find requested camera."); + } + Camera camera = Camera.open(requestedCameraId); + + SizePair sizePair = selectSizePair(camera, requestedPreviewWidth, requestedPreviewHeight); + if (sizePair == null) { + throw new IOException("Could not find suitable preview size."); + } + Size pictureSize = sizePair.pictureSize(); + previewSize = sizePair.previewSize(); + + int[] previewFpsRange = selectPreviewFpsRange(camera, requestedFps); + if (previewFpsRange == null) { + throw new IOException("Could not find suitable preview frames per second range."); + } + + Camera.Parameters parameters = camera.getParameters(); + + if (pictureSize != null) { + parameters.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight()); + } + parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight()); + parameters.setPreviewFpsRange( + previewFpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX], + previewFpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]); + parameters.setPreviewFormat(ImageFormat.NV21); + + setRotation(camera, parameters, requestedCameraId); + + if (parameters + .getSupportedFocusModes() + .contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) { + parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); + } else { + Log.i(TAG, "Camera auto focus is not supported on this device."); + } + + camera.setParameters(parameters); + + if (callback != null) { + callback.onOpened(textureEntry.id(), previewSize.getWidth(), previewSize.getHeight()); + } + + // Four frame buffers are needed for working with the camera: + // + // one for the frame that is currently being executed upon in doing detection + // one for the next pending frame to process immediately upon completing detection + // two for the frames that the camera uses to populate future preview images + // + // Through trial and error it appears that two free buffers, in addition to the two buffers + // used in this code, are needed for the camera to work properly. Perhaps the camera has + // one thread for acquiring images, and another thread for calling into user code. If only + // three buffers are used, then the camera will spew thousands of warning messages when + // detection takes a non-trivial amount of time. + camera.setPreviewCallbackWithBuffer(new CameraPreviewCallback()); + camera.addCallbackBuffer(createPreviewBuffer(previewSize)); + camera.addCallbackBuffer(createPreviewBuffer(previewSize)); + camera.addCallbackBuffer(createPreviewBuffer(previewSize)); + camera.addCallbackBuffer(createPreviewBuffer(previewSize)); + + return camera; + } + + /** + * Gets the id for the camera specified by the direction it is facing. Returns -1 if no such + * camera was found. + * + * @param facing the desired camera (front-facing or rear-facing) + */ + private static int getIdForRequestedCamera(int facing) { + CameraInfo cameraInfo = new CameraInfo(); + for (int i = 0; i < Camera.getNumberOfCameras(); ++i) { + Camera.getCameraInfo(i, cameraInfo); + if (cameraInfo.facing == facing) { + return i; + } + } + return -1; + } + + public static List> listAvailableCameraDetails() { + List> availableCameraDetails = new ArrayList<>(); + int cameraCount = Camera.getNumberOfCameras(); + for (int i = 0; i < cameraCount; ++i) { + Map detailsMap = new HashMap<>(); + CameraInfo info = new CameraInfo(); + Camera.getCameraInfo(i, info); + detailsMap.put("name", String.valueOf(i)); + Log.d("ML", "camera Name: " + i); + if (info.facing == CAMERA_FACING_BACK) { + detailsMap.put("lensFacing", "back"); + } else { + detailsMap.put("lensFacing", "front"); + } + availableCameraDetails.add(detailsMap); + } + return availableCameraDetails; + } + + /** + * Selects the most suitable preview and picture size, given the desired width and height. + * + *

+ * + *

Even though we only need to find the preview size, it's necessary to find both the preview + * size and the picture size of the camera together, because these need to have the same aspect + * ratio. On some hardware, if you would only set the preview size, you will get a distorted + * image. + * + * @param camera the camera to select a preview size from + * @param desiredWidth the desired width of the camera preview frames + * @param desiredHeight the desired height of the camera preview frames + * @return the selected preview and picture size pair + */ + private static SizePair selectSizePair(Camera camera, int desiredWidth, int desiredHeight) { + List validPreviewSizes = generateValidPreviewSizeList(camera); + + // The method for selecting the best size is to minimize the sum of the differences between + // the desired values and the actual values for width and height. This is certainly not the + // only way to select the best size, but it provides a decent tradeoff between using the + // closest aspect ratio vs. using the closest pixel area. + SizePair selectedPair = null; + int minDiff = Integer.MAX_VALUE; + for (SizePair sizePair : validPreviewSizes) { + Size size = sizePair.previewSize(); + int diff = + Math.abs(size.getWidth() - desiredWidth) + Math.abs(size.getHeight() - desiredHeight); + if (diff < minDiff) { + selectedPair = sizePair; + minDiff = diff; + } + } + + return selectedPair; + } + + /** + * Stores a preview size and a corresponding same-aspect-ratio picture size. To avoid distorted + * preview images on some devices, the picture size must be set to a size that is the same aspect + * ratio as the preview size or the preview may end up being distorted. If the picture size is + * null, then there is no picture size with the same aspect ratio as the preview size. + */ + private static class SizePair { + private final Size preview; + private Size picture; + + SizePair( + android.hardware.Camera.Size previewSize, + @Nullable android.hardware.Camera.Size pictureSize) { + preview = new Size(previewSize.width, previewSize.height); + if (pictureSize != null) { + picture = new Size(pictureSize.width, pictureSize.height); + } + } + + Size previewSize() { + return preview; + } + + @Nullable + Size pictureSize() { + return picture; + } + } + + /** + * Generates a list of acceptable preview sizes. Preview sizes are not acceptable if there is not + * a corresponding picture size of the same aspect ratio. If there is a corresponding picture size + * of the same aspect ratio, the picture size is paired up with the preview size. + * + *

+ * + *

This is necessary because even if we don't use still pictures, the still picture size must + * be set to a size that is the same aspect ratio as the preview size we choose. Otherwise, the + * preview images may be distorted on some devices. + */ + private static List generateValidPreviewSizeList(Camera camera) { + Camera.Parameters parameters = camera.getParameters(); + List supportedPreviewSizes = parameters.getSupportedPreviewSizes(); + List supportedPictureSizes = parameters.getSupportedPictureSizes(); + List validPreviewSizes = new ArrayList<>(); + for (android.hardware.Camera.Size previewSize : supportedPreviewSizes) { + float previewAspectRatio = (float) previewSize.width / (float) previewSize.height; + + // By looping through the picture sizes in order, we favor the higher resolutions. + // We choose the highest resolution in order to support taking the full resolution + // picture later. + for (android.hardware.Camera.Size pictureSize : supportedPictureSizes) { + float pictureAspectRatio = (float) pictureSize.width / (float) pictureSize.height; + if (Math.abs(previewAspectRatio - pictureAspectRatio) < ASPECT_RATIO_TOLERANCE) { + validPreviewSizes.add(new SizePair(previewSize, pictureSize)); + break; + } + } + } + + // If there are no picture sizes with the same aspect ratio as any preview sizes, allow all + // of the preview sizes and hope that the camera can handle it. Probably unlikely, but we + // still account for it. + if (validPreviewSizes.size() == 0) { + Log.w(TAG, "No preview sizes have a corresponding same-aspect-ratio picture size"); + for (android.hardware.Camera.Size previewSize : supportedPreviewSizes) { + // The null picture size will let us know that we shouldn't set a picture size. + validPreviewSizes.add(new SizePair(previewSize, null)); + } + } + + return validPreviewSizes; + } + + /** + * Selects the most suitable preview frames per second range, given the desired frames per second. + * + * @param camera the camera to select a frames per second range from + * @param desiredPreviewFps the desired frames per second for the camera preview frames + * @return the selected preview frames per second range + */ + @SuppressLint("InlinedApi") + private static int[] selectPreviewFpsRange(Camera camera, float desiredPreviewFps) { + // The camera API uses integers scaled by a factor of 1000 instead of floating-point frame + // rates. + int desiredPreviewFpsScaled = (int) (desiredPreviewFps * 1000.0f); + + // The method for selecting the best range is to minimize the sum of the differences between + // the desired value and the upper and lower bounds of the range. This may select a range + // that the desired value is outside of, but this is often preferred. For example, if the + // desired frame rate is 29.97, the range (30, 30) is probably more desirable than the + // range (15, 30). + int[] selectedFpsRange = null; + int minDiff = Integer.MAX_VALUE; + List previewFpsRangeList = camera.getParameters().getSupportedPreviewFpsRange(); + for (int[] range : previewFpsRangeList) { + int deltaMin = desiredPreviewFpsScaled - range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX]; + int deltaMax = desiredPreviewFpsScaled - range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]; + int diff = Math.abs(deltaMin) + Math.abs(deltaMax); + if (diff < minDiff) { + selectedFpsRange = range; + minDiff = diff; + } + } + return selectedFpsRange; + } + + /** + * Calculates the correct rotation for the given camera id and sets the rotation in the + * parameters. It also sets the camera's display orientation and rotation. + * + * @param parameters the camera parameters for which to set the rotation + * @param cameraId the camera id to set rotation based on + */ + private void setRotation(Camera camera, Camera.Parameters parameters, int cameraId) { + WindowManager windowManager = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE); + int degrees = 0; + assert windowManager != null; + int rotation = windowManager.getDefaultDisplay().getRotation(); + switch (rotation) { + case Surface.ROTATION_0: + degrees = 0; + break; + case Surface.ROTATION_90: + degrees = 90; + break; + case Surface.ROTATION_180: + degrees = 180; + break; + case Surface.ROTATION_270: + degrees = 270; + break; + default: + Log.e(TAG, "Bad rotation value: " + rotation); + } + + CameraInfo cameraInfo = new CameraInfo(); + Camera.getCameraInfo(cameraId, cameraInfo); + + int angle; + int displayAngle; + if (cameraInfo.facing == CAMERA_FACING_FRONT) { + angle = (cameraInfo.orientation + degrees) % 360; + displayAngle = (360 - angle) % 360; // compensate for it being mirrored + } else { // back-facing + angle = (cameraInfo.orientation - degrees + 360) % 360; + displayAngle = angle; + } + + // This corresponds to the rotation constants. + this.rotation = angle / 90; + + camera.setDisplayOrientation(displayAngle); + parameters.setRotation(angle); + } + + /** + * Creates one buffer for the camera preview callback. The size of the buffer is based off of the + * camera preview size and the format of the camera image. + * + * @return a new preview buffer of the appropriate size for the current camera settings + */ + @SuppressLint("InlinedApi") + private byte[] createPreviewBuffer(Size previewSize) { + int bitsPerPixel = ImageFormat.getBitsPerPixel(ImageFormat.NV21); + long sizeInBits = (long) previewSize.getHeight() * previewSize.getWidth() * bitsPerPixel; + int bufferSize = (int) Math.ceil(sizeInBits / 8.0d) + 1; + + // Creating the byte array this way and wrapping it, as opposed to using .allocate(), + // should guarantee that there will be an array to work with. + byte[] byteArray = new byte[bufferSize]; + ByteBuffer buffer = ByteBuffer.wrap(byteArray); + if (!buffer.hasArray() || (buffer.array() != byteArray)) { + // I don't think that this will ever happen. But if it does, then we wouldn't be + // passing the preview content to the underlying detector later. + throw new IllegalStateException("Failed to create valid buffer for camera source."); + } + + bytesToByteBuffer.put(byteArray, buffer); + return byteArray; + } + + // ============================================================================================== + // Frame processing + // ============================================================================================== + + /** Called when the camera has a new preview frame. */ + private class CameraPreviewCallback implements Camera.PreviewCallback { + @Override + public void onPreviewFrame(byte[] data, Camera camera) { + processingRunnable.setNextFrame(data, camera); + } + } + + public void setMachineLearningFrameProcessor( + Detector processor, @Nullable Map options) { + synchronized (processorLock) { + detector = processor; + detectorOptions = options; + } + } + + /** + * This runnable controls access to the underlying receiver, calling it to process frames when + * available from the camera. This is designed to run detection on frames as fast as possible + * (i.e., without unnecessary context switching or waiting on the next frame). + * + *

+ * + *

While detection is running on a frame, new frames may be received from the camera. As these + * frames come in, the most recent frame is held onto as pending. As soon as detection and its + * associated processing is done for the previous frame, detection on the mostly recently received + * frame will immediately start on the same thread. + */ + private class FrameProcessingRunnable implements Runnable { + + // This lock guards all of the member variables below. + private final Object lock = new Object(); + private boolean active = true; + + // These pending variables hold the state associated with the new frame awaiting processing. + private ByteBuffer pendingFrameData; + + FrameProcessingRunnable() {} + + /** + * Releases the underlying receiver. This is only safe to do after the associated thread has + * completed, which is managed in camera source's release method above. + */ + @SuppressLint("Assert") + void release() { + assert (processingThread.getState() == State.TERMINATED); + } + + /** Marks the runnable as active/not active. Signals any blocked threads to continue. */ + void setActive(boolean active) { + synchronized (lock) { + this.active = active; + lock.notifyAll(); + } + } + + /** + * Sets the frame data received from the camera. This adds the previous unused frame buffer (if + * present) back to the camera, and keeps a pending reference to the frame data for future use. + */ + void setNextFrame(byte[] data, Camera camera) { + synchronized (lock) { + if (pendingFrameData != null) { + camera.addCallbackBuffer(pendingFrameData.array()); + pendingFrameData = null; + } + + if (!bytesToByteBuffer.containsKey(data)) { + Log.d( + TAG, + "Skipping frame. Could not find ByteBuffer associated with the image " + + "data from the camera."); + return; + } + + pendingFrameData = bytesToByteBuffer.get(data); + + // Notify the processor thread if it is waiting on the next frame (see below). + lock.notifyAll(); + } + } + + /** + * As long as the processing thread is active, this executes detection on frames continuously. + * The next pending frame is either immediately available or hasn't been received yet. Once it + * is available, we transfer the frame info to local variables and run detection on that frame. + * It immediately loops back for the next frame without pausing. + * + *

+ * + *

If detection takes longer than the time in between new frames from the camera, this will + * mean that this loop will run without ever waiting on a frame, avoiding any context switching + * or frame acquisition time latency. + * + *

+ * + *

If you find that this is using more CPU than you'd like, you should probably decrease the + * FPS setting above to allow for some idle time in between frames. + */ + @SuppressLint("InlinedApi") + @SuppressWarnings("GuardedBy") + @Override + public void run() { + ByteBuffer data; + + while (true) { + synchronized (lock) { + while (active && (pendingFrameData == null)) { + try { + // Wait for the next frame to be received from the camera, since we + // don't have it yet. + lock.wait(); + } catch (InterruptedException e) { + Log.d(TAG, "Frame processing loop terminated.", e); + return; + } + } + + if (!active) { + // Exit the loop once this camera source is stopped or released. We check + // this here, immediately after the wait() above, to handle the case where + // setActive(false) had been called, triggering the termination of this + // loop. + return; + } + + // Hold onto the frame data locally, so that we can use this for detection + // below. We need to clear pendingFrameData to ensure that this buffer isn't + // recycled back to the camera before we are done using that data. + data = pendingFrameData; + pendingFrameData = null; + } + + // The code below needs to run outside of synchronization, because this will allow + // the camera to add pending frame(s) while we are running detection on the current + // frame. + + try { + synchronized (processorLock) { + FirebaseVisionImageMetadata metadata = + new FirebaseVisionImageMetadata.Builder() + .setFormat(FirebaseVisionImageMetadata.IMAGE_FORMAT_NV21) + .setWidth(previewSize.getWidth()) + .setHeight(previewSize.getHeight()) + .setRotation(rotation) + .build(); + FirebaseVisionImage image = FirebaseVisionImage.fromByteBuffer(data, metadata); + detector.handleDetection(image, new android.util.Size(previewSize.getWidth(), previewSize.getHeight()), detectorOptions, liveDetectorFinishedCallback); + } + } catch (Throwable t) { + Log.e(TAG, "Exception thrown from receiver.", t); + } finally { + camera.addCallbackBuffer(data.array()); + } + } + } + } +} diff --git a/packages/firebase_ml_vision/example/android/app/build.gradle b/packages/firebase_ml_vision/example/android/app/build.gradle index 9383e31cc780..2c8a7e9fcb16 100644 --- a/packages/firebase_ml_vision/example/android/app/build.gradle +++ b/packages/firebase_ml_vision/example/android/app/build.gradle @@ -23,7 +23,7 @@ android { defaultConfig { applicationId "io.flutter.plugins.firebasemlvisionexample" - minSdkVersion 16 + minSdkVersion 21 } buildTypes { diff --git a/packages/firebase_ml_vision/example/android/app/src/main/java/io/flutter/plugins/firebasemlvisionexample/MainActivity.java b/packages/firebase_ml_vision/example/android/app/src/main/java/io/flutter/plugins/firebasemlvisionexample/MainActivity.java index f5bfd378a945..86577c43582f 100644 --- a/packages/firebase_ml_vision/example/android/app/src/main/java/io/flutter/plugins/firebasemlvisionexample/MainActivity.java +++ b/packages/firebase_ml_vision/example/android/app/src/main/java/io/flutter/plugins/firebasemlvisionexample/MainActivity.java @@ -1,13 +1,32 @@ package io.flutter.plugins.firebasemlvisionexample; +import android.media.Image; import android.os.Bundle; +import android.support.annotation.Nullable; import io.flutter.app.FlutterActivity; import io.flutter.plugins.GeneratedPluginRegistrant; +import io.flutter.plugins.camera.PreviewImageDelegate; +import io.flutter.plugins.firebasemlvision.live.CameraPreviewImageProvider; + +public class MainActivity extends FlutterActivity + implements PreviewImageDelegate, CameraPreviewImageProvider { + @Nullable private PreviewImageDelegate previewImageDelegate; -public class MainActivity extends FlutterActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); GeneratedPluginRegistrant.registerWith(this); } + + @Override + public void onImageAvailable(Image image, int rotation) { + if (previewImageDelegate != null) { + previewImageDelegate.onImageAvailable(image, rotation); + } + } + + @Override + public void setImageDelegate(PreviewImageDelegate delegate) { + previewImageDelegate = delegate; + } } diff --git a/packages/firebase_ml_vision/example/ios/Runner.xcodeproj/project.pbxproj b/packages/firebase_ml_vision/example/ios/Runner.xcodeproj/project.pbxproj index 5ed511a52444..0ae5c14c9cd2 100644 --- a/packages/firebase_ml_vision/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/firebase_ml_vision/example/ios/Runner.xcodeproj/project.pbxproj @@ -186,6 +186,7 @@ TargetAttributes = { 97C146ED1CF9000F007C117D = { CreatedOnToolsVersion = 7.3.1; + DevelopmentTeam = XPX9B2R9P6; }; }; }; @@ -453,6 +454,7 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = XPX9B2R9P6; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -476,6 +478,7 @@ buildSettings = { ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CURRENT_PROJECT_VERSION = "$(FLUTTER_BUILD_NUMBER)"; + DEVELOPMENT_TEAM = XPX9B2R9P6; ENABLE_BITCODE = NO; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", diff --git a/packages/firebase_ml_vision/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme b/packages/firebase_ml_vision/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme index 1263ac84b105..851d32ee7b53 100644 --- a/packages/firebase_ml_vision/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme +++ b/packages/firebase_ml_vision/example/ios/Runner.xcodeproj/xcshareddata/xcschemes/Runner.xcscheme @@ -26,7 +26,6 @@ buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" - language = "" shouldUseLaunchSchemeArgsEnv = "YES"> @@ -46,7 +45,6 @@ buildConfiguration = "Debug" selectedDebuggerIdentifier = "Xcode.DebuggerFoundation.Debugger.LLDB" selectedLauncherIdentifier = "Xcode.DebuggerFoundation.Launcher.LLDB" - language = "" launchStyle = "0" useCustomWorkingDirectory = "NO" ignoresPersistentStateOnLaunch = "NO" @@ -63,6 +61,12 @@ ReferencedContainer = "container:Runner.xcodeproj"> + + + + diff --git a/packages/firebase_ml_vision/example/ios/Runner/AppDelegate.m b/packages/firebase_ml_vision/example/ios/Runner/AppDelegate.m index 59a72e90be12..87b0634370a7 100644 --- a/packages/firebase_ml_vision/example/ios/Runner/AppDelegate.m +++ b/packages/firebase_ml_vision/example/ios/Runner/AppDelegate.m @@ -1,10 +1,12 @@ #include "AppDelegate.h" #include "GeneratedPluginRegistrant.h" +@import Firebase; @implementation AppDelegate - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { + [FIRApp configure]; [GeneratedPluginRegistrant registerWithRegistry:self]; // Override point for customization after application launch. return [super application:application didFinishLaunchingWithOptions:launchOptions]; diff --git a/packages/firebase_ml_vision/example/ios/Runner/Base.lproj/Main.storyboard b/packages/firebase_ml_vision/example/ios/Runner/Base.lproj/Main.storyboard index f3c28516fb38..5e371aca7cc9 100644 --- a/packages/firebase_ml_vision/example/ios/Runner/Base.lproj/Main.storyboard +++ b/packages/firebase_ml_vision/example/ios/Runner/Base.lproj/Main.storyboard @@ -1,8 +1,12 @@ - - + + + + + - + + @@ -14,9 +18,9 @@ - + - + diff --git a/packages/firebase_ml_vision/example/lib/detector_painters.dart b/packages/firebase_ml_vision/example/lib/detector_painters.dart index c2a8b1315979..b8a89d8214eb 100644 --- a/packages/firebase_ml_vision/example/lib/detector_painters.dart +++ b/packages/firebase_ml_vision/example/lib/detector_painters.dart @@ -7,7 +7,26 @@ import 'dart:ui' as ui; import 'package:firebase_ml_vision/firebase_ml_vision.dart'; import 'package:flutter/material.dart'; -enum Detector { barcode, face, label, text } +CustomPaint customPaintForResults(LiveViewDetectionResult result) { + CustomPainter painter; + if (result is LiveViewBarcodeDetectionResult) { + painter = new BarcodeDetectorPainter(result.size, result.data); + } else if (result is LiveViewTextDetectionResult) { + print("painting text"); + painter = new TextDetectorPainter(result.size, result.data); + } else if (result is LiveViewFaceDetectionResult) { + painter = new FaceDetectorPainter(result.size, result.data); + } else if (result is LiveViewLabelDetectionResult) { + painter = new LabelDetectorPainter(result.size, result.data); + } else { + print("unknown painter"); + painter = null; + } + + return new CustomPaint( + painter: painter, + ); +} class BarcodeDetectorPainter extends CustomPainter { BarcodeDetectorPainter(this.absoluteImageSize, this.barcodeLocations); diff --git a/packages/firebase_ml_vision/example/lib/live_preview.dart b/packages/firebase_ml_vision/example/lib/live_preview.dart new file mode 100644 index 000000000000..427bd08a02f8 --- /dev/null +++ b/packages/firebase_ml_vision/example/lib/live_preview.dart @@ -0,0 +1,157 @@ +import 'dart:async'; + +import 'package:camera/camera.dart' as camera; +import 'package:firebase_ml_vision/firebase_ml_vision.dart'; +import 'package:firebase_ml_vision_example/detector_painters.dart'; +import 'package:flutter/material.dart'; + +class LivePreview extends StatefulWidget { + final FirebaseVisionDetectorType detector; + + const LivePreview( + this.detector, { + Key key, + }) : super(key: key); + + @override + LivePreviewState createState() { + return new LivePreviewState(); + } +} + +class LivePreviewState extends State { + LiveViewCameraLoadStateReady _readyLoadState; + + Stream _prepareCameraPreview() async* { + if (_readyLoadState != null) { + await setLiveViewDetector(); + yield _readyLoadState; + } else { + yield new LiveViewCameraLoadStateLoading(); + final List cameras = + await camera.availableCameras(); + final camera.CameraDescription backCamera = cameras.firstWhere( + (camera.CameraDescription cameraDescription) => + cameraDescription.lensDirection == + camera.CameraLensDirection.back); + if (backCamera != null) { + yield new LiveViewCameraLoadStateLoaded(backCamera); + try { + final camera.CameraController controller = + new camera.CameraController( + backCamera, camera.ResolutionPreset.high); + await controller.initialize(); + await setLiveViewDetector(); + yield new LiveViewCameraLoadStateReady(controller); + } on LiveViewCameraException catch (e) { + yield new LiveViewCameraLoadStateFailed( + "error initializing camera controller: ${e.toString()}"); + } + } else { + yield new LiveViewCameraLoadStateFailed("Could not find device camera"); + } + } + } + + @override + void initState() { + super.initState(); + setLiveViewDetector(); + } + + Future setLiveViewDetector() async { + VisionOptions options; + if (widget.detector == FirebaseVisionDetectorType.barcode) { + options = const BarcodeDetectorOptions(); + } else if (widget.detector == FirebaseVisionDetectorType.label) { + options = const LabelDetectorOptions(); + } else if (widget.detector == FirebaseVisionDetectorType.face) { + options = const FaceDetectorOptions(); + } + FirebaseVision.instance.setLiveViewDetector(widget.detector, options); + } + + @override + void dispose() { + super.dispose(); + _readyLoadState?.controller?.dispose(); + } + + @override + Widget build(BuildContext context) { + return new StreamBuilder( + stream: _prepareCameraPreview(), + initialData: new LiveViewCameraLoadStateLoading(), + builder: (BuildContext context, + AsyncSnapshot snapshot) { + final LiveViewCameraLoadState loadState = snapshot.data; + if (loadState != null) { + if (loadState is LiveViewCameraLoadStateLoading || + loadState is LiveViewCameraLoadStateLoaded) { + return const Text("loading camera preview…"); + } + if (loadState is LiveViewCameraLoadStateReady) { + if (_readyLoadState != loadState) { + _readyLoadState?.dispose(); + _readyLoadState = loadState; + } + return new AspectRatio( + aspectRatio: _readyLoadState.controller.value.aspectRatio, + child: Stack(children: [ + new camera.CameraPreview(loadState.controller), + Container( + constraints: const BoxConstraints.expand(), + child: StreamBuilder( + builder: (BuildContext context, + AsyncSnapshot snapshot) { + print("update: ${snapshot}"); + if (snapshot == null || snapshot.data == null) { + return Text("No DATA!!!"); + } + print("size: ${snapshot.data.size}"); + print("data: ${snapshot.data.data}"); + return customPaintForResults(snapshot.data); + }, + stream: FirebaseVision.instance.liveViewStream, + ), + ) + ]), + ); + } else if (loadState is LiveViewCameraLoadStateFailed) { + return new Text("error loading camera ${loadState.errorMessage}"); + } else { + return const Text("Unknown Camera error"); + } + } else { + return new Text("Camera error: ${snapshot.error.toString()}"); + } + }, + ); + } +} + +abstract class LiveViewCameraLoadState {} + +class LiveViewCameraLoadStateLoading extends LiveViewCameraLoadState {} + +class LiveViewCameraLoadStateLoaded extends LiveViewCameraLoadState { + final camera.CameraDescription cameraDescription; + + LiveViewCameraLoadStateLoaded(this.cameraDescription); +} + +class LiveViewCameraLoadStateReady extends LiveViewCameraLoadState { + final camera.CameraController controller; + + LiveViewCameraLoadStateReady(this.controller); + + void dispose() { + controller.dispose(); + } +} + +class LiveViewCameraLoadStateFailed extends LiveViewCameraLoadState { + final String errorMessage; + + LiveViewCameraLoadStateFailed(this.errorMessage); +} diff --git a/packages/firebase_ml_vision/example/lib/main.dart b/packages/firebase_ml_vision/example/lib/main.dart index d375dbd38d0c..79be44cf41a8 100644 --- a/packages/firebase_ml_vision/example/lib/main.dart +++ b/packages/firebase_ml_vision/example/lib/main.dart @@ -7,6 +7,7 @@ import 'dart:io'; import 'package:firebase_ml_vision/firebase_ml_vision.dart'; import 'package:firebase_ml_vision_example/detector_painters.dart'; +import 'package:firebase_ml_vision_example/live_preview.dart'; import 'package:flutter/material.dart'; import 'package:image_picker/image_picker.dart'; @@ -17,11 +18,28 @@ class _MyHomePage extends StatefulWidget { _MyHomePageState createState() => new _MyHomePageState(); } -class _MyHomePageState extends State<_MyHomePage> { +class _MyHomePageState extends State<_MyHomePage> + with SingleTickerProviderStateMixin { File _imageFile; Size _imageSize; List _scanResults; - Detector _currentDetector = Detector.text; + FirebaseVisionDetectorType _currentDetector = FirebaseVisionDetectorType.text; + TabController _tabController; + int _selectedPageIndex = 0; + + @override + void initState() { + super.initState(); + _tabController = new TabController(vsync: this, length: 2); + _tabController.addListener(_handleTabSelection); + _selectedPageIndex = 0; + } + + void _handleTabSelection() { + setState(() { + _selectedPageIndex = _tabController.index; + }); + } Future _getAndScanImage() async { setState(() { @@ -71,16 +89,16 @@ class _MyHomePageState extends State<_MyHomePage> { FirebaseVisionDetector detector; switch (_currentDetector) { - case Detector.barcode: + case FirebaseVisionDetectorType.barcode: detector = FirebaseVision.instance.barcodeDetector(); break; - case Detector.face: + case FirebaseVisionDetectorType.face: detector = FirebaseVision.instance.faceDetector(); break; - case Detector.label: + case FirebaseVisionDetectorType.label: detector = FirebaseVision.instance.labelDetector(); break; - case Detector.text: + case FirebaseVisionDetectorType.text: detector = FirebaseVision.instance.textDetector(); break; default: @@ -99,16 +117,16 @@ class _MyHomePageState extends State<_MyHomePage> { CustomPainter painter; switch (_currentDetector) { - case Detector.barcode: + case FirebaseVisionDetectorType.barcode: painter = new BarcodeDetectorPainter(_imageSize, results); break; - case Detector.face: + case FirebaseVisionDetectorType.face: painter = new FaceDetectorPainter(_imageSize, results); break; - case Detector.label: + case FirebaseVisionDetectorType.label: painter = new LabelDetectorPainter(_imageSize, results); break; - case Detector.text: + case FirebaseVisionDetectorType.text: painter = new TextDetectorPainter(_imageSize, results); break; default: @@ -149,40 +167,62 @@ class _MyHomePageState extends State<_MyHomePage> { appBar: new AppBar( title: const Text('ML Vision Example'), actions: [ - new PopupMenuButton( - onSelected: (Detector result) { - _currentDetector = result; - if (_imageFile != null) _scanImage(_imageFile); + new PopupMenuButton( + onSelected: (FirebaseVisionDetectorType result) { + setState(() { + _currentDetector = result; + if (_imageFile != null) _scanImage(_imageFile); + }); }, - itemBuilder: (BuildContext context) => >[ - const PopupMenuItem( + itemBuilder: (BuildContext context) => + >[ + const PopupMenuItem( child: Text('Detect Barcode'), - value: Detector.barcode, + value: FirebaseVisionDetectorType.barcode, ), - const PopupMenuItem( + const PopupMenuItem( child: Text('Detect Face'), - value: Detector.face, + value: FirebaseVisionDetectorType.face, ), - const PopupMenuItem( + const PopupMenuItem( child: Text('Detect Label'), - value: Detector.label, + value: FirebaseVisionDetectorType.label, ), - const PopupMenuItem( + const PopupMenuItem( child: Text('Detect Text'), - value: Detector.text, + value: FirebaseVisionDetectorType.text, ), ], ), ], + bottom: TabBar( + controller: _tabController, + tabs: [ + const Tab( + icon: const Icon(Icons.photo), + ), + const Tab( + icon: const Icon(Icons.camera), + ) + ], + ), ), - body: _imageFile == null - ? const Center(child: Text('No image selected.')) - : _buildImage(), - floatingActionButton: new FloatingActionButton( - onPressed: _getAndScanImage, - tooltip: 'Pick Image', - child: const Icon(Icons.add_a_photo), + body: TabBarView( + controller: _tabController, + children: [ + _imageFile == null + ? const Center(child: Text('No image selected.')) + : _buildImage(), + LivePreview(_currentDetector), + ], ), + floatingActionButton: _selectedPageIndex == 0 + ? new FloatingActionButton( + onPressed: _getAndScanImage, + tooltip: 'Pick Image', + child: const Icon(Icons.add_a_photo), + ) + : null, ); } } diff --git a/packages/firebase_ml_vision/example/pubspec.yaml b/packages/firebase_ml_vision/example/pubspec.yaml index 5bf287f73e79..51a3c58a641b 100644 --- a/packages/firebase_ml_vision/example/pubspec.yaml +++ b/packages/firebase_ml_vision/example/pubspec.yaml @@ -13,6 +13,8 @@ dependencies: dev_dependencies: firebase_ml_vision: path: ../ + camera: + path: ../../camera flutter: uses-material-design: true diff --git a/packages/firebase_ml_vision/ios/Classes/BarcodeDetector.m b/packages/firebase_ml_vision/ios/Classes/BarcodeDetector.m index 6399ee8de9b3..09c5abcd27a4 100644 --- a/packages/firebase_ml_vision/ios/Classes/BarcodeDetector.m +++ b/packages/firebase_ml_vision/ios/Classes/BarcodeDetector.m @@ -3,30 +3,41 @@ @implementation BarcodeDetector static FIRVisionBarcodeDetector *barcodeDetector; -+ (void)handleDetection:(FIRVisionImage *)image ++ (id)sharedInstance { + static BarcodeDetector *sharedInstance = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (void)handleDetection:(FIRVisionImage *)image options:(NSDictionary *)options - result:(FlutterResult)result { + finishedCallback:(OperationFinishedCallback)callback + errorCallback:(OperationErrorCallback)errorCallback { if (barcodeDetector == nil) { FIRVision *vision = [FIRVision vision]; barcodeDetector = [vision barcodeDetectorWithOptions:[BarcodeDetector parseOptions:options]]; } NSMutableArray *ret = [NSMutableArray array]; - [barcodeDetector detectInImage:image - completion:^(NSArray *barcodes, NSError *error) { - if (error) { - [FLTFirebaseMlVisionPlugin handleError:error result:result]; - return; - } else if (!barcodes) { - result(@[]); - return; - } - - // Scanned barcode - for (FIRVisionBarcode *barcode in barcodes) { - [ret addObject:visionBarcodeToDictionary(barcode)]; - } - result(ret); - }]; + [barcodeDetector + detectInImage:image + completion:^(NSArray *barcodes, NSError *error) { + if (error) { + [FLTFirebaseMlVisionPlugin handleError:error finishedCallback:errorCallback]; + return; + } else if (!barcodes) { + callback(@[], @"barcode"); + return; + } + + // Scanned barcode + for (FIRVisionBarcode *barcode in barcodes) { + [ret addObject:visionBarcodeToDictionary(barcode)]; + } + callback(ret, @"barcode"); + }]; } NSDictionary *visionBarcodeToDictionary(FIRVisionBarcode *barcode) { diff --git a/packages/firebase_ml_vision/ios/Classes/FaceDetector.m b/packages/firebase_ml_vision/ios/Classes/FaceDetector.m index c1aa9e556964..5f9c25e2e532 100644 --- a/packages/firebase_ml_vision/ios/Classes/FaceDetector.m +++ b/packages/firebase_ml_vision/ios/Classes/FaceDetector.m @@ -3,9 +3,19 @@ @implementation FaceDetector static FIRVisionFaceDetector *faceDetector; -+ (void)handleDetection:(FIRVisionImage *)image ++ (id)sharedInstance { + static FaceDetector *sharedInstance = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (void)handleDetection:(FIRVisionImage *)image options:(NSDictionary *)options - result:(FlutterResult)result { + finishedCallback:(OperationFinishedCallback)callback + errorCallback:(OperationErrorCallback)errorCallback { FIRVision *vision = [FIRVision vision]; faceDetector = [vision faceDetectorWithOptions:[FaceDetector parseOptions:options]]; @@ -13,10 +23,10 @@ + (void)handleDetection:(FIRVisionImage *)image detectInImage:image completion:^(NSArray *_Nullable faces, NSError *_Nullable error) { if (error) { - [FLTFirebaseMlVisionPlugin handleError:error result:result]; + [FLTFirebaseMlVisionPlugin handleError:error finishedCallback:errorCallback]; return; } else if (!faces) { - result(@[]); + callback(@[], @"face"); return; } @@ -68,7 +78,7 @@ + (void)handleDetection:(FIRVisionImage *)image [faceData addObject:data]; } - result(faceData); + callback(faceData, @"face"); }]; } diff --git a/packages/firebase_ml_vision/ios/Classes/FirebaseMlVisionPlugin.h b/packages/firebase_ml_vision/ios/Classes/FirebaseMlVisionPlugin.h index 247e44b3aaea..59d67339755f 100644 --- a/packages/firebase_ml_vision/ios/Classes/FirebaseMlVisionPlugin.h +++ b/packages/firebase_ml_vision/ios/Classes/FirebaseMlVisionPlugin.h @@ -2,15 +2,24 @@ #import "Firebase/Firebase.h" +/* + A callback type to allow the caller to format the detected response + before it is sent back to Flutter + */ +typedef void (^OperationFinishedCallback)(id _Nullable result, NSString *detectorType); +typedef void (^OperationErrorCallback)(FlutterError *error); + @interface FLTFirebaseMlVisionPlugin : NSObject -+ (void)handleError:(NSError *)error result:(FlutterResult)result; ++ (void)handleError:(NSError *)error finishedCallback:(OperationErrorCallback)callback; @end @protocol Detector @required -+ (void)handleDetection:(FIRVisionImage *)image ++ (id)sharedInstance; +- (void)handleDetection:(FIRVisionImage *)image options:(NSDictionary *)options - result:(FlutterResult)result; + finishedCallback:(OperationFinishedCallback)callback + errorCallback:(OperationErrorCallback)error; @optional @end diff --git a/packages/firebase_ml_vision/ios/Classes/FirebaseMlVisionPlugin.m b/packages/firebase_ml_vision/ios/Classes/FirebaseMlVisionPlugin.m index 9561890ae503..548434c6bba0 100644 --- a/packages/firebase_ml_vision/ios/Classes/FirebaseMlVisionPlugin.m +++ b/packages/firebase_ml_vision/ios/Classes/FirebaseMlVisionPlugin.m @@ -1,53 +1,185 @@ #import "FirebaseMlVisionPlugin.h" +#import "LiveView.h" +#import "NSError+FlutterError.h" -@interface NSError (FlutterError) -@property(readonly, nonatomic) FlutterError *flutterError; -@end - -@implementation NSError (FlutterError) -- (FlutterError *)flutterError { - return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)self.code] - message:self.domain - details:self.localizedDescription]; -} +@interface FLTFirebaseMlVisionPlugin () +@property(readonly, nonatomic) NSObject *registry; +@property(readonly, nonatomic) NSObject *messenger; +@property(readonly, nonatomic) LiveView *camera; @end @implementation FLTFirebaseMlVisionPlugin -+ (void)handleError:(NSError *)error result:(FlutterResult)result { - result([error flutterError]); ++ (void)handleError:(NSError *)error finishedCallback:(OperationErrorCallback)callback { + callback([error flutterError]); } + (void)registerWithRegistrar:(NSObject *)registrar { FlutterMethodChannel *channel = [FlutterMethodChannel methodChannelWithName:@"plugins.flutter.io/firebase_ml_vision" binaryMessenger:[registrar messenger]]; - FLTFirebaseMlVisionPlugin *instance = [[FLTFirebaseMlVisionPlugin alloc] init]; + FLTFirebaseMlVisionPlugin *instance = + [[FLTFirebaseMlVisionPlugin alloc] initWithRegistry:[registrar textures] + messenger:[registrar messenger]]; [registrar addMethodCallDelegate:instance channel:channel]; } -- (instancetype)init { +- (instancetype)initWithRegistry:(NSObject *)registry + messenger:(NSObject *)messenger { self = [super init]; if (self) { if (![FIRApp defaultApp]) { [FIRApp configure]; } } + _registry = registry; + _messenger = messenger; return self; } - (void)handleMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result { - FIRVisionImage *image = [self filePathToVisionImage:call.arguments[@"path"]]; - NSDictionary *options = call.arguments[@"options"]; - if ([@"BarcodeDetector#detectInImage" isEqualToString:call.method]) { - [BarcodeDetector handleDetection:image options:options result:result]; - } else if ([@"FaceDetector#detectInImage" isEqualToString:call.method]) { - [FaceDetector handleDetection:image options:options result:result]; - } else if ([@"LabelDetector#detectInImage" isEqualToString:call.method]) { - [LabelDetector handleDetection:image options:options result:result]; - } else if ([@"TextDetector#detectInImage" isEqualToString:call.method]) { - [TextDetector handleDetection:image options:options result:result]; + if ([@"init" isEqualToString:call.method]) { + if (_camera) { + [_camera close]; + } + result(nil); + } else if ([@"availableCameras" isEqualToString:call.method]) { + NSArray *devices; + if (@available(iOS 10.0, *)) { + AVCaptureDeviceDiscoverySession *discoverySession = [AVCaptureDeviceDiscoverySession + discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ] + mediaType:AVMediaTypeVideo + position:AVCaptureDevicePositionUnspecified]; + devices = discoverySession.devices; + } else { + // Fallback on earlier versions + devices = AVCaptureDevice.devices; + } + NSMutableArray *> *reply = + [[NSMutableArray alloc] initWithCapacity:devices.count]; + for (AVCaptureDevice *device in devices) { + NSString *lensFacing; + switch ([device position]) { + case AVCaptureDevicePositionBack: + lensFacing = @"back"; + break; + case AVCaptureDevicePositionFront: + lensFacing = @"front"; + break; + case AVCaptureDevicePositionUnspecified: + lensFacing = @"external"; + break; + } + [reply addObject:@{ + @"name" : [device uniqueID], + @"lensFacing" : lensFacing, + }]; + } + result(reply); + } else if ([@"initialize" isEqualToString:call.method]) { + NSString *cameraName = call.arguments[@"cameraName"]; + NSString *resolutionPreset = call.arguments[@"resolutionPreset"]; + NSError *error; + LiveView *cam = [[LiveView alloc] initWithCameraName:cameraName + resolutionPreset:resolutionPreset + error:&error]; + if (error) { + result([error flutterError]); + } else { + if (_camera) { + [_camera close]; + } + int64_t textureId = [_registry registerTexture:cam]; + _camera = cam; + cam.onFrameAvailable = ^{ + [self->_registry textureFrameAvailable:textureId]; + }; + FlutterEventChannel *eventChannel = [FlutterEventChannel + eventChannelWithName: + [NSString + stringWithFormat:@"plugins.flutter.io/firebase_ml_vision/liveViewEvents%lld", + textureId] + binaryMessenger:_messenger]; + [eventChannel setStreamHandler:cam]; + cam.eventChannel = eventChannel; + cam.onSizeAvailable = ^(CGSize previewSize, CGSize captureSize) { + result(@{ + @"textureId" : @(textureId), + @"previewWidth" : @(previewSize.width), + @"previewHeight" : @(previewSize.height), + @"captureWidth" : @(captureSize.width), + @"captureHeight" : @(captureSize.height), + }); + }; + [cam start]; + } + } else if ([@"dispose" isEqualToString:call.method]) { + NSDictionary *argsMap = call.arguments; + NSUInteger textureId = ((NSNumber *)argsMap[@"textureId"]).unsignedIntegerValue; + [_registry unregisterTexture:textureId]; + [_camera close]; + result(nil); + } else if ([@"LiveView#setDetector" isEqualToString:call.method]) { + NSDictionary *argsMap = call.arguments; + NSString *detectorType = ((NSString *)argsMap[@"detectorType"]); + NSDictionary *options = call.arguments[@"options"]; + id detector = [FLTFirebaseMlVisionPlugin detectorForDetectorTypeString:detectorType]; + if (_camera) { + [_camera setDetector:detector withOptions:options]; + } + result(nil); + } else { + // image file detection + FIRVisionImage *image = [self filePathToVisionImage:call.arguments[@"path"]]; + NSDictionary *options = call.arguments[@"options"]; + if ([@"BarcodeDetector#detectInImage" isEqualToString:call.method]) { + [[BarcodeDetector sharedInstance] handleDetection:image + options:options + finishedCallback:[self handleSuccess:result] + errorCallback:[self handleError:result]]; + } else if ([@"FaceDetector#detectInImage" isEqualToString:call.method]) { + [[FaceDetector sharedInstance] handleDetection:image + options:options + finishedCallback:[self handleSuccess:result] + errorCallback:[self handleError:result]]; + } else if ([@"LabelDetector#detectInImage" isEqualToString:call.method]) { + [[LabelDetector sharedInstance] handleDetection:image + options:options + finishedCallback:[self handleSuccess:result] + errorCallback:[self handleError:result]]; + } else if ([@"TextDetector#detectInImage" isEqualToString:call.method]) { + [[TextDetector sharedInstance] handleDetection:image + options:options + finishedCallback:[self handleSuccess:result] + errorCallback:[self handleError:result]]; + } else { + result(FlutterMethodNotImplemented); + } + } +} + +- (OperationFinishedCallback)handleSuccess:(FlutterResult)result { + return ^(id _Nullable r, NSString *detectorType) { + result(r); + }; +} + +- (OperationErrorCallback)handleError:(FlutterResult)result { + return ^(FlutterError *error) { + result(error); + }; +} + ++ (NSObject *)detectorForDetectorTypeString:(NSString *)detectorType { + if ([detectorType isEqualToString:@"text"]) { + return [TextDetector sharedInstance]; + } else if ([detectorType isEqualToString:@"barcode"]) { + return [BarcodeDetector sharedInstance]; + } else if ([detectorType isEqualToString:@"label"]) { + return [LabelDetector sharedInstance]; + } else if ([detectorType isEqualToString:@"face"]) { + return [FaceDetector sharedInstance]; } else { - result(FlutterMethodNotImplemented); + return [TextDetector sharedInstance]; } } diff --git a/packages/firebase_ml_vision/ios/Classes/LabelDetector.m b/packages/firebase_ml_vision/ios/Classes/LabelDetector.m index 7c381779d843..c89416aedfe2 100644 --- a/packages/firebase_ml_vision/ios/Classes/LabelDetector.m +++ b/packages/firebase_ml_vision/ios/Classes/LabelDetector.m @@ -3,19 +3,29 @@ @implementation LabelDetector static FIRVisionLabelDetector *detector; -+ (void)handleDetection:(FIRVisionImage *)image ++ (id)sharedInstance { + static LabelDetector *sharedInstance = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (void)handleDetection:(FIRVisionImage *)image options:(NSDictionary *)options - result:(FlutterResult)result { + finishedCallback:(OperationFinishedCallback)callback + errorCallback:(OperationErrorCallback)errorCallback { FIRVision *vision = [FIRVision vision]; detector = [vision labelDetectorWithOptions:[LabelDetector parseOptions:options]]; [detector detectInImage:image completion:^(NSArray *_Nullable labels, NSError *_Nullable error) { if (error) { - [FLTFirebaseMlVisionPlugin handleError:error result:result]; + [FLTFirebaseMlVisionPlugin handleError:error finishedCallback:errorCallback]; return; } else if (!labels) { - result(@[]); + callback(@[], @"label"); } NSMutableArray *labelData = [NSMutableArray array]; @@ -28,7 +38,7 @@ + (void)handleDetection:(FIRVisionImage *)image [labelData addObject:data]; } - result(labelData); + callback(labelData, @"label"); }]; } diff --git a/packages/firebase_ml_vision/ios/Classes/LiveView.h b/packages/firebase_ml_vision/ios/Classes/LiveView.h new file mode 100644 index 000000000000..be5791e1ee1d --- /dev/null +++ b/packages/firebase_ml_vision/ios/Classes/LiveView.h @@ -0,0 +1,30 @@ +#import +#import +#import +#import "FirebaseMlVisionPlugin.h" +@import FirebaseMLVision; + +@interface LiveView : NSObject +@property(readonly, nonatomic) int64_t textureId; +@property(nonatomic) bool isUsingFrontCamera; +@property(nonatomic, copy) void (^onFrameAvailable)(void); +@property(nonatomic, copy) void (^onSizeAvailable)(CGSize previewSize, CGSize captureSize); +@property(nonatomic) FlutterEventChannel *eventChannel; +@property(nonatomic) FlutterEventSink eventSink; +@property(readonly, nonatomic) AVCaptureSession *captureSession; +@property(readonly, nonatomic) AVCaptureDevice *captureDevice; +@property(readonly, nonatomic) AVCaptureVideoDataOutput *captureVideoOutput; +@property(readonly, nonatomic) AVCaptureInput *captureVideoInput; +@property(readonly) CVPixelBufferRef volatile latestPixelBuffer; +@property(readonly, nonatomic) CGSize previewSize; +@property(readonly, nonatomic) CGSize captureSize; +@property(strong, nonatomic) AVCaptureVideoDataOutput *videoOutput; +- (instancetype)initWithCameraName:(NSString *)cameraName + resolutionPreset:(NSString *)resolutionPreset + error:(NSError **)error; +- (void)start; +- (void)stop; +- (void)close; +- (void)setDetector:(NSObject *)detector withOptions:(NSDictionary *)detectorOptions; +@end diff --git a/packages/firebase_ml_vision/ios/Classes/LiveView.m b/packages/firebase_ml_vision/ios/Classes/LiveView.m new file mode 100644 index 000000000000..9e99bd776937 --- /dev/null +++ b/packages/firebase_ml_vision/ios/Classes/LiveView.m @@ -0,0 +1,219 @@ +#import "LiveView.h" +#import +#import +#import "FirebaseMlVisionPlugin.h" +#import "NSError+FlutterError.h" +#import "UIUtilities.h" + +static NSString *const sessionQueueLabel = + @"io.flutter.plugins.firebaseml.visiondetector.SessionQueue"; +static NSString *const videoDataOutputQueueLabel = + @"io.flutter.plugins.firebaseml.visiondetector.VideoDataOutputQueue"; + +@interface LiveView () +@property(assign, atomic) BOOL isRecognizing; +@property(nonatomic) dispatch_queue_t sessionQueue; +@property(strong, nonatomic) NSObject *currentDetector; +@property(strong, nonatomic) NSDictionary *currentDetectorOptions; +@end + +@implementation LiveView +- (instancetype)initWithCameraName:(NSString *)cameraName + resolutionPreset:(NSString *)resolutionPreset + error:(NSError **)error { + self = [super init]; + NSAssert(self, @"super init cannot be nil"); + + // Configure Captgure Session + + _isUsingFrontCamera = NO; + _captureSession = [[AVCaptureSession alloc] init]; + _sessionQueue = dispatch_queue_create(sessionQueueLabel.UTF8String, nil); + + // base example uses AVCaptureVideoPreviewLayer here and the layer is added to a view, Flutter + // Texture works differently here + [self setUpCaptureSessionOutputWithResolutionPreset:resolutionPreset]; + [self setUpCaptureSessionInputWithCameraName:cameraName]; + + return self; +} + +- (void)setDetector:(NSObject *)detector withOptions:(NSDictionary *)detectorOptions { + _currentDetector = detector; + _currentDetectorOptions = detectorOptions; +} + +- (AVCaptureSessionPreset)resolutionPresetForPreference:(NSString *)preference { + AVCaptureSessionPreset preset; + if ([preference isEqualToString:@"high"]) { + preset = AVCaptureSessionPresetHigh; + } else if ([preference isEqualToString:@"medium"]) { + preset = AVCaptureSessionPresetMedium; + } else { + NSAssert([preference isEqualToString:@"low"], @"Unknown resolution preset %@", preference); + preset = AVCaptureSessionPresetLow; + } + return preset; +} + +- (void)setUpCaptureSessionOutputWithResolutionPreset:(NSString *)resolutionPreset { + dispatch_async(_sessionQueue, ^{ + [self->_captureSession beginConfiguration]; + self->_captureSession.sessionPreset = [self resolutionPresetForPreference:resolutionPreset]; + + self->_captureVideoOutput = [[AVCaptureVideoDataOutput alloc] init]; + self->_captureVideoOutput.videoSettings = @{ + (id) + kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] + }; + dispatch_queue_t outputQueue = dispatch_queue_create(videoDataOutputQueueLabel.UTF8String, nil); + [self->_captureVideoOutput setSampleBufferDelegate:self queue:outputQueue]; + if ([self.captureSession canAddOutput:self->_captureVideoOutput]) { + [self.captureSession addOutputWithNoConnections:self->_captureVideoOutput]; + [self.captureSession commitConfiguration]; + } else { + NSLog(@"%@", @"Failed to add capture session output."); + } + }); +} + +- (void)setUpCaptureSessionInputWithCameraName:(NSString *)cameraName { + dispatch_async(_sessionQueue, ^{ + AVCaptureDevice *device = [AVCaptureDevice deviceWithUniqueID:cameraName]; + CMVideoDimensions dimensions = + CMVideoFormatDescriptionGetDimensions([[device activeFormat] formatDescription]); + self->_previewSize = CGSizeMake(dimensions.width, dimensions.height); + if (self->_onSizeAvailable) { + self->_onSizeAvailable(self->_previewSize, self->_captureSize); + } + if (device) { + NSArray *currentInputs = self.captureSession.inputs; + for (AVCaptureInput *input in currentInputs) { + [self.captureSession removeInput:input]; + } + NSError *error; + self->_captureVideoInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error]; + + if (error) { + NSLog(@"Failed to create capture device input: %@", error.localizedDescription); + return; + } else { + AVCaptureConnection *connection = + [AVCaptureConnection connectionWithInputPorts:self->_captureVideoInput.ports + output:self->_captureVideoOutput]; + // connection.videoOrientation = AVCaptureVideoOrientationPortrait; + if ([self->_captureDevice position] == AVCaptureDevicePositionFront) { + connection.videoMirrored = YES; + } + [self->_captureSession addInputWithNoConnections:self->_captureVideoInput]; + [self->_captureSession addConnection:connection]; + } + } + }); +} + +- (void)start { + dispatch_async(_sessionQueue, ^{ + [self->_captureSession startRunning]; + }); +} + +- (void)stop { + dispatch_async(_sessionQueue, ^{ + [self->_captureSession stopRunning]; + }); +} + +- (void)captureOutput:(AVCaptureOutput *)output + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection { + CVImageBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); + if (newBuffer) { + if (!_isRecognizing) { + _isRecognizing = YES; + FIRVisionImage *visionImage = [[FIRVisionImage alloc] initWithBuffer:sampleBuffer]; + FIRVisionImageMetadata *metadata = [[FIRVisionImageMetadata alloc] init]; + UIImageOrientation orientation = [UIUtilities + imageOrientationFromDevicePosition:_isUsingFrontCamera ? AVCaptureDevicePositionFront + : AVCaptureDevicePositionBack]; + FIRVisionDetectorImageOrientation visionOrientation = + [UIUtilities visionImageOrientationFromImageOrientation:orientation]; + + metadata.orientation = visionOrientation; + visionImage.metadata = metadata; + [_currentDetector handleDetection:visionImage + options:_currentDetectorOptions + finishedCallback:^(id _Nullable result, NSString *detectorType) { + self->_isRecognizing = NO; + if (self->_eventSink != nil) { + self->_eventSink(@{ + @"eventType" : @"detection", + @"detectionType" : detectorType, + @"data" : result + }); + } + } + errorCallback:^(FlutterError *error) { + self->_isRecognizing = NO; + if (self->_eventSink != nil) { + self->_eventSink(error); + } + }]; + } + CFRetain(newBuffer); + CVPixelBufferRef old = _latestPixelBuffer; + while (!OSAtomicCompareAndSwapPtrBarrier(old, newBuffer, (void **)&_latestPixelBuffer)) { + old = _latestPixelBuffer; + } + if (old != nil) { + CFRelease(old); + } + if (_onFrameAvailable) { + _onFrameAvailable(); + } + } + if (!CMSampleBufferDataIsReady(sampleBuffer)) { + _eventSink(@{ + @"event" : @"error", + @"errorDescription" : @"sample buffer is not ready. Skipping sample" + }); + return; + } +} + +- (void)close { + [_captureSession stopRunning]; + for (AVCaptureInput *input in [_captureSession inputs]) { + [_captureSession removeInput:input]; + } + for (AVCaptureOutput *output in [_captureSession outputs]) { + [_captureSession removeOutput:output]; + } +} + +- (void)dealloc { + if (_latestPixelBuffer) { + CFRelease(_latestPixelBuffer); + } +} + +- (CVPixelBufferRef)copyPixelBuffer { + CVPixelBufferRef pixelBuffer = _latestPixelBuffer; + while (!OSAtomicCompareAndSwapPtrBarrier(pixelBuffer, nil, (void **)&_latestPixelBuffer)) { + pixelBuffer = _latestPixelBuffer; + } + return pixelBuffer; +} + +- (FlutterError *_Nullable)onCancelWithArguments:(id _Nullable)arguments { + _eventSink = nil; + return nil; +} + +- (FlutterError *_Nullable)onListenWithArguments:(id _Nullable)arguments + eventSink:(nonnull FlutterEventSink)events { + _eventSink = events; + return nil; +} + +@end diff --git a/packages/firebase_ml_vision/ios/Classes/NSError+FlutterError.h b/packages/firebase_ml_vision/ios/Classes/NSError+FlutterError.h new file mode 100644 index 000000000000..1db09ddc1223 --- /dev/null +++ b/packages/firebase_ml_vision/ios/Classes/NSError+FlutterError.h @@ -0,0 +1,12 @@ +// +// NSError+FlutterError.h +// firebase_ml_vision +// +// Created by Dustin Graham on 7/19/18. +// +#import +#import + +@interface NSError (FlutterError) +@property(readonly, nonatomic) FlutterError *flutterError; +@end diff --git a/packages/firebase_ml_vision/ios/Classes/NSError+FlutterError.m b/packages/firebase_ml_vision/ios/Classes/NSError+FlutterError.m new file mode 100644 index 000000000000..92d6a2bb57aa --- /dev/null +++ b/packages/firebase_ml_vision/ios/Classes/NSError+FlutterError.m @@ -0,0 +1,16 @@ +// +// NSError+FlutterError.m +// firebase_ml_vision +// +// Created by Dustin Graham on 7/19/18. +// + +#import "NSError+FlutterError.h" + +@implementation NSError (FlutterError) +- (FlutterError *)flutterError { + return [FlutterError errorWithCode:[NSString stringWithFormat:@"Error %d", (int)self.code] + message:self.domain + details:self.localizedDescription]; +} +@end diff --git a/packages/firebase_ml_vision/ios/Classes/TextDetector.m b/packages/firebase_ml_vision/ios/Classes/TextDetector.m index 83d2cdfb10ec..99a7e9003a23 100644 --- a/packages/firebase_ml_vision/ios/Classes/TextDetector.m +++ b/packages/firebase_ml_vision/ios/Classes/TextDetector.m @@ -3,9 +3,19 @@ @implementation TextDetector static FIRVisionTextDetector *textDetector; -+ (void)handleDetection:(FIRVisionImage *)image ++ (id)sharedInstance { + static TextDetector *sharedInstance = nil; + static dispatch_once_t onceToken; + dispatch_once(&onceToken, ^{ + sharedInstance = [[self alloc] init]; + }); + return sharedInstance; +} + +- (void)handleDetection:(FIRVisionImage *)image options:(NSDictionary *)options - result:(FlutterResult)result { + finishedCallback:(OperationFinishedCallback)callback + errorCallback:(OperationErrorCallback)errorCallback { if (textDetector == nil) { FIRVision *vision = [FIRVision vision]; textDetector = [vision textDetector]; @@ -15,10 +25,10 @@ + (void)handleDetection:(FIRVisionImage *)image detectInImage:image completion:^(NSArray> *_Nullable features, NSError *_Nullable error) { if (error) { - [FLTFirebaseMlVisionPlugin handleError:error result:result]; + [FLTFirebaseMlVisionPlugin handleError:error finishedCallback:errorCallback]; return; } else if (!features) { - result(@[]); + callback(@[], @"text"); return; } @@ -64,11 +74,11 @@ + (void)handleDetection:(FIRVisionImage *)image [blocks addObject:blockData]; } - result(blocks); + callback(blocks, @"text"); }]; } -+ (NSDictionary *)getTextData:(CGRect)frame +- (NSDictionary *)getTextData:(CGRect)frame cornerPoints:(NSArray *)cornerPoints text:(NSString *)text { __block NSMutableArray *points = [NSMutableArray array]; @@ -87,7 +97,7 @@ + (NSDictionary *)getTextData:(CGRect)frame }; } -+ (NSMutableArray *)getLineData:(NSArray *)lines { +- (NSMutableArray *)getLineData:(NSArray *)lines { NSMutableArray *lineDataArray = [NSMutableArray array]; for (FIRVisionTextLine *line in lines) { @@ -102,7 +112,7 @@ + (NSMutableArray *)getLineData:(NSArray *)lines { return lineDataArray; } -+ (NSMutableArray *)getElementData:(NSArray *)elements { +- (NSMutableArray *)getElementData:(NSArray *)elements { NSMutableArray *elementDataArray = [NSMutableArray array]; for (FIRVisionTextElement *element in elements) { @@ -113,4 +123,5 @@ + (NSMutableArray *)getElementData:(NSArray *)elements { return elementDataArray; } + @end diff --git a/packages/firebase_ml_vision/ios/Classes/UIUtilities.h b/packages/firebase_ml_vision/ios/Classes/UIUtilities.h new file mode 100644 index 000000000000..94e32d2ca4ff --- /dev/null +++ b/packages/firebase_ml_vision/ios/Classes/UIUtilities.h @@ -0,0 +1,29 @@ +// +// Copyright (c) 2018 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#import +@import AVFoundation; +@import UIKit; +@import FirebaseMLVision; + +@interface UIUtilities : NSObject ++ (void)imageOrientation; ++ (UIImageOrientation)imageOrientationFromDevicePosition:(AVCaptureDevicePosition)devicePosition; ++ (FIRVisionDetectorImageOrientation)visionImageOrientationFromImageOrientation: + (UIImageOrientation)imageOrientation; ++ (UIDeviceOrientation)currentUIOrientation; + +@end diff --git a/packages/firebase_ml_vision/ios/Classes/UIUtilities.m b/packages/firebase_ml_vision/ios/Classes/UIUtilities.m new file mode 100644 index 000000000000..191da95542e7 --- /dev/null +++ b/packages/firebase_ml_vision/ios/Classes/UIUtilities.m @@ -0,0 +1,99 @@ +// +// Copyright (c) 2018 Google Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +#import "UIUtilities.h" + +@implementation UIUtilities + ++ (void)imageOrientation { + [self imageOrientationFromDevicePosition:AVCaptureDevicePositionBack]; +} + ++ (UIImageOrientation)imageOrientationFromDevicePosition:(AVCaptureDevicePosition)devicePosition { + UIDeviceOrientation deviceOrientation = UIDevice.currentDevice.orientation; + if (deviceOrientation == UIDeviceOrientationFaceDown || + deviceOrientation == UIDeviceOrientationFaceUp || + deviceOrientation == UIDeviceOrientationUnknown) { + deviceOrientation = [self currentUIOrientation]; + } + switch (deviceOrientation) { + case UIDeviceOrientationPortrait: + return devicePosition == AVCaptureDevicePositionFront ? UIImageOrientationLeftMirrored + : UIImageOrientationRight; + case UIDeviceOrientationLandscapeLeft: + return devicePosition == AVCaptureDevicePositionFront ? UIImageOrientationDownMirrored + : UIImageOrientationUp; + case UIDeviceOrientationPortraitUpsideDown: + return devicePosition == AVCaptureDevicePositionFront ? UIImageOrientationRightMirrored + : UIImageOrientationLeft; + case UIDeviceOrientationLandscapeRight: + return devicePosition == AVCaptureDevicePositionFront ? UIImageOrientationUpMirrored + : UIImageOrientationDown; + case UIDeviceOrientationFaceDown: + case UIDeviceOrientationFaceUp: + case UIDeviceOrientationUnknown: + return UIImageOrientationUp; + } +} + ++ (FIRVisionDetectorImageOrientation)visionImageOrientationFromImageOrientation: + (UIImageOrientation)imageOrientation { + switch (imageOrientation) { + case UIImageOrientationUp: + return FIRVisionDetectorImageOrientationTopLeft; + case UIImageOrientationDown: + return FIRVisionDetectorImageOrientationBottomRight; + case UIImageOrientationLeft: + return FIRVisionDetectorImageOrientationLeftBottom; + case UIImageOrientationRight: + return FIRVisionDetectorImageOrientationRightTop; + case UIImageOrientationUpMirrored: + return FIRVisionDetectorImageOrientationTopRight; + case UIImageOrientationDownMirrored: + return FIRVisionDetectorImageOrientationBottomLeft; + case UIImageOrientationLeftMirrored: + return FIRVisionDetectorImageOrientationLeftTop; + case UIImageOrientationRightMirrored: + return FIRVisionDetectorImageOrientationRightBottom; + } +} + ++ (UIDeviceOrientation)currentUIOrientation { + UIDeviceOrientation (^deviceOrientation)(void) = ^UIDeviceOrientation(void) { + switch (UIApplication.sharedApplication.statusBarOrientation) { + case UIInterfaceOrientationLandscapeLeft: + return UIDeviceOrientationLandscapeRight; + case UIInterfaceOrientationLandscapeRight: + return UIDeviceOrientationLandscapeLeft; + case UIInterfaceOrientationPortraitUpsideDown: + return UIDeviceOrientationPortraitUpsideDown; + case UIInterfaceOrientationPortrait: + case UIInterfaceOrientationUnknown: + return UIDeviceOrientationPortrait; + } + }; + + if (NSThread.isMainThread) { + return deviceOrientation(); + } else { + __block UIDeviceOrientation currentOrientation = UIDeviceOrientationPortrait; + dispatch_sync(dispatch_get_main_queue(), ^{ + currentOrientation = deviceOrientation(); + }); + return currentOrientation; + } +} +@end diff --git a/packages/firebase_ml_vision/lib/firebase_ml_vision.dart b/packages/firebase_ml_vision/lib/firebase_ml_vision.dart index 664f6eb18f22..a9852bfeb7f0 100644 --- a/packages/firebase_ml_vision/lib/firebase_ml_vision.dart +++ b/packages/firebase_ml_vision/lib/firebase_ml_vision.dart @@ -7,12 +7,17 @@ library firebase_ml_vision; import 'dart:async'; import 'dart:io'; import 'dart:math'; +import 'dart:ui'; import 'package:flutter/foundation.dart'; import 'package:flutter/services.dart'; +import 'package:flutter/material.dart'; part 'src/barcode_detector.dart'; part 'src/face_detector.dart'; part 'src/firebase_vision.dart'; part 'src/label_detector.dart'; part 'src/text_detector.dart'; +part 'src/live_view.dart'; +part 'src/live_view_detection_result.dart'; +part 'src/vision_options.dart'; diff --git a/packages/firebase_ml_vision/lib/src/barcode_detector.dart b/packages/firebase_ml_vision/lib/src/barcode_detector.dart index 9f532b940630..c739be72cc7d 100644 --- a/packages/firebase_ml_vision/lib/src/barcode_detector.dart +++ b/packages/firebase_ml_vision/lib/src/barcode_detector.dart @@ -190,15 +190,13 @@ class BarcodeDetector extends FirebaseVisionDetector { 'BarcodeDetector#detectInImage', { 'path': visionImage.imageFile.path, - 'options': { - 'barcodeFormats': options.barcodeFormats.value, - }, + 'options': options.toMap(), }, ); final List barcodes = []; reply.forEach((dynamic barcode) { - barcodes.add(new Barcode._(barcode)); + barcodes.add(new Barcode(barcode)); }); return barcodes; @@ -215,15 +213,22 @@ class BarcodeDetector extends FirebaseVisionDetector { /// final BarcodeDetectorOptions options = /// BarcodeDetectorOptions(barcodeFormats: BarcodeFormat.aztec | BarcodeFormat.ean8); /// ``` -class BarcodeDetectorOptions { +class BarcodeDetectorOptions implements VisionOptions { const BarcodeDetectorOptions({this.barcodeFormats = BarcodeFormat.all}); final BarcodeFormat barcodeFormats; + + @override + Map toMap() { + return { + 'barcodeFormats': barcodeFormats.value, + }; + } } /// Represents a single recognized barcode and its value. class Barcode { - Barcode._(Map _data) + Barcode(Map _data) : boundingBox = _data['left'] != null ? Rectangle( _data['left'], diff --git a/packages/firebase_ml_vision/lib/src/face_detector.dart b/packages/firebase_ml_vision/lib/src/face_detector.dart index 560f2a7448cf..db626190ad06 100644 --- a/packages/firebase_ml_vision/lib/src/face_detector.dart +++ b/packages/firebase_ml_vision/lib/src/face_detector.dart @@ -45,19 +45,13 @@ class FaceDetector extends FirebaseVisionDetector { 'FaceDetector#detectInImage', { 'path': visionImage.imageFile.path, - 'options': { - 'enableClassification': options.enableClassification, - 'enableLandmarks': options.enableLandmarks, - 'enableTracking': options.enableTracking, - 'minFaceSize': options.minFaceSize, - 'mode': _enumToString(options.mode), - }, + 'options': options.toMap(), }, ); final List faces = []; for (dynamic data in reply) { - faces.add(Face._(data)); + faces.add(Face(data)); } return faces; @@ -68,7 +62,7 @@ class FaceDetector extends FirebaseVisionDetector { /// /// Used to configure features such as classification, face tracking, speed, /// etc. -class FaceDetectorOptions { +class FaceDetectorOptions implements VisionOptions { /// Constructor for [FaceDetectorOptions]. /// /// The parameter minFaceValue must be between 0.0 and 1.0, inclusive. @@ -104,11 +98,22 @@ class FaceDetectorOptions { /// Option for controlling additional accuracy / speed trade-offs. final FaceDetectorMode mode; + + @override + Map toMap() { + return { + 'enableClassification': enableClassification, + 'enableLandmarks': enableLandmarks, + 'enableTracking': enableTracking, + 'minFaceSize': minFaceSize, + 'mode': _enumToString(mode), + }; + } } /// Represents a face detected by [FaceDetector]. class Face { - Face._(dynamic data) + Face(dynamic data) : boundingBox = Rectangle( data['left'], data['top'], diff --git a/packages/firebase_ml_vision/lib/src/firebase_vision.dart b/packages/firebase_ml_vision/lib/src/firebase_vision.dart index 66b41221ec1e..3fd085ef1bae 100644 --- a/packages/firebase_ml_vision/lib/src/firebase_vision.dart +++ b/packages/firebase_ml_vision/lib/src/firebase_vision.dart @@ -13,10 +13,57 @@ part of firebase_ml_vision; /// TextDetector textDetector = FirebaseVision.instance.getTextDetector(); /// ``` class FirebaseVision { - FirebaseVision._(); + Stream liveViewStream; + + FirebaseVision._() { + liveViewStream = const EventChannel( + 'plugins.flutter.io/firebase_ml_vision/liveViewEvents') + .receiveBroadcastStream() + .where((dynamic event) => event['eventType'] == 'detection') + .map((dynamic event) { + print("mapping!"); + + // get the image size + final Map sizeMap = event['imageSize']; + int width = sizeMap['width']; + int height = sizeMap['height']; + final imageSize = Size(width.toDouble(), height.toDouble()); + + // get the data + final List reply = event['data']; + // get the data type + final String detectionType = event['detectionType']; + if (detectionType == "barcode") { + final List barcodes = []; + reply.forEach((dynamic barcodeMap) { + barcodes.add(new Barcode(barcodeMap)); + }); + return new LiveViewBarcodeDetectionResult(barcodes, imageSize); + } else if (detectionType == "text") { + final List texts = []; + reply.forEach((dynamic block) { + texts.add(TextBlock.fromBlockData(block)); + }); + return new LiveViewTextDetectionResult(texts, imageSize); + } else if (detectionType == "face") { + final List faces = []; + reply.forEach((dynamic f) { + faces.add(new Face(f)); + }); + return new LiveViewFaceDetectionResult(faces, imageSize); + } else if (detectionType == "label") { + final List