detectorOptions;
+
+ /**
+ * Map to convert between a byte array, received from the camera, and its associated byte buffer.
+ * We use byte buffers internally because this is a more efficient way to call into native code
+ * later (avoids a potential copy).
+ *
+ *
+ *
+ *
Note: uses IdentityHashMap here instead of HashMap because the behavior of an array's
+ * equals, hashCode and toString methods is both useless and unexpected. IdentityHashMap enforces
+ * identity ('==') check on the keys.
+ */
+ private final Map bytesToByteBuffer = new IdentityHashMap<>();
+
+ private final Detector.OperationFinishedCallback liveDetectorFinishedCallback =
+ new Detector.OperationFinishedCallback() {
+ @Override
+ public void success(Detector detector, Object data, android.util.Size size) {
+ Map event = new HashMap<>();
+ event.put("eventType", "detection");
+ String dataType;
+ if (detector instanceof BarcodeDetector) {
+ dataType = "barcode";
+ } else if (detector instanceof TextDetector) {
+ dataType = "text";
+ } else if (detector instanceof LabelDetector) {
+ dataType = "label";
+ } else if (detector instanceof FaceDetector) {
+ dataType = "face";
+ } else {
+ // unsupported live detector
+ return;
+ }
+ event.put("detectionType", dataType);
+ event.put("data", data);
+ eventSink.success(event);
+ }
+
+ @Override
+ public void error(DetectorException e) {
+ e.sendError(eventSink);
+ }
+ };
+
+ public LegacyCamera(
+ PluginRegistry.Registrar registrar, String resolutionPreset, int cameraFacing) {
+ this.registrar = registrar;
+ this.activity = registrar.activity();
+ this.textureEntry = registrar.view().createSurfaceTexture();
+ processingRunnable = new FrameProcessingRunnable();
+
+ registerEventChannel();
+
+ switch (resolutionPreset) {
+ case "high":
+ requestedPreviewWidth = 1024;
+ requestedPreviewHeight = 768;
+ break;
+ case "medium":
+ requestedPreviewWidth = 640;
+ requestedPreviewHeight = 480;
+ break;
+ case "low":
+ requestedPreviewWidth = 320;
+ requestedPreviewHeight = 240;
+ break;
+ }
+
+ setFacing(cameraFacing);
+ }
+
+ private void registerEventChannel() {
+ new EventChannel(
+ registrar.messenger(),
+ "plugins.flutter.io/firebase_ml_vision/liveViewEvents" + textureEntry.id())
+ .setStreamHandler(
+ new EventChannel.StreamHandler() {
+ @Override
+ public void onListen(Object arguments, EventChannel.EventSink eventSink) {
+ LegacyCamera.this.eventSink = eventSink;
+ }
+
+ @Override
+ public void onCancel(Object arguments) {
+ LegacyCamera.this.eventSink = null;
+ }
+ });
+ }
+
+ // ==============================================================================================
+ // Public
+ // ==============================================================================================
+
+ /** Stops the camera and releases the resources of the camera and underlying detector. */
+ public void release() {
+ synchronized (processorLock) {
+ stop();
+ processingRunnable.release();
+ }
+ }
+
+ /**
+ * Opens the camera and starts sending preview frames to the underlying detector. The supplied
+ * surface holder is used for the preview so frames can be displayed to the user.
+ *
+ * @throws IOException if the supplied surface holder could not be used as the preview display
+ */
+ @RequiresPermission(Manifest.permission.CAMERA)
+ public synchronized LegacyCamera start(OnCameraOpenedCallback callback) throws IOException {
+ if (camera != null) {
+ return this;
+ }
+
+ camera = createCamera(callback);
+
+ SurfaceTexture surfaceTexture = textureEntry.surfaceTexture();
+ surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
+
+ camera.setPreviewTexture(surfaceTexture);
+ usingSurfaceTexture = true;
+ camera.startPreview();
+
+ processingThread = new Thread(processingRunnable);
+ processingRunnable.setActive(true);
+ processingThread.start();
+
+ return this;
+ }
+
+ /**
+ * Closes the camera and stops sending frames to the underlying frame detector.
+ *
+ *
+ *
+ *
This camera source may be restarted again by calling {@link #start(OnCameraOpenedCallback)}.
+ *
+ *
+ *
+ *
Call {@link #release()} instead to completely shut down this camera source and release the
+ * resources of the underlying detector.
+ */
+ public synchronized void stop() {
+ processingRunnable.setActive(false);
+ if (processingThread != null) {
+ try {
+ // Wait for the thread to complete to ensure that we can't have multiple threads
+ // executing at the same time (i.e., which would happen if we called start too
+ // quickly after stop).
+ processingThread.join();
+ } catch (InterruptedException e) {
+ Log.d(TAG, "Frame processing thread interrupted on release.");
+ }
+ processingThread = null;
+ }
+
+ if (camera != null) {
+ camera.stopPreview();
+ camera.setPreviewCallbackWithBuffer(null);
+ try {
+ if (usingSurfaceTexture) {
+ camera.setPreviewTexture(null);
+ } else {
+ camera.setPreviewDisplay(null);
+ }
+ } catch (Exception e) {
+ Log.e(TAG, "Failed to clear camera preview: " + e);
+ }
+ camera.release();
+ camera = null;
+ }
+
+ // Release the reference to any image buffers, since these will no longer be in use.
+ bytesToByteBuffer.clear();
+ }
+
+ /** Changes the facing of the camera. */
+ public synchronized void setFacing(int facing) {
+ if ((facing != CAMERA_FACING_BACK) && (facing != CAMERA_FACING_FRONT)) {
+ throw new IllegalArgumentException("Invalid camera: " + facing);
+ }
+ this.facing = facing;
+ }
+
+ /** Returns the preview size that is currently in use by the underlying camera. */
+ public Size getPreviewSize() {
+ return previewSize;
+ }
+
+ /**
+ * Returns the selected camera; one of {@link CameraInfo#CAMERA_FACING_BACK} or {@link
+ * CameraInfo#CAMERA_FACING_FRONT}.
+ */
+ public int getCameraFacing() {
+ return facing;
+ }
+
+ /**
+ * Opens the camera and applies the user settings.
+ *
+ * @throws IOException if camera cannot be found or preview cannot be processed
+ */
+ @SuppressLint("InlinedApi")
+ private Camera createCamera(@Nullable OnCameraOpenedCallback callback) throws IOException {
+ int requestedCameraId = getIdForRequestedCamera(facing);
+ if (requestedCameraId == -1) {
+ throw new IOException("Could not find requested camera.");
+ }
+ Camera camera = Camera.open(requestedCameraId);
+
+ SizePair sizePair = selectSizePair(camera, requestedPreviewWidth, requestedPreviewHeight);
+ if (sizePair == null) {
+ throw new IOException("Could not find suitable preview size.");
+ }
+ Size pictureSize = sizePair.pictureSize();
+ previewSize = sizePair.previewSize();
+
+ int[] previewFpsRange = selectPreviewFpsRange(camera, requestedFps);
+ if (previewFpsRange == null) {
+ throw new IOException("Could not find suitable preview frames per second range.");
+ }
+
+ Camera.Parameters parameters = camera.getParameters();
+
+ if (pictureSize != null) {
+ parameters.setPictureSize(pictureSize.getWidth(), pictureSize.getHeight());
+ }
+ parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
+ parameters.setPreviewFpsRange(
+ previewFpsRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ previewFpsRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+ parameters.setPreviewFormat(ImageFormat.NV21);
+
+ setRotation(camera, parameters, requestedCameraId);
+
+ if (parameters
+ .getSupportedFocusModes()
+ .contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
+ parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
+ } else {
+ Log.i(TAG, "Camera auto focus is not supported on this device.");
+ }
+
+ camera.setParameters(parameters);
+
+ if (callback != null) {
+ callback.onOpened(textureEntry.id(), previewSize.getWidth(), previewSize.getHeight());
+ }
+
+ // Four frame buffers are needed for working with the camera:
+ //
+ // one for the frame that is currently being executed upon in doing detection
+ // one for the next pending frame to process immediately upon completing detection
+ // two for the frames that the camera uses to populate future preview images
+ //
+ // Through trial and error it appears that two free buffers, in addition to the two buffers
+ // used in this code, are needed for the camera to work properly. Perhaps the camera has
+ // one thread for acquiring images, and another thread for calling into user code. If only
+ // three buffers are used, then the camera will spew thousands of warning messages when
+ // detection takes a non-trivial amount of time.
+ camera.setPreviewCallbackWithBuffer(new CameraPreviewCallback());
+ camera.addCallbackBuffer(createPreviewBuffer(previewSize));
+ camera.addCallbackBuffer(createPreviewBuffer(previewSize));
+ camera.addCallbackBuffer(createPreviewBuffer(previewSize));
+ camera.addCallbackBuffer(createPreviewBuffer(previewSize));
+
+ return camera;
+ }
+
+ /**
+ * Gets the id for the camera specified by the direction it is facing. Returns -1 if no such
+ * camera was found.
+ *
+ * @param facing the desired camera (front-facing or rear-facing)
+ */
+ private static int getIdForRequestedCamera(int facing) {
+ CameraInfo cameraInfo = new CameraInfo();
+ for (int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+ Camera.getCameraInfo(i, cameraInfo);
+ if (cameraInfo.facing == facing) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ public static List