diff --git a/packages/camera/camera_android/CHANGELOG.md b/packages/camera/camera_android/CHANGELOG.md index 6b3a94ecb43..c4c772bfd60 100644 --- a/packages/camera/camera_android/CHANGELOG.md +++ b/packages/camera/camera_android/CHANGELOG.md @@ -1,3 +1,6 @@ +## 0.10.5 + +* Allows camera to be switched while video recording. ## 0.10.4+3 * Clarifies explanation of endorsement in README. diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java index 264ab72f524..500407009c5 100644 --- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java +++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/Camera.java @@ -96,13 +96,28 @@ class Camera * Holds all of the camera features/settings and will be used to update the request builder when * one changes. */ - private final CameraFeatures cameraFeatures; + private CameraFeatures cameraFeatures; + + private String imageFormatGroup; + + /** + * Takes an input/output surface and orients the recording correctly. This is needed because + * switching cameras while recording causes the wrong orientation. + */ + private VideoRenderer videoRenderer; + + /** + * Whether or not the camera aligns with the initial way the camera was facing if the camera was + * flipped. + */ + private int initialCameraFacing; private final SurfaceTextureEntry flutterTexture; + private final ResolutionPreset resolutionPreset; private final boolean enableAudio; private final Context applicationContext; private final DartMessenger dartMessenger; - private final CameraProperties cameraProperties; + private CameraProperties cameraProperties; private final CameraFeatureFactory cameraFeatureFactory; private final Activity activity; /** A {@link CameraCaptureSession.CaptureCallback} that handles events related to JPEG capture. */ @@ -192,6 +207,7 @@ public Camera( this.applicationContext = activity.getApplicationContext(); this.cameraProperties = cameraProperties; this.cameraFeatureFactory = cameraFeatureFactory; + this.resolutionPreset = resolutionPreset; this.cameraFeatures = CameraFeatures.init( cameraFeatureFactory, cameraProperties, activity, dartMessenger, resolutionPreset); @@ -232,6 +248,7 @@ private void prepareMediaRecorder(String outputFilePath) throws IOException { if (mediaRecorder != null) { mediaRecorder.release(); } + closeRenderer(); final PlatformChannel.DeviceOrientation lockedOrientation = cameraFeatures.getSensorOrientation().getLockedCaptureOrientation(); @@ -259,6 +276,7 @@ private void prepareMediaRecorder(String outputFilePath) throws IOException { @SuppressLint("MissingPermission") public void open(String imageFormatGroup) throws CameraAccessException { + this.imageFormatGroup = imageFormatGroup; final ResolutionFeature resolutionFeature = cameraFeatures.getResolution(); if (!resolutionFeature.checkIsSupported()) { @@ -303,14 +321,17 @@ public void onOpened(@NonNull CameraDevice device) { cameraDevice = new DefaultCameraDeviceWrapper(device); try { startPreview(); + if (!recordingVideo) // only send initialization if we werent already recording and switching cameras dartMessenger.sendCameraInitializedEvent( - resolutionFeature.getPreviewSize().getWidth(), - resolutionFeature.getPreviewSize().getHeight(), - cameraFeatures.getExposureLock().getValue(), - cameraFeatures.getAutoFocus().getValue(), - cameraFeatures.getExposurePoint().checkIsSupported(), - cameraFeatures.getFocusPoint().checkIsSupported()); - } catch (CameraAccessException e) { + resolutionFeature.getPreviewSize().getWidth(), + resolutionFeature.getPreviewSize().getHeight(), + cameraFeatures.getExposureLock().getValue(), + cameraFeatures.getAutoFocus().getValue(), + cameraFeatures.getExposurePoint().checkIsSupported(), + cameraFeatures.getFocusPoint().checkIsSupported()); + + } catch (Exception e) { + Log.i(TAG, "open | onOpened error: " + e.getMessage()); dartMessenger.sendCameraErrorEvent(e.getMessage()); close(); } @@ -320,7 +341,8 @@ public void onOpened(@NonNull CameraDevice device) { public void onClosed(@NonNull CameraDevice camera) { Log.i(TAG, "open | onClosed"); - // Prevents calls to methods that would otherwise result in IllegalStateException exceptions. + // Prevents calls to methods that would otherwise result in IllegalStateException + // exceptions. cameraDevice = null; closeCaptureSession(); dartMessenger.sendCameraClosingEvent(); @@ -735,7 +757,7 @@ public void startVideoRecording( if (imageStreamChannel != null) { setStreamHandler(imageStreamChannel); } - + initialCameraFacing = cameraProperties.getLensFacing(); recordingVideo = true; try { startCapture(true, imageStreamChannel != null); @@ -747,6 +769,13 @@ public void startVideoRecording( } } + private void closeRenderer() { + if (videoRenderer != null) { + videoRenderer.close(); + videoRenderer = null; + } + } + public void stopVideoRecording(@NonNull final Result result) { if (!recordingVideo) { result.success(null); @@ -757,6 +786,7 @@ public void stopVideoRecording(@NonNull final Result result) { cameraFeatureFactory.createAutoFocusFeature(cameraProperties, false)); recordingVideo = false; try { + closeRenderer(); captureSession.abortCaptures(); mediaRecorder.stop(); } catch (CameraAccessException | IllegalStateException e) { @@ -765,7 +795,7 @@ public void stopVideoRecording(@NonNull final Result result) { mediaRecorder.reset(); try { startPreview(); - } catch (CameraAccessException | IllegalStateException e) { + } catch (CameraAccessException | IllegalStateException | InterruptedException e) { result.error("videoRecordingFailed", e.getMessage(), null); return; } @@ -1049,13 +1079,50 @@ public void resumePreview() { null, (code, message) -> dartMessenger.sendCameraErrorEvent(message)); } - public void startPreview() throws CameraAccessException { + public void startPreview() throws CameraAccessException, InterruptedException { + // If recording is already in progress, the camera is being flipped, so send it through the VideoRenderer to keep the correct orientation. + if (recordingVideo) { + startPreviewWithVideoRendererStream(); + } else { + startRegularPreview(); + } + } + + private void startRegularPreview() throws CameraAccessException { if (pictureImageReader == null || pictureImageReader.getSurface() == null) return; Log.i(TAG, "startPreview"); - createCaptureSession(CameraDevice.TEMPLATE_PREVIEW, pictureImageReader.getSurface()); } + private void startPreviewWithVideoRendererStream() + throws CameraAccessException, InterruptedException { + if (videoRenderer == null) return; + + // get rotation for rendered video + final PlatformChannel.DeviceOrientation lockedOrientation = + cameraFeatures.getSensorOrientation().getLockedCaptureOrientation(); + DeviceOrientationManager orientationManager = + cameraFeatures.getSensorOrientation().getDeviceOrientationManager(); + + int rotation = 0; + if (orientationManager != null) { + rotation = + lockedOrientation == null + ? orientationManager.getVideoOrientation() + : orientationManager.getVideoOrientation(lockedOrientation); + } + + if (cameraProperties.getLensFacing() != initialCameraFacing) { + + // If the new camera is facing the opposite way than the initial recording, + // the rotation should be flipped 180 degrees. + rotation = (rotation + 180) % 360; + } + videoRenderer.setRotation(rotation); + + createCaptureSession(CameraDevice.TEMPLATE_RECORD, videoRenderer.getInputSurface()); + } + public void startPreviewWithImageStream(EventChannel imageStreamChannel) throws CameraAccessException { setStreamHandler(imageStreamChannel); @@ -1179,17 +1246,7 @@ private void closeCaptureSession() { public void close() { Log.i(TAG, "close"); - if (cameraDevice != null) { - cameraDevice.close(); - cameraDevice = null; - - // Closing the CameraDevice without closing the CameraCaptureSession is recommended - // for quickly closing the camera: - // https://developer.android.com/reference/android/hardware/camera2/CameraCaptureSession#close() - captureSession = null; - } else { - closeCaptureSession(); - } + stopAndReleaseCamera(); if (pictureImageReader != null) { pictureImageReader.close(); @@ -1208,6 +1265,75 @@ public void close() { stopBackgroundThread(); } + private void stopAndReleaseCamera() { + if (cameraDevice != null) { + cameraDevice.close(); + cameraDevice = null; + + // Closing the CameraDevice without closing the CameraCaptureSession is recommended + // for quickly closing the camera: + // https://developer.android.com/reference/android/hardware/camera2/CameraCaptureSession#close() + captureSession = null; + } else { + closeCaptureSession(); + } + } + + private void prepareVideoRenderer() { + if (videoRenderer != null) return; + final ResolutionFeature resolutionFeature = cameraFeatures.getResolution(); + + // handle videoRenderer errors + Thread.UncaughtExceptionHandler videoRendererUncaughtExceptionHandler = + new Thread.UncaughtExceptionHandler() { + @Override + public void uncaughtException(Thread thread, Throwable ex) { + dartMessenger.sendCameraErrorEvent( + "Failed to process frames after camera was flipped."); + } + }; + + videoRenderer = + new VideoRenderer( + mediaRecorder.getSurface(), + resolutionFeature.getCaptureSize().getWidth(), + resolutionFeature.getCaptureSize().getHeight(), + videoRendererUncaughtExceptionHandler); + } + + public void setDescriptionWhileRecording( + @NonNull final Result result, CameraProperties properties) { + + if (!recordingVideo) { + result.error("setDescriptionWhileRecordingFailed", "Device was not recording", null); + return; + } + + // See VideoRenderer.java requires API 26 to switch camera while recording + if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.O) { + result.error( + "setDescriptionWhileRecordingFailed", + "Device does not support switching the camera while recording", + null); + return; + } + + stopAndReleaseCamera(); + prepareVideoRenderer(); + cameraProperties = properties; + cameraFeatures = + CameraFeatures.init( + cameraFeatureFactory, cameraProperties, activity, dartMessenger, resolutionPreset); + cameraFeatures.setAutoFocus( + cameraFeatureFactory.createAutoFocusFeature(cameraProperties, true)); + try { + open(imageFormatGroup); + } catch (CameraAccessException e) { + result.error("setDescriptionWhileRecordingFailed", e.getMessage(), null); + } + result.success(null); + } + public void dispose() { Log.i(TAG, "dispose"); diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java index 432344ade8c..aad62bbaba8 100644 --- a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java +++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/MethodCallHandlerImpl.java @@ -354,6 +354,18 @@ public void onMethodCall(@NonNull MethodCall call, @NonNull final Result result) result.success(null); break; } + case "setDescriptionWhileRecording": + { + try { + String cameraName = call.argument("cameraName"); + CameraProperties cameraProperties = + new CameraPropertiesImpl(cameraName, CameraUtils.getCameraManager(activity)); + camera.setDescriptionWhileRecording(result, cameraProperties); + } catch (Exception e) { + handleException(e, result); + } + break; + } case "dispose": { if (camera != null) { diff --git a/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java new file mode 100644 index 00000000000..62a70640961 --- /dev/null +++ b/packages/camera/camera_android/android/src/main/java/io/flutter/plugins/camera/VideoRenderer.java @@ -0,0 +1,364 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +package io.flutter.plugins.camera; + +import static android.os.SystemClock.uptimeMillis; + +import android.graphics.SurfaceTexture; +import android.opengl.EGL14; +import android.opengl.EGLConfig; +import android.opengl.EGLContext; +import android.opengl.EGLDisplay; +import android.opengl.EGLExt; +import android.opengl.EGLSurface; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.opengl.GLUtils; +import android.opengl.Matrix; +import android.os.Handler; +import android.os.HandlerThread; +import android.util.Log; +import android.view.Surface; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; + +/** + * Renders video onto texture after performing a matrix rotation on each frame. + * + *
VideoRenderer is needed because when switching between cameras mid recording, the orientation + * of the recording from the new camera usually becomes flipped. MediaRecorder has + * setOrientationHint, but that cannot be called mid recording and therefore isn't useful. Android + * Camera2 has no setDisplayOrientation on the camera itself as it is supposed to 'just work' (see + * https://stackoverflow.com/questions/33479004/what-is-the-camera2-api-equivalent-of-setdisplayorientation). + * Therefore it cannot be used to set the camera's orientation either. + * + *
This leaves the solution to be routing the recording through a surface texture and performing
+ * a matrix transformation on it manually to get the correct orientation. This only happens when
+ * setDescription is called mid video recording.
+ */
+public class VideoRenderer {
+
+ private static String TAG = "VideoRenderer";
+
+ private static final String vertexShaderCode =
+ " precision highp float;\n"
+ + " attribute vec3 vertexPosition;\n"
+ + " attribute vec2 uvs;\n"
+ + " varying vec2 varUvs;\n"
+ + " uniform mat4 texMatrix;\n"
+ + " uniform mat4 mvp;\n"
+ + "\n"
+ + " void main()\n"
+ + " {\n"
+ + " varUvs = (texMatrix * vec4(uvs.x, uvs.y, 0, 1.0)).xy;\n"
+ + " gl_Position = mvp * vec4(vertexPosition, 1.0);\n"
+ + " }";
+
+ private static final String fragmentShaderCode =
+ " #extension GL_OES_EGL_image_external : require\n"
+ + " precision mediump float;\n"
+ + "\n"
+ + " varying vec2 varUvs;\n"
+ + " uniform samplerExternalOES texSampler;\n"
+ + "\n"
+ + " void main()\n"
+ + " {\n"
+ + " vec4 c = texture2D(texSampler, varUvs);\n"
+ + " gl_FragColor = vec4(c.r, c.g, c.b, c.a);\n"
+ + " }";
+
+ private final int[] textureHandles = new int[1];
+
+ private final float[] vertices =
+ new float[] {
+ -1.0f, -1.0f, 0.0f, 0f, 0f, -1.0f, 1.0f, 0.0f, 0f, 1f, 1.0f, 1.0f, 0.0f, 1f, 1f, 1.0f,
+ -1.0f, 0.0f, 1f, 0f
+ };
+
+ private final int[] indices = new int[] {2, 1, 0, 0, 3, 2};
+
+ private int program;
+ private int vertexHandle = 0;
+ private final int[] bufferHandles = new int[2];
+ private int uvsHandle = 0;
+ private int texMatrixHandle = 0;
+ private int mvpHandle = 0;
+
+ EGLDisplay display;
+ EGLContext context;
+ EGLSurface surface;
+ private Thread thread;
+ private final Surface outputSurface;
+ private SurfaceTexture inputSurfaceTexture;
+ private Surface inputSurface;
+
+ private HandlerThread surfaceTextureFrameAvailableHandler;
+ private final Object surfaceTextureAvailableFrameLock = new Object();
+ private Boolean surfaceTextureFrameAvailable = false;
+
+ private final int recordingWidth;
+ private final int recordingHeight;
+ private int rotation = 0;
+
+ private final Object lock = new Object();
+
+ private final Thread.UncaughtExceptionHandler uncaughtExceptionHandler;
+
+ /** Gets surface for input. Blocks until surface is ready. */
+ public Surface getInputSurface() throws InterruptedException {
+ synchronized (lock) {
+ while (inputSurface == null) {
+ lock.wait();
+ }
+ }
+ return inputSurface;
+ }
+
+ public VideoRenderer(
+ Surface outputSurface,
+ int recordingWidth,
+ int recordingHeight,
+ Thread.UncaughtExceptionHandler uncaughtExceptionHandler) {
+ this.outputSurface = outputSurface;
+ this.recordingHeight = recordingHeight;
+ this.recordingWidth = recordingWidth;
+ this.uncaughtExceptionHandler = uncaughtExceptionHandler;
+ startOpenGL();
+ Log.d(TAG, "VideoRenderer setup complete");
+ }
+
+ /** Stop rendering and cleanup resources. */
+ public void close() {
+ thread.interrupt();
+ surfaceTextureFrameAvailableHandler.quitSafely();
+ cleanupOpenGL();
+ inputSurfaceTexture.release();
+ }
+
+ private void cleanupOpenGL() {
+ GLES20.glDeleteBuffers(2, bufferHandles, 0);
+ GLES20.glDeleteTextures(1, textureHandles, 0);
+ EGL14.eglDestroyContext(display, context);
+ EGL14.eglDestroySurface(display, surface);
+ GLES20.glDeleteProgram(program);
+ }
+
+ /** Configures openGL. Must be called in same thread as draw is called. */
+ private void configureOpenGL() {
+ synchronized (lock) {
+ display = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (display == EGL14.EGL_NO_DISPLAY)
+ throw new RuntimeException(
+ "eglDisplay == EGL14.EGL_NO_DISPLAY: "
+ + GLUtils.getEGLErrorString(EGL14.eglGetError()));
+
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(display, version, 0, version, 1))
+ throw new RuntimeException(
+ "eglInitialize(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
+
+ String eglExtensions = EGL14.eglQueryString(display, EGL14.EGL_EXTENSIONS);
+ if (!eglExtensions.contains("EGL_ANDROID_presentation_time"))
+ throw new RuntimeException(
+ "cannot configure OpenGL. missing EGL_ANDROID_presentation_time");
+
+ int[] attribList =
+ new int[] {
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_ALPHA_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+ EGLExt.EGL_RECORDABLE_ANDROID, 1,
+ EGL14.EGL_NONE
+ };
+
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ if (!EGL14.eglChooseConfig(display, attribList, 0, configs, 0, configs.length, numConfigs, 0))
+ throw new RuntimeException(GLUtils.getEGLErrorString(EGL14.eglGetError()));
+
+ int err = EGL14.eglGetError();
+ if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
+
+ int[] ctxAttribs = new int[] {EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, EGL14.EGL_NONE};
+ context = EGL14.eglCreateContext(display, configs[0], EGL14.EGL_NO_CONTEXT, ctxAttribs, 0);
+
+ err = EGL14.eglGetError();
+ if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
+
+ int[] surfaceAttribs = new int[] {EGL14.EGL_NONE};
+
+ surface = EGL14.eglCreateWindowSurface(display, configs[0], outputSurface, surfaceAttribs, 0);
+
+ err = EGL14.eglGetError();
+ if (err != EGL14.EGL_SUCCESS) throw new RuntimeException(GLUtils.getEGLErrorString(err));
+
+ if (!EGL14.eglMakeCurrent(display, surface, surface, context))
+ throw new RuntimeException(
+ "eglMakeCurrent(): " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
+
+ ByteBuffer vertexBuffer = ByteBuffer.allocateDirect(vertices.length * 4);
+ vertexBuffer.order(ByteOrder.nativeOrder());
+ vertexBuffer.asFloatBuffer().put(vertices);
+ vertexBuffer.asFloatBuffer().position(0);
+
+ ByteBuffer indexBuffer = ByteBuffer.allocateDirect(indices.length * 4);
+ indexBuffer.order(ByteOrder.nativeOrder());
+ indexBuffer.asIntBuffer().put(indices);
+ indexBuffer.position(0);
+
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexShaderCode);
+ int fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentShaderCode);
+
+ program = GLES20.glCreateProgram();
+
+ GLES20.glAttachShader(program, vertexShader);
+ GLES20.glAttachShader(program, fragmentShader);
+ GLES20.glLinkProgram(program);
+
+ deleteShader(vertexShader);
+ deleteShader(fragmentShader);
+
+ vertexHandle = GLES20.glGetAttribLocation(program, "vertexPosition");
+ uvsHandle = GLES20.glGetAttribLocation(program, "uvs");
+ texMatrixHandle = GLES20.glGetUniformLocation(program, "texMatrix");
+ mvpHandle = GLES20.glGetUniformLocation(program, "mvp");
+
+ // Initialize buffers
+ GLES20.glGenBuffers(2, bufferHandles, 0);
+
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0]);
+ GLES20.glBufferData(
+ GLES20.GL_ARRAY_BUFFER, vertices.length * 4, vertexBuffer, GLES20.GL_DYNAMIC_DRAW);
+
+ GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1]);
+ GLES20.glBufferData(
+ GLES20.GL_ELEMENT_ARRAY_BUFFER, indices.length * 4, indexBuffer, GLES20.GL_DYNAMIC_DRAW);
+
+ // Init texture that will receive decoded frames
+ GLES20.glGenTextures(1, textureHandles, 0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureHandles[0]);
+
+ inputSurfaceTexture = new SurfaceTexture(getTexId());
+ inputSurfaceTexture.setDefaultBufferSize(recordingWidth, recordingHeight);
+ surfaceTextureFrameAvailableHandler = new HandlerThread("FrameHandlerThread");
+ surfaceTextureFrameAvailableHandler.start();
+ inputSurface = new Surface(inputSurfaceTexture);
+
+ inputSurfaceTexture.setOnFrameAvailableListener(
+ new SurfaceTexture.OnFrameAvailableListener() {
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ synchronized (surfaceTextureAvailableFrameLock) {
+ if (surfaceTextureFrameAvailable)
+ Log.w(TAG, "Frame available before processing other frames. dropping frames");
+ surfaceTextureFrameAvailable = true;
+ surfaceTextureAvailableFrameLock.notifyAll();
+ }
+ }
+ },
+ new Handler(surfaceTextureFrameAvailableHandler.getLooper()));
+ lock.notifyAll();
+ }
+ }
+
+ /** Starts and configures Video Renderer. */
+ private void startOpenGL() {
+ Log.d(TAG, "Starting OpenGL Thread");
+ thread =
+ new Thread() {
+ @Override
+ public void run() {
+
+ configureOpenGL();
+
+ try {
+ // Continuously pull frames from input surface texture and use videoRenderer to modify
+ // to correct rotation.
+ while (!Thread.interrupted()) {
+
+ synchronized (surfaceTextureAvailableFrameLock) {
+ while (!surfaceTextureFrameAvailable) {
+ surfaceTextureAvailableFrameLock.wait(500);
+ }
+ surfaceTextureFrameAvailable = false;
+ }
+
+ inputSurfaceTexture.updateTexImage();
+
+ float[] surfaceTextureMatrix = new float[16];
+ inputSurfaceTexture.getTransformMatrix(surfaceTextureMatrix);
+
+ draw(recordingWidth, recordingHeight, surfaceTextureMatrix);
+ }
+ } catch (InterruptedException e) {
+ Log.d(TAG, "thread interrupted while waiting for frames");
+ }
+ }
+ };
+ thread.setUncaughtExceptionHandler(uncaughtExceptionHandler);
+ thread.start();
+ }
+
+ public int getTexId() {
+ return textureHandles[0];
+ }
+
+ public float[] moveMatrix() {
+ float[] m = new float[16];
+ Matrix.setIdentityM(m, 0);
+ Matrix.rotateM(m, 0, rotation, 0, 0, 1);
+ return m;
+ }
+
+ public void setRotation(int rotation) {
+ this.rotation = rotation;
+ }
+
+ private int loadShader(int type, String code) {
+
+ int shader = GLES20.glCreateShader(type);
+
+ GLES20.glShaderSource(shader, code);
+ GLES20.glCompileShader(shader);
+ return shader;
+ }
+
+ private void deleteShader(int shader) {
+ GLES20.glDeleteShader(shader);
+ }
+
+ public void draw(int viewportWidth, int viewportHeight, float[] texMatrix) {
+
+ GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
+ GLES20.glClearColor(0f, 0f, 0f, 0f);
+
+ GLES20.glViewport(0, 0, viewportWidth, viewportHeight);
+
+ GLES20.glUseProgram(program);
+
+ // Pass transformations to shader
+ GLES20.glUniformMatrix4fv(texMatrixHandle, 1, false, texMatrix, 0);
+ GLES20.glUniformMatrix4fv(mvpHandle, 1, false, moveMatrix(), 0);
+
+ // Prepare buffers with vertices and indices & draw
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, bufferHandles[0]);
+ GLES20.glBindBuffer(GLES20.GL_ELEMENT_ARRAY_BUFFER, bufferHandles[1]);
+
+ GLES20.glEnableVertexAttribArray(vertexHandle);
+ GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 4 * 5, 0);
+
+ GLES20.glEnableVertexAttribArray(uvsHandle);
+ GLES20.glVertexAttribPointer(uvsHandle, 2, GLES20.GL_FLOAT, false, 4 * 5, 3 * 4);
+
+ GLES20.glDrawElements(GLES20.GL_TRIANGLES, 6, GLES20.GL_UNSIGNED_INT, 0);
+
+ EGLExt.eglPresentationTimeANDROID(display, surface, uptimeMillis() * 1000000);
+ if (!EGL14.eglSwapBuffers(display, surface)) {
+ Log.w(TAG, "eglSwapBuffers() " + GLUtils.getEGLErrorString(EGL14.eglGetError()));
+ }
+ }
+}
diff --git a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
index 9a679017ded..9de33e3dc7a 100644
--- a/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
+++ b/packages/camera/camera_android/android/src/test/java/io/flutter/plugins/camera/CameraTest.java
@@ -602,6 +602,123 @@ public void resumeVideoRecording_shouldCallPauseWhenRecordingAndOnAPIN() {
verify(mockResult, never()).error(any(), any(), any());
}
+ @Test
+ public void setDescriptionWhileRecording() {
+ MethodChannel.Result mockResult = mock(MethodChannel.Result.class);
+ MediaRecorder mockMediaRecorder = mock(MediaRecorder.class);
+ VideoRenderer mockVideoRenderer = mock(VideoRenderer.class);
+ TestUtils.setPrivateField(camera, "mediaRecorder", mockMediaRecorder);
+ TestUtils.setPrivateField(camera, "recordingVideo", true);
+ TestUtils.setPrivateField(camera, "videoRenderer", mockVideoRenderer);
+
+ final CameraProperties newCameraProperties = mock(CameraProperties.class);
+ camera.setDescriptionWhileRecording(mockResult, newCameraProperties);
+
+ if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.O) {
+ verify(mockResult, times(1))
+ .error(
+ eq("setDescriptionWhileRecordingFailed"),
+ eq("Device does not support switching the camera while recording"),
+ eq(null));
+ } else {
+ verify(mockResult, times(1)).success(null);
+ verify(mockResult, never()).error(any(), any(), any());
+ }
+ }
+
+ @Test
+ public void startPreview_shouldPullStreamFromVideoRenderer()
+ throws InterruptedException, CameraAccessException {
+ VideoRenderer mockVideoRenderer = mock(VideoRenderer.class);
+ ArrayList