Skip to content
This repository was archived by the owner on Feb 22, 2023. It is now read-only.

Add byte streaming capability for the camera #900

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,26 @@ public void onMethodCall(MethodCall call, final Result result) {
camera.stopVideoRecording(result);
break;
}
case "startByteStream":
{
try {
camera.startPreviewWithByteStream();
result.success(null);
} catch (CameraAccessException e) {
result.error("CameraAccess", e.getMessage(), null);
}
break;
}
case "stopByteStream":
{
try {
camera.startPreview();
result.success(null);
} catch (CameraAccessException e) {
result.error("CameraAccess", e.getMessage(), null);
}
break;
}
case "dispose":
{
if (camera != null) {
Expand Down Expand Up @@ -248,7 +268,8 @@ private class Camera {
private CameraDevice cameraDevice;
private CameraCaptureSession cameraCaptureSession;
private EventChannel.EventSink eventSink;
private ImageReader imageReader;
private ImageReader pictureImageReader;
private ImageReader byteImageReader; // Used to pass bytes to dart side.
private int sensorOrientation;
private boolean isFrontFacing;
private String cameraName;
Expand Down Expand Up @@ -453,9 +474,13 @@ private void open(@Nullable final Result result) {
if (result != null) result.error("cameraPermission", "Camera permission not granted", null);
} else {
try {
imageReader =
pictureImageReader =
ImageReader.newInstance(
captureSize.getWidth(), captureSize.getHeight(), ImageFormat.JPEG, 2);
byteImageReader =
ImageReader.newInstance(
previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We use ImageFormat.YUV_420_888 because it is commonly supported on Android devices.


cameraManager.openCamera(
cameraName,
new CameraDevice.StateCallback() {
Expand Down Expand Up @@ -548,7 +573,7 @@ private void takePicture(String filePath, @NonNull final Result result) {
return;
}

imageReader.setOnImageAvailableListener(
pictureImageReader.setOnImageAvailableListener(
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Expand All @@ -566,7 +591,7 @@ public void onImageAvailable(ImageReader reader) {
try {
final CaptureRequest.Builder captureBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(imageReader.getSurface());
captureBuilder.addTarget(pictureImageReader.getSurface());
int displayRotation = activity.getWindowManager().getDefaultDisplay().getRotation();
int displayOrientation = ORIENTATIONS.get(displayRotation);
if (isFrontFacing) displayOrientation = -displayOrientation;
Expand Down Expand Up @@ -696,7 +721,7 @@ private void startPreview() throws CameraAccessException {
surfaces.add(previewSurface);
captureRequestBuilder.addTarget(previewSurface);

surfaces.add(imageReader.getSurface());
surfaces.add(pictureImageReader.getSurface());

cameraDevice.createCaptureSession(
surfaces,
Expand Down Expand Up @@ -726,6 +751,107 @@ public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession
null);
}

private void startPreviewWithByteStream() throws CameraAccessException {
closeCaptureSession();

SurfaceTexture surfaceTexture = textureEntry.surfaceTexture();
surfaceTexture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());

captureRequestBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);

List<Surface> surfaces = new ArrayList<>();

Surface previewSurface = new Surface(surfaceTexture);
surfaces.add(previewSurface);
captureRequestBuilder.addTarget(previewSurface);

surfaces.add(byteImageReader.getSurface());
captureRequestBuilder.addTarget(byteImageReader.getSurface());

cameraDevice.createCaptureSession(
surfaces,
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession session) {
if (cameraDevice == null) {
sendErrorEvent("The camera was closed during configuration.");
return;
}
try {
cameraCaptureSession = session;
captureRequestBuilder.set(
CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
cameraCaptureSession.setRepeatingRequest(captureRequestBuilder.build(), null, null);
} catch (CameraAccessException e) {
sendErrorEvent(e.getMessage());
}
}

@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
sendErrorEvent("Failed to configure the camera for streaming bytes.");
}
},
null);

registerByteStreamEventChannel();
}

private void registerByteStreamEventChannel() {
final EventChannel cameraChannel =
new EventChannel(registrar.messenger(), "plugins.flutter.io/camera/bytes");

cameraChannel.setStreamHandler(
new EventChannel.StreamHandler() {
@Override
public void onListen(Object o, EventChannel.EventSink eventSink) {
setByteStreamImageAvailableListener(eventSink);
}

@Override
public void onCancel(Object o) {
byteImageReader.setOnImageAvailableListener(null, null);
}
});
}

private void setByteStreamImageAvailableListener(final EventChannel.EventSink eventSink) {
byteImageReader.setOnImageAvailableListener(
new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(final ImageReader reader) {
Image img = reader.acquireLatestImage();
if (img == null) return;

eventSink.success(YUV_420_888toNV21(img));
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Conversion to NV21 to turn planar image bytebuffer to a single dimension image.

img.close();
}
},
null);
}

private byte[] YUV_420_888toNV21(Image image) {
byte[] nv21;

ByteBuffer yBuffer = image.getPlanes()[0].getBuffer();
ByteBuffer uBuffer = image.getPlanes()[1].getBuffer();
ByteBuffer vBuffer = image.getPlanes()[2].getBuffer();

int ySize = yBuffer.remaining();
int uSize = uBuffer.remaining();
int vSize = vBuffer.remaining();

nv21 = new byte[ySize + uSize + vSize];

//U and V are swapped
yBuffer.get(nv21, 0, ySize);
vBuffer.get(nv21, ySize, vSize);
uBuffer.get(nv21, ySize + vSize, uSize);

return nv21;
}

private void sendErrorEvent(String errorDescription) {
if (eventSink != null) {
Map<String, String> event = new HashMap<>();
Expand All @@ -749,9 +875,13 @@ private void close() {
cameraDevice.close();
cameraDevice = null;
}
if (imageReader != null) {
imageReader.close();
imageReader = null;
if (pictureImageReader != null) {
pictureImageReader.close();
pictureImageReader = null;
}
if (byteImageReader != null) {
byteImageReader.close();
byteImageReader = null;
}
if (mediaRecorder != null) {
mediaRecorder.reset();
Expand Down
24 changes: 4 additions & 20 deletions packages/camera/example/ios/Runner.xcodeproj/project.pbxproj
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,6 @@
9705A1C41CF9048500538489 /* Embed Frameworks */,
3B06AD1E1E4923F5004D2608 /* Thin Binary */,
FE224661708E6DA2A0F8B952 /* [CP] Embed Pods Frameworks */,
EACF0929FF12B6CC70C2D6BE /* [CP] Copy Pods Resources */,
);
buildRules = (
);
Expand All @@ -183,7 +182,7 @@
TargetAttributes = {
97C146ED1CF9000F007C117D = {
CreatedOnToolsVersion = 7.3.1;
DevelopmentTeam = EQHXZ8M8AV;
DevelopmentTeam = S8QB4VV633;
};
};
};
Expand Down Expand Up @@ -269,29 +268,14 @@
shellPath = /bin/sh;
shellScript = "/bin/sh \"$FLUTTER_ROOT/packages/flutter_tools/bin/xcode_backend.sh\" build";
};
EACF0929FF12B6CC70C2D6BE /* [CP] Copy Pods Resources */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
);
name = "[CP] Copy Pods Resources";
outputPaths = (
);
runOnlyForDeploymentPostprocessing = 0;
shellPath = /bin/sh;
shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-Runner/Pods-Runner-resources.sh\"\n";
showEnvVarsInLog = 0;
};
FE224661708E6DA2A0F8B952 /* [CP] Embed Pods Frameworks */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
files = (
);
inputPaths = (
"${SRCROOT}/Pods/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh",
"${PODS_ROOT}/../../../../../../flutter/bin/cache/artifacts/engine/ios-release/Flutter.framework",
"${PODS_ROOT}/../.symlinks/flutter/ios/Flutter.framework",
);
name = "[CP] Embed Pods Frameworks";
outputPaths = (
Expand Down Expand Up @@ -433,7 +417,7 @@
buildSettings = {
ARCHS = arm64;
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = EQHXZ8M8AV;
DEVELOPMENT_TEAM = S8QB4VV633;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
Expand All @@ -456,7 +440,7 @@
buildSettings = {
ARCHS = arm64;
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
DEVELOPMENT_TEAM = EQHXZ8M8AV;
DEVELOPMENT_TEAM = S8QB4VV633;
ENABLE_BITCODE = NO;
FRAMEWORK_SEARCH_PATHS = (
"$(inherited)",
Expand Down
Loading