Skip to content
This repository was archived by the owner on Feb 22, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion packages/camera/camera_platform_interface/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
## NEXT
## 2.2.0

* Adds image streaming to the platform interface.
* Removes unnecessary imports.

## 2.1.6
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart';
import 'package:stream_transform/stream_transform.dart';

import 'type_conversion.dart';

const MethodChannel _channel = MethodChannel('plugins.flutter.io/camera');

/// An implementation of [CameraPlatform] that uses method channels.
Expand Down Expand Up @@ -48,6 +50,12 @@ class MethodChannelCamera extends CameraPlatform {
final StreamController<DeviceEvent> deviceEventStreamController =
StreamController<DeviceEvent>.broadcast();

// The stream to receive frames from the native code.
StreamSubscription<dynamic>? _platformImageStreamSubscription;

// The stream for vending frames to platform interface clients.
StreamController<CameraImageData>? _frameStreamController;

Stream<CameraEvent> _cameraEvents(int cameraId) =>
cameraEventStreamController.stream
.where((CameraEvent event) => event.cameraId == cameraId);
Expand Down Expand Up @@ -267,6 +275,52 @@ class MethodChannelCamera extends CameraPlatform {
<String, dynamic>{'cameraId': cameraId},
);

@override
Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
{CameraImageStreamOptions? options}) {
_frameStreamController = StreamController<CameraImageData>(
onListen: _onFrameStreamListen,
onPause: _onFrameStreamPauseResume,
onResume: _onFrameStreamPauseResume,
onCancel: _onFrameStreamCancel,
);
return _frameStreamController!.stream;
}

void _onFrameStreamListen() {
_startPlatformStream();
}

Future<void> _startPlatformStream() async {
await _channel.invokeMethod<void>('startImageStream');
const EventChannel cameraEventChannel =
EventChannel('plugins.flutter.io/camera/imageStream');
_platformImageStreamSubscription =
cameraEventChannel.receiveBroadcastStream().listen((dynamic imageData) {
if (defaultTargetPlatform == TargetPlatform.iOS) {
try {
_channel.invokeMethod<void>('receivedImageStreamData');
} on PlatformException catch (e) {
throw CameraException(e.code, e.message);
}
}
_frameStreamController!
.add(cameraImageFromPlatformData(imageData as Map<dynamic, dynamic>));
});
}

FutureOr<void> _onFrameStreamCancel() async {
await _channel.invokeMethod<void>('stopImageStream');
await _platformImageStreamSubscription?.cancel();
_platformImageStreamSubscription = null;
_frameStreamController = null;
}

void _onFrameStreamPauseResume() {
throw CameraException('InvalidCall',
'Pause and resume are not supported for onStreamedFrameAvailable');
}

@override
Future<void> setFlashMode(int cameraId, FlashMode mode) =>
_channel.invokeMethod<void>(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

import 'dart:typed_data';

import 'package:flutter/foundation.dart';

import '../types/types.dart';

/// Converts method channel call [data] for `receivedImageStreamData` to a
/// [CameraImageData].
CameraImageData cameraImageFromPlatformData(Map<dynamic, dynamic> data) {
return CameraImageData(
format: _cameraImageFormatFromPlatformData(data['format']),
height: data['height'] as int,
width: data['width'] as int,
lensAperture: data['lensAperture'] as double?,
sensorExposureTime: data['sensorExposureTime'] as int?,
sensorSensitivity: data['sensorSensitivity'] as double?,
planes: List<CameraImagePlane>.unmodifiable(
(data['planes'] as List<dynamic>).map<CameraImagePlane>(
(dynamic planeData) => _cameraImagePlaneFromPlatformData(
planeData as Map<dynamic, dynamic>))));
}

CameraImageFormat _cameraImageFormatFromPlatformData(dynamic data) {
return CameraImageFormat(_imageFormatGroupFromPlatformData(data), raw: data);
}

ImageFormatGroup _imageFormatGroupFromPlatformData(dynamic data) {
if (defaultTargetPlatform == TargetPlatform.android) {
switch (data) {
case 35: // android.graphics.ImageFormat.YUV_420_888
return ImageFormatGroup.yuv420;
case 256: // android.graphics.ImageFormat.JPEG
return ImageFormatGroup.jpeg;
}
}

if (defaultTargetPlatform == TargetPlatform.iOS) {
switch (data) {
case 875704438: // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
return ImageFormatGroup.yuv420;

case 1111970369: // kCVPixelFormatType_32BGRA
return ImageFormatGroup.bgra8888;
}
}

return ImageFormatGroup.unknown;
}

CameraImagePlane _cameraImagePlaneFromPlatformData(Map<dynamic, dynamic> data) {
return CameraImagePlane(
bytes: data['bytes'] as Uint8List,
bytesPerPixel: data['bytesPerPixel'] as int?,
bytesPerRow: data['bytesPerRow'] as int,
height: data['height'] as int?,
width: data['width'] as int?);
}
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,21 @@ abstract class CameraPlatform extends PlatformInterface {
throw UnimplementedError('resumeVideoRecording() is not implemented.');
}

/// A new streamed frame is available.
///
/// Listening to this stream will start streaming, and canceling will stop.
/// Pausing will throw a [CameraException], as pausing the stream would cause
/// very high memory usage; to temporarily stop receiving frames, cancel, then
/// listen again later.
///
///
// TODO(bmparr): Add options to control streaming settings (e.g.,
// resolution and FPS).
Stream<CameraImageData> onStreamedFrameAvailable(int cameraId,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm confused by this name. Does the app-facing package notify the platform implementation package that a frame is available? Based on the MethodChannel implementation, it looks like this method creates the stream that the app-facing package listens to. Would a better name be something like createCameraImageDataStream?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't like this name either; I used this name because it's the pattern used by all of the existing Streams in the interface

Do you want to pick a new naming scheme for platform interface streams in this plugin as part of this PR? If so I could make alternate named versions of all of the existing ones, doing passthroughs to the old names (which would be soft-deprecated with a comment), so that we'd get consistency.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah, I see. I think I was confused by the call to startImageStream. I understand now that the app-facing package is adding a listener to the stream which is created by the platform implementation. I think the current name is appropriate for how it is being used then. Streams and callbacks in Flutter tend to use the on<event-name> pattern like the one here.

{CameraImageStreamOptions? options}) {
throw UnimplementedError('onStreamedFrameAvailable() is not implemented.');
}

/// Sets the flash mode for the selected camera.
/// On Web [FlashMode.auto] corresponds to [FlashMode.always].
Future<void> setFlashMode(int cameraId, FlashMode mode) {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

import 'dart:typed_data';

import 'package:flutter/foundation.dart';

import '../../camera_platform_interface.dart';

/// Options for configuring camera streaming.
///
/// Currently unused; this exists for future-proofing of the platform interface
/// API.
@immutable
class CameraImageStreamOptions {}

/// A single color plane of image data.
///
/// The number and meaning of the planes in an image are determined by its
/// format.
@immutable
class CameraImagePlane {
/// Creates a new instance with the given bytes and optional metadata.
const CameraImagePlane({
required this.bytes,
required this.bytesPerRow,
this.bytesPerPixel,
this.height,
this.width,
});

/// Bytes representing this plane.
final Uint8List bytes;

/// The row stride for this color plane, in bytes.
final int bytesPerRow;

/// The distance between adjacent pixel samples in bytes, when available.
final int? bytesPerPixel;

/// Height of the pixel buffer, when available.
final int? height;

/// Width of the pixel buffer, when available.
final int? width;
}

/// Describes how pixels are represented in an image.
@immutable
class CameraImageFormat {
/// Create a new format with the given cross-platform group and raw underyling
/// platform identifier.
const CameraImageFormat(this.group, {required this.raw});

/// Describes the format group the raw image format falls into.
final ImageFormatGroup group;

/// Raw version of the format from the underlying platform.
///
/// On Android, this should be an `int` from class
/// `android.graphics.ImageFormat`. See
/// https://developer.android.com/reference/android/graphics/ImageFormat
///
/// On iOS, this should be a `FourCharCode` constant from Pixel Format
/// Identifiers. See
/// https://developer.apple.com/documentation/corevideo/1563591-pixel_format_identifiers
final dynamic raw;
}

/// A single complete image buffer from the platform camera.
///
/// This class allows for direct application access to the pixel data of an
/// Image through one or more [Uint8List]. Each buffer is encapsulated in a
/// [CameraImagePlane] that describes the layout of the pixel data in that
/// plane. [CameraImageData] is not directly usable as a UI resource.
///
/// Although not all image formats are planar on all platforms, this class
/// treats 1-dimensional images as single planar images.
@immutable
class CameraImageData {
/// Creates a new instance with the given format, planes, and metadata.
const CameraImageData({
required this.format,
required this.planes,
required this.height,
required this.width,
this.lensAperture,
this.sensorExposureTime,
this.sensorSensitivity,
});

/// Format of the image provided.
///
/// Determines the number of planes needed to represent the image, and
/// the general layout of the pixel data in each [Uint8List].
final CameraImageFormat format;

/// Height of the image in pixels.
///
/// For formats where some color channels are subsampled, this is the height
/// of the largest-resolution plane.
final int height;

/// Width of the image in pixels.
///
/// For formats where some color channels are subsampled, this is the width
/// of the largest-resolution plane.
final int width;

/// The pixels planes for this image.
///
/// The number of planes is determined by the format of the image.
final List<CameraImagePlane> planes;

/// The aperture settings for this image.
///
/// Represented as an f-stop value.
final double? lensAperture;

/// The sensor exposure time for this image in nanoseconds.
final int? sensorExposureTime;

/// The sensor sensitivity in standard ISO arithmetic units.
final double? sensorSensitivity;
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

export 'camera_description.dart';
export 'camera_exception.dart';
export 'camera_image_data.dart';
export 'exposure_mode.dart';
export 'flash_mode.dart';
export 'focus_mode.dart';
Expand Down
2 changes: 1 addition & 1 deletion packages/camera/camera_platform_interface/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera_
issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22
# NOTE: We strongly prefer non-breaking changes, even at the expense of a
# less-clean API. See https://flutter.dev/go/platform-interface-breaking-changes
version: 2.1.6
version: 2.2.0

environment:
sdk: '>=2.12.0 <3.0.0'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1038,6 +1038,52 @@ void main() {
arguments: <String, Object?>{'cameraId': cameraId}),
]);
});

test('Should start streaming', () async {
// Arrange
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see that this is done throughout the entire file. I don't have any comments on it, it just seems unnecessary to do for each test.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not a big fan of the commenting myself, although I can see the value in reminding people to always think this way in test structure (since it is a good structure, and I've seen plenty of tests that turn into soup when people don't follow it). I was just going for file-level consistency.

final MethodChannelMock channel = MethodChannelMock(
channelName: 'plugins.flutter.io/camera',
methods: <String, dynamic>{
'startImageStream': null,
'stopImageStream': null,
},
);

// Act
final StreamSubscription<CameraImageData> subscription = camera
.onStreamedFrameAvailable(cameraId)
.listen((CameraImageData imageData) {});

// Assert
expect(channel.log, <Matcher>[
isMethodCall('startImageStream', arguments: null),
]);

subscription.cancel();
});

test('Should stop streaming', () async {
// Arrange
final MethodChannelMock channel = MethodChannelMock(
channelName: 'plugins.flutter.io/camera',
methods: <String, dynamic>{
'startImageStream': null,
'stopImageStream': null,
},
);

// Act
final StreamSubscription<CameraImageData> subscription = camera
.onStreamedFrameAvailable(cameraId)
.listen((CameraImageData imageData) {});
subscription.cancel();

// Assert
expect(channel.log, <Matcher>[
isMethodCall('startImageStream', arguments: null),
isMethodCall('stopImageStream', arguments: null),
]);
});
});
});
}
Loading