diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md index d19b500af2d7..03b9293b6893 100644 --- a/packages/camera/camera/CHANGELOG.md +++ b/packages/camera/camera/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.9.4+18 + +* Fixes a crash in iOS when streaming on low-performance devices. + ## 0.9.4+17 * Removes obsolete information from README, and adds OS support table. diff --git a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj index 6c171505a8ca..37f56d0ed52e 100644 --- a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj @@ -15,6 +15,7 @@ 25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */; }; 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; + 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; @@ -70,6 +71,7 @@ 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; 59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; + 788A065927B0E02900533D74 /* StreamingTest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamingTest.m; sourceTree = ""; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; @@ -142,6 +144,7 @@ F63F9EED27143B19002479BF /* MockFLTThreadSafeFlutterResult.h */, E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */, E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */, + 788A065927B0E02900533D74 /* StreamingTest.m */, ); path = RunnerTests; sourceTree = ""; @@ -416,6 +419,7 @@ E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */, 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */, E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */, + 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */, E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */, E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */, E0C6E2002770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m in Sources */, diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m new file mode 100644 index 000000000000..1843cce12152 --- /dev/null +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -0,0 +1,85 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera; +@import camera.Test; +@import XCTest; +@import AVFoundation; +#import +#import "CameraTestUtils.h" + +@interface StreamingTests : XCTestCase +@property(readonly, nonatomic) FLTCam *camera; +@property(readonly, nonatomic) CMSampleBufferRef sampleBuffer; +@end + +@implementation StreamingTests + +- (void)setUp { + dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL); + _camera = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); + _sampleBuffer = FLTCreateTestSampleBuffer(); +} + +- (void)tearDown { + CFRelease(_sampleBuffer); +} + +- (void)testExceedMaxStreamingPendingFramesCount { + XCTestExpectation *streamingExpectation = [self + expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"]; + + id handlerMock = OCMClassMock([FLTImageStreamHandler class]); + OCMStub([handlerMock eventSink]).andReturn(^(id event) { + [streamingExpectation fulfill]; + }); + + id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); + [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; + + XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages" + object:_camera + expectedValue:@YES]; + XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1]; + XCTAssertEqual(result, XCTWaiterResultCompleted); + + streamingExpectation.expectedFulfillmentCount = 4; + for (int i = 0; i < 10; i++) { + [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; + } + + [self waitForExpectationsWithTimeout:1.0 handler:nil]; +} + +- (void)testReceivedImageStreamData { + XCTestExpectation *streamingExpectation = + [self expectationWithDescription: + @"Must be able to call the handler again when receivedImageStreamData is called"]; + + id handlerMock = OCMClassMock([FLTImageStreamHandler class]); + OCMStub([handlerMock eventSink]).andReturn(^(id event) { + [streamingExpectation fulfill]; + }); + + id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); + [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; + + XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages" + object:_camera + expectedValue:@YES]; + XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1]; + XCTAssertEqual(result, XCTWaiterResultCompleted); + + streamingExpectation.expectedFulfillmentCount = 5; + for (int i = 0; i < 10; i++) { + [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; + } + + [_camera receivedImageStreamData]; + [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; + + [self waitForExpectationsWithTimeout:1.0 handler:nil]; +} + +@end diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.m b/packages/camera/camera/ios/Classes/CameraPlugin.m index 634aa699a01a..c0a3833dcd64 100644 --- a/packages/camera/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/camera/ios/Classes/CameraPlugin.m @@ -162,6 +162,9 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call } else if ([@"stopImageStream" isEqualToString:call.method]) { [_camera stopImageStream]; [result sendSuccess]; + } else if ([@"receivedImageStreamData" isEqualToString:call.method]) { + [_camera receivedImageStreamData]; + [result sendSuccess]; } else { NSDictionary *argsMap = call.arguments; NSUInteger cameraId = ((NSNumber *)argsMap[@"cameraId"]).unsignedIntegerValue; diff --git a/packages/camera/camera/ios/Classes/FLTCam.h b/packages/camera/camera/ios/Classes/FLTCam.h index 0cd135e0e41f..8a5dafaf8354 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.h +++ b/packages/camera/camera/ios/Classes/FLTCam.h @@ -61,6 +61,14 @@ NS_ASSUME_NONNULL_BEGIN - (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr; - (void)applyFocusMode; +/** + * Acknowledges the receipt of one image stream frame. + * + * This should be called each time a frame is received. Failing to call it may + * cause later frames to be dropped instead of streamed. + */ +- (void)receivedImageStreamData; + /** * Applies FocusMode on the AVCaptureDevice. * diff --git a/packages/camera/camera/ios/Classes/FLTCam.m b/packages/camera/camera/ios/Classes/FLTCam.m index 30c177bd4b2a..7af505b249cb 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.m +++ b/packages/camera/camera/ios/Classes/FLTCam.m @@ -10,14 +10,6 @@ @import CoreMotion; #import -@interface FLTImageStreamHandler : NSObject -// The queue on which `eventSink` property should be accessed -@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; -// `eventSink` property should be accessed on `captureSessionQueue`. -// The block itself should be invoked on the main queue. -@property FlutterEventSink eventSink; -@end - @implementation FLTImageStreamHandler - (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue { @@ -68,7 +60,13 @@ @interface FLTCam () *)messenger { + [self startImageStreamWithMessenger:messenger + imageStreamHandler:[[FLTImageStreamHandler alloc] + initWithCaptureSessionQueue:_captureSessionQueue]]; +} + +- (void)startImageStreamWithMessenger:(NSObject *)messenger + imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler { if (!_isStreamingImages) { FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream" @@ -905,12 +916,12 @@ - (void)startImageStreamWithMessenger:(NSObject *)messen FLTThreadSafeEventChannel *threadSafeEventChannel = [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel]; - _imageStreamHandler = - [[FLTImageStreamHandler alloc] initWithCaptureSessionQueue:_captureSessionQueue]; + _imageStreamHandler = imageStreamHandler; [threadSafeEventChannel setStreamHandler:_imageStreamHandler completion:^{ dispatch_async(self->_captureSessionQueue, ^{ self.isStreamingImages = YES; + self.streamingPendingFramesCount = 0; }); }]; } else { @@ -928,6 +939,10 @@ - (void)stopImageStream { } } +- (void)receivedImageStreamData { + self.streamingPendingFramesCount--; +} + - (void)getMaxZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result { CGFloat maxZoomFactor = [self getMaxAvailableZoomFactor]; diff --git a/packages/camera/camera/ios/Classes/FLTCam_Test.h b/packages/camera/camera/ios/Classes/FLTCam_Test.h index a1f9f2b65981..19e284227f4f 100644 --- a/packages/camera/camera/ios/Classes/FLTCam_Test.h +++ b/packages/camera/camera/ios/Classes/FLTCam_Test.h @@ -5,6 +5,19 @@ #import "FLTCam.h" #import "FLTSavePhotoDelegate.h" +@interface FLTImageStreamHandler : NSObject + +/// The queue on which `eventSink` property should be accessed. +@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; + +/// The event sink to stream camera events to Dart. +/// +/// The property should only be accessed on `captureSessionQueue`. +/// The block itself should be invoked on the main queue. +@property FlutterEventSink eventSink; + +@end + // APIs exposed for unit testing. @interface FLTCam () @@ -14,6 +27,9 @@ /// The output for photo capturing. Exposed setter for unit tests. @property(strong, nonatomic) AVCapturePhotoOutput *capturePhotoOutput API_AVAILABLE(ios(10)); +/// True when images from the camera are being streamed. +@property(assign, nonatomic) BOOL isStreamingImages; + /// A dictionary to retain all in-progress FLTSavePhotoDelegates. The key of the dictionary is the /// AVCapturePhotoSettings's uniqueID for each photo capture operation, and the value is the /// FLTSavePhotoDelegate that handles the result of each photo capture operation. Note that photo @@ -38,4 +54,8 @@ captureSessionQueue:(dispatch_queue_t)captureSessionQueue error:(NSError **)error; +/// Start streaming images. +- (void)startImageStreamWithMessenger:(NSObject *)messenger + imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler; + @end diff --git a/packages/camera/camera/lib/src/camera_controller.dart b/packages/camera/camera/lib/src/camera_controller.dart index 30e6221697c9..1492ca193761 100644 --- a/packages/camera/camera/lib/src/camera_controller.dart +++ b/packages/camera/camera/lib/src/camera_controller.dart @@ -448,6 +448,13 @@ class CameraController extends ValueNotifier { _imageStreamSubscription = cameraEventChannel.receiveBroadcastStream().listen( (dynamic imageData) { + if (defaultTargetPlatform == TargetPlatform.iOS) { + try { + _channel.invokeMethod('receivedImageStreamData'); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } onAvailable( CameraImage.fromPlatformData(imageData as Map)); }, diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 0e684a743ba3..064eb919c96a 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -4,7 +4,7 @@ description: A Flutter plugin for controlling the camera. Supports previewing Dart. repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.4+17 +version: 0.9.4+18 environment: sdk: ">=2.14.0 <3.0.0"