From 29beaa7ac044cb7088ee603a18653fbf0e4cf8a5 Mon Sep 17 00:00:00 2001 From: zuvola Date: Thu, 18 Nov 2021 14:10:20 +0900 Subject: [PATCH 01/28] Fixed crash when streaming in iOS --- packages/camera/camera/ios/Classes/CameraPlugin.m | 6 +++++- packages/camera/camera/lib/src/camera_controller.dart | 7 +++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.m b/packages/camera/camera/ios/Classes/CameraPlugin.m index 2c12081da807..edf8d6c3ec75 100644 --- a/packages/camera/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/camera/ios/Classes/CameraPlugin.m @@ -327,6 +327,7 @@ @interface FLTCam : NSObject { _imageStreamSubscription = cameraEventChannel.receiveBroadcastStream().listen( (dynamic imageData) { + if (defaultTargetPlatform == TargetPlatform.iOS) { + try { + _channel.invokeMethod('receivedImageStreamData'); + } on PlatformException catch (e) { + throw CameraException(e.code, e.message); + } + } onAvailable(CameraImage.fromPlatformData(imageData)); }, ); From aec0f93e27f15bfcc03b77576770b22547b91839 Mon Sep 17 00:00:00 2001 From: zuvola Date: Thu, 18 Nov 2021 14:23:10 +0900 Subject: [PATCH 02/28] Fixed skip judgment --- packages/camera/camera/ios/Classes/CameraPlugin.m | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.m b/packages/camera/camera/ios/Classes/CameraPlugin.m index edf8d6c3ec75..acbbc8ccd2fd 100644 --- a/packages/camera/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/camera/ios/Classes/CameraPlugin.m @@ -605,9 +605,9 @@ - (void)captureOutput:(AVCaptureOutput *)output arguments:@"sample buffer is not ready. Skipping sample"]; return; } - if (_isStreamingImages && !_skipStreamingImages) { - _skipStreamingImages = YES; - if (_imageStreamHandler.eventSink) { + if (_isStreamingImages) { + if (_imageStreamHandler.eventSink && !_skipStreamingImages) { + _skipStreamingImages = YES; CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); @@ -1122,6 +1122,7 @@ - (void)startImageStreamWithMessenger:(NSObject *)messen [eventChannel setStreamHandler:_imageStreamHandler]; _isStreamingImages = YES; + _skipStreamingImages = NO; } else { [_methodChannel invokeMethod:errorMethod arguments:@"Images from camera are already streaming!"]; From 7cff7754465d2b760b766351a31a79e0611f2a3c Mon Sep 17 00:00:00 2001 From: zuvola Date: Mon, 22 Nov 2021 11:39:24 +0900 Subject: [PATCH 03/28] Add test --- .../example/integration_test/camera_test.dart | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/packages/camera/camera/example/integration_test/camera_test.dart b/packages/camera/camera/example/integration_test/camera_test.dart index d09300a3f906..8bc4b8311cad 100644 --- a/packages/camera/camera/example/integration_test/camera_test.dart +++ b/packages/camera/camera/example/integration_test/camera_test.dart @@ -240,4 +240,34 @@ void main() { }, skip: !Platform.isAndroid, ); + + testWidgets( + 'Image streaming persistence test on iOS', + (WidgetTester tester) async { + final List cameras = await availableCameras(); + if (cameras.isEmpty) { + return; + } + + final CameraController controller = CameraController( + cameras[0], + ResolutionPreset.max, + enableAudio: false, + ); + + await controller.initialize(); + int _frame = 0; + + await controller.startImageStream((CameraImage image) { + _frame++; + }); + + await Future.delayed(Duration(seconds: 5)); + expect(_frame > 30, true); + + await controller.stopImageStream(); + await controller.dispose(); + }, + skip: Platform.isAndroid, + ); } From 5d1fb5634737b7429c80ac4f3d1caca0340b7ad0 Mon Sep 17 00:00:00 2001 From: zuvola Date: Mon, 22 Nov 2021 11:39:56 +0900 Subject: [PATCH 04/28] Update CHANGELOG.md --- packages/camera/camera/CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md index 1a6eceb957b1..f40d79bd5092 100644 --- a/packages/camera/camera/CHANGELOG.md +++ b/packages/camera/camera/CHANGELOG.md @@ -2,6 +2,7 @@ * Fixes bug where calling a method after the camera was closed resulted in a Java `IllegalStateException` exception. * Fixes integration tests. +* Fixes crash when streaming in iOS. ## 0.9.4+4 From 7c0fa9eb1dccfe7fd2a2ab2107c95cad51d63f41 Mon Sep 17 00:00:00 2001 From: zuvola Date: Mon, 22 Nov 2021 13:58:54 +0900 Subject: [PATCH 05/28] Fix test --- .../camera/camera/example/integration_test/camera_test.dart | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/camera/camera/example/integration_test/camera_test.dart b/packages/camera/camera/example/integration_test/camera_test.dart index 8bc4b8311cad..23a77c00bdf4 100644 --- a/packages/camera/camera/example/integration_test/camera_test.dart +++ b/packages/camera/camera/example/integration_test/camera_test.dart @@ -268,6 +268,6 @@ void main() { await controller.stopImageStream(); await controller.dispose(); }, - skip: Platform.isAndroid, + skip: !Platform.isIOS, ); } From 3056e1778cf82be5d7a6fa5606df751a285c6732 Mon Sep 17 00:00:00 2001 From: zuvola Date: Fri, 10 Dec 2021 13:18:46 +0900 Subject: [PATCH 06/28] Add frameStack property to startImageStream --- .../example/integration_test/camera_test.dart | 6 +----- .../camera/camera/ios/Classes/CameraPlugin.m | 17 ++++++++++------- .../camera/lib/src/camera_controller.dart | 13 ++++++++++--- .../camera/camera/test/camera_preview_test.dart | 2 +- 4 files changed, 22 insertions(+), 16 deletions(-) diff --git a/packages/camera/camera/example/integration_test/camera_test.dart b/packages/camera/camera/example/integration_test/camera_test.dart index 23a77c00bdf4..74520e89bd6f 100644 --- a/packages/camera/camera/example/integration_test/camera_test.dart +++ b/packages/camera/camera/example/integration_test/camera_test.dart @@ -256,14 +256,10 @@ void main() { ); await controller.initialize(); - int _frame = 0; - await controller.startImageStream((CameraImage image) { - _frame++; - }); + await controller.startImageStream((CameraImage image) {}, frameStack: 3); await Future.delayed(Duration(seconds: 5)); - expect(_frame > 30, true); await controller.stopImageStream(); await controller.dispose(); diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.m b/packages/camera/camera/ios/Classes/CameraPlugin.m index acbbc8ccd2fd..b88695c3f890 100644 --- a/packages/camera/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/camera/ios/Classes/CameraPlugin.m @@ -327,7 +327,8 @@ @interface FLTCam : NSObject *)messenger { +- (void)startImageStreamWithMessenger:(NSObject *)messenger frameStack:(int)frameStack { if (!_isStreamingImages) { FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream" @@ -1122,7 +1123,8 @@ - (void)startImageStreamWithMessenger:(NSObject *)messen [eventChannel setStreamHandler:_imageStreamHandler]; _isStreamingImages = YES; - _skipStreamingImages = NO; + _streamingFrameStack = 0; + _maxStreamingFrameStack = frameStack; } else { [_methodChannel invokeMethod:errorMethod arguments:@"Images from camera are already streaming!"]; @@ -1427,13 +1429,14 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call }]; } } else if ([@"startImageStream" isEqualToString:call.method]) { - [_camera startImageStreamWithMessenger:_messenger]; + NSNumber *frameStack = call.arguments[@"frameStack"]; + [_camera startImageStreamWithMessenger:_messenger frameStack:[frameStack intValue]]; [result sendSuccess]; } else if ([@"stopImageStream" isEqualToString:call.method]) { [_camera stopImageStream]; [result sendSuccess]; } else if ([@"receivedImageStreamData" isEqualToString:call.method]) { - _camera.skipStreamingImages = NO; + _camera.streamingFrameStack--; } else { NSDictionary *argsMap = call.arguments; NSUInteger cameraId = ((NSNumber *)argsMap[@"cameraId"]).unsignedIntegerValue; diff --git a/packages/camera/camera/lib/src/camera_controller.dart b/packages/camera/camera/lib/src/camera_controller.dart index 62c789df0e69..f17e12b69e2d 100644 --- a/packages/camera/camera/lib/src/camera_controller.dart +++ b/packages/camera/camera/lib/src/camera_controller.dart @@ -403,6 +403,11 @@ class CameraController extends ValueNotifier { /// have significant frame rate drops for [CameraPreview] on lower end /// devices. /// + /// On older iOS devices, frames pending processing can exhaust the memory + /// and cause crashes. In this case, you can use [frameStack] to limit + /// the maximum number of frames pending processing. The default value is 0, + /// which means there is no limit. + /// /// Throws a [CameraException] if image streaming or video recording has /// already started. /// @@ -410,7 +415,8 @@ class CameraController extends ValueNotifier { /// platforms won't be supported in current setup). /// // TODO(bmparr): Add settings for resolution and fps. - Future startImageStream(onLatestImageAvailable onAvailable) async { + Future startImageStream(onLatestImageAvailable onAvailable, + {int frameStack = 0}) async { assert(defaultTargetPlatform == TargetPlatform.android || defaultTargetPlatform == TargetPlatform.iOS); _throwIfNotInitialized("startImageStream"); @@ -428,7 +434,8 @@ class CameraController extends ValueNotifier { } try { - await _channel.invokeMethod('startImageStream'); + await _channel + .invokeMethod('startImageStream', {'frameStack': frameStack}); value = value.copyWith(isStreamingImages: true); } on PlatformException catch (e) { throw CameraException(e.code, e.message); @@ -438,7 +445,7 @@ class CameraController extends ValueNotifier { _imageStreamSubscription = cameraEventChannel.receiveBroadcastStream().listen( (dynamic imageData) { - if (defaultTargetPlatform == TargetPlatform.iOS) { + if (defaultTargetPlatform == TargetPlatform.iOS && frameStack > 0) { try { _channel.invokeMethod('receivedImageStreamData'); } on PlatformException catch (e) { diff --git a/packages/camera/camera/test/camera_preview_test.dart b/packages/camera/camera/test/camera_preview_test.dart index 32718f4d5169..a5a18aa5935e 100644 --- a/packages/camera/camera/test/camera_preview_test.dart +++ b/packages/camera/camera/test/camera_preview_test.dart @@ -97,7 +97,7 @@ class FakeController extends ValueNotifier Future setZoomLevel(double zoom) async {} @override - Future startImageStream(onAvailable) async {} + Future startImageStream(onAvailable, {int frameStack = 0}) async {} @override Future startVideoRecording() async {} From 27957a190432b8e8f289db884dece9b717e26c97 Mon Sep 17 00:00:00 2001 From: zuvola Date: Fri, 10 Dec 2021 13:24:28 +0900 Subject: [PATCH 07/28] Bump version to 0.9.4+6 --- packages/camera/camera/CHANGELOG.md | 5 ++++- packages/camera/camera/pubspec.yaml | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md index f40d79bd5092..f82fdf2f58f9 100644 --- a/packages/camera/camera/CHANGELOG.md +++ b/packages/camera/camera/CHANGELOG.md @@ -1,8 +1,11 @@ +## 0.9.4+6 + +* Added `frameStack` property to `startImageStream`. + ## 0.9.4+5 * Fixes bug where calling a method after the camera was closed resulted in a Java `IllegalStateException` exception. * Fixes integration tests. -* Fixes crash when streaming in iOS. ## 0.9.4+4 diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 58e1ca3ca98c..4c6c7e7e52d1 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -4,7 +4,7 @@ description: A Flutter plugin for controlling the camera. Supports previewing Dart. repository: https://github.com/flutter/plugins/tree/master/packages/camera/camera issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.4+5 +version: 0.9.4+6 environment: sdk: ">=2.14.0 <3.0.0" From 97d16c7d7a98fd95ba22fc9c9e89fce7f1e15418 Mon Sep 17 00:00:00 2001 From: zuvola Date: Fri, 10 Dec 2021 13:54:07 +0900 Subject: [PATCH 08/28] format --- packages/camera/camera/ios/Classes/CameraPlugin.m | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.m b/packages/camera/camera/ios/Classes/CameraPlugin.m index b88695c3f890..f9dbad683723 100644 --- a/packages/camera/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/camera/ios/Classes/CameraPlugin.m @@ -607,7 +607,8 @@ - (void)captureOutput:(AVCaptureOutput *)output return; } if (_isStreamingImages) { - if (_imageStreamHandler.eventSink && (_maxStreamingFrameStack < 1 || _streamingFrameStack < _maxStreamingFrameStack)) { + if (_imageStreamHandler.eventSink && + (_maxStreamingFrameStack < 1 || _streamingFrameStack < _maxStreamingFrameStack)) { _streamingFrameStack++; CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); @@ -1113,7 +1114,8 @@ - (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset: [result sendSuccessWithData:@(offset)]; } -- (void)startImageStreamWithMessenger:(NSObject *)messenger frameStack:(int)frameStack { +- (void)startImageStreamWithMessenger:(NSObject *)messenger + frameStack:(int)frameStack { if (!_isStreamingImages) { FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream" From 1382d62b986896e0f223564c1be5a5d0b1bf0f87 Mon Sep 17 00:00:00 2001 From: zuvola Date: Tue, 14 Dec 2021 10:34:57 +0900 Subject: [PATCH 09/28] Fixed tests --- packages/camera/camera/test/camera_image_stream_test.dart | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/camera/camera/test/camera_image_stream_test.dart b/packages/camera/camera/test/camera_image_stream_test.dart index 840770d1eed7..a688f7168a10 100644 --- a/packages/camera/camera/test/camera_image_stream_test.dart +++ b/packages/camera/camera/test/camera_image_stream_test.dart @@ -104,8 +104,9 @@ void main() { await cameraController.startImageStream((image) => null); - expect(cameraChannelMock.log, - [isMethodCall('startImageStream', arguments: null)]); + expect(cameraChannelMock.log, [ + isMethodCall('startImageStream', arguments: {'frameStack': 0}) + ]); expect(streamChannelMock.log, [isMethodCall('listen', arguments: null)]); }); @@ -196,7 +197,7 @@ void main() { await cameraController.stopImageStream(); expect(cameraChannelMock.log, [ - isMethodCall('startImageStream', arguments: null), + isMethodCall('startImageStream', arguments: {'frameStack': 0}), isMethodCall('stopImageStream', arguments: null) ]); From 84dc53d254b20c9eb9674fa7104183c60712da80 Mon Sep 17 00:00:00 2001 From: zuvola Date: Fri, 28 Jan 2022 14:20:10 +0900 Subject: [PATCH 10/28] Restored the API and set the number of pending frames to 4 --- .../example/integration_test/camera_test.dart | 2 +- .../camera/camera/ios/Classes/CameraPlugin.m | 21 ++++++++++++------- .../camera/lib/src/camera_controller.dart | 13 +++--------- .../camera/test/camera_preview_test.dart | 2 +- 4 files changed, 18 insertions(+), 20 deletions(-) diff --git a/packages/camera/camera/example/integration_test/camera_test.dart b/packages/camera/camera/example/integration_test/camera_test.dart index 50acec93aada..cc4f7b45ad9c 100644 --- a/packages/camera/camera/example/integration_test/camera_test.dart +++ b/packages/camera/camera/example/integration_test/camera_test.dart @@ -309,7 +309,7 @@ void main() { await controller.initialize(); - await controller.startImageStream((CameraImage image) {}, frameStack: 3); + await controller.startImageStream((CameraImage image) {}); await Future.delayed(Duration(seconds: 5)); diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.m b/packages/camera/camera/ios/Classes/CameraPlugin.m index 9b68f21bce57..e73141cb75e1 100644 --- a/packages/camera/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/camera/ios/Classes/CameraPlugin.m @@ -304,8 +304,8 @@ @interface FLTCam : NSObject *)messenger - frameStack:(int)frameStack { +- (void)startImageStreamWithMessenger:(NSObject *)messenger { if (!_isStreamingImages) { FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream" @@ -1118,6 +1123,7 @@ - (void)startImageStreamWithMessenger:(NSObject *)messen completion:^{ dispatch_async(self->_captureSessionQueue, ^{ self.isStreamingImages = YES; + self.streamingPendingFrames = 0; }); }]; } else { @@ -1423,14 +1429,13 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call }]; } } else if ([@"startImageStream" isEqualToString:call.method]) { - NSNumber *frameStack = call.arguments[@"frameStack"]; - [_camera startImageStreamWithMessenger:_messenger frameStack:[frameStack intValue]]; + [_camera startImageStreamWithMessenger:_messenger]; [result sendSuccess]; } else if ([@"stopImageStream" isEqualToString:call.method]) { [_camera stopImageStream]; [result sendSuccess]; } else if ([@"receivedImageStreamData" isEqualToString:call.method]) { - _camera.streamingFrameStack--; + _camera.streamingPendingFrames--; } else { NSDictionary *argsMap = call.arguments; NSUInteger cameraId = ((NSNumber *)argsMap[@"cameraId"]).unsignedIntegerValue; diff --git a/packages/camera/camera/lib/src/camera_controller.dart b/packages/camera/camera/lib/src/camera_controller.dart index f17e12b69e2d..62c789df0e69 100644 --- a/packages/camera/camera/lib/src/camera_controller.dart +++ b/packages/camera/camera/lib/src/camera_controller.dart @@ -403,11 +403,6 @@ class CameraController extends ValueNotifier { /// have significant frame rate drops for [CameraPreview] on lower end /// devices. /// - /// On older iOS devices, frames pending processing can exhaust the memory - /// and cause crashes. In this case, you can use [frameStack] to limit - /// the maximum number of frames pending processing. The default value is 0, - /// which means there is no limit. - /// /// Throws a [CameraException] if image streaming or video recording has /// already started. /// @@ -415,8 +410,7 @@ class CameraController extends ValueNotifier { /// platforms won't be supported in current setup). /// // TODO(bmparr): Add settings for resolution and fps. - Future startImageStream(onLatestImageAvailable onAvailable, - {int frameStack = 0}) async { + Future startImageStream(onLatestImageAvailable onAvailable) async { assert(defaultTargetPlatform == TargetPlatform.android || defaultTargetPlatform == TargetPlatform.iOS); _throwIfNotInitialized("startImageStream"); @@ -434,8 +428,7 @@ class CameraController extends ValueNotifier { } try { - await _channel - .invokeMethod('startImageStream', {'frameStack': frameStack}); + await _channel.invokeMethod('startImageStream'); value = value.copyWith(isStreamingImages: true); } on PlatformException catch (e) { throw CameraException(e.code, e.message); @@ -445,7 +438,7 @@ class CameraController extends ValueNotifier { _imageStreamSubscription = cameraEventChannel.receiveBroadcastStream().listen( (dynamic imageData) { - if (defaultTargetPlatform == TargetPlatform.iOS && frameStack > 0) { + if (defaultTargetPlatform == TargetPlatform.iOS) { try { _channel.invokeMethod('receivedImageStreamData'); } on PlatformException catch (e) { diff --git a/packages/camera/camera/test/camera_preview_test.dart b/packages/camera/camera/test/camera_preview_test.dart index a5a18aa5935e..32718f4d5169 100644 --- a/packages/camera/camera/test/camera_preview_test.dart +++ b/packages/camera/camera/test/camera_preview_test.dart @@ -97,7 +97,7 @@ class FakeController extends ValueNotifier Future setZoomLevel(double zoom) async {} @override - Future startImageStream(onAvailable, {int frameStack = 0}) async {} + Future startImageStream(onAvailable) async {} @override Future startVideoRecording() async {} From 3ed1e94d0dde99a8a96f9a0df284e15bbc9c4a16 Mon Sep 17 00:00:00 2001 From: zuvola Date: Mon, 7 Feb 2022 13:54:12 +0900 Subject: [PATCH 11/28] Fixed tests --- .../example/integration_test/camera_test.dart | 26 ----- .../ios/Runner.xcodeproj/project.pbxproj | 4 + .../example/ios/RunnerTests/StreamingTest.m | 97 +++++++++++++++++++ 3 files changed, 101 insertions(+), 26 deletions(-) create mode 100644 packages/camera/camera/example/ios/RunnerTests/StreamingTest.m diff --git a/packages/camera/camera/example/integration_test/camera_test.dart b/packages/camera/camera/example/integration_test/camera_test.dart index cc4f7b45ad9c..3af291afe63b 100644 --- a/packages/camera/camera/example/integration_test/camera_test.dart +++ b/packages/camera/camera/example/integration_test/camera_test.dart @@ -292,30 +292,4 @@ void main() { }, skip: !Platform.isIOS, ); - - testWidgets( - 'Image streaming persistence test on iOS', - (WidgetTester tester) async { - final List cameras = await availableCameras(); - if (cameras.isEmpty) { - return; - } - - final CameraController controller = CameraController( - cameras[0], - ResolutionPreset.max, - enableAudio: false, - ); - - await controller.initialize(); - - await controller.startImageStream((CameraImage image) {}); - - await Future.delayed(Duration(seconds: 5)); - - await controller.stopImageStream(); - await controller.dispose(); - }, - skip: !Platform.isIOS, - ); } diff --git a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj index 87bcf2e666cd..465b1068ef38 100644 --- a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj @@ -15,6 +15,7 @@ 25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */; }; 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; + 7888073027A77C12000EA0DC /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 7888072F27A77C12000EA0DC /* StreamingTest.m */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; @@ -66,6 +67,7 @@ 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; 59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; + 7888072F27A77C12000EA0DC /* StreamingTest.m */ = {isa = PBXFileReference; indentWidth = 2; lastKnownFileType = sourcecode.c.objc; path = StreamingTest.m; sourceTree = ""; tabWidth = 2; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; @@ -128,6 +130,7 @@ F63F9EED27143B19002479BF /* MockFLTThreadSafeFlutterResult.h */, E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */, E032F2A627A0BF2D009E9028 /* FLTFlashModeTests.m */, + 7888072F27A77C12000EA0DC /* StreamingTest.m */, ); path = RunnerTests; sourceTree = ""; @@ -395,6 +398,7 @@ 033B94BE269C40A200B4DF97 /* CameraMethodChannelTests.m in Sources */, 03BB766B2665316900CE5A93 /* CameraFocusTests.m in Sources */, E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */, + 7888073027A77C12000EA0DC /* StreamingTest.m in Sources */, F6EE622F2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m in Sources */, 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */, E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */, diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m new file mode 100644 index 000000000000..176635d51b07 --- /dev/null +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -0,0 +1,97 @@ +// Copyright 2013 The Flutter Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +@import camera; +@import camera.Test; +@import XCTest; +@import AVFoundation; +#import +#import "MockFLTThreadSafeFlutterResult.h" + +@interface FLTImageStreamHandler : NSObject +@property FlutterEventSink eventSink; +@end + +@interface FLTCam : NSObject +@property(assign, nonatomic) int streamingPendingFrames; +@property(assign, nonatomic) int maxStreamingPendingFrames; +@property(assign, nonatomic) BOOL isStreamingImages; +@property(nonatomic) FLTImageStreamHandler *imageStreamHandler; +- (void)captureOutput:(AVCaptureOutput *)output + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection; +@end + +@interface CameraPlugin (Private) +@property(retain, nonatomic) FLTCam *camera; +@end + +@interface StreamingTests : XCTestCase +@end + +@implementation StreamingTests + +- (void)testStreamingPendingFrames { + CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; + + // Set up mocks for initWithCameraName method + id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); + OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]]) + .andReturn([AVCaptureInput alloc]); + id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]); + OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock); + OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); + + // Set up method calls + FlutterMethodCall *createCall = [FlutterMethodCall + methodCallWithMethodName:@"create" + arguments:@{@"resolutionPreset" : @"medium", @"enableAudio" : @(1)}]; + FlutterMethodCall *startCall = [FlutterMethodCall methodCallWithMethodName:@"startImageStream" + arguments:nil]; + FlutterMethodCall *receivedCall = + [FlutterMethodCall methodCallWithMethodName:@"receivedImageStreamData" arguments:nil]; + + // Set up sampleBuffer + CVPixelBufferRef pixelBuffer; + CVPixelBufferCreate(kCFAllocatorDefault, 100, 100, kCVPixelFormatType_32BGRA, nil, &pixelBuffer); + CMVideoFormatDescriptionRef formatDescription; + CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, + &formatDescription); + CMSampleBufferRef sampleBuffer; + CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, formatDescription, + &kCMTimingInfoInvalid, &sampleBuffer); + + // Start streaming + [camera handleMethodCallAsync:createCall result:nil]; + [camera handleMethodCallAsync:startCall result:nil]; + camera.camera.imageStreamHandler.eventSink = ^(id _Nullable event) { + }; + + // Waiting for streaming to start + FLTCam *cam = [camera camera]; + while (!cam.isStreamingImages) { + [NSThread sleepForTimeInterval:0.001]; + } + + // Initial value + XCTAssertEqual(cam.streamingPendingFrames, 0); + + // Emulate receiving a video frame + [camera.camera captureOutput:nil didOutputSampleBuffer:sampleBuffer fromConnection:nil]; + XCTAssertEqual(cam.streamingPendingFrames, 1); + + // ReceivedCall reduces streamingPendingFrames + [camera handleMethodCallAsync:receivedCall result:nil]; + XCTAssertEqual(cam.streamingPendingFrames, 0); + + // Don't exceed maxStreamingPendingFrames + for (int i = 0; i < cam.maxStreamingPendingFrames + 2; i++) { + [camera.camera captureOutput:nil didOutputSampleBuffer:sampleBuffer fromConnection:nil]; + } + XCTAssertEqual(cam.streamingPendingFrames, cam.maxStreamingPendingFrames); +} + +@end From 6191e3a555bee724c5c79ce689720531501cbd07 Mon Sep 17 00:00:00 2001 From: zuvola Date: Mon, 7 Feb 2022 14:24:26 +0900 Subject: [PATCH 12/28] Merge --- .../example/ios/Runner.xcodeproj/project.pbxproj | 4 ++++ .../camera/example/ios/RunnerTests/StreamingTest.m | 6 ++---- packages/camera/camera/ios/Classes/FLTCam.h | 1 + packages/camera/camera/ios/Classes/FLTCam.m | 10 +++++++++- 4 files changed, 16 insertions(+), 5 deletions(-) diff --git a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj index ac39de2e3f84..dfdbb52a8f02 100644 --- a/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/camera/camera/example/ios/Runner.xcodeproj/project.pbxproj @@ -15,6 +15,7 @@ 25C3919135C3D981E6F800D0 /* libPods-RunnerTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */; }; 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 03BB767226653ABE00CE5A93 /* CameraOrientationTests.m */; }; 3B3967161E833CAA004F5970 /* AppFrameworkInfo.plist in Resources */ = {isa = PBXBuildFile; fileRef = 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */; }; + 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */ = {isa = PBXBuildFile; fileRef = 788A065927B0E02900533D74 /* StreamingTest.m */; }; 978B8F6F1D3862AE00F588F7 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */; }; 97C146F31CF9000F007C117D /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 97C146F21CF9000F007C117D /* main.m */; }; 97C146FC1CF9000F007C117D /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 97C146FA1CF9000F007C117D /* Main.storyboard */; }; @@ -68,6 +69,7 @@ 1944D8072499F3B5E7653D44 /* libPods-RunnerTests.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-RunnerTests.a"; sourceTree = BUILT_PRODUCTS_DIR; }; 3B3967151E833CAA004F5970 /* AppFrameworkInfo.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; name = AppFrameworkInfo.plist; path = Flutter/AppFrameworkInfo.plist; sourceTree = ""; }; 59848A7CA98C1FADF8840207 /* Pods-Runner.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-Runner.debug.xcconfig"; path = "Target Support Files/Pods-Runner/Pods-Runner.debug.xcconfig"; sourceTree = ""; }; + 788A065927B0E02900533D74 /* StreamingTest.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = StreamingTest.m; sourceTree = ""; }; 7AFA3C8E1D35360C0083082E /* Release.xcconfig */ = {isa = PBXFileReference; lastKnownFileType = text.xcconfig; name = Release.xcconfig; path = Flutter/Release.xcconfig; sourceTree = ""; }; 7AFFD8ED1D35381100E5BB4D /* AppDelegate.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 7AFFD8EE1D35381100E5BB4D /* AppDelegate.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; @@ -134,6 +136,7 @@ F63F9EED27143B19002479BF /* MockFLTThreadSafeFlutterResult.h */, E032F24F279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m */, E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */, + 788A065927B0E02900533D74 /* StreamingTest.m */, ); path = RunnerTests; sourceTree = ""; @@ -406,6 +409,7 @@ F6EE622F2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m in Sources */, 334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */, E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */, + 788A065A27B0E02900533D74 /* StreamingTest.m in Sources */, E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */, E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */, E0C6E2002770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m in Sources */, diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m index 176635d51b07..acba2ec6e85b 100644 --- a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -13,13 +13,11 @@ @interface FLTImageStreamHandler : NSObject @property FlutterEventSink eventSink; @end -@interface FLTCam : NSObject -@property(assign, nonatomic) int streamingPendingFrames; +@interface FLTCam (Private) @property(assign, nonatomic) int maxStreamingPendingFrames; @property(assign, nonatomic) BOOL isStreamingImages; @property(nonatomic) FLTImageStreamHandler *imageStreamHandler; +@property(retain, nonatomic) FLTCam *camera; - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection; diff --git a/packages/camera/camera/ios/Classes/FLTCam.h b/packages/camera/camera/ios/Classes/FLTCam.h index 417a1d74db21..3ecd619e4769 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.h +++ b/packages/camera/camera/ios/Classes/FLTCam.h @@ -30,6 +30,7 @@ NS_ASSUME_NONNULL_BEGIN @property(assign, nonatomic) FLTFlashMode flashMode; // Format used for video and image streaming. @property(assign, nonatomic) FourCharCode videoFormat; +@property(assign, nonatomic) int streamingPendingFrames; - (instancetype)initWithCameraName:(NSString *)cameraName resolutionPreset:(NSString *)resolutionPreset diff --git a/packages/camera/camera/ios/Classes/FLTCam.m b/packages/camera/camera/ios/Classes/FLTCam.m index 94f985066675..9e20f1f9f95e 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.m +++ b/packages/camera/camera/ios/Classes/FLTCam.m @@ -67,6 +67,7 @@ @interface FLTCam () *)messen completion:^{ dispatch_async(self->_captureSessionQueue, ^{ self.isStreamingImages = YES; + self.streamingPendingFrames = 0; }); }]; } else { From e732d7bbd83c01c7282c8b5ee81503701b85814e Mon Sep 17 00:00:00 2001 From: zuvola Date: Mon, 7 Feb 2022 15:23:41 +0900 Subject: [PATCH 13/28] Fixed CHANGELOG --- packages/camera/camera/CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md index f846774ffe0a..c7e2d65c4c89 100644 --- a/packages/camera/camera/CHANGELOG.md +++ b/packages/camera/camera/CHANGELOG.md @@ -1,3 +1,7 @@ +## NEXT + +* Fixes a crash in iOS when streaming on low-performance devices. + ## 0.9.4+10 * iOS performance improvement by moving file writing from the main queue to a background IO queue. From b69ea0f430758fffe2ada15b163338870d34c3d0 Mon Sep 17 00:00:00 2001 From: zuvola Date: Mon, 7 Feb 2022 15:23:51 +0900 Subject: [PATCH 14/28] Fixed tests --- packages/camera/camera/test/camera_image_stream_test.dart | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/camera/camera/test/camera_image_stream_test.dart b/packages/camera/camera/test/camera_image_stream_test.dart index a688f7168a10..840770d1eed7 100644 --- a/packages/camera/camera/test/camera_image_stream_test.dart +++ b/packages/camera/camera/test/camera_image_stream_test.dart @@ -104,9 +104,8 @@ void main() { await cameraController.startImageStream((image) => null); - expect(cameraChannelMock.log, [ - isMethodCall('startImageStream', arguments: {'frameStack': 0}) - ]); + expect(cameraChannelMock.log, + [isMethodCall('startImageStream', arguments: null)]); expect(streamChannelMock.log, [isMethodCall('listen', arguments: null)]); }); @@ -197,7 +196,7 @@ void main() { await cameraController.stopImageStream(); expect(cameraChannelMock.log, [ - isMethodCall('startImageStream', arguments: {'frameStack': 0}), + isMethodCall('startImageStream', arguments: null), isMethodCall('stopImageStream', arguments: null) ]); From 74d9d8598b35d0e67dad3f9ab45bf9148fed726d Mon Sep 17 00:00:00 2001 From: zuvola Date: Thu, 10 Mar 2022 16:09:17 +0900 Subject: [PATCH 15/28] Apply PR feedback --- packages/camera/camera/CHANGELOG.md | 4 + .../example/ios/RunnerTests/StreamingTest.m | 125 ++++++++++-------- .../camera/camera/ios/Classes/CameraPlugin.m | 3 +- packages/camera/camera/ios/Classes/FLTCam.h | 2 +- packages/camera/camera/ios/Classes/FLTCam.m | 15 ++- .../camera/camera/ios/Classes/FLTCam_Test.h | 4 + 6 files changed, 90 insertions(+), 63 deletions(-) diff --git a/packages/camera/camera/CHANGELOG.md b/packages/camera/camera/CHANGELOG.md index c088bd5b5373..c6329907b4a0 100644 --- a/packages/camera/camera/CHANGELOG.md +++ b/packages/camera/camera/CHANGELOG.md @@ -1,3 +1,7 @@ +## 0.9.4+17 + +* Fixes a crash in iOS when streaming on low-performance devices. + ## NEXT * Minor iOS internal code cleanup related to queue helper functions. diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m index acba2ec6e85b..62e54e1420be 100644 --- a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -10,49 +10,41 @@ #import "MockFLTThreadSafeFlutterResult.h" @interface FLTImageStreamHandler : NSObject +- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue; @property FlutterEventSink eventSink; @end -@interface FLTCam (Private) -@property(assign, nonatomic) int maxStreamingPendingFrames; -@property(assign, nonatomic) BOOL isStreamingImages; -@property(nonatomic) FLTImageStreamHandler *imageStreamHandler; -@property(retain, nonatomic) FLTCam *camera; -- (void)captureOutput:(AVCaptureOutput *)output - didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer - fromConnection:(AVCaptureConnection *)connection; -@end - -@interface CameraPlugin (Private) -@property(retain, nonatomic) FLTCam *camera; -@end - @interface StreamingTests : XCTestCase +@property(readonly, nonatomic) FLTCam *camera; @end @implementation StreamingTests -- (void)testStreamingPendingFrames { - CameraPlugin *camera = [[CameraPlugin alloc] initWithRegistry:nil messenger:nil]; - - // Set up mocks for initWithCameraName method - id avCaptureDeviceInputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([avCaptureDeviceInputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg anyObjectRef]]) - .andReturn([AVCaptureInput alloc]); - id avCaptureSessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([avCaptureSessionMock alloc]).andReturn(avCaptureSessionMock); - OCMStub([avCaptureSessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - - // Set up method calls - FlutterMethodCall *createCall = [FlutterMethodCall - methodCallWithMethodName:@"create" - arguments:@{@"resolutionPreset" : @"medium", @"enableAudio" : @(1)}]; - FlutterMethodCall *startCall = [FlutterMethodCall methodCallWithMethodName:@"startImageStream" - arguments:nil]; - FlutterMethodCall *receivedCall = - [FlutterMethodCall methodCallWithMethodName:@"receivedImageStreamData" arguments:nil]; - - // Set up sampleBuffer +- (void)setUp { + // set up mocks + id inputMock = OCMClassMock([AVCaptureDeviceInput class]); + OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) + .andReturn(inputMock); + + id sessionMock = OCMClassMock([AVCaptureSession class]); + OCMStub([sessionMock alloc]).andReturn(sessionMock); + OCMStub([sessionMock addInputWithNoConnections:[OCMArg any]]); + OCMStub([sessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); + + // create a camera + dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); + dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, + (void *)FLTCaptureSessionQueueSpecific, NULL); + _camera = [[FLTCam alloc] initWithCameraName:@"camera" + resolutionPreset:@"medium" + enableAudio:true + orientation:UIDeviceOrientationPortrait + captureSessionQueue:captureSessionQueue + error:nil]; +} + +// Set up a sampleBuffer +- (CMSampleBufferRef)sampleBuffer { CVPixelBufferRef pixelBuffer; CVPixelBufferCreate(kCFAllocatorDefault, 100, 100, kCVPixelFormatType_32BGRA, nil, &pixelBuffer); CMVideoFormatDescriptionRef formatDescription; @@ -62,34 +54,55 @@ - (void)testStreamingPendingFrames { CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, formatDescription, &kCMTimingInfoInvalid, &sampleBuffer); - // Start streaming - [camera handleMethodCallAsync:createCall result:nil]; - [camera handleMethodCallAsync:startCall result:nil]; - camera.camera.imageStreamHandler.eventSink = ^(id _Nullable event) { - }; + return sampleBuffer; +} + +- (void)testExceedMaxStreamingPendingFramesCount { + XCTestExpectation *streamingExpectation = [self + expectationWithDescription:@"Must not receive more than MaxStreamingPendingFramesCount"]; + + id handlerMock = OCMClassMock([FLTImageStreamHandler class]); + OCMStub([handlerMock alloc]).andReturn(handlerMock); + OCMStub([handlerMock initWithCaptureSessionQueue:[OCMArg any]]).andReturn(handlerMock); + OCMStub([handlerMock eventSink]).andReturn(^(id event) { + [streamingExpectation fulfill]; + }); - // Waiting for streaming to start - FLTCam *cam = [camera camera]; - while (!cam.isStreamingImages) { - [NSThread sleepForTimeInterval:0.001]; + id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); + [_camera startImageStreamWithMessenger:messenger]; + + streamingExpectation.expectedFulfillmentCount = 4; + for (int i = 0; i < 10; i++) { + [_camera captureOutput:nil didOutputSampleBuffer:[self sampleBuffer] fromConnection:nil]; } - // Initial value - XCTAssertEqual(cam.streamingPendingFrames, 0); + [self waitForExpectationsWithTimeout:3.0 handler:nil]; +} - // Emulate receiving a video frame - [camera.camera captureOutput:nil didOutputSampleBuffer:sampleBuffer fromConnection:nil]; - XCTAssertEqual(cam.streamingPendingFrames, 1); +- (void)testReceivedImageStreamData { + XCTestExpectation *streamingExpectation = + [self expectationWithDescription: + @"Must be able to receive again when receivedImageStreamData is called"]; - // ReceivedCall reduces streamingPendingFrames - [camera handleMethodCallAsync:receivedCall result:nil]; - XCTAssertEqual(cam.streamingPendingFrames, 0); + id handlerMock = OCMClassMock([FLTImageStreamHandler class]); + OCMStub([handlerMock alloc]).andReturn(handlerMock); + OCMStub([handlerMock initWithCaptureSessionQueue:[OCMArg any]]).andReturn(handlerMock); + OCMStub([handlerMock eventSink]).andReturn(^(id event) { + [streamingExpectation fulfill]; + }); - // Don't exceed maxStreamingPendingFrames - for (int i = 0; i < cam.maxStreamingPendingFrames + 2; i++) { - [camera.camera captureOutput:nil didOutputSampleBuffer:sampleBuffer fromConnection:nil]; + id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); + [_camera startImageStreamWithMessenger:messenger]; + + streamingExpectation.expectedFulfillmentCount = 5; + for (int i = 0; i < 10; i++) { + [_camera captureOutput:nil didOutputSampleBuffer:[self sampleBuffer] fromConnection:nil]; } - XCTAssertEqual(cam.streamingPendingFrames, cam.maxStreamingPendingFrames); + + [_camera receivedImageStreamData]; + [_camera captureOutput:nil didOutputSampleBuffer:[self sampleBuffer] fromConnection:nil]; + + [self waitForExpectationsWithTimeout:3.0 handler:nil]; } @end diff --git a/packages/camera/camera/ios/Classes/CameraPlugin.m b/packages/camera/camera/ios/Classes/CameraPlugin.m index e37b0052e5a1..c0a3833dcd64 100644 --- a/packages/camera/camera/ios/Classes/CameraPlugin.m +++ b/packages/camera/camera/ios/Classes/CameraPlugin.m @@ -163,7 +163,8 @@ - (void)handleMethodCallAsync:(FlutterMethodCall *)call [_camera stopImageStream]; [result sendSuccess]; } else if ([@"receivedImageStreamData" isEqualToString:call.method]) { - _camera.streamingPendingFrames--; + [_camera receivedImageStreamData]; + [result sendSuccess]; } else { NSDictionary *argsMap = call.arguments; NSUInteger cameraId = ((NSNumber *)argsMap[@"cameraId"]).unsignedIntegerValue; diff --git a/packages/camera/camera/ios/Classes/FLTCam.h b/packages/camera/camera/ios/Classes/FLTCam.h index 3ecd619e4769..1242bd57e033 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.h +++ b/packages/camera/camera/ios/Classes/FLTCam.h @@ -30,7 +30,6 @@ NS_ASSUME_NONNULL_BEGIN @property(assign, nonatomic) FLTFlashMode flashMode; // Format used for video and image streaming. @property(assign, nonatomic) FourCharCode videoFormat; -@property(assign, nonatomic) int streamingPendingFrames; - (instancetype)initWithCameraName:(NSString *)cameraName resolutionPreset:(NSString *)resolutionPreset @@ -54,6 +53,7 @@ NS_ASSUME_NONNULL_BEGIN - (void)setExposureModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr; - (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr; - (void)applyFocusMode; +- (void)receivedImageStreamData; /** * Applies FocusMode on the AVCaptureDevice. diff --git a/packages/camera/camera/ios/Classes/FLTCam.m b/packages/camera/camera/ios/Classes/FLTCam.m index 7cf4af4d6206..7e9164b2c8da 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.m +++ b/packages/camera/camera/ios/Classes/FLTCam.m @@ -67,7 +67,8 @@ @interface FLTCam () *)messen completion:^{ dispatch_async(self->_captureSessionQueue, ^{ self.isStreamingImages = YES; - self.streamingPendingFrames = 0; + self.streamingPendingFramesCount = 0; }); }]; } else { @@ -907,6 +908,10 @@ - (void)stopImageStream { } } +- (void)receivedImageStreamData { + self.streamingPendingFramesCount--; +} + - (void)getMaxZoomLevelWithResult:(FLTThreadSafeFlutterResult *)result { CGFloat maxZoomFactor = [self getMaxAvailableZoomFactor]; diff --git a/packages/camera/camera/ios/Classes/FLTCam_Test.h b/packages/camera/camera/ios/Classes/FLTCam_Test.h index db885d0d0773..ed54a4ac8d49 100644 --- a/packages/camera/camera/ios/Classes/FLTCam_Test.h +++ b/packages/camera/camera/ios/Classes/FLTCam_Test.h @@ -22,4 +22,8 @@ @property(readonly, nonatomic) NSMutableDictionary *inProgressSavePhotoDelegates; +/// Called whenever an AVCaptureVideoDataOutput instance outputs a new video frame. +- (void)captureOutput:(AVCaptureOutput *)output + didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer + fromConnection:(AVCaptureConnection *)connection; @end From 6b81e1a3a2aad8797774cf71c97d40072f4ef312 Mon Sep 17 00:00:00 2001 From: zuvola Date: Fri, 11 Mar 2022 09:35:40 +0900 Subject: [PATCH 16/28] Update messages --- .../camera/camera/example/ios/RunnerTests/StreamingTest.m | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m index 62e54e1420be..a71f781f2a4a 100644 --- a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -59,7 +59,7 @@ - (CMSampleBufferRef)sampleBuffer { - (void)testExceedMaxStreamingPendingFramesCount { XCTestExpectation *streamingExpectation = [self - expectationWithDescription:@"Must not receive more than MaxStreamingPendingFramesCount"]; + expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"]; id handlerMock = OCMClassMock([FLTImageStreamHandler class]); OCMStub([handlerMock alloc]).andReturn(handlerMock); @@ -82,7 +82,7 @@ - (void)testExceedMaxStreamingPendingFramesCount { - (void)testReceivedImageStreamData { XCTestExpectation *streamingExpectation = [self expectationWithDescription: - @"Must be able to receive again when receivedImageStreamData is called"]; + @"Must be able to call the handler again when receivedImageStreamData is called"]; id handlerMock = OCMClassMock([FLTImageStreamHandler class]); OCMStub([handlerMock alloc]).andReturn(handlerMock); From 1167112b8322a11478f022a386214327d64cbc4d Mon Sep 17 00:00:00 2001 From: zuvola Date: Fri, 11 Mar 2022 10:41:35 +0900 Subject: [PATCH 17/28] Update to use CameraTestUtils --- .../example/ios/RunnerTests/StreamingTest.m | 46 ++++--------------- 1 file changed, 10 insertions(+), 36 deletions(-) diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m index a71f781f2a4a..45a48e91044e 100644 --- a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -7,7 +7,7 @@ @import XCTest; @import AVFoundation; #import -#import "MockFLTThreadSafeFlutterResult.h" +#import "CameraTestUtils.h" @interface FLTImageStreamHandler : NSObject - (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue; @@ -16,45 +16,19 @@ - (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueu @interface StreamingTests : XCTestCase @property(readonly, nonatomic) FLTCam *camera; +@property(readonly, nonatomic) CMSampleBufferRef sampleBuffer; @end @implementation StreamingTests - (void)setUp { - // set up mocks - id inputMock = OCMClassMock([AVCaptureDeviceInput class]); - OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]]) - .andReturn(inputMock); - - id sessionMock = OCMClassMock([AVCaptureSession class]); - OCMStub([sessionMock alloc]).andReturn(sessionMock); - OCMStub([sessionMock addInputWithNoConnections:[OCMArg any]]); - OCMStub([sessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES); - - // create a camera - dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL); - dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific, - (void *)FLTCaptureSessionQueueSpecific, NULL); - _camera = [[FLTCam alloc] initWithCameraName:@"camera" - resolutionPreset:@"medium" - enableAudio:true - orientation:UIDeviceOrientationPortrait - captureSessionQueue:captureSessionQueue - error:nil]; + dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL); + _camera = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue); + _sampleBuffer = FLTCreateTestSampleBuffer(); } -// Set up a sampleBuffer -- (CMSampleBufferRef)sampleBuffer { - CVPixelBufferRef pixelBuffer; - CVPixelBufferCreate(kCFAllocatorDefault, 100, 100, kCVPixelFormatType_32BGRA, nil, &pixelBuffer); - CMVideoFormatDescriptionRef formatDescription; - CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, - &formatDescription); - CMSampleBufferRef sampleBuffer; - CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, formatDescription, - &kCMTimingInfoInvalid, &sampleBuffer); - - return sampleBuffer; +-(void)tearDown { + CFRelease(_sampleBuffer); } - (void)testExceedMaxStreamingPendingFramesCount { @@ -73,7 +47,7 @@ - (void)testExceedMaxStreamingPendingFramesCount { streamingExpectation.expectedFulfillmentCount = 4; for (int i = 0; i < 10; i++) { - [_camera captureOutput:nil didOutputSampleBuffer:[self sampleBuffer] fromConnection:nil]; + [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; } [self waitForExpectationsWithTimeout:3.0 handler:nil]; @@ -96,11 +70,11 @@ - (void)testReceivedImageStreamData { streamingExpectation.expectedFulfillmentCount = 5; for (int i = 0; i < 10; i++) { - [_camera captureOutput:nil didOutputSampleBuffer:[self sampleBuffer] fromConnection:nil]; + [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; } [_camera receivedImageStreamData]; - [_camera captureOutput:nil didOutputSampleBuffer:[self sampleBuffer] fromConnection:nil]; + [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; [self waitForExpectationsWithTimeout:3.0 handler:nil]; } From 24871c6dda2e7986509f5bd2dc89f2d78ebe26b2 Mon Sep 17 00:00:00 2001 From: zuvola Date: Fri, 11 Mar 2022 10:50:05 +0900 Subject: [PATCH 18/28] Update _Test header --- .../example/ios/RunnerTests/StreamingTest.m | 15 +++++++++------ packages/camera/camera/ios/Classes/FLTCam.m | 9 --------- packages/camera/camera/ios/Classes/FLTCam_Test.h | 15 +++++++++++++++ 3 files changed, 24 insertions(+), 15 deletions(-) diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m index 45a48e91044e..49d8bcada992 100644 --- a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -9,11 +9,6 @@ #import #import "CameraTestUtils.h" -@interface FLTImageStreamHandler : NSObject -- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue; -@property FlutterEventSink eventSink; -@end - @interface StreamingTests : XCTestCase @property(readonly, nonatomic) FLTCam *camera; @property(readonly, nonatomic) CMSampleBufferRef sampleBuffer; @@ -27,7 +22,7 @@ - (void)setUp { _sampleBuffer = FLTCreateTestSampleBuffer(); } --(void)tearDown { +- (void)tearDown { CFRelease(_sampleBuffer); } @@ -45,6 +40,10 @@ - (void)testExceedMaxStreamingPendingFramesCount { id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); [_camera startImageStreamWithMessenger:messenger]; + while (!_camera.isStreamingImages) { + [NSThread sleepForTimeInterval:0.001]; + } + streamingExpectation.expectedFulfillmentCount = 4; for (int i = 0; i < 10; i++) { [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; @@ -68,6 +67,10 @@ - (void)testReceivedImageStreamData { id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); [_camera startImageStreamWithMessenger:messenger]; + while (!_camera.isStreamingImages) { + [NSThread sleepForTimeInterval:0.001]; + } + streamingExpectation.expectedFulfillmentCount = 5; for (int i = 0; i < 10; i++) { [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; diff --git a/packages/camera/camera/ios/Classes/FLTCam.m b/packages/camera/camera/ios/Classes/FLTCam.m index 456ac956fbea..1eef3190e12d 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.m +++ b/packages/camera/camera/ios/Classes/FLTCam.m @@ -10,14 +10,6 @@ @import CoreMotion; #import -@interface FLTImageStreamHandler : NSObject -// The queue on which `eventSink` property should be accessed -@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; -// `eventSink` property should be accessed on `captureSessionQueue`. -// The block itself should be invoked on the main queue. -@property FlutterEventSink eventSink; -@end - @implementation FLTImageStreamHandler - (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue { @@ -68,7 +60,6 @@ @interface FLTCam () +// The queue on which `eventSink` property should be accessed +@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; +// `eventSink` property should be accessed on `captureSessionQueue`. +// The block itself should be invoked on the main queue. +@property FlutterEventSink eventSink; +@end + +@interface FLTImageStreamHandler () +- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue; +@end From 7635469def87bf0555dd345886e00574831e0b07 Mon Sep 17 00:00:00 2001 From: zuvola Date: Fri, 11 Mar 2022 10:57:12 +0900 Subject: [PATCH 19/28] Bump version to 0.9.4+17 --- packages/camera/camera/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 2baab09c5dcb..0e684a743ba3 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -4,7 +4,7 @@ description: A Flutter plugin for controlling the camera. Supports previewing Dart. repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.4+16 +version: 0.9.4+17 environment: sdk: ">=2.14.0 <3.0.0" From 376bbfac1a1461ff9da7d9a60d611e8684d534aa Mon Sep 17 00:00:00 2001 From: zuvola Date: Wed, 16 Mar 2022 13:16:38 +0900 Subject: [PATCH 20/28] Inject FLTImageStreamHandler object --- .../example/ios/RunnerTests/StreamingTest.m | 8 ++----- packages/camera/camera/ios/Classes/FLTCam.m | 7 ++++-- .../camera/camera/ios/Classes/FLTCam_Test.h | 22 +++++++++---------- 3 files changed, 18 insertions(+), 19 deletions(-) diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m index 49d8bcada992..8d9f85c7d1b8 100644 --- a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -31,14 +31,12 @@ - (void)testExceedMaxStreamingPendingFramesCount { expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"]; id handlerMock = OCMClassMock([FLTImageStreamHandler class]); - OCMStub([handlerMock alloc]).andReturn(handlerMock); - OCMStub([handlerMock initWithCaptureSessionQueue:[OCMArg any]]).andReturn(handlerMock); OCMStub([handlerMock eventSink]).andReturn(^(id event) { [streamingExpectation fulfill]; }); id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); - [_camera startImageStreamWithMessenger:messenger]; + [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; while (!_camera.isStreamingImages) { [NSThread sleepForTimeInterval:0.001]; @@ -58,14 +56,12 @@ - (void)testReceivedImageStreamData { @"Must be able to call the handler again when receivedImageStreamData is called"]; id handlerMock = OCMClassMock([FLTImageStreamHandler class]); - OCMStub([handlerMock alloc]).andReturn(handlerMock); - OCMStub([handlerMock initWithCaptureSessionQueue:[OCMArg any]]).andReturn(handlerMock); OCMStub([handlerMock eventSink]).andReturn(^(id event) { [streamingExpectation fulfill]; }); id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); - [_camera startImageStreamWithMessenger:messenger]; + [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; while (!_camera.isStreamingImages) { [NSThread sleepForTimeInterval:0.001]; diff --git a/packages/camera/camera/ios/Classes/FLTCam.m b/packages/camera/camera/ios/Classes/FLTCam.m index 1eef3190e12d..d72906dd79b4 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.m +++ b/packages/camera/camera/ios/Classes/FLTCam.m @@ -897,6 +897,10 @@ - (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset: } - (void)startImageStreamWithMessenger:(NSObject *)messenger { + [self startImageStreamWithMessenger:messenger imageStreamHandler:[[FLTImageStreamHandler alloc] initWithCaptureSessionQueue:_captureSessionQueue]]; +} + +- (void)startImageStreamWithMessenger:(NSObject *)messenger imageStreamHandler:(FLTImageStreamHandler*)imageStreamHandler { if (!_isStreamingImages) { FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream" @@ -904,8 +908,7 @@ - (void)startImageStreamWithMessenger:(NSObject *)messen FLTThreadSafeEventChannel *threadSafeEventChannel = [[FLTThreadSafeEventChannel alloc] initWithEventChannel:eventChannel]; - _imageStreamHandler = - [[FLTImageStreamHandler alloc] initWithCaptureSessionQueue:_captureSessionQueue]; + _imageStreamHandler = imageStreamHandler; [threadSafeEventChannel setStreamHandler:_imageStreamHandler completion:^{ dispatch_async(self->_captureSessionQueue, ^{ diff --git a/packages/camera/camera/ios/Classes/FLTCam_Test.h b/packages/camera/camera/ios/Classes/FLTCam_Test.h index d66c5553caf1..23710a0ecbeb 100644 --- a/packages/camera/camera/ios/Classes/FLTCam_Test.h +++ b/packages/camera/camera/ios/Classes/FLTCam_Test.h @@ -5,6 +5,15 @@ #import "FLTCam.h" #import "FLTSavePhotoDelegate.h" +@interface FLTImageStreamHandler : NSObject +// The queue on which `eventSink` property should be accessed +@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; +// `eventSink` property should be accessed on `captureSessionQueue`. +// The block itself should be invoked on the main queue. +@property FlutterEventSink eventSink; +@end + + // APIs exposed for unit testing. @interface FLTCam () @@ -41,16 +50,7 @@ captureSessionQueue:(dispatch_queue_t)captureSessionQueue error:(NSError **)error; -@end - -@interface FLTImageStreamHandler : NSObject -// The queue on which `eventSink` property should be accessed -@property(nonatomic, strong) dispatch_queue_t captureSessionQueue; -// `eventSink` property should be accessed on `captureSessionQueue`. -// The block itself should be invoked on the main queue. -@property FlutterEventSink eventSink; -@end +/// Start streaming images. +- (void)startImageStreamWithMessenger:(NSObject *)messenger imageStreamHandler:(FLTImageStreamHandler*)imageStreamHandler; -@interface FLTImageStreamHandler () -- (instancetype)initWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue; @end From f276bd02872aae0851b7b61ae64d0c0c595ccc26 Mon Sep 17 00:00:00 2001 From: zuvola Date: Wed, 16 Mar 2022 14:04:21 +0900 Subject: [PATCH 21/28] Capital S --- packages/camera/camera/ios/Classes/FLTCam.m | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/camera/camera/ios/Classes/FLTCam.m b/packages/camera/camera/ios/Classes/FLTCam.m index d72906dd79b4..58bdaf0938d0 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.m +++ b/packages/camera/camera/ios/Classes/FLTCam.m @@ -61,7 +61,7 @@ @interface FLTCam () Date: Wed, 16 Mar 2022 14:05:12 +0900 Subject: [PATCH 22/28] Use XCTNSPredicateExpectation --- .../example/ios/RunnerTests/StreamingTest.m | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m index 8d9f85c7d1b8..ec4d3b42dc8c 100644 --- a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -26,6 +26,13 @@ - (void)tearDown { CFRelease(_sampleBuffer); } +- (void)waitForStart { + NSPredicate *predicate = [NSPredicate predicateWithFormat:@"isStreamingImages == YES"]; + XCTNSPredicateExpectation *expectation = [[XCTNSPredicateExpectation alloc] initWithPredicate:predicate object:_camera]; + XCTWaiterResult result = [XCTWaiter waitForExpectations:@[expectation] timeout:3]; + XCTAssertEqual(result, XCTWaiterResultCompleted); +} + - (void)testExceedMaxStreamingPendingFramesCount { XCTestExpectation *streamingExpectation = [self expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"]; @@ -38,9 +45,7 @@ - (void)testExceedMaxStreamingPendingFramesCount { id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; - while (!_camera.isStreamingImages) { - [NSThread sleepForTimeInterval:0.001]; - } + [self waitForStart]; streamingExpectation.expectedFulfillmentCount = 4; for (int i = 0; i < 10; i++) { @@ -63,9 +68,7 @@ - (void)testReceivedImageStreamData { id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; - while (!_camera.isStreamingImages) { - [NSThread sleepForTimeInterval:0.001]; - } + [self waitForStart]; streamingExpectation.expectedFulfillmentCount = 5; for (int i = 0; i < 10; i++) { From 9653345848deed0865ca93cdf49530e2d88028c9 Mon Sep 17 00:00:00 2001 From: zuvola Date: Wed, 16 Mar 2022 14:05:38 +0900 Subject: [PATCH 23/28] Wait 1 second --- .../camera/camera/example/ios/RunnerTests/StreamingTest.m | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m index ec4d3b42dc8c..7a31bd17866f 100644 --- a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -52,7 +52,7 @@ - (void)testExceedMaxStreamingPendingFramesCount { [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; } - [self waitForExpectationsWithTimeout:3.0 handler:nil]; + [self waitForExpectationsWithTimeout:1.0 handler:nil]; } - (void)testReceivedImageStreamData { @@ -78,7 +78,7 @@ - (void)testReceivedImageStreamData { [_camera receivedImageStreamData]; [_camera captureOutput:nil didOutputSampleBuffer:self.sampleBuffer fromConnection:nil]; - [self waitForExpectationsWithTimeout:3.0 handler:nil]; + [self waitForExpectationsWithTimeout:1.0 handler:nil]; } @end From 8e334bfc05d7f7fa8cab278ad742d1f8a81e7b89 Mon Sep 17 00:00:00 2001 From: zuvola Date: Wed, 16 Mar 2022 14:15:13 +0900 Subject: [PATCH 24/28] Format --- .../camera/camera/example/ios/RunnerTests/StreamingTest.m | 5 +++-- packages/camera/camera/ios/Classes/FLTCam.m | 7 +++++-- packages/camera/camera/ios/Classes/FLTCam_Test.h | 4 ++-- 3 files changed, 10 insertions(+), 6 deletions(-) diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m index 7a31bd17866f..b65290e55db2 100644 --- a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -28,8 +28,9 @@ - (void)tearDown { - (void)waitForStart { NSPredicate *predicate = [NSPredicate predicateWithFormat:@"isStreamingImages == YES"]; - XCTNSPredicateExpectation *expectation = [[XCTNSPredicateExpectation alloc] initWithPredicate:predicate object:_camera]; - XCTWaiterResult result = [XCTWaiter waitForExpectations:@[expectation] timeout:3]; + XCTNSPredicateExpectation *expectation = + [[XCTNSPredicateExpectation alloc] initWithPredicate:predicate object:_camera]; + XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:3]; XCTAssertEqual(result, XCTWaiterResultCompleted); } diff --git a/packages/camera/camera/ios/Classes/FLTCam.m b/packages/camera/camera/ios/Classes/FLTCam.m index 58bdaf0938d0..e36785fd6941 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.m +++ b/packages/camera/camera/ios/Classes/FLTCam.m @@ -897,10 +897,13 @@ - (void)setExposureOffsetWithResult:(FLTThreadSafeFlutterResult *)result offset: } - (void)startImageStreamWithMessenger:(NSObject *)messenger { - [self startImageStreamWithMessenger:messenger imageStreamHandler:[[FLTImageStreamHandler alloc] initWithCaptureSessionQueue:_captureSessionQueue]]; + [self startImageStreamWithMessenger:messenger + imageStreamHandler:[[FLTImageStreamHandler alloc] + initWithCaptureSessionQueue:_captureSessionQueue]]; } -- (void)startImageStreamWithMessenger:(NSObject *)messenger imageStreamHandler:(FLTImageStreamHandler*)imageStreamHandler { +- (void)startImageStreamWithMessenger:(NSObject *)messenger + imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler { if (!_isStreamingImages) { FlutterEventChannel *eventChannel = [FlutterEventChannel eventChannelWithName:@"plugins.flutter.io/camera/imageStream" diff --git a/packages/camera/camera/ios/Classes/FLTCam_Test.h b/packages/camera/camera/ios/Classes/FLTCam_Test.h index 23710a0ecbeb..c28cdcbdea02 100644 --- a/packages/camera/camera/ios/Classes/FLTCam_Test.h +++ b/packages/camera/camera/ios/Classes/FLTCam_Test.h @@ -13,7 +13,6 @@ @property FlutterEventSink eventSink; @end - // APIs exposed for unit testing. @interface FLTCam () @@ -51,6 +50,7 @@ error:(NSError **)error; /// Start streaming images. -- (void)startImageStreamWithMessenger:(NSObject *)messenger imageStreamHandler:(FLTImageStreamHandler*)imageStreamHandler; +- (void)startImageStreamWithMessenger:(NSObject *)messenger + imageStreamHandler:(FLTImageStreamHandler *)imageStreamHandler; @end From 7b8d74fe5d0046f6a6a2be360c172f10d6f8a98c Mon Sep 17 00:00:00 2001 From: zuvola Date: Wed, 16 Mar 2022 15:05:27 +0900 Subject: [PATCH 25/28] Using KVO to wait --- .../example/ios/RunnerTests/StreamingTest.m | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m index b65290e55db2..1843cce12152 100644 --- a/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m +++ b/packages/camera/camera/example/ios/RunnerTests/StreamingTest.m @@ -26,14 +26,6 @@ - (void)tearDown { CFRelease(_sampleBuffer); } -- (void)waitForStart { - NSPredicate *predicate = [NSPredicate predicateWithFormat:@"isStreamingImages == YES"]; - XCTNSPredicateExpectation *expectation = - [[XCTNSPredicateExpectation alloc] initWithPredicate:predicate object:_camera]; - XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:3]; - XCTAssertEqual(result, XCTWaiterResultCompleted); -} - - (void)testExceedMaxStreamingPendingFramesCount { XCTestExpectation *streamingExpectation = [self expectationWithDescription:@"Must not call handler over maxStreamingPendingFramesCount"]; @@ -46,7 +38,11 @@ - (void)testExceedMaxStreamingPendingFramesCount { id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; - [self waitForStart]; + XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages" + object:_camera + expectedValue:@YES]; + XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1]; + XCTAssertEqual(result, XCTWaiterResultCompleted); streamingExpectation.expectedFulfillmentCount = 4; for (int i = 0; i < 10; i++) { @@ -69,7 +65,11 @@ - (void)testReceivedImageStreamData { id messenger = OCMProtocolMock(@protocol(FlutterBinaryMessenger)); [_camera startImageStreamWithMessenger:messenger imageStreamHandler:handlerMock]; - [self waitForStart]; + XCTKVOExpectation *expectation = [[XCTKVOExpectation alloc] initWithKeyPath:@"isStreamingImages" + object:_camera + expectedValue:@YES]; + XCTWaiterResult result = [XCTWaiter waitForExpectations:@[ expectation ] timeout:1]; + XCTAssertEqual(result, XCTWaiterResultCompleted); streamingExpectation.expectedFulfillmentCount = 5; for (int i = 0; i < 10; i++) { From ab233fa5fe6de779d80168a68b589c98ced4a822 Mon Sep 17 00:00:00 2001 From: zuvola Date: Wed, 23 Mar 2022 14:01:57 +0900 Subject: [PATCH 26/28] Add comments --- packages/camera/camera/ios/Classes/FLTCam.h | 4 ++++ packages/camera/camera/ios/Classes/FLTCam.m | 5 +++++ packages/camera/camera/ios/Classes/FLTCam_Test.h | 9 ++++++--- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/packages/camera/camera/ios/Classes/FLTCam.h b/packages/camera/camera/ios/Classes/FLTCam.h index 8c2af1f55d41..0a09159e0e55 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.h +++ b/packages/camera/camera/ios/Classes/FLTCam.h @@ -60,6 +60,10 @@ NS_ASSUME_NONNULL_BEGIN - (void)setExposureModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr; - (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr; - (void)applyFocusMode; + +/// Sets that a streaming image has been received. +/// Call it every time a streaming image is received. +/// To prevent memory consumption, the number of frames pending processing is limited. - (void)receivedImageStreamData; /** diff --git a/packages/camera/camera/ios/Classes/FLTCam.m b/packages/camera/camera/ios/Classes/FLTCam.m index e36785fd6941..459cd6e17a54 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.m +++ b/packages/camera/camera/ios/Classes/FLTCam.m @@ -60,8 +60,13 @@ @interface FLTCam () -// The queue on which `eventSink` property should be accessed + +/// The queue on which `eventSink` property should be accessed @property(nonatomic, strong) dispatch_queue_t captureSessionQueue; -// `eventSink` property should be accessed on `captureSessionQueue`. -// The block itself should be invoked on the main queue. + +/// `eventSink` property should be accessed on `captureSessionQueue`. +/// The block itself should be invoked on the main queue. @property FlutterEventSink eventSink; + @end // APIs exposed for unit testing. From 830d5adf6eed3c40f17618040f72fb95182264b6 Mon Sep 17 00:00:00 2001 From: zuvola Date: Thu, 24 Mar 2022 09:57:44 +0900 Subject: [PATCH 27/28] Update comments --- packages/camera/camera/ios/Classes/FLTCam.h | 9 ++++++--- packages/camera/camera/ios/Classes/FLTCam.m | 2 +- packages/camera/camera/ios/Classes/FLTCam_Test.h | 6 ++++-- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/packages/camera/camera/ios/Classes/FLTCam.h b/packages/camera/camera/ios/Classes/FLTCam.h index 0a09159e0e55..8a5dafaf8354 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.h +++ b/packages/camera/camera/ios/Classes/FLTCam.h @@ -61,9 +61,12 @@ NS_ASSUME_NONNULL_BEGIN - (void)setFocusModeWithResult:(FLTThreadSafeFlutterResult *)result mode:(NSString *)modeStr; - (void)applyFocusMode; -/// Sets that a streaming image has been received. -/// Call it every time a streaming image is received. -/// To prevent memory consumption, the number of frames pending processing is limited. +/** + * Acknowledges the receipt of one image stream frame. + * + * This should be called each time a frame is received. Failing to call it may + * cause later frames to be dropped instead of streamed. + */ - (void)receivedImageStreamData; /** diff --git a/packages/camera/camera/ios/Classes/FLTCam.m b/packages/camera/camera/ios/Classes/FLTCam.m index 459cd6e17a54..7af505b249cb 100644 --- a/packages/camera/camera/ios/Classes/FLTCam.m +++ b/packages/camera/camera/ios/Classes/FLTCam.m @@ -133,7 +133,7 @@ - (instancetype)initWithCameraName:(NSString *)cameraName _videoFormat = kCVPixelFormatType_32BGRA; _inProgressSavePhotoDelegates = [NSMutableDictionary dictionary]; - // To prevent memory consumption, limit the number of frames pending processing. + // To limit memory consumption, limit the number of frames pending processing. // After some testing, 4 was determined to be the best maximum value. // https://github.com/flutter/plugins/pull/4520#discussion_r766335637 _maxStreamingPendingFramesCount = 4; diff --git a/packages/camera/camera/ios/Classes/FLTCam_Test.h b/packages/camera/camera/ios/Classes/FLTCam_Test.h index 7dd3a1915d75..19e284227f4f 100644 --- a/packages/camera/camera/ios/Classes/FLTCam_Test.h +++ b/packages/camera/camera/ios/Classes/FLTCam_Test.h @@ -7,10 +7,12 @@ @interface FLTImageStreamHandler : NSObject -/// The queue on which `eventSink` property should be accessed +/// The queue on which `eventSink` property should be accessed. @property(nonatomic, strong) dispatch_queue_t captureSessionQueue; -/// `eventSink` property should be accessed on `captureSessionQueue`. +/// The event sink to stream camera events to Dart. +/// +/// The property should only be accessed on `captureSessionQueue`. /// The block itself should be invoked on the main queue. @property FlutterEventSink eventSink; From e185988f54303d6c7d20a3cd59a1b39352c815e3 Mon Sep 17 00:00:00 2001 From: stuartmorgan Date: Fri, 25 Mar 2022 13:06:26 -0700 Subject: [PATCH 28/28] Version bump --- packages/camera/camera/pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/camera/camera/pubspec.yaml b/packages/camera/camera/pubspec.yaml index 0e684a743ba3..064eb919c96a 100644 --- a/packages/camera/camera/pubspec.yaml +++ b/packages/camera/camera/pubspec.yaml @@ -4,7 +4,7 @@ description: A Flutter plugin for controlling the camera. Supports previewing Dart. repository: https://github.com/flutter/plugins/tree/main/packages/camera/camera issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+camera%22 -version: 0.9.4+17 +version: 0.9.4+18 environment: sdk: ">=2.14.0 <3.0.0"