Skip to content
This repository was archived by the owner on Feb 22, 2023. It is now read-only.

[camera]remove CAS operation and use dispatch queue instead #4973

Merged
merged 4 commits into from
Mar 8, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion packages/camera/camera/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
## NEXT
## 0.9.4+15

* Uses dispatch queue for pixel buffer synchronization on iOS.
* Minor iOS internal code cleanup related to queue helper functions.

## 0.9.4+14
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
E0C6E2002770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FD2770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m */; };
E0C6E2012770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */; };
E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */; };
E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */ = {isa = PBXBuildFile; fileRef = E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */; };
E0F95E3D27A32AB900699390 /* CameraPropertiesTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */; };
E487C86026D686A10034AC92 /* CameraPreviewPauseTests.m in Sources */ = {isa = PBXBuildFile; fileRef = E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */; };
F6EE622F2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m in Sources */ = {isa = PBXBuildFile; fileRef = F6EE622E2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m */; };
Expand Down Expand Up @@ -91,6 +92,8 @@
E0C6E1FD2770F01A00EA6AA3 /* ThreadSafeMethodChannelTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeMethodChannelTests.m; sourceTree = "<group>"; };
E0C6E1FE2770F01A00EA6AA3 /* ThreadSafeTextureRegistryTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeTextureRegistryTests.m; sourceTree = "<group>"; };
E0C6E1FF2770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = ThreadSafeEventChannelTests.m; sourceTree = "<group>"; };
E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = CameraTestUtils.h; sourceTree = "<group>"; };
E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraTestUtils.m; sourceTree = "<group>"; };
E0F95E3C27A32AB900699390 /* CameraPropertiesTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPropertiesTests.m; sourceTree = "<group>"; };
E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = CameraPreviewPauseTests.m; sourceTree = "<group>"; };
F63F9EED27143B19002479BF /* MockFLTThreadSafeFlutterResult.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = MockFLTThreadSafeFlutterResult.h; sourceTree = "<group>"; };
Expand Down Expand Up @@ -132,6 +135,8 @@
E071CF7127B3061B006EF3BA /* FLTCamPhotoCaptureTests.m */,
E071CF7327B31DE4006EF3BA /* FLTCamSampleBufferTests.m */,
E01EE4A72799F3A5008C1950 /* QueueUtilsTests.m */,
E0CDBAC027CD9729002561D9 /* CameraTestUtils.h */,
E0CDBAC127CD9729002561D9 /* CameraTestUtils.m */,
E487C85F26D686A10034AC92 /* CameraPreviewPauseTests.m */,
F6EE622E2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m */,
F63F9EED27143B19002479BF /* MockFLTThreadSafeFlutterResult.h */,
Expand Down Expand Up @@ -408,6 +413,7 @@
E071CF7427B31DE4006EF3BA /* FLTCamSampleBufferTests.m in Sources */,
E04F108627A87CA600573D0C /* FLTSavePhotoDelegateTests.m in Sources */,
F6EE622F2710A6FC00905E4A /* MockFLTThreadSafeFlutterResult.m in Sources */,
E0CDBAC227CD9729002561D9 /* CameraTestUtils.m in Sources */,
334733EA2668111C00DCC49E /* CameraOrientationTests.m in Sources */,
E032F250279F5E94009E9028 /* CameraCaptureSessionQueueRaceConditionTests.m in Sources */,
E0C6E2022770F01A00EA6AA3 /* ThreadSafeEventChannelTests.m in Sources */,
Expand Down
2 changes: 1 addition & 1 deletion packages/camera/camera/example/ios/Runner/main.m
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ int main(int argc, char *argv[]) {
// The setup logic in `AppDelegate::didFinishLaunchingWithOptions:` eventually sends camera
// operations on the background queue, which would run concurrently with the test cases during
// unit tests, making the debugging process confusing. This setup is actually not necessary for
// the unit tests, so here we want to skip the AppDelegate when running unit tests.
// the unit tests, so it is better to skip the AppDelegate when running unit tests.
BOOL isTesting = NSClassFromString(@"XCTestCase") != nil;
return UIApplicationMain(argc, argv, nil,
isTesting ? nil : NSStringFromClass([AppDelegate class]));
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

@import camera;

NS_ASSUME_NONNULL_BEGIN

/// Creates an `FLTCam` that runs its capture session operations on a given queue.
/// @param captureSessionQueue the capture session queue
/// @return an FLTCam object.
extern FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue);

/// Creates a test sample buffer.
/// @return a test sample buffer.
extern CMSampleBufferRef FLTCreateTestSampleBuffer(void);

NS_ASSUME_NONNULL_END
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#import "CameraTestUtils.h"
#import <OCMock/OCMock.h>
@import AVFoundation;

FLTCam *FLTCreateCamWithCaptureSessionQueue(dispatch_queue_t captureSessionQueue) {
id inputMock = OCMClassMock([AVCaptureDeviceInput class]);
OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]])
.andReturn(inputMock);

id sessionMock = OCMClassMock([AVCaptureSession class]);
OCMStub([sessionMock addInputWithNoConnections:[OCMArg any]]); // no-op
OCMStub([sessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);

return [[FLTCam alloc] initWithCameraName:@"camera"
resolutionPreset:@"medium"
enableAudio:true
orientation:UIDeviceOrientationPortrait
captureSession:sessionMock
captureSessionQueue:captureSessionQueue
error:nil];
}

CMSampleBufferRef FLTCreateTestSampleBuffer(void) {
CVPixelBufferRef pixelBuffer;
CVPixelBufferCreate(kCFAllocatorDefault, 100, 100, kCVPixelFormatType_32BGRA, NULL, &pixelBuffer);

CMFormatDescriptionRef formatDescription;
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer,
&formatDescription);

CMSampleTimingInfo timingInfo = {CMTimeMake(1, 44100), kCMTimeZero, kCMTimeInvalid};

CMSampleBufferRef sampleBuffer;
CMSampleBufferCreateReadyWithImageBuffer(kCFAllocatorDefault, pixelBuffer, formatDescription,
&timingInfo, &sampleBuffer);

CFRelease(pixelBuffer);
CFRelease(formatDescription);
return sampleBuffer;
}
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
@import AVFoundation;
@import XCTest;
#import <OCMock/OCMock.h>
#import "CameraTestUtils.h"

/// Includes test cases related to photo capture operations for FLTCam class.
@interface FLTCamPhotoCaptureTests : XCTestCase

@end
Expand All @@ -22,7 +24,7 @@ - (void)testCaptureToFile_mustReportErrorToResultIfSavePhotoDelegateCompletionsW
dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL);
dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific,
(void *)FLTCaptureSessionQueueSpecific, NULL);
FLTCam *cam = [self createFLTCamWithCaptureSessionQueue:captureSessionQueue];
FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
id mockSettings = OCMClassMock([AVCapturePhotoSettings class]);
OCMStub([mockSettings photoSettings]).andReturn(settings);
Expand Down Expand Up @@ -61,7 +63,7 @@ - (void)testCaptureToFile_mustReportPathToResultIfSavePhotoDelegateCompletionsWi
dispatch_queue_t captureSessionQueue = dispatch_queue_create("capture_session_queue", NULL);
dispatch_queue_set_specific(captureSessionQueue, FLTCaptureSessionQueueSpecific,
(void *)FLTCaptureSessionQueueSpecific, NULL);
FLTCam *cam = [self createFLTCamWithCaptureSessionQueue:captureSessionQueue];
FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);

AVCapturePhotoSettings *settings = [AVCapturePhotoSettings photoSettings];
id mockSettings = OCMClassMock([AVCapturePhotoSettings class]);
Expand Down Expand Up @@ -92,23 +94,4 @@ - (void)testCaptureToFile_mustReportPathToResultIfSavePhotoDelegateCompletionsWi
[self waitForExpectationsWithTimeout:1 handler:nil];
}

/// Creates an `FLTCam` that runs its operations on a given capture session queue.
- (FLTCam *)createFLTCamWithCaptureSessionQueue:(dispatch_queue_t)captureSessionQueue {
id inputMock = OCMClassMock([AVCaptureDeviceInput class]);
OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]])
.andReturn(inputMock);

id sessionMock = OCMClassMock([AVCaptureSession class]);
OCMStub([sessionMock alloc]).andReturn(sessionMock);
OCMStub([sessionMock addInputWithNoConnections:[OCMArg any]]); // no-op
OCMStub([sessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);

return [[FLTCam alloc] initWithCameraName:@"camera"
resolutionPreset:@"medium"
enableAudio:true
orientation:UIDeviceOrientationPortrait
captureSessionQueue:captureSessionQueue
error:nil];
}

@end
Original file line number Diff line number Diff line change
Expand Up @@ -7,31 +7,35 @@
@import AVFoundation;
@import XCTest;
#import <OCMock/OCMock.h>
#import "CameraTestUtils.h"

/// Includes test cases related to sample buffer handling for FLTCam class.
@interface FLTCamSampleBufferTests : XCTestCase

@end

@implementation FLTCamSampleBufferTests

- (void)testSampleBufferCallbackQueueMustBeCaptureSessionQueue {
id inputMock = OCMClassMock([AVCaptureDeviceInput class]);
OCMStub([inputMock deviceInputWithDevice:[OCMArg any] error:[OCMArg setTo:nil]])
.andReturn(inputMock);

id sessionMock = OCMClassMock([AVCaptureSession class]);
OCMStub([sessionMock alloc]).andReturn(sessionMock);
OCMStub([sessionMock addInputWithNoConnections:[OCMArg any]]); // no-op
OCMStub([sessionMock canSetSessionPreset:[OCMArg any]]).andReturn(YES);

dispatch_queue_t captureSessionQueue = dispatch_queue_create("testing", NULL);
FLTCam *cam = [[FLTCam alloc] initWithCameraName:@"camera"
resolutionPreset:@"medium"
enableAudio:true
orientation:UIDeviceOrientationPortrait
captureSessionQueue:captureSessionQueue
error:nil];
XCTAssertEqual(captureSessionQueue, cam.captureVideoOutput.sampleBufferCallbackQueue);
FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(captureSessionQueue);
XCTAssertEqual(captureSessionQueue, cam.captureVideoOutput.sampleBufferCallbackQueue,
@"Sample buffer callback queue must be the capture session queue.");
}

- (void)testCopyPixelBuffer {
FLTCam *cam = FLTCreateCamWithCaptureSessionQueue(dispatch_queue_create("test", NULL));
CMSampleBufferRef capturedSampleBuffer = FLTCreateTestSampleBuffer();
CVPixelBufferRef capturedPixelBuffer = CMSampleBufferGetImageBuffer(capturedSampleBuffer);
// Mimic sample buffer callback when captured a new video sample
[cam captureOutput:cam.captureVideoOutput
didOutputSampleBuffer:capturedSampleBuffer
fromConnection:OCMClassMock([AVCaptureConnection class])];
CVPixelBufferRef deliveriedPixelBuffer = [cam copyPixelBuffer];
XCTAssertEqual(deliveriedPixelBuffer, capturedPixelBuffer,
@"FLTCam must deliver the latest captured pixel buffer to copyPixelBuffer API.");
CFRelease(capturedSampleBuffer);
CFRelease(deliveriedPixelBuffer);
}

@end
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ - (void)testHandlePhotoCaptureResult_mustCompleteWithErrorIfFailedToWrite {
[completionExpectation fulfill];
}];

// We can't use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g.
// Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g.
// `XCTRunnerIDESession::logDebugMessage:`) on a private queue.
id mockData = OCMPartialMock([NSData data]);
OCMStub([mockData writeToFile:OCMOCK_ANY
Expand Down Expand Up @@ -82,7 +82,7 @@ - (void)testHandlePhotoCaptureResult_mustCompleteWithFilePathIfSuccessToWrite {
[completionExpectation fulfill];
}];

// We can't use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g.
// Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g.
// `XCTRunnerIDESession::logDebugMessage:`) on a private queue.
id mockData = OCMPartialMock([NSData data]);
OCMStub([mockData writeToFile:filePath options:NSDataWritingAtomic error:[OCMArg setTo:nil]])
Expand All @@ -107,7 +107,7 @@ - (void)testHandlePhotoCaptureResult_bothProvideDataAndSaveFileMustRunOnIOQueue
const char *ioQueueSpecific = "io_queue_specific";
dispatch_queue_set_specific(ioQueue, ioQueueSpecific, (void *)ioQueueSpecific, NULL);

// We can't use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g.
// Do not use OCMClassMock for NSData because some XCTest APIs uses NSData (e.g.
// `XCTRunnerIDESession::logDebugMessage:`) on a private queue.
id mockData = OCMPartialMock([NSData data]);
OCMStub([mockData writeToFile:OCMOCK_ANY options:NSDataWritingAtomic error:[OCMArg setTo:nil]])
Expand Down
7 changes: 7 additions & 0 deletions packages/camera/camera/ios/Classes/FLTCam.h
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,13 @@ NS_ASSUME_NONNULL_BEGIN
// Format used for video and image streaming.
@property(assign, nonatomic) FourCharCode videoFormat;

/// Initializes an `FLTCam` instance.
/// @param cameraName a name used to uniquely identify the camera.
/// @param resolutionPreset the resolution preset
/// @param enableAudio YES if audio should be enabled for video capturing; NO otherwise.
/// @param orientation the orientation of camera
/// @param captureSessionQueue the queue on which camera's capture session operations happen.
/// @param error report to the caller if any error happened creating the camera.
- (instancetype)initWithCameraName:(NSString *)cameraName
resolutionPreset:(NSString *)resolutionPreset
enableAudio:(BOOL)enableAudio
Expand Down
57 changes: 43 additions & 14 deletions packages/camera/camera/ios/Classes/FLTCam.m
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,9 @@ @interface FLTCam () <AVCaptureVideoDataOutputSampleBufferDelegate,
@property(readonly, nonatomic) AVCaptureSession *captureSession;

@property(readonly, nonatomic) AVCaptureInput *captureVideoInput;
@property(readonly) CVPixelBufferRef volatile latestPixelBuffer;
/// Tracks the latest pixel buffer sent from AVFoundation's sample buffer delegate callback.
/// Used to deliver the latest pixel buffer to the flutter engine via the `copyPixelBuffer` API.
@property(readwrite, nonatomic) CVPixelBufferRef latestPixelBuffer;
@property(readonly, nonatomic) CGSize captureSize;
@property(strong, nonatomic) AVAssetWriter *videoWriter;
@property(strong, nonatomic) AVAssetWriterInput *videoWriterInput;
Expand All @@ -76,6 +78,9 @@ @interface FLTCam () <AVCaptureVideoDataOutputSampleBufferDelegate,
@property AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
/// All FLTCam's state access and capture session related operations should be on run on this queue.
@property(strong, nonatomic) dispatch_queue_t captureSessionQueue;
/// The queue on which `latestPixelBuffer` property is accessed.
/// To avoid unnecessary contention, do not access `latestPixelBuffer` on the `captureSessionQueue`.
@property(strong, nonatomic) dispatch_queue_t pixelBufferSynchronizationQueue;
/// The queue on which captured photos (not videos) are written to disk.
/// Videos are written to disk by `videoAdaptor` on an internal queue managed by AVFoundation.
@property(strong, nonatomic) dispatch_queue_t photoIOQueue;
Expand All @@ -92,6 +97,22 @@ - (instancetype)initWithCameraName:(NSString *)cameraName
orientation:(UIDeviceOrientation)orientation
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
error:(NSError **)error {
return [self initWithCameraName:cameraName
resolutionPreset:resolutionPreset
enableAudio:enableAudio
orientation:orientation
captureSession:[[AVCaptureSession alloc] init]
captureSessionQueue:captureSessionQueue
error:error];
}

- (instancetype)initWithCameraName:(NSString *)cameraName
resolutionPreset:(NSString *)resolutionPreset
enableAudio:(BOOL)enableAudio
orientation:(UIDeviceOrientation)orientation
captureSession:(AVCaptureSession *)captureSession
captureSessionQueue:(dispatch_queue_t)captureSessionQueue
error:(NSError **)error {
self = [super init];
NSAssert(self, @"super init cannot be nil");
@try {
Expand All @@ -101,8 +122,10 @@ - (instancetype)initWithCameraName:(NSString *)cameraName
}
_enableAudio = enableAudio;
_captureSessionQueue = captureSessionQueue;
_pixelBufferSynchronizationQueue =
dispatch_queue_create("io.flutter.camera.pixelBufferSynchronizationQueue", NULL);
_photoIOQueue = dispatch_queue_create("io.flutter.camera.photoIOQueue", NULL);
_captureSession = [[AVCaptureSession alloc] init];
_captureSession = captureSession;
_captureDevice = [AVCaptureDevice deviceWithUniqueID:cameraName];
_flashMode = _captureDevice.hasFlash ? FLTFlashModeAuto : FLTFlashModeOff;
_exposureMode = FLTExposureModeAuto;
Expand Down Expand Up @@ -355,12 +378,17 @@ - (void)captureOutput:(AVCaptureOutput *)output
if (output == _captureVideoOutput) {
CVPixelBufferRef newBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CFRetain(newBuffer);
CVPixelBufferRef old = _latestPixelBuffer;
while (!OSAtomicCompareAndSwapPtrBarrier(old, newBuffer, (void **)&_latestPixelBuffer)) {
old = _latestPixelBuffer;
}
if (old != nil) {
CFRelease(old);

__block CVPixelBufferRef previousPixelBuffer = nil;
// Use `dispatch_sync` to avoid unnecessary context switch under common non-contest scenarios;
// Under rare contest scenarios, it will not block for too long since the critical section is
// quite lightweight.
dispatch_sync(self.pixelBufferSynchronizationQueue, ^{
previousPixelBuffer = self.latestPixelBuffer;
self.latestPixelBuffer = newBuffer;
});
if (previousPixelBuffer) {
CFRelease(previousPixelBuffer);
}
if (_onFrameAvailable) {
_onFrameAvailable();
Expand Down Expand Up @@ -420,7 +448,7 @@ - (void)captureOutput:(AVCaptureOutput *)output

[planes addObject:planeBuffer];
}
// Before accessing pixel data, we should lock the base address, and unlock it afterwards.
// Lock the base address before accessing pixel data, and unlock it afterwards.
// Done accessing the `pixelBuffer` at this point.
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);

Expand Down Expand Up @@ -575,11 +603,12 @@ - (void)dealloc {
}

- (CVPixelBufferRef)copyPixelBuffer {
CVPixelBufferRef pixelBuffer = _latestPixelBuffer;
while (!OSAtomicCompareAndSwapPtrBarrier(pixelBuffer, nil, (void **)&_latestPixelBuffer)) {
pixelBuffer = _latestPixelBuffer;
}

__block CVPixelBufferRef pixelBuffer = nil;
// Use `dispatch_sync` because `copyPixelBuffer` API requires synchronous return.
dispatch_sync(self.pixelBufferSynchronizationQueue, ^{
pixelBuffer = self.latestPixelBuffer;
self.latestPixelBuffer = nil;
});
return pixelBuffer;
}

Expand Down
Loading