diff --git a/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews.mm b/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews.mm index 40c7ab8e12edc..9208bbf0f89e1 100644 --- a/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews.mm +++ b/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews.mm @@ -32,6 +32,22 @@ - (BOOL)flt_hasFirstResponderInViewHierarchySubtree { } @end +// Determines if the final `clipBounds` from a clipRect/clipRRect/clipPath mutator contains the +// `platformview_boundingrect`. +// +// `clip_bounds` is the bounding rect of the rect/rrect/path in the clipRect/clipRRect/clipPath +// mutator. This rect is in its own coordinate space. The rect needs to be transformed by +// `transform_matrix` to be in the coordinate space where the PlatformView is displayed. +// +// `platformview_boundingrect` is the final bounding rect of the PlatformView in the coordinate +// space where the PlatformView is displayed. +static bool ClipBoundsContainsPlatformViewBoundingRect(const SkRect& clip_bounds, + const SkRect& platformview_boundingrect, + const SkMatrix& transform_matrix) { + SkRect transforme_clip_bounds = transform_matrix.mapRect(clip_bounds); + return transforme_clip_bounds.contains(platformview_boundingrect); +} + namespace flutter { // Becomes NO if Apple's API changes and blurred backdrop filters cannot be applied. BOOL canApplyBlurBackdrop = YES; @@ -404,7 +420,8 @@ - (BOOL)flt_hasFirstResponderInViewHierarchySubtree { } void FlutterPlatformViewsController::ApplyMutators(const MutatorsStack& mutators_stack, - UIView* embedded_view) { + UIView* embedded_view, + const SkRect& bounding_rect) { if (flutter_view_ == nullptr) { return; } @@ -412,39 +429,53 @@ - (BOOL)flt_hasFirstResponderInViewHierarchySubtree { ResetAnchor(embedded_view.layer); ChildClippingView* clipView = (ChildClippingView*)embedded_view.superview; - // The UIKit frame is set based on the logical resolution instead of physical. - // (https://developer.apple.com/library/archive/documentation/DeviceInformation/Reference/iOSDeviceCompatibility/Displays/Displays.html). - // However, flow is based on the physical resolution. For example, 1000 pixels in flow equals - // 500 points in UIKit. And until this point, we did all the calculation based on the flow - // resolution. So we need to scale down to match UIKit's logical resolution. CGFloat screenScale = [UIScreen mainScreen].scale; - CATransform3D finalTransform = CATransform3DMakeScale(1 / screenScale, 1 / screenScale, 1); UIView* flutter_view = flutter_view_.get(); FlutterClippingMaskView* maskView = [[[FlutterClippingMaskView alloc] initWithFrame:CGRectMake(-clipView.frame.origin.x, -clipView.frame.origin.y, CGRectGetWidth(flutter_view.bounds), - CGRectGetHeight(flutter_view.bounds))] autorelease]; + CGRectGetHeight(flutter_view.bounds)) + screenScale:screenScale] autorelease]; + SkMatrix transformMatrix; NSMutableArray* blurFilters = [[[NSMutableArray alloc] init] autorelease]; + clipView.maskView = nil; auto iter = mutators_stack.Begin(); while (iter != mutators_stack.End()) { switch ((*iter)->GetType()) { case kTransform: { - CATransform3D transform = GetCATransform3DFromSkMatrix((*iter)->GetMatrix()); - finalTransform = CATransform3DConcat(transform, finalTransform); + transformMatrix.preConcat((*iter)->GetMatrix()); break; } - case kClipRect: - [maskView clipRect:(*iter)->GetRect() matrix:finalTransform]; + case kClipRect: { + if (ClipBoundsContainsPlatformViewBoundingRect((*iter)->GetRect(), bounding_rect, + transformMatrix)) { + break; + } + [maskView clipRect:(*iter)->GetRect() matrix:transformMatrix]; + clipView.maskView = maskView; break; - case kClipRRect: - [maskView clipRRect:(*iter)->GetRRect() matrix:finalTransform]; + } + case kClipRRect: { + if (ClipBoundsContainsPlatformViewBoundingRect((*iter)->GetRRect().getBounds(), + bounding_rect, transformMatrix)) { + break; + } + [maskView clipRRect:(*iter)->GetRRect() matrix:transformMatrix]; + clipView.maskView = maskView; break; - case kClipPath: - [maskView clipPath:(*iter)->GetPath() matrix:finalTransform]; + } + case kClipPath: { + if (ClipBoundsContainsPlatformViewBoundingRect((*iter)->GetPath().getBounds(), + bounding_rect, transformMatrix)) { + break; + } + [maskView clipPath:(*iter)->GetPath() matrix:transformMatrix]; + clipView.maskView = maskView; break; + } case kOpacity: embedded_view.alpha = (*iter)->GetAlphaFloat() * embedded_view.alpha; break; @@ -489,6 +520,13 @@ - (BOOL)flt_hasFirstResponderInViewHierarchySubtree { [clipView applyBlurBackdropFilters:blurFilters]; } + // The UIKit frame is set based on the logical resolution (points) instead of physical. + // (https://developer.apple.com/library/archive/documentation/DeviceInformation/Reference/iOSDeviceCompatibility/Displays/Displays.html). + // However, flow is based on the physical resolution. For example, 1000 pixels in flow equals + // 500 points in UIKit for devices that has screenScale of 2. We need to scale the transformMatrix + // down to the logical resoltion before applying it to the layer of PlatformView. + transformMatrix.postScale(1 / screenScale, 1 / screenScale); + // Reverse the offset of the clipView. // The clipView's frame includes the final translate of the final transform matrix. // Thus, this translate needs to be reversed so the platform view can layout at the correct @@ -496,10 +534,9 @@ - (BOOL)flt_hasFirstResponderInViewHierarchySubtree { // // Note that the transforms are not applied to the clipping paths because clipping paths happen on // the mask view, whose origin is always (0,0) to the flutter_view. - CATransform3D reverseTranslate = - CATransform3DMakeTranslation(-clipView.frame.origin.x, -clipView.frame.origin.y, 0); - embedded_view.layer.transform = CATransform3DConcat(finalTransform, reverseTranslate); - clipView.maskView = maskView; + transformMatrix.postTranslate(-clipView.frame.origin.x, -clipView.frame.origin.y); + + embedded_view.layer.transform = flutter::GetCATransform3DFromSkMatrix(transformMatrix); } void FlutterPlatformViewsController::CompositeWithParams(int view_id, @@ -538,7 +575,7 @@ - (BOOL)flt_hasFirstResponderInViewHierarchySubtree { CGFloat screenScale = [UIScreen mainScreen].scale; clippingView.frame = CGRectMake(rect.x() / screenScale, rect.y() / screenScale, rect.width() / screenScale, rect.height() / screenScale); - ApplyMutators(mutatorStack, touchInterceptor); + ApplyMutators(mutatorStack, touchInterceptor, rect); } EmbedderPaintContext FlutterPlatformViewsController::CompositeEmbeddedView(int view_id) { diff --git a/shell/platform/darwin/ios/framework/Source/FlutterPlatformViewsTest.mm b/shell/platform/darwin/ios/framework/Source/FlutterPlatformViewsTest.mm index 56fbc541d3253..9391a4b23b1da 100644 --- a/shell/platform/darwin/ios/framework/Source/FlutterPlatformViewsTest.mm +++ b/shell/platform/darwin/ios/framework/Source/FlutterPlatformViewsTest.mm @@ -1478,6 +1478,74 @@ - (void)testChildClippingViewShouldBeTheBoundingRectOfPlatformView { kFloatCompareEpsilon); } +- (void)testClipsDoNotInterceptWithPlatformViewShouldNotAddMaskView { + flutter::FlutterPlatformViewsTestMockPlatformViewDelegate mock_delegate; + auto thread_task_runner = CreateNewThread("FlutterPlatformViewsTest"); + flutter::TaskRunners runners(/*label=*/self.name.UTF8String, + /*platform=*/thread_task_runner, + /*raster=*/thread_task_runner, + /*ui=*/thread_task_runner, + /*io=*/thread_task_runner); + auto flutterPlatformViewsController = std::make_shared(); + auto platform_view = std::make_unique( + /*delegate=*/mock_delegate, + /*rendering_api=*/flutter::IOSRenderingAPI::kSoftware, + /*platform_views_controller=*/flutterPlatformViewsController, + /*task_runners=*/runners); + + FlutterPlatformViewsTestMockFlutterPlatformFactory* factory = + [[FlutterPlatformViewsTestMockFlutterPlatformFactory new] autorelease]; + flutterPlatformViewsController->RegisterViewFactory( + factory, @"MockFlutterPlatformView", + FlutterPlatformViewGestureRecognizersBlockingPolicyEager); + FlutterResult result = ^(id result) { + }; + flutterPlatformViewsController->OnMethodCall( + [FlutterMethodCall + methodCallWithMethodName:@"create" + arguments:@{@"id" : @2, @"viewType" : @"MockFlutterPlatformView"}], + result); + + XCTAssertNotNil(gMockPlatformView); + + UIView* mockFlutterView = [[[UIView alloc] initWithFrame:CGRectMake(0, 0, 30, 30)] autorelease]; + flutterPlatformViewsController->SetFlutterView(mockFlutterView); + // Create embedded view params + flutter::MutatorsStack stack; + // Layer tree always pushes a screen scale factor to the stack + SkMatrix screenScaleMatrix = + SkMatrix::Scale([UIScreen mainScreen].scale, [UIScreen mainScreen].scale); + stack.PushTransform(screenScaleMatrix); + SkMatrix translateMatrix = SkMatrix::Translate(5, 5); + // The platform view's rect for this test will be (5, 5, 10, 10) + stack.PushTransform(translateMatrix); + // Push a clip rect, big enough to contain the entire platform view bound + SkRect rect = SkRect::MakeXYWH(0, 0, 25, 25); + stack.PushClipRect(rect); + // Push a clip rrect, big enough to contain the entire platform view bound + SkRect rect_for_rrect = SkRect::MakeXYWH(0, 0, 24, 24); + SkRRect rrect = SkRRect::MakeRectXY(rect_for_rrect, 1, 1); + stack.PushClipRRect(rrect); + // Push a clip path, big enough to contain the entire platform view bound + SkPath path = SkPath::RRect(SkRect::MakeXYWH(0, 0, 23, 23), 1, 1); + stack.PushClipPath(path); + + auto embeddedViewParams = std::make_unique( + SkMatrix::Concat(screenScaleMatrix, translateMatrix), SkSize::Make(5, 5), stack); + + flutterPlatformViewsController->PrerollCompositeEmbeddedView(2, std::move(embeddedViewParams)); + flutterPlatformViewsController->CompositeEmbeddedView(2); + gMockPlatformView.backgroundColor = UIColor.redColor; + XCTAssertTrue([gMockPlatformView.superview.superview isKindOfClass:ChildClippingView.class]); + ChildClippingView* childClippingView = (ChildClippingView*)gMockPlatformView.superview.superview; + [mockFlutterView addSubview:childClippingView]; + + [mockFlutterView setNeedsLayout]; + [mockFlutterView layoutIfNeeded]; + + XCTAssertNil(childClippingView.maskView); +} + - (void)testClipRect { flutter::FlutterPlatformViewsTestMockPlatformViewDelegate mock_delegate; auto thread_task_runner = CreateNewThread("FlutterPlatformViewsTest"); diff --git a/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews_Internal.h b/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews_Internal.h index 9e8009e6fe077..7ec51e3b8ffcf 100644 --- a/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews_Internal.h +++ b/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews_Internal.h @@ -28,20 +28,22 @@ // is replaced with the alpha channel of the |FlutterClippingMaskView|. @interface FlutterClippingMaskView : UIView +- (instancetype)initWithFrame:(CGRect)frame screenScale:(CGFloat)screenScale; + // Adds a clip rect operation to the queue. // // The `clipSkRect` is transformed with the `matrix` before adding to the queue. -- (void)clipRect:(const SkRect&)clipSkRect matrix:(const CATransform3D&)matrix; +- (void)clipRect:(const SkRect&)clipSkRect matrix:(const SkMatrix&)matrix; // Adds a clip rrect operation to the queue. // // The `clipSkRRect` is transformed with the `matrix` before adding to the queue. -- (void)clipRRect:(const SkRRect&)clipSkRRect matrix:(const CATransform3D&)matrix; +- (void)clipRRect:(const SkRRect&)clipSkRRect matrix:(const SkMatrix&)matrix; // Adds a clip path operation to the queue. // // The `path` is transformed with the `matrix` before adding to the queue. -- (void)clipPath:(const SkPath&)path matrix:(const CATransform3D&)matrix; +- (void)clipPath:(const SkPath&)path matrix:(const SkMatrix&)matrix; @end @@ -280,7 +282,13 @@ class FlutterPlatformViewsController { // T_1 is applied to C_2, T_3 and T_4 are applied to C_5, and T_6 is applied to PLATFORM_VIEW. // // After each clip operation, we update the head to the super view of the current head. - void ApplyMutators(const MutatorsStack& mutators_stack, UIView* embedded_view); + // + // The `bounding_rect` is the final bounding rect of the PlatformView + // (EmbeddedViewParams::finalBoundingRect). If a clip mutator's rect contains the final bounding + // rect of the PlatformView, the clip mutator is not applied for performance optimization. + void ApplyMutators(const MutatorsStack& mutators_stack, + UIView* embedded_view, + const SkRect& bounding_rect); void CompositeWithParams(int view_id, const EmbeddedViewParams& params); // Allocates a new FlutterPlatformViewLayer if needed, draws the pixels within the rect from diff --git a/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews_Internal.mm b/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews_Internal.mm index ff70a4b4d2b27..da7f156bafe95 100644 --- a/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews_Internal.mm +++ b/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews_Internal.mm @@ -241,6 +241,16 @@ - (NSMutableArray*)backdropFilterSubviews { @interface FlutterClippingMaskView () +// A `CATransform3D` matrix represnts a scale transform that revese UIScreen.scale. +// +// The transform matrix passed in clipRect/clipRRect/clipPath methods are in device coordinate +// space. The transfrom matrix concats `reverseScreenScale` to create a transform matrix in the iOS +// logical coordinates (points). +// +// See https://developer.apple.com/documentation/uikit/uiscreen/1617836-scale?language=objc for +// information about screen scale. +@property(nonatomic) CATransform3D reverseScreenScale; + - (fml::CFRef)getTransformedPath:(CGPathRef)path matrix:(CATransform3D)matrix; @end @@ -250,8 +260,13 @@ @implementation FlutterClippingMaskView { } - (instancetype)initWithFrame:(CGRect)frame { + return [self initWithFrame:frame screenScale:[UIScreen mainScreen].scale]; +} + +- (instancetype)initWithFrame:(CGRect)frame screenScale:(CGFloat)screenScale { if (self = [super initWithFrame:frame]) { self.backgroundColor = UIColor.clearColor; + _reverseScreenScale = CATransform3DMakeScale(1 / screenScale, 1 / screenScale, 1); } return self; } @@ -280,13 +295,16 @@ - (void)drawRect:(CGRect)rect { CGContextRestoreGState(context); } -- (void)clipRect:(const SkRect&)clipSkRect matrix:(const CATransform3D&)matrix { +- (void)clipRect:(const SkRect&)clipSkRect matrix:(const SkMatrix&)matrix { CGRect clipRect = flutter::GetCGRectFromSkRect(clipSkRect); CGPathRef path = CGPathCreateWithRect(clipRect, nil); - paths_.push_back([self getTransformedPath:path matrix:matrix]); + // The `matrix` is based on the physical pixels, convert it to UIKit points. + CATransform3D matrixInPoints = + CATransform3DConcat(flutter::GetCATransform3DFromSkMatrix(matrix), _reverseScreenScale); + paths_.push_back([self getTransformedPath:path matrix:matrixInPoints]); } -- (void)clipRRect:(const SkRRect&)clipSkRRect matrix:(const CATransform3D&)matrix { +- (void)clipRRect:(const SkRRect&)clipSkRRect matrix:(const SkMatrix&)matrix { CGPathRef pathRef = nullptr; switch (clipSkRRect.getType()) { case SkRRect::kEmpty_Type: { @@ -346,13 +364,16 @@ - (void)clipRRect:(const SkRRect&)clipSkRRect matrix:(const CATransform3D&)matri break; } } + // The `matrix` is based on the physical pixels, convert it to UIKit points. + CATransform3D matrixInPoints = + CATransform3DConcat(flutter::GetCATransform3DFromSkMatrix(matrix), _reverseScreenScale); // TODO(cyanglaz): iOS does not seem to support hard edge on CAShapeLayer. It clearly stated that // the CAShaperLayer will be drawn antialiased. Need to figure out a way to do the hard edge // clipping on iOS. - paths_.push_back([self getTransformedPath:pathRef matrix:matrix]); + paths_.push_back([self getTransformedPath:pathRef matrix:matrixInPoints]); } -- (void)clipPath:(const SkPath&)path matrix:(const CATransform3D&)matrix { +- (void)clipPath:(const SkPath&)path matrix:(const SkMatrix&)matrix { if (!path.isValid()) { return; } @@ -411,7 +432,10 @@ - (void)clipPath:(const SkPath&)path matrix:(const CATransform3D&)matrix { } verb = iter.next(pts); } - paths_.push_back([self getTransformedPath:pathRef matrix:matrix]); + // The `matrix` is based on the physical pixels, convert it to UIKit points. + CATransform3D matrixInPoints = + CATransform3DConcat(flutter::GetCATransform3DFromSkMatrix(matrix), _reverseScreenScale); + paths_.push_back([self getTransformedPath:pathRef matrix:matrixInPoints]); } - (fml::CFRef)getTransformedPath:(CGPathRef)path matrix:(CATransform3D)matrix { diff --git a/testing/scenario_app/ios/Scenarios/Scenarios.xcodeproj/project.pbxproj b/testing/scenario_app/ios/Scenarios/Scenarios.xcodeproj/project.pbxproj index feecb3b772466..5ef94a102a778 100644 --- a/testing/scenario_app/ios/Scenarios/Scenarios.xcodeproj/project.pbxproj +++ b/testing/scenario_app/ios/Scenarios/Scenarios.xcodeproj/project.pbxproj @@ -37,8 +37,11 @@ 6816DB9E231750ED00A51400 /* GoldenPlatformViewTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 6816DB9D231750ED00A51400 /* GoldenPlatformViewTests.m */; }; 6816DBA12317573300A51400 /* GoldenImage.m in Sources */ = {isa = PBXBuildFile; fileRef = 6816DBA02317573300A51400 /* GoldenImage.m */; }; 6816DBA42318358200A51400 /* GoldenTestManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 6816DBA32318358200A51400 /* GoldenTestManager.m */; }; + 687AF8E9291EBDE0003912C7 /* golden_platform_view_clippath_with_transform_iPhone 8_13.0_simulator.png in Resources */ = {isa = PBXBuildFile; fileRef = 687AF8E8291EBDE0003912C7 /* golden_platform_view_clippath_with_transform_iPhone 8_13.0_simulator.png */; }; 68A5B63423EB71D300BDBCDB /* PlatformViewGestureRecognizerTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 68A5B63323EB71D300BDBCDB /* PlatformViewGestureRecognizerTests.m */; }; 68D4017D2564859300ECD91A /* ContinuousTexture.m in Sources */ = {isa = PBXBuildFile; fileRef = 68D4017C2564859300ECD91A /* ContinuousTexture.m */; }; + 68D5003F291ED645001ACFE1 /* golden_platform_view_cliprrect_with_transform_iPhone 8_13.0_simulator.png in Resources */ = {isa = PBXBuildFile; fileRef = 68D5003D291ED645001ACFE1 /* golden_platform_view_cliprrect_with_transform_iPhone 8_13.0_simulator.png */; }; + 68D50042291ED8CD001ACFE1 /* golden_platform_view_cliprect_with_transform_iPhone 8_13.0_simulator.png in Resources */ = {isa = PBXBuildFile; fileRef = 68D50041291ED8CD001ACFE1 /* golden_platform_view_cliprect_with_transform_iPhone 8_13.0_simulator.png */; }; F26F15B8268B6B5600EC54D3 /* iPadGestureTests.m in Sources */ = {isa = PBXBuildFile; fileRef = F26F15B7268B6B5500EC54D3 /* iPadGestureTests.m */; }; F769EB53276312BB007AC10F /* golden_platform_view_cliprect_iPhone 8_13.0_simulator.png in Resources */ = {isa = PBXBuildFile; fileRef = F769EB52276312BB007AC10F /* golden_platform_view_cliprect_iPhone 8_13.0_simulator.png */; }; F7B464EB2759D0A900079189 /* golden_two_platform_views_with_other_backdrop_filter_iPhone 8_13.0_simulator.png in Resources */ = {isa = PBXBuildFile; fileRef = F7B464DE2759D0A900079189 /* golden_two_platform_views_with_other_backdrop_filter_iPhone 8_13.0_simulator.png */; }; @@ -148,9 +151,12 @@ 6816DBA02317573300A51400 /* GoldenImage.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GoldenImage.m; sourceTree = ""; }; 6816DBA22318358200A51400 /* GoldenTestManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = GoldenTestManager.h; sourceTree = ""; }; 6816DBA32318358200A51400 /* GoldenTestManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = GoldenTestManager.m; sourceTree = ""; }; + 687AF8E8291EBDE0003912C7 /* golden_platform_view_clippath_with_transform_iPhone 8_13.0_simulator.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "golden_platform_view_clippath_with_transform_iPhone 8_13.0_simulator.png"; sourceTree = ""; }; 68A5B63323EB71D300BDBCDB /* PlatformViewGestureRecognizerTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = PlatformViewGestureRecognizerTests.m; sourceTree = ""; }; 68D4017B2564859300ECD91A /* ContinuousTexture.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ContinuousTexture.h; sourceTree = ""; }; 68D4017C2564859300ECD91A /* ContinuousTexture.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ContinuousTexture.m; sourceTree = ""; }; + 68D5003D291ED645001ACFE1 /* golden_platform_view_cliprrect_with_transform_iPhone 8_13.0_simulator.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "golden_platform_view_cliprrect_with_transform_iPhone 8_13.0_simulator.png"; sourceTree = ""; }; + 68D50041291ED8CD001ACFE1 /* golden_platform_view_cliprect_with_transform_iPhone 8_13.0_simulator.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "golden_platform_view_cliprect_with_transform_iPhone 8_13.0_simulator.png"; sourceTree = ""; }; F26F15B7268B6B5500EC54D3 /* iPadGestureTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = iPadGestureTests.m; sourceTree = ""; }; F72114B628EF99F500184A2D /* Info_Impeller.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info_Impeller.plist; sourceTree = ""; }; F769EB52276312BB007AC10F /* golden_platform_view_cliprect_iPhone 8_13.0_simulator.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "golden_platform_view_cliprect_iPhone 8_13.0_simulator.png"; sourceTree = ""; }; @@ -290,6 +296,9 @@ F7B464DC2759D02B00079189 /* Goldens */ = { isa = PBXGroup; children = ( + 68D50041291ED8CD001ACFE1 /* golden_platform_view_cliprect_with_transform_iPhone 8_13.0_simulator.png */, + 68D5003D291ED645001ACFE1 /* golden_platform_view_cliprrect_with_transform_iPhone 8_13.0_simulator.png */, + 687AF8E8291EBDE0003912C7 /* golden_platform_view_clippath_with_transform_iPhone 8_13.0_simulator.png */, F7B464E32759D0A900079189 /* golden_bogus_font_text_iPhone 8_13.0_simulator.png */, F7B464E92759D0A900079189 /* golden_non_full_screen_flutter_view_platform_view_iPhone 8_13.0_simulator.png */, F7B464E52759D0A900079189 /* golden_platform_view_clippath_iPhone 8_13.0_simulator.png */, @@ -432,6 +441,7 @@ isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; files = ( + 68D50042291ED8CD001ACFE1 /* golden_platform_view_cliprect_with_transform_iPhone 8_13.0_simulator.png in Resources */, F7B464F62759D0A900079189 /* golden_non_full_screen_flutter_view_platform_view_iPhone 8_13.0_simulator.png in Resources */, F7B464F02759D0A900079189 /* golden_bogus_font_text_iPhone 8_13.0_simulator.png in Resources */, F7B464F22759D0A900079189 /* golden_platform_view_clippath_iPhone 8_13.0_simulator.png in Resources */, @@ -442,8 +452,10 @@ F7B464F32759D0A900079189 /* golden_platform_view_multiple_background_foreground_iPhone 8_13.0_simulator.png in Resources */, F7B464F72759D0A900079189 /* golden_platform_view_rotate_iPhone 8_13.0_simulator.png in Resources */, F7B464ED2759D0A900079189 /* golden_platform_view_cliprrect_iPhone 8_13.0_simulator.png in Resources */, + 68D5003F291ED645001ACFE1 /* golden_platform_view_cliprrect_with_transform_iPhone 8_13.0_simulator.png in Resources */, F7B464EB2759D0A900079189 /* golden_two_platform_views_with_other_backdrop_filter_iPhone 8_13.0_simulator.png in Resources */, F7B464F42759D0A900079189 /* golden_platform_view_with_other_backdrop_filter_iPhone 8_13.0_simulator.png in Resources */, + 687AF8E9291EBDE0003912C7 /* golden_platform_view_clippath_with_transform_iPhone 8_13.0_simulator.png in Resources */, F769EB53276312BB007AC10F /* golden_platform_view_cliprect_iPhone 8_13.0_simulator.png in Resources */, F7B464EF2759D0A900079189 /* golden_platform_view_multiple_iPhone 8_13.0_simulator.png in Resources */, ); diff --git a/testing/scenario_app/ios/Scenarios/Scenarios/AppDelegate.m b/testing/scenario_app/ios/Scenarios/Scenarios/AppDelegate.m index f69b402fa7131..4674126c7b47e 100644 --- a/testing/scenario_app/ios/Scenarios/Scenarios/AppDelegate.m +++ b/testing/scenario_app/ios/Scenarios/Scenarios/AppDelegate.m @@ -44,6 +44,9 @@ - (BOOL)application:(UIApplication*)application @"--platform-view-cliprect" : @"platform_view_cliprect", @"--platform-view-cliprrect" : @"platform_view_cliprrect", @"--platform-view-clippath" : @"platform_view_clippath", + @"--platform-view-cliprrect-with-transform" : @"platform_view_cliprrect_with_transform", + @"--platform-view-cliprect-with-transform" : @"platform_view_cliprect_with_transform", + @"--platform-view-clippath-with-transform" : @"platform_view_clippath_with_transform", @"--platform-view-transform" : @"platform_view_transform", @"--platform-view-opacity" : @"platform_view_opacity", @"--platform-view-with-other-backdrop-filter" : @"platform_view_with_other_backdrop_filter", diff --git a/testing/scenario_app/ios/Scenarios/ScenariosUITests/GoldenTestManager.m b/testing/scenario_app/ios/Scenarios/ScenariosUITests/GoldenTestManager.m index 4b6f15e5e85c0..ad1a400bb7d9a 100644 --- a/testing/scenario_app/ios/Scenarios/ScenariosUITests/GoldenTestManager.m +++ b/testing/scenario_app/ios/Scenarios/ScenariosUITests/GoldenTestManager.m @@ -30,6 +30,9 @@ - (instancetype)initWithLaunchArg:(NSString*)launchArg { @"--platform-view-cliprect" : @"platform_view_cliprect", @"--platform-view-cliprrect" : @"platform_view_cliprrect", @"--platform-view-clippath" : @"platform_view_clippath", + @"--platform-view-cliprrect-with-transform" : @"platform_view_cliprrect_with_transform", + @"--platform-view-cliprect-with-transform" : @"platform_view_cliprect_with_transform", + @"--platform-view-clippath-with-transform" : @"platform_view_clippath_with_transform", @"--platform-view-transform" : @"platform_view_transform", @"--platform-view-opacity" : @"platform_view_opacity", @"--platform-view-with-other-backdrop-filter" : @"platform_view_with_other_backdrop_filter", diff --git a/testing/scenario_app/ios/Scenarios/ScenariosUITests/PlatformViewUITests.m b/testing/scenario_app/ios/Scenarios/ScenariosUITests/PlatformViewUITests.m index 4da67c0ab1a72..cfec0aa1ca84b 100644 --- a/testing/scenario_app/ios/Scenarios/ScenariosUITests/PlatformViewUITests.m +++ b/testing/scenario_app/ios/Scenarios/ScenariosUITests/PlatformViewUITests.m @@ -134,6 +134,60 @@ - (void)testPlatformView { @end +@interface PlatformViewMutationClipRectWithTransformTests : GoldenPlatformViewTests + +@end + +@implementation PlatformViewMutationClipRectWithTransformTests + +- (instancetype)initWithInvocation:(NSInvocation*)invocation { + GoldenTestManager* manager = + [[GoldenTestManager alloc] initWithLaunchArg:@"--platform-view-cliprect-with-transform"]; + return [super initWithManager:manager invocation:invocation]; +} + +- (void)testPlatformView { + [self checkPlatformViewGolden]; +} + +@end + +@interface PlatformViewMutationClipRRectWithTransformTests : GoldenPlatformViewTests + +@end + +@implementation PlatformViewMutationClipRRectWithTransformTests + +- (instancetype)initWithInvocation:(NSInvocation*)invocation { + GoldenTestManager* manager = + [[GoldenTestManager alloc] initWithLaunchArg:@"--platform-view-cliprrect-with-transform"]; + return [super initWithManager:manager invocation:invocation]; +} + +- (void)testPlatformView { + [self checkPlatformViewGolden]; +} + +@end + +@interface PlatformViewMutationClipPathWithTransformTests : GoldenPlatformViewTests + +@end + +@implementation PlatformViewMutationClipPathWithTransformTests + +- (instancetype)initWithInvocation:(NSInvocation*)invocation { + GoldenTestManager* manager = + [[GoldenTestManager alloc] initWithLaunchArg:@"--platform-view-clippath-with-transform"]; + return [super initWithManager:manager invocation:invocation]; +} + +- (void)testPlatformView { + [self checkPlatformViewGolden]; +} + +@end + @interface PlatformViewMutationTransformTests : GoldenPlatformViewTests @end diff --git a/testing/scenario_app/ios/Scenarios/ScenariosUITests/golden_platform_view_clippath_with_transform_iPhone 8_13.0_simulator.png b/testing/scenario_app/ios/Scenarios/ScenariosUITests/golden_platform_view_clippath_with_transform_iPhone 8_13.0_simulator.png new file mode 100644 index 0000000000000..b494f84298ea5 Binary files /dev/null and b/testing/scenario_app/ios/Scenarios/ScenariosUITests/golden_platform_view_clippath_with_transform_iPhone 8_13.0_simulator.png differ diff --git a/testing/scenario_app/ios/Scenarios/ScenariosUITests/golden_platform_view_cliprect_with_transform_iPhone 8_13.0_simulator.png b/testing/scenario_app/ios/Scenarios/ScenariosUITests/golden_platform_view_cliprect_with_transform_iPhone 8_13.0_simulator.png new file mode 100644 index 0000000000000..883528a188218 Binary files /dev/null and b/testing/scenario_app/ios/Scenarios/ScenariosUITests/golden_platform_view_cliprect_with_transform_iPhone 8_13.0_simulator.png differ diff --git a/testing/scenario_app/ios/Scenarios/ScenariosUITests/golden_platform_view_cliprrect_with_transform_iPhone 8_13.0_simulator.png b/testing/scenario_app/ios/Scenarios/ScenariosUITests/golden_platform_view_cliprrect_with_transform_iPhone 8_13.0_simulator.png new file mode 100644 index 0000000000000..b3adaa22e1b3c Binary files /dev/null and b/testing/scenario_app/ios/Scenarios/ScenariosUITests/golden_platform_view_cliprrect_with_transform_iPhone 8_13.0_simulator.png differ diff --git a/testing/scenario_app/lib/src/platform_view.dart b/testing/scenario_app/lib/src/platform_view.dart index 11d2267942779..7e989b6998ed8 100644 --- a/testing/scenario_app/lib/src/platform_view.dart +++ b/testing/scenario_app/lib/src/platform_view.dart @@ -620,7 +620,7 @@ class PlatformViewClipRRectScenario extends PlatformViewScenario { /// Platform view with clip path. class PlatformViewClipPathScenario extends PlatformViewScenario { - /// Constructs a platform view with clip rrect scenario. + /// Constructs a platform view with clip path scenario. PlatformViewClipPathScenario( PlatformDispatcher dispatcher, { int id = 0, @@ -645,6 +645,135 @@ class PlatformViewClipPathScenario extends PlatformViewScenario { } } +/// Platform view with clip rect after transformed. +class PlatformViewClipRectWithTransformScenario extends PlatformViewScenario { + /// Constructs a platform view with clip rect with transform scenario. + PlatformViewClipRectWithTransformScenario( + PlatformDispatcher dispatcher, { + int id = 0, + }) : super(dispatcher, id: id); + + @override + void onBeginFrame(Duration duration) { + final Matrix4 matrix4 = Matrix4.identity() + ..rotateZ(1) + ..scale(0.5, 0.5, 1.0) + ..translate(1000.0, 100.0); + + final SceneBuilder builder = SceneBuilder()..pushTransform(matrix4.storage); + builder.pushClipRect(const Rect.fromLTRB(100, 100, 400, 400)); + + addPlatformView( + id, + dispatcher: dispatcher, + sceneBuilder: builder, + ); + + // Add a translucent rect that has the same size of PlatformView. + final PictureRecorder recorder = PictureRecorder(); + final Canvas canvas = Canvas(recorder); + canvas.drawRect( + const Rect.fromLTWH(0, 0, 500, 500), + Paint()..color = const Color(0x22FF0000), + ); + final Picture picture = recorder.endRecording(); + builder.addPicture(Offset.zero, picture); + + finishBuilder(builder); + } +} + +/// Platform view with clip rrect after transformed. +class PlatformViewClipRRectWithTransformScenario extends PlatformViewScenario { + /// Constructs a platform view with clip rrect with transform scenario. + PlatformViewClipRRectWithTransformScenario( + PlatformDispatcher dispatcher, { + int id = 0, + }) : super(dispatcher, id: id); + + @override + void onBeginFrame(Duration duration) { + final Matrix4 matrix4 = Matrix4.identity() + ..rotateZ(1) + ..scale(0.5, 0.5, 1.0) + ..translate(1000.0, 100.0); + + final SceneBuilder builder = SceneBuilder()..pushTransform(matrix4.storage); + builder.pushClipRRect( + RRect.fromLTRBAndCorners( + 100, + 100, + 400, + 400, + topLeft: const Radius.circular(15), + topRight: const Radius.circular(50), + bottomLeft: const Radius.circular(50), + ), + ); + addPlatformView( + id, + dispatcher: dispatcher, + sceneBuilder: builder, + ); + + // Add a translucent rect that has the same size of PlatformView. + final PictureRecorder recorder = PictureRecorder(); + final Canvas canvas = Canvas(recorder); + canvas.drawRect( + const Rect.fromLTWH(0, 0, 500, 500), + Paint()..color = const Color(0x22FF0000), + ); + final Picture picture = recorder.endRecording(); + builder.addPicture(Offset.zero, picture); + + finishBuilder(builder); + } +} + +/// Platform view with clip path after transformed. +class PlatformViewClipPathWithTransformScenario extends PlatformViewScenario { + /// Constructs a platform view with clip path with transform scenario. + PlatformViewClipPathWithTransformScenario( + PlatformDispatcher dispatcher, { + int id = 0, + }) : super(dispatcher, id: id); + + @override + void onBeginFrame(Duration duration) { + final Matrix4 matrix4 = Matrix4.identity() + ..rotateZ(1) + ..scale(0.5, 0.5, 1.0) + ..translate(1000.0, 100.0); + + final SceneBuilder builder = SceneBuilder()..pushTransform(matrix4.storage); + final Path path = Path() + ..moveTo(100, 100) + ..quadraticBezierTo(50, 250, 100, 400) + ..lineTo(350, 400) + ..cubicTo(400, 300, 300, 200, 350, 100) + ..close(); + + builder.pushClipPath(path); + addPlatformView( + id, + dispatcher: dispatcher, + sceneBuilder: builder, + ); + + // Add a translucent rect that has the same size of PlatformView. + final PictureRecorder recorder = PictureRecorder(); + final Canvas canvas = Canvas(recorder); + canvas.drawRect( + const Rect.fromLTWH(0, 0, 500, 500), + Paint()..color = const Color(0x22FF0000), + ); + final Picture picture = recorder.endRecording(); + builder.addPicture(Offset.zero, picture); + + finishBuilder(builder); + } +} + /// Platform view with transform. class PlatformViewTransformScenario extends PlatformViewScenario { /// Constructs a platform view with transform scenario. diff --git a/testing/scenario_app/lib/src/scenarios.dart b/testing/scenario_app/lib/src/scenarios.dart index cbf4e6f528658..c992eea814fcd 100644 --- a/testing/scenario_app/lib/src/scenarios.dart +++ b/testing/scenario_app/lib/src/scenarios.dart @@ -31,8 +31,11 @@ Map _scenarios = { 'platform_view_multiple_without_overlays': () => MultiPlatformViewWithoutOverlaysScenario(PlatformDispatcher.instance, firstId: _viewId++, secondId: _viewId++), 'platform_view_max_overlays': () => PlatformViewMaxOverlaysScenario(PlatformDispatcher.instance, id: _viewId++), 'platform_view_cliprect': () => PlatformViewClipRectScenario(PlatformDispatcher.instance, id: _viewId++), + 'platform_view_cliprect_with_transform': () => PlatformViewClipRectWithTransformScenario(PlatformDispatcher.instance, id: _viewId++), 'platform_view_cliprrect': () => PlatformViewClipRRectScenario(PlatformDispatcher.instance, id: _viewId++), + 'platform_view_cliprrect_with_transform': () => PlatformViewClipRRectWithTransformScenario(PlatformDispatcher.instance, id: _viewId++), 'platform_view_clippath': () => PlatformViewClipPathScenario(PlatformDispatcher.instance, id: _viewId++), + 'platform_view_clippath_with_transform': () => PlatformViewClipPathWithTransformScenario(PlatformDispatcher.instance, id: _viewId++), 'platform_view_transform': () => PlatformViewTransformScenario(PlatformDispatcher.instance, id: _viewId++), 'platform_view_opacity': () => PlatformViewOpacityScenario(PlatformDispatcher.instance, id: _viewId++), 'platform_view_with_other_backdrop_filter': () => PlatformViewWithOtherBackDropFilter(PlatformDispatcher.instance, id: _viewId++),