Flutter iOS Embedder
FlutterView.mm
Go to the documentation of this file.
1 // Copyright 2013 The Flutter Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
6 
7 #include "flutter/fml/platform/darwin/cf_utils.h"
9 
11 
12 @interface FlutterView ()
13 @property(nonatomic, weak) id<FlutterViewEngineDelegate> delegate;
14 @end
15 
16 @implementation FlutterView {
17  BOOL _isWideGamutEnabled;
18 }
19 
20 - (instancetype)init {
21  NSAssert(NO, @"FlutterView must initWithDelegate");
22  return nil;
23 }
24 
25 - (instancetype)initWithFrame:(CGRect)frame {
26  NSAssert(NO, @"FlutterView must initWithDelegate");
27  return nil;
28 }
29 
30 - (instancetype)initWithCoder:(NSCoder*)aDecoder {
31  NSAssert(NO, @"FlutterView must initWithDelegate");
32  return nil;
33 }
34 
35 - (UIScreen*)screen {
36  if (@available(iOS 13.0, *)) {
37  return self.window.windowScene.screen;
38  }
39  return UIScreen.mainScreen;
40 }
41 
42 - (MTLPixelFormat)pixelFormat {
43  if ([self.layer isKindOfClass:[CAMetalLayer class]]) {
44 // It is a known Apple bug that CAMetalLayer incorrectly reports its supported
45 // SDKs. It is, in fact, available since iOS 8.
46 #pragma clang diagnostic push
47 #pragma clang diagnostic ignored "-Wunguarded-availability-new"
48  CAMetalLayer* layer = (CAMetalLayer*)self.layer;
49  return layer.pixelFormat;
50  }
51  return MTLPixelFormatBGRA8Unorm;
52 }
53 - (BOOL)isWideGamutSupported {
54  FML_DCHECK(self.screen);
55 
56  // This predicates the decision on the capabilities of the iOS device's
57  // display. This means external displays will not support wide gamut if the
58  // device's display doesn't support it. It practice that should be never.
59  return self.screen.traitCollection.displayGamut != UIDisplayGamutSRGB;
60 }
61 
62 - (instancetype)initWithDelegate:(id<FlutterViewEngineDelegate>)delegate
63  opaque:(BOOL)opaque
64  enableWideGamut:(BOOL)isWideGamutEnabled {
65  if (delegate == nil) {
66  NSLog(@"FlutterView delegate was nil.");
67  return nil;
68  }
69 
70  self = [super initWithFrame:CGRectNull];
71 
72  if (self) {
73  _delegate = delegate;
74  _isWideGamutEnabled = isWideGamutEnabled;
75  self.layer.opaque = opaque;
76  }
77 
78  return self;
79 }
80 
81 static void PrintWideGamutWarningOnce() {
82  static BOOL did_print = NO;
83  if (did_print) {
84  return;
85  }
86  FML_DLOG(WARNING) << "Rendering wide gamut colors is turned on but isn't "
87  "supported, downgrading the color gamut to sRGB.";
88  did_print = YES;
89 }
90 
91 - (void)layoutSubviews {
92  if ([self.layer isKindOfClass:[CAMetalLayer class]]) {
93 // It is a known Apple bug that CAMetalLayer incorrectly reports its supported
94 // SDKs. It is, in fact, available since iOS 8.
95 #pragma clang diagnostic push
96 #pragma clang diagnostic ignored "-Wunguarded-availability-new"
97  CAMetalLayer* layer = (CAMetalLayer*)self.layer;
98 #pragma clang diagnostic pop
99  CGFloat screenScale = self.screen.scale;
100  layer.allowsGroupOpacity = YES;
101  layer.contentsScale = screenScale;
102  layer.rasterizationScale = screenScale;
103  layer.framebufferOnly = flutter::Settings::kSurfaceDataAccessible ? NO : YES;
104  if (_isWideGamutEnabled && self.isWideGamutSupported) {
105  fml::CFRef<CGColorSpaceRef> srgb(CGColorSpaceCreateWithName(kCGColorSpaceExtendedSRGB));
106  layer.colorspace = srgb;
107  layer.pixelFormat = MTLPixelFormatBGRA10_XR;
108  } else if (_isWideGamutEnabled && !self.isWideGamutSupported) {
109  PrintWideGamutWarningOnce();
110  }
111  }
112 
113  [super layoutSubviews];
114 }
115 
116 + (Class)layerClass {
118  flutter::GetRenderingAPIForProcess(/*force_software=*/false));
119 }
120 
121 - (void)drawLayer:(CALayer*)layer inContext:(CGContextRef)context {
122  TRACE_EVENT0("flutter", "SnapshotFlutterView");
123 
124  if (layer != self.layer || context == nullptr) {
125  return;
126  }
127 
128  auto screenshot = [_delegate takeScreenshot:flutter::Rasterizer::ScreenshotType::UncompressedImage
129  asBase64Encoded:NO];
130 
131  if (!screenshot.data || screenshot.data->isEmpty() || screenshot.frame_size.isEmpty()) {
132  return;
133  }
134 
135  NSData* data = [NSData dataWithBytes:const_cast<void*>(screenshot.data->data())
136  length:screenshot.data->size()];
137 
138  fml::CFRef<CGDataProviderRef> image_data_provider(
139  CGDataProviderCreateWithCFData(reinterpret_cast<CFDataRef>(data)));
140 
141  fml::CFRef<CGColorSpaceRef> colorspace(CGColorSpaceCreateDeviceRGB());
142 
143  // Defaults for RGBA8888.
144  size_t bits_per_component = 8u;
145  size_t bits_per_pixel = 32u;
146  size_t bytes_per_row_multiplier = 4u;
147  CGBitmapInfo bitmap_info =
148  static_cast<CGBitmapInfo>(static_cast<uint32_t>(kCGImageAlphaPremultipliedLast) |
149  static_cast<uint32_t>(kCGBitmapByteOrder32Big));
150 
151  switch (screenshot.pixel_format) {
152  case flutter::Rasterizer::ScreenshotFormat::kUnknown:
153  case flutter::Rasterizer::ScreenshotFormat::kR8G8B8A8UNormInt:
154  // Assume unknown is Skia and is RGBA8888. Keep defaults.
155  break;
156  case flutter::Rasterizer::ScreenshotFormat::kB8G8R8A8UNormInt:
157  // Treat this as little endian with the alpha first so that it's read backwards.
158  bitmap_info =
159  static_cast<CGBitmapInfo>(static_cast<uint32_t>(kCGImageAlphaPremultipliedFirst) |
160  static_cast<uint32_t>(kCGBitmapByteOrder32Little));
161  break;
162  case flutter::Rasterizer::ScreenshotFormat::kR16G16B16A16Float:
163  bits_per_component = 16u;
164  bits_per_pixel = 64u;
165  bytes_per_row_multiplier = 8u;
166  bitmap_info =
167  static_cast<CGBitmapInfo>(static_cast<uint32_t>(kCGImageAlphaPremultipliedLast) |
168  static_cast<uint32_t>(kCGBitmapFloatComponents) |
169  static_cast<uint32_t>(kCGBitmapByteOrder16Little));
170  break;
171  }
172 
173  fml::CFRef<CGImageRef> image(CGImageCreate(
174  screenshot.frame_size.width(), // size_t width
175  screenshot.frame_size.height(), // size_t height
176  bits_per_component, // size_t bitsPerComponent
177  bits_per_pixel, // size_t bitsPerPixel,
178  bytes_per_row_multiplier * screenshot.frame_size.width(), // size_t bytesPerRow
179  colorspace, // CGColorSpaceRef space
180  bitmap_info, // CGBitmapInfo bitmapInfo
181  image_data_provider, // CGDataProviderRef provider
182  nullptr, // const CGFloat* decode
183  false, // bool shouldInterpolate
184  kCGRenderingIntentDefault // CGColorRenderingIntent intent
185  ));
186 
187  const CGRect frame_rect =
188  CGRectMake(0.0, 0.0, screenshot.frame_size.width(), screenshot.frame_size.height());
189  CGContextSaveGState(context);
190  // If the CGContext is not a bitmap based context, this returns zero.
191  CGFloat height = CGBitmapContextGetHeight(context);
192  if (height == 0) {
193  height = CGFloat(screenshot.frame_size.height());
194  }
195  CGContextTranslateCTM(context, 0.0, height);
196  CGContextScaleCTM(context, 1.0, -1.0);
197  CGContextDrawImage(context, frame_rect, image);
198  CGContextRestoreGState(context);
199 }
200 
201 - (BOOL)isAccessibilityElement {
202  // iOS does not provide an API to query whether the voice control
203  // is turned on or off. It is likely at least one of the assitive
204  // technologies is turned on if this method is called. If we do
205  // not catch it in notification center, we will catch it here.
206  //
207  // TODO(chunhtai): Remove this workaround once iOS provides an
208  // API to query whether voice control is enabled.
209  // https://github.com/flutter/flutter/issues/76808.
210  [self.delegate flutterViewAccessibilityDidCall];
211  return NO;
212 }
213 
214 // Enables keyboard-based navigation when the user turns on
215 // full keyboard access (FKA), using existing accessibility information.
216 //
217 // iOS does not provide any API for monitoring or querying whether FKA is on,
218 // but it does call isAccessibilityElement if FKA is on,
219 // so the isAccessibilityElement implementation above will be called
220 // when the view appears and the accessibility information will most likely
221 // be available by the time the user starts to interact with the app using FKA.
222 //
223 // See SemanticsObject+UIFocusSystem.mm for more details.
224 - (NSArray<id<UIFocusItem>>*)focusItemsInRect:(CGRect)rect {
225  NSObject* rootAccessibilityElement =
226  [self.accessibilityElements count] > 0 ? self.accessibilityElements[0] : nil;
227  return [rootAccessibilityElement isKindOfClass:[SemanticsObjectContainer class]]
228  ? @[ [rootAccessibilityElement accessibilityElementAtIndex:0] ]
229  : nil;
230 }
231 
232 - (NSArray<id<UIFocusEnvironment>>*)preferredFocusEnvironments {
233  // Occasionally we add subviews to FlutterView (text fields for example).
234  // These views shouldn't be directly visible to the iOS focus engine, instead
235  // the focus engine should only interact with the designated focus items
236  // (SemanticsObjects).
237  return nil;
238 }
239 
240 @end
flutter::GetCoreAnimationLayerClassForRenderingAPI
Class GetCoreAnimationLayerClassForRenderingAPI(IOSRenderingAPI rendering_api)
Definition: rendering_api_selection.mm:59
FlutterViewEngineDelegate-p
Definition: FlutterView.h:14
initWithFrame
instancetype initWithFrame
Definition: FlutterTextInputPlugin.h:172
flutter::GetRenderingAPIForProcess
IOSRenderingAPI GetRenderingAPIForProcess(bool force_software)
Definition: rendering_api_selection.mm:31
initWithCoder
instancetype initWithCoder
Definition: FlutterTextInputPlugin.h:171
SemanticsObject.h
FlutterView
Definition: FlutterView.h:32
SemanticsObjectContainer
Definition: SemanticsObject.h:227
FLUTTER_ASSERT_ARC
Definition: FlutterChannelKeyResponder.mm:13
FlutterView.h