text
stringlengths
6
13.6M
id
stringlengths
13
176
metadata
dict
__index_level_0__
int64
0
1.69k
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // ignore_for_file: public_member_api_docs part of flutter_gpu; /// A handle to a graphics context. Used to create and manage GPU resources. /// /// To obtain the default graphics context, use [getContext]. base class GpuContext extends NativeFieldWrapperClass1 { /// Creates a new graphics context that corresponds to the default Impeller /// context. GpuContext._createDefault() { final String? error = _initializeDefault(); if (error != null) { throw Exception(error); } } /// A supported [PixelFormat] for textures that store 4-channel colors /// (red/green/blue/alpha). PixelFormat get defaultColorFormat { return PixelFormat.values[_getDefaultColorFormat()]; } /// A supported [PixelFormat] for textures that store stencil information. /// May include a depth channel if a stencil-only format is not available. PixelFormat get defaultStencilFormat { return PixelFormat.values[_getDefaultStencilFormat()]; } /// A supported `PixelFormat` for textures that store both a stencil and depth /// component. This will never return a depth-only or stencil-only texture. /// /// May be [PixelFormat.unknown] if no suitable depth+stencil format was /// found. PixelFormat get defaultDepthStencilFormat { return PixelFormat.values[_getDefaultDepthStencilFormat()]; } /// Allocates a new region of GPU-resident memory. /// /// The [storageMode] must be either [StorageMode.hostVisible] or /// [StorageMode.devicePrivate], otherwise an exception will be thrown. /// /// Returns [null] if the [DeviceBuffer] creation failed. DeviceBuffer? createDeviceBuffer(StorageMode storageMode, int sizeInBytes) { if (storageMode == StorageMode.deviceTransient) { throw Exception( 'DeviceBuffers cannot be set to StorageMode.deviceTransient'); } DeviceBuffer result = DeviceBuffer._initialize(this, storageMode, sizeInBytes); return result.isValid ? result : null; } /// Allocates a new region of host-visible GPU-resident memory, initialized /// with the given [data]. /// /// Given that the buffer will be immediately populated with [data] uploaded /// from the host, the [StorageMode] of the new [DeviceBuffer] is /// automatically set to [StorageMode.hostVisible]. /// /// Returns [null] if the [DeviceBuffer] creation failed. DeviceBuffer? createDeviceBufferWithCopy(ByteData data) { DeviceBuffer result = DeviceBuffer._initializeWithHostData(this, data); return result.isValid ? result : null; } HostBuffer createHostBuffer() { return HostBuffer._initialize(this); } /// Allocates a new texture in GPU-resident memory. /// /// Returns [null] if the [Texture] creation failed. Texture? createTexture(StorageMode storageMode, int width, int height, {PixelFormat format = PixelFormat.r8g8b8a8UNormInt, sampleCount = 1, TextureCoordinateSystem coordinateSystem = TextureCoordinateSystem.renderToTexture, bool enableRenderTargetUsage = true, bool enableShaderReadUsage = true, bool enableShaderWriteUsage = false}) { Texture result = Texture._initialize( this, storageMode, format, width, height, sampleCount, coordinateSystem, enableRenderTargetUsage, enableShaderReadUsage, enableShaderWriteUsage); return result.isValid ? result : null; } /// Create a new command buffer that can be used to submit GPU commands. CommandBuffer createCommandBuffer() { return CommandBuffer._(this); } RenderPipeline createRenderPipeline( Shader vertexShader, Shader fragmentShader) { return RenderPipeline._(this, vertexShader, fragmentShader); } /// Associates the default Impeller context with this Context. @Native<Handle Function(Handle)>( symbol: 'InternalFlutterGpu_Context_InitializeDefault') external String? _initializeDefault(); @Native<Int Function(Pointer<Void>)>( symbol: 'InternalFlutterGpu_Context_GetDefaultColorFormat') external int _getDefaultColorFormat(); @Native<Int Function(Pointer<Void>)>( symbol: 'InternalFlutterGpu_Context_GetDefaultStencilFormat') external int _getDefaultStencilFormat(); @Native<Int Function(Pointer<Void>)>( symbol: 'InternalFlutterGpu_Context_GetDefaultDepthStencilFormat') external int _getDefaultDepthStencilFormat(); } /// The default graphics context. final GpuContext gpuContext = GpuContext._createDefault();
engine/lib/gpu/lib/src/context.dart/0
{ "file_path": "engine/lib/gpu/lib/src/context.dart", "repo_id": "engine", "token_count": 1429 }
250
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_GPU_SHADER_LIBRARY_H_ #define FLUTTER_LIB_GPU_SHADER_LIBRARY_H_ #include <memory> #include <string> #include <unordered_map> #include "flutter/lib/gpu/export.h" #include "flutter/lib/gpu/shader.h" #include "flutter/lib/ui/dart_wrapper.h" #include "fml/memory/ref_ptr.h" namespace flutter { namespace gpu { /// An immutable collection of shaders loaded from a shader bundle asset. class ShaderLibrary : public RefCountedDartWrappable<ShaderLibrary> { DEFINE_WRAPPERTYPEINFO(); FML_FRIEND_MAKE_REF_COUNTED(ShaderLibrary); public: using ShaderMap = std::unordered_map<std::string, fml::RefPtr<Shader>>; static fml::RefPtr<ShaderLibrary> MakeFromAsset( impeller::Context::BackendType backend_type, const std::string& name, std::string& out_error); static fml::RefPtr<ShaderLibrary> MakeFromShaders(ShaderMap shaders); static fml::RefPtr<ShaderLibrary> MakeFromFlatbuffer( impeller::Context::BackendType backend_type, std::shared_ptr<fml::Mapping> payload); /// Sets a return override for `MakeFromAsset` for testing purposes. static void SetOverride(fml::RefPtr<ShaderLibrary> override_shader_library); fml::RefPtr<Shader> GetShader(const std::string& shader_name, Dart_Handle shader_wrapper) const; ~ShaderLibrary() override; private: /// A global override used to inject a ShaderLibrary when running with the /// Impeller playground. When set, `MakeFromAsset` will always just return /// this library. static fml::RefPtr<ShaderLibrary> override_shader_library_; std::shared_ptr<fml::Mapping> payload_; ShaderMap shaders_; explicit ShaderLibrary(std::shared_ptr<fml::Mapping> payload, ShaderMap shaders); FML_DISALLOW_COPY_AND_ASSIGN(ShaderLibrary); }; } // namespace gpu } // namespace flutter //---------------------------------------------------------------------------- /// Exports /// extern "C" { FLUTTER_GPU_EXPORT extern Dart_Handle InternalFlutterGpu_ShaderLibrary_InitializeWithAsset( Dart_Handle wrapper, Dart_Handle asset_name); FLUTTER_GPU_EXPORT extern Dart_Handle InternalFlutterGpu_ShaderLibrary_GetShader( flutter::gpu::ShaderLibrary* wrapper, Dart_Handle shader_name, Dart_Handle shader_wrapper); } // extern "C" #endif // FLUTTER_LIB_GPU_SHADER_LIBRARY_H_
engine/lib/gpu/shader_library.h/0
{ "file_path": "engine/lib/gpu/shader_library.h", "repo_id": "engine", "token_count": 893 }
251
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. part of dart.ui; /// Annotation to keep [Object.toString] overrides as-is instead of removing /// them for size optimization purposes. /// /// For certain uris (currently `dart:ui` and `package:flutter`) the Dart /// compiler will remove [Object.toString] overrides from classes in /// profile/release mode to reduce code size. /// /// Individual classes can opt out of this behavior via the following /// annotations: /// /// * `@pragma('flutter:keep-to-string')` /// * `@pragma('flutter:keep-to-string-in-subtypes')` /// /// See https://github.com/dart-lang/sdk/blob/main/runtime/docs/pragmas.md /// /// For example, in the following class the `toString` method will remain as /// `return _buffer.toString();`, even if the `--delete-tostring-package-uri` /// option would otherwise apply and replace it with `return super.toString()`. /// (By convention, `dart:ui` is usually imported `as ui`, hence the prefix.) /// /// ```dart /// class MyStringBuffer { /// final StringBuffer _buffer = StringBuffer(); /// /// // ... /// /// @ui.keepToString /// @override /// String toString() { /// return _buffer.toString(); /// } /// } /// ``` const pragma keepToString = pragma('flutter:keep-to-string');
engine/lib/ui/annotations.dart/0
{ "file_path": "engine/lib/ui/annotations.dart", "repo_id": "engine", "token_count": 432 }
252
#version 320 es // Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. precision highp float; layout(location = 0) out vec4 fragColor; layout(location = 0) uniform float a; void main() { fragColor = vec4( /* cross product of parallel vectors is a zero vector */ cross(vec3(a, 2.0, 3.0), vec3(2.0, 4.0, 6.0))[0], 1.0, // cross product of parallel vectors is a zero vector cross(vec3(a, 2.0, 3.0), vec3(2.0, 4.0, 6.0))[2], 1.0); }
engine/lib/ui/fixtures/shaders/supported_glsl_op_shaders/68_cross.frag/0
{ "file_path": "engine/lib/ui/fixtures/shaders/supported_glsl_op_shaders/68_cross.frag", "repo_id": "engine", "token_count": 216 }
253
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_ISOLATE_NAME_SERVER_ISOLATE_NAME_SERVER_H_ #define FLUTTER_LIB_UI_ISOLATE_NAME_SERVER_ISOLATE_NAME_SERVER_H_ #include <map> #include <mutex> #include <string> #include "flutter/fml/macros.h" #include "third_party/dart/runtime/include/dart_api.h" namespace flutter { class IsolateNameServer { public: IsolateNameServer(); ~IsolateNameServer(); // Looks up the Dart_Port associated with a given name. Returns ILLEGAL_PORT // if the name does not exist. Dart_Port LookupIsolatePortByName(const std::string& name); // Registers a Dart_Port with a given name. Returns true if registration is // successful, false if the name entry already exists. bool RegisterIsolatePortWithName(Dart_Port port, const std::string& name); // Removes a name to Dart_Port mapping given a name. Returns true if the // mapping was successfully removed, false if the mapping does not exist. bool RemoveIsolateNameMapping(const std::string& name); private: Dart_Port LookupIsolatePortByNameUnprotected(const std::string& name); mutable std::mutex mutex_; std::map<std::string, Dart_Port> port_mapping_; FML_DISALLOW_COPY_AND_ASSIGN(IsolateNameServer); }; } // namespace flutter #endif // FLUTTER_LIB_UI_ISOLATE_NAME_SERVER_ISOLATE_NAME_SERVER_H_
engine/lib/ui/isolate_name_server/isolate_name_server.h/0
{ "file_path": "engine/lib/ui/isolate_name_server/isolate_name_server.h", "repo_id": "engine", "token_count": 479 }
254
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/lib/ui/painting/display_list_deferred_image_gpu_skia.h" #include "flutter/fml/make_copyable.h" #include "third_party/skia/include/core/SkColorSpace.h" #include "third_party/skia/include/core/SkImage.h" #include "third_party/skia/include/gpu/ganesh/SkImageGanesh.h" namespace flutter { sk_sp<DlDeferredImageGPUSkia> DlDeferredImageGPUSkia::Make( const SkImageInfo& image_info, sk_sp<DisplayList> display_list, fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate, const fml::RefPtr<fml::TaskRunner>& raster_task_runner, fml::RefPtr<SkiaUnrefQueue> unref_queue) { return sk_sp<DlDeferredImageGPUSkia>(new DlDeferredImageGPUSkia( ImageWrapper::Make(image_info, std::move(display_list), std::move(snapshot_delegate), raster_task_runner, std::move(unref_queue)), raster_task_runner)); } sk_sp<DlDeferredImageGPUSkia> DlDeferredImageGPUSkia::MakeFromLayerTree( const SkImageInfo& image_info, std::unique_ptr<LayerTree> layer_tree, fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate, const fml::RefPtr<fml::TaskRunner>& raster_task_runner, fml::RefPtr<SkiaUnrefQueue> unref_queue) { return sk_sp<DlDeferredImageGPUSkia>(new DlDeferredImageGPUSkia( ImageWrapper::MakeFromLayerTree( image_info, std::move(layer_tree), std::move(snapshot_delegate), raster_task_runner, std::move(unref_queue)), raster_task_runner)); } DlDeferredImageGPUSkia::DlDeferredImageGPUSkia( std::shared_ptr<ImageWrapper> image_wrapper, fml::RefPtr<fml::TaskRunner> raster_task_runner) : image_wrapper_(std::move(image_wrapper)), raster_task_runner_(std::move(raster_task_runner)) {} // |DlImage| DlDeferredImageGPUSkia::~DlDeferredImageGPUSkia() { fml::TaskRunner::RunNowOrPostTask(raster_task_runner_, [image_wrapper = image_wrapper_]() { if (!image_wrapper) { return; } image_wrapper->Unregister(); image_wrapper->DeleteTexture(); }); } // |DlImage| sk_sp<SkImage> DlDeferredImageGPUSkia::skia_image() const { return image_wrapper_ ? image_wrapper_->CreateSkiaImage() : nullptr; }; // |DlImage| std::shared_ptr<impeller::Texture> DlDeferredImageGPUSkia::impeller_texture() const { return nullptr; } // |DlImage| bool DlDeferredImageGPUSkia::isOpaque() const { return image_wrapper_ ? image_wrapper_->image_info().isOpaque() : false; } // |DlImage| bool DlDeferredImageGPUSkia::isTextureBacked() const { return image_wrapper_ ? image_wrapper_->isTextureBacked() : false; } // |DlImage| bool DlDeferredImageGPUSkia::isUIThreadSafe() const { return true; } // |DlImage| SkISize DlDeferredImageGPUSkia::dimensions() const { return image_wrapper_ ? image_wrapper_->image_info().dimensions() : SkISize::MakeEmpty(); } // |DlImage| size_t DlDeferredImageGPUSkia::GetApproximateByteSize() const { return sizeof(*this) + (image_wrapper_ ? image_wrapper_->image_info().computeMinByteSize() : 0); } std::optional<std::string> DlDeferredImageGPUSkia::get_error() const { return image_wrapper_ ? image_wrapper_->get_error() : std::nullopt; } std::shared_ptr<DlDeferredImageGPUSkia::ImageWrapper> DlDeferredImageGPUSkia::ImageWrapper::Make( const SkImageInfo& image_info, sk_sp<DisplayList> display_list, fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate, fml::RefPtr<fml::TaskRunner> raster_task_runner, fml::RefPtr<SkiaUnrefQueue> unref_queue) { auto wrapper = std::shared_ptr<ImageWrapper>(new ImageWrapper( image_info, std::move(display_list), std::move(snapshot_delegate), std::move(raster_task_runner), std::move(unref_queue))); wrapper->SnapshotDisplayList(); return wrapper; } std::shared_ptr<DlDeferredImageGPUSkia::ImageWrapper> DlDeferredImageGPUSkia::ImageWrapper::MakeFromLayerTree( const SkImageInfo& image_info, std::unique_ptr<LayerTree> layer_tree, fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate, fml::RefPtr<fml::TaskRunner> raster_task_runner, fml::RefPtr<SkiaUnrefQueue> unref_queue) { auto wrapper = std::shared_ptr<ImageWrapper>( new ImageWrapper(image_info, nullptr, std::move(snapshot_delegate), std::move(raster_task_runner), std::move(unref_queue))); wrapper->SnapshotDisplayList(std::move(layer_tree)); return wrapper; } DlDeferredImageGPUSkia::ImageWrapper::ImageWrapper( const SkImageInfo& image_info, sk_sp<DisplayList> display_list, fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate, fml::RefPtr<fml::TaskRunner> raster_task_runner, fml::RefPtr<SkiaUnrefQueue> unref_queue) : image_info_(image_info), display_list_(std::move(display_list)), snapshot_delegate_(std::move(snapshot_delegate)), raster_task_runner_(std::move(raster_task_runner)), unref_queue_(std::move(unref_queue)) {} void DlDeferredImageGPUSkia::ImageWrapper::OnGrContextCreated() { FML_DCHECK(raster_task_runner_->RunsTasksOnCurrentThread()); SnapshotDisplayList(); } void DlDeferredImageGPUSkia::ImageWrapper::OnGrContextDestroyed() { FML_DCHECK(raster_task_runner_->RunsTasksOnCurrentThread()); DeleteTexture(); } sk_sp<SkImage> DlDeferredImageGPUSkia::ImageWrapper::CreateSkiaImage() const { FML_DCHECK(raster_task_runner_->RunsTasksOnCurrentThread()); if (texture_.isValid() && context_) { return SkImages::BorrowTextureFrom( context_.get(), texture_, kTopLeft_GrSurfaceOrigin, image_info_.colorType(), image_info_.alphaType(), image_info_.refColorSpace()); } return image_; } bool DlDeferredImageGPUSkia::ImageWrapper::isTextureBacked() const { return texture_.isValid(); } void DlDeferredImageGPUSkia::ImageWrapper::SnapshotDisplayList( std::unique_ptr<LayerTree> layer_tree) { fml::TaskRunner::RunNowOrPostTask( raster_task_runner_, fml::MakeCopyable([weak_this = weak_from_this(), layer_tree = std::move(layer_tree)]() mutable { auto wrapper = weak_this.lock(); if (!wrapper) { return; } auto snapshot_delegate = wrapper->snapshot_delegate_; if (!snapshot_delegate) { return; } if (layer_tree) { auto display_list = layer_tree->Flatten(SkRect::MakeWH(wrapper->image_info_.width(), wrapper->image_info_.height()), snapshot_delegate->GetTextureRegistry(), snapshot_delegate->GetGrContext()); wrapper->display_list_ = std::move(display_list); } auto result = snapshot_delegate->MakeSkiaGpuImage( wrapper->display_list_, wrapper->image_info_); if (result->texture.isValid()) { wrapper->texture_ = result->texture; wrapper->context_ = std::move(result->context); wrapper->texture_registry_ = wrapper->snapshot_delegate_->GetTextureRegistry(); wrapper->texture_registry_->RegisterContextListener( reinterpret_cast<uintptr_t>(wrapper.get()), weak_this); } else if (result->image) { wrapper->image_ = std::move(result->image); } else { std::scoped_lock lock(wrapper->error_mutex_); wrapper->error_ = result->error; } })); } std::optional<std::string> DlDeferredImageGPUSkia::ImageWrapper::get_error() { std::scoped_lock lock(error_mutex_); return error_; } void DlDeferredImageGPUSkia::ImageWrapper::Unregister() { if (texture_registry_) { texture_registry_->UnregisterContextListener( reinterpret_cast<uintptr_t>(this)); } } void DlDeferredImageGPUSkia::ImageWrapper::DeleteTexture() { if (texture_.isValid()) { unref_queue_->DeleteTexture(texture_); texture_ = GrBackendTexture(); } image_.reset(); context_.reset(); } } // namespace flutter
engine/lib/ui/painting/display_list_deferred_image_gpu_skia.cc/0
{ "file_path": "engine/lib/ui/painting/display_list_deferred_image_gpu_skia.cc", "repo_id": "engine", "token_count": 3623 }
255
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/lib/ui/painting/image_decoder_impeller.h" #include <memory> #include "flutter/fml/closure.h" #include "flutter/fml/make_copyable.h" #include "flutter/fml/trace_event.h" #include "flutter/impeller/core/allocator.h" #include "flutter/impeller/core/texture.h" #include "flutter/impeller/display_list/dl_image_impeller.h" #include "flutter/impeller/renderer/command_buffer.h" #include "flutter/impeller/renderer/context.h" #include "flutter/lib/ui/painting/image_decoder_skia.h" #include "impeller/base/strings.h" #include "impeller/display_list/skia_conversions.h" #include "impeller/geometry/size.h" #include "third_party/skia/include/core/SkAlphaType.h" #include "third_party/skia/include/core/SkBitmap.h" #include "third_party/skia/include/core/SkColorSpace.h" #include "third_party/skia/include/core/SkColorType.h" #include "third_party/skia/include/core/SkImageInfo.h" #include "third_party/skia/include/core/SkMallocPixelRef.h" #include "third_party/skia/include/core/SkPixelRef.h" #include "third_party/skia/include/core/SkPixmap.h" #include "third_party/skia/include/core/SkPoint.h" #include "third_party/skia/include/core/SkSamplingOptions.h" #include "third_party/skia/include/core/SkSize.h" namespace flutter { class MallocDeviceBuffer : public impeller::DeviceBuffer { public: explicit MallocDeviceBuffer(impeller::DeviceBufferDescriptor desc) : impeller::DeviceBuffer(desc) { data_ = static_cast<uint8_t*>(malloc(desc.size)); } ~MallocDeviceBuffer() override { free(data_); } bool SetLabel(const std::string& label) override { return true; } bool SetLabel(const std::string& label, impeller::Range range) override { return true; } uint8_t* OnGetContents() const override { return data_; } bool OnCopyHostBuffer(const uint8_t* source, impeller::Range source_range, size_t offset) override { memcpy(data_ + offset, source + source_range.offset, source_range.length); return true; } private: uint8_t* data_; FML_DISALLOW_COPY_AND_ASSIGN(MallocDeviceBuffer); }; #ifdef FML_OS_ANDROID static constexpr bool kShouldUseMallocDeviceBuffer = true; #else static constexpr bool kShouldUseMallocDeviceBuffer = false; #endif // FML_OS_ANDROID namespace { /** * Loads the gamut as a set of three points (triangle). */ void LoadGamut(SkPoint abc[3], const skcms_Matrix3x3& xyz) { // rx = rX / (rX + rY + rZ) // ry = rY / (rX + rY + rZ) // gx, gy, bx, and gy are calculated similarly. for (int index = 0; index < 3; index++) { float sum = xyz.vals[index][0] + xyz.vals[index][1] + xyz.vals[index][2]; abc[index].fX = xyz.vals[index][0] / sum; abc[index].fY = xyz.vals[index][1] / sum; } } /** * Calculates the area of the triangular gamut. */ float CalculateArea(SkPoint abc[3]) { const SkPoint& a = abc[0]; const SkPoint& b = abc[1]; const SkPoint& c = abc[2]; return 0.5f * fabsf(a.fX * b.fY + b.fX * c.fY - a.fX * c.fY - c.fX * b.fY - b.fX * a.fY); } // Note: This was calculated from SkColorSpace::MakeSRGB(). static constexpr float kSrgbGamutArea = 0.0982f; // Source: // https://source.chromium.org/chromium/_/skia/skia.git/+/393fb1ec80f41d8ad7d104921b6920e69749fda1:src/codec/SkAndroidCodec.cpp;l=67;drc=46572b4d445f41943059d0e377afc6d6748cd5ca;bpv=1;bpt=0 bool IsWideGamut(const SkColorSpace* color_space) { if (!color_space) { return false; } skcms_Matrix3x3 xyzd50; color_space->toXYZD50(&xyzd50); SkPoint rgb[3]; LoadGamut(rgb, xyzd50); float area = CalculateArea(rgb); return area > kSrgbGamutArea; } } // namespace ImageDecoderImpeller::ImageDecoderImpeller( const TaskRunners& runners, std::shared_ptr<fml::ConcurrentTaskRunner> concurrent_task_runner, const fml::WeakPtr<IOManager>& io_manager, bool supports_wide_gamut, const std::shared_ptr<fml::SyncSwitch>& gpu_disabled_switch) : ImageDecoder(runners, std::move(concurrent_task_runner), io_manager), supports_wide_gamut_(supports_wide_gamut), gpu_disabled_switch_(gpu_disabled_switch) { std::promise<std::shared_ptr<impeller::Context>> context_promise; context_ = context_promise.get_future(); runners_.GetIOTaskRunner()->PostTask(fml::MakeCopyable( [promise = std::move(context_promise), io_manager]() mutable { promise.set_value(io_manager ? io_manager->GetImpellerContext() : nullptr); })); } ImageDecoderImpeller::~ImageDecoderImpeller() = default; static SkColorType ChooseCompatibleColorType(SkColorType type) { switch (type) { case kRGBA_F32_SkColorType: return kRGBA_F16_SkColorType; default: return kRGBA_8888_SkColorType; } } static SkAlphaType ChooseCompatibleAlphaType(SkAlphaType type) { return type; } DecompressResult ImageDecoderImpeller::DecompressTexture( ImageDescriptor* descriptor, SkISize target_size, impeller::ISize max_texture_size, bool supports_wide_gamut, const std::shared_ptr<impeller::Allocator>& allocator) { TRACE_EVENT0("impeller", __FUNCTION__); if (!descriptor) { std::string decode_error("Invalid descriptor (should never happen)"); FML_DLOG(ERROR) << decode_error; return DecompressResult{.decode_error = decode_error}; } target_size.set(std::min(static_cast<int32_t>(max_texture_size.width), target_size.width()), std::min(static_cast<int32_t>(max_texture_size.height), target_size.height())); const SkISize source_size = descriptor->image_info().dimensions(); auto decode_size = source_size; if (descriptor->is_compressed()) { decode_size = descriptor->get_scaled_dimensions(std::max( static_cast<float>(target_size.width()) / source_size.width(), static_cast<float>(target_size.height()) / source_size.height())); } //---------------------------------------------------------------------------- /// 1. Decode the image. /// const auto base_image_info = descriptor->image_info(); const bool is_wide_gamut = supports_wide_gamut ? IsWideGamut(base_image_info.colorSpace()) : false; SkAlphaType alpha_type = ChooseCompatibleAlphaType(base_image_info.alphaType()); SkImageInfo image_info; if (is_wide_gamut) { SkColorType color_type = alpha_type == SkAlphaType::kOpaque_SkAlphaType ? kBGR_101010x_XR_SkColorType : kRGBA_F16_SkColorType; image_info = base_image_info.makeWH(decode_size.width(), decode_size.height()) .makeColorType(color_type) .makeAlphaType(alpha_type) .makeColorSpace(SkColorSpace::MakeSRGB()); } else { image_info = base_image_info.makeWH(decode_size.width(), decode_size.height()) .makeColorType( ChooseCompatibleColorType(base_image_info.colorType())) .makeAlphaType(alpha_type); } const auto pixel_format = impeller::skia_conversions::ToPixelFormat(image_info.colorType()); if (!pixel_format.has_value()) { std::string decode_error(impeller::SPrintF( "Codec pixel format is not supported (SkColorType=%d)", image_info.colorType())); FML_DLOG(ERROR) << decode_error; return DecompressResult{.decode_error = decode_error}; } auto bitmap = std::make_shared<SkBitmap>(); bitmap->setInfo(image_info); auto bitmap_allocator = std::make_shared<ImpellerAllocator>(allocator); if (descriptor->is_compressed()) { if (!bitmap->tryAllocPixels(bitmap_allocator.get())) { std::string decode_error( "Could not allocate intermediate for image decompression."); FML_DLOG(ERROR) << decode_error; return DecompressResult{.decode_error = decode_error}; } // Decode the image into the image generator's closest supported size. if (!descriptor->get_pixels(bitmap->pixmap())) { std::string decode_error("Could not decompress image."); FML_DLOG(ERROR) << decode_error; return DecompressResult{.decode_error = decode_error}; } } else { auto temp_bitmap = std::make_shared<SkBitmap>(); temp_bitmap->setInfo(base_image_info); auto pixel_ref = SkMallocPixelRef::MakeWithData( base_image_info, descriptor->row_bytes(), descriptor->data()); temp_bitmap->setPixelRef(pixel_ref, 0, 0); if (!bitmap->tryAllocPixels(bitmap_allocator.get())) { std::string decode_error( "Could not allocate intermediate for pixel conversion."); FML_DLOG(ERROR) << decode_error; return DecompressResult{.decode_error = decode_error}; } temp_bitmap->readPixels(bitmap->pixmap()); bitmap->setImmutable(); } if (bitmap->dimensions() == target_size) { auto buffer = bitmap_allocator->GetDeviceBuffer(); if (!buffer) { return DecompressResult{.decode_error = "Unable to get device buffer"}; } return DecompressResult{.device_buffer = buffer, .sk_bitmap = bitmap, .image_info = bitmap->info()}; } //---------------------------------------------------------------------------- /// 2. If the decoded image isn't the requested target size, resize it. /// TRACE_EVENT0("impeller", "DecodeScale"); const auto scaled_image_info = image_info.makeDimensions(target_size); auto scaled_bitmap = std::make_shared<SkBitmap>(); auto scaled_allocator = std::make_shared<ImpellerAllocator>(allocator); scaled_bitmap->setInfo(scaled_image_info); if (!scaled_bitmap->tryAllocPixels(scaled_allocator.get())) { std::string decode_error( "Could not allocate scaled bitmap for image decompression."); FML_DLOG(ERROR) << decode_error; return DecompressResult{.decode_error = decode_error}; } if (!bitmap->pixmap().scalePixels( scaled_bitmap->pixmap(), SkSamplingOptions(SkFilterMode::kLinear, SkMipmapMode::kNone))) { FML_LOG(ERROR) << "Could not scale decoded bitmap data."; } scaled_bitmap->setImmutable(); auto buffer = scaled_allocator->GetDeviceBuffer(); if (!buffer) { return DecompressResult{.decode_error = "Unable to get device buffer"}; } return DecompressResult{.device_buffer = buffer, .sk_bitmap = scaled_bitmap, .image_info = scaled_bitmap->info()}; } /// Only call this method if the GPU is available. static std::pair<sk_sp<DlImage>, std::string> UnsafeUploadTextureToPrivate( const std::shared_ptr<impeller::Context>& context, const std::shared_ptr<impeller::DeviceBuffer>& buffer, const SkImageInfo& image_info) { const auto pixel_format = impeller::skia_conversions::ToPixelFormat(image_info.colorType()); if (!pixel_format) { std::string decode_error(impeller::SPrintF( "Unsupported pixel format (SkColorType=%d)", image_info.colorType())); FML_DLOG(ERROR) << decode_error; return std::make_pair(nullptr, decode_error); } impeller::TextureDescriptor texture_descriptor; texture_descriptor.storage_mode = impeller::StorageMode::kDevicePrivate; texture_descriptor.format = pixel_format.value(); texture_descriptor.size = {image_info.width(), image_info.height()}; texture_descriptor.mip_count = texture_descriptor.size.MipCount(); texture_descriptor.compression_type = impeller::CompressionType::kLossy; auto dest_texture = context->GetResourceAllocator()->CreateTexture(texture_descriptor); if (!dest_texture) { std::string decode_error("Could not create Impeller texture."); FML_DLOG(ERROR) << decode_error; return std::make_pair(nullptr, decode_error); } dest_texture->SetLabel( impeller::SPrintF("ui.Image(%p)", dest_texture.get()).c_str()); auto command_buffer = context->CreateCommandBuffer(); if (!command_buffer) { std::string decode_error( "Could not create command buffer for mipmap generation."); FML_DLOG(ERROR) << decode_error; return std::make_pair(nullptr, decode_error); } command_buffer->SetLabel("Mipmap Command Buffer"); auto blit_pass = command_buffer->CreateBlitPass(); if (!blit_pass) { std::string decode_error( "Could not create blit pass for mipmap generation."); FML_DLOG(ERROR) << decode_error; return std::make_pair(nullptr, decode_error); } blit_pass->SetLabel("Mipmap Blit Pass"); blit_pass->AddCopy(impeller::DeviceBuffer::AsBufferView(buffer), dest_texture); if (texture_descriptor.size.MipCount() > 1) { blit_pass->GenerateMipmap(dest_texture); } blit_pass->EncodeCommands(context->GetResourceAllocator()); if (!context->GetCommandQueue()->Submit({command_buffer}).ok()) { std::string decode_error("Failed to submit blit pass command buffer."); FML_DLOG(ERROR) << decode_error; return std::make_pair(nullptr, decode_error); } return std::make_pair( impeller::DlImageImpeller::Make(std::move(dest_texture)), std::string()); } std::pair<sk_sp<DlImage>, std::string> ImageDecoderImpeller::UploadTextureToPrivate( const std::shared_ptr<impeller::Context>& context, const std::shared_ptr<impeller::DeviceBuffer>& buffer, const SkImageInfo& image_info, const std::shared_ptr<SkBitmap>& bitmap, const std::shared_ptr<fml::SyncSwitch>& gpu_disabled_switch) { TRACE_EVENT0("impeller", __FUNCTION__); if (!context) { return std::make_pair(nullptr, "No Impeller context is available"); } if (!buffer) { return std::make_pair(nullptr, "No Impeller device buffer is available"); } std::pair<sk_sp<DlImage>, std::string> result; gpu_disabled_switch->Execute( fml::SyncSwitch::Handlers() .SetIfFalse([&result, context, buffer, image_info] { result = UnsafeUploadTextureToPrivate(context, buffer, image_info); }) .SetIfTrue([&result, context, bitmap, gpu_disabled_switch] { // create_mips is false because we already know the GPU is disabled. result = UploadTextureToStorage(context, bitmap, gpu_disabled_switch, impeller::StorageMode::kHostVisible, /*create_mips=*/false); })); return result; } std::pair<sk_sp<DlImage>, std::string> ImageDecoderImpeller::UploadTextureToStorage( const std::shared_ptr<impeller::Context>& context, std::shared_ptr<SkBitmap> bitmap, const std::shared_ptr<fml::SyncSwitch>& gpu_disabled_switch, impeller::StorageMode storage_mode, bool create_mips) { TRACE_EVENT0("impeller", __FUNCTION__); if (!context) { return std::make_pair(nullptr, "No Impeller context is available"); } if (!bitmap) { return std::make_pair(nullptr, "No texture bitmap is available"); } const auto image_info = bitmap->info(); const auto pixel_format = impeller::skia_conversions::ToPixelFormat(image_info.colorType()); if (!pixel_format) { std::string decode_error(impeller::SPrintF( "Unsupported pixel format (SkColorType=%d)", image_info.colorType())); FML_DLOG(ERROR) << decode_error; return std::make_pair(nullptr, decode_error); } impeller::TextureDescriptor texture_descriptor; texture_descriptor.storage_mode = storage_mode; texture_descriptor.format = pixel_format.value(); texture_descriptor.size = {image_info.width(), image_info.height()}; texture_descriptor.mip_count = create_mips ? texture_descriptor.size.MipCount() : 1; auto texture = context->GetResourceAllocator()->CreateTexture(texture_descriptor); if (!texture) { std::string decode_error("Could not create Impeller texture."); FML_DLOG(ERROR) << decode_error; return std::make_pair(nullptr, decode_error); } auto mapping = std::make_shared<fml::NonOwnedMapping>( reinterpret_cast<const uint8_t*>(bitmap->getAddr(0, 0)), // data texture_descriptor.GetByteSizeOfBaseMipLevel(), // size [bitmap](auto, auto) mutable { bitmap.reset(); } // proc ); if (!texture->SetContents(mapping)) { std::string decode_error("Could not copy contents into Impeller texture."); FML_DLOG(ERROR) << decode_error; return std::make_pair(nullptr, decode_error); } texture->SetLabel(impeller::SPrintF("ui.Image(%p)", texture.get()).c_str()); if (texture_descriptor.mip_count > 1u && create_mips) { std::optional<std::string> decode_error; // The only platform that needs mipmapping unconditionally is GL. // GL based platforms never disable GPU access. // This is only really needed for iOS. gpu_disabled_switch->Execute(fml::SyncSwitch::Handlers().SetIfFalse( [context, &texture, &decode_error] { auto command_buffer = context->CreateCommandBuffer(); if (!command_buffer) { decode_error = "Could not create command buffer for mipmap generation."; return; } command_buffer->SetLabel("Mipmap Command Buffer"); auto blit_pass = command_buffer->CreateBlitPass(); if (!blit_pass) { decode_error = "Could not create blit pass for mipmap generation."; return; } blit_pass->SetLabel("Mipmap Blit Pass"); blit_pass->GenerateMipmap(texture); blit_pass->EncodeCommands(context->GetResourceAllocator()); if (!context->GetCommandQueue()->Submit({command_buffer}).ok()) { decode_error = "Failed to submit blit pass command buffer."; return; } command_buffer->WaitUntilScheduled(); })); if (decode_error.has_value()) { FML_DLOG(ERROR) << decode_error.value(); return std::make_pair(nullptr, decode_error.value()); } } return std::make_pair(impeller::DlImageImpeller::Make(std::move(texture)), std::string()); } // |ImageDecoder| void ImageDecoderImpeller::Decode(fml::RefPtr<ImageDescriptor> descriptor, uint32_t target_width, uint32_t target_height, const ImageResult& p_result) { FML_DCHECK(descriptor); FML_DCHECK(p_result); // Wrap the result callback so that it can be invoked from any thread. auto raw_descriptor = descriptor.get(); raw_descriptor->AddRef(); ImageResult result = [p_result, // raw_descriptor, // ui_runner = runners_.GetUITaskRunner() // ](auto image, auto decode_error) { ui_runner->PostTask([raw_descriptor, p_result, image, decode_error]() { raw_descriptor->Release(); p_result(std::move(image), decode_error); }); }; concurrent_task_runner_->PostTask( [raw_descriptor, // context = context_.get(), // target_size = SkISize::Make(target_width, target_height), // io_runner = runners_.GetIOTaskRunner(), // result, supports_wide_gamut = supports_wide_gamut_, // gpu_disabled_switch = gpu_disabled_switch_]() { if (!context) { result(nullptr, "No Impeller context is available"); return; } auto max_size_supported = context->GetResourceAllocator()->GetMaxTextureSizeSupported(); // Always decompress on the concurrent runner. auto bitmap_result = DecompressTexture( raw_descriptor, target_size, max_size_supported, supports_wide_gamut, context->GetResourceAllocator()); if (!bitmap_result.device_buffer) { result(nullptr, bitmap_result.decode_error); return; } auto upload_texture_and_invoke_result = [result, context, bitmap_result, gpu_disabled_switch]() { sk_sp<DlImage> image; std::string decode_error; if (!kShouldUseMallocDeviceBuffer && context->GetCapabilities()->SupportsBufferToTextureBlits()) { std::tie(image, decode_error) = UploadTextureToPrivate( context, bitmap_result.device_buffer, bitmap_result.image_info, bitmap_result.sk_bitmap, gpu_disabled_switch); result(image, decode_error); } else { std::tie(image, decode_error) = UploadTextureToStorage( context, bitmap_result.sk_bitmap, gpu_disabled_switch, impeller::StorageMode::kDevicePrivate, /*create_mips=*/true); result(image, decode_error); } }; // TODO(jonahwilliams): // https://github.com/flutter/flutter/issues/123058 Technically we // don't need to post tasks to the io runner, but without this // forced serialization we can end up overloading the GPU and/or // competing with raster workloads. io_runner->PostTask(upload_texture_and_invoke_result); }); } ImpellerAllocator::ImpellerAllocator( std::shared_ptr<impeller::Allocator> allocator) : allocator_(std::move(allocator)) {} std::shared_ptr<impeller::DeviceBuffer> ImpellerAllocator::GetDeviceBuffer() const { return buffer_; } bool ImpellerAllocator::allocPixelRef(SkBitmap* bitmap) { if (!bitmap) { return false; } const SkImageInfo& info = bitmap->info(); if (kUnknown_SkColorType == info.colorType() || info.width() < 0 || info.height() < 0 || !info.validRowBytes(bitmap->rowBytes())) { return false; } impeller::DeviceBufferDescriptor descriptor; descriptor.storage_mode = impeller::StorageMode::kHostVisible; descriptor.size = ((bitmap->height() - 1) * bitmap->rowBytes()) + (bitmap->width() * bitmap->bytesPerPixel()); std::shared_ptr<impeller::DeviceBuffer> device_buffer = kShouldUseMallocDeviceBuffer ? std::make_shared<MallocDeviceBuffer>(descriptor) : allocator_->CreateBuffer(descriptor); if (!device_buffer) { return false; } struct ImpellerPixelRef final : public SkPixelRef { ImpellerPixelRef(int w, int h, void* s, size_t r) : SkPixelRef(w, h, s, r) {} ~ImpellerPixelRef() override {} }; auto pixel_ref = sk_sp<SkPixelRef>( new ImpellerPixelRef(info.width(), info.height(), device_buffer->OnGetContents(), bitmap->rowBytes())); bitmap->setPixelRef(std::move(pixel_ref), 0, 0); buffer_ = std::move(device_buffer); return true; } } // namespace flutter
engine/lib/ui/painting/image_decoder_impeller.cc/0
{ "file_path": "engine/lib/ui/painting/image_decoder_impeller.cc", "repo_id": "engine", "token_count": 9293 }
256
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_PAINTING_IMAGE_ENCODING_SKIA_H_ #define FLUTTER_LIB_UI_PAINTING_IMAGE_ENCODING_SKIA_H_ #include "flutter/common/task_runners.h" #include "flutter/display_list/image/dl_image.h" #include "flutter/fml/synchronization/sync_switch.h" #include "flutter/lib/ui/snapshot_delegate.h" namespace flutter { void ConvertImageToRasterSkia( const sk_sp<DlImage>& dl_image, std::function<void(sk_sp<SkImage>)> encode_task, const fml::RefPtr<fml::TaskRunner>& raster_task_runner, const fml::RefPtr<fml::TaskRunner>& io_task_runner, const fml::WeakPtr<GrDirectContext>& resource_context, const fml::TaskRunnerAffineWeakPtr<SnapshotDelegate>& snapshot_delegate, const std::shared_ptr<const fml::SyncSwitch>& is_gpu_disabled_sync_switch); } // namespace flutter #endif // FLUTTER_LIB_UI_PAINTING_IMAGE_ENCODING_SKIA_H_
engine/lib/ui/painting/image_encoding_skia.h/0
{ "file_path": "engine/lib/ui/painting/image_encoding_skia.h", "repo_id": "engine", "token_count": 390 }
257
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_PAINTING_MATRIX_H_ #define FLUTTER_LIB_UI_PAINTING_MATRIX_H_ #include "third_party/skia/include/core/SkM44.h" #include "third_party/skia/include/core/SkMatrix.h" #include "third_party/tonic/typed_data/typed_list.h" namespace flutter { SkM44 ToSkM44(const tonic::Float64List& matrix4); SkMatrix ToSkMatrix(const tonic::Float64List& matrix4); tonic::Float64List ToMatrix4(const SkMatrix& sk_matrix); } // namespace flutter #endif // FLUTTER_LIB_UI_PAINTING_MATRIX_H_
engine/lib/ui/painting/matrix.h/0
{ "file_path": "engine/lib/ui/painting/matrix.h", "repo_id": "engine", "token_count": 249 }
258
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_PAINTING_RRECT_H_ #define FLUTTER_LIB_UI_PAINTING_RRECT_H_ #include "third_party/dart/runtime/include/dart_api.h" #include "third_party/skia/include/core/SkRRect.h" #include "third_party/tonic/converter/dart_converter.h" namespace flutter { class RRect { public: SkRRect sk_rrect; bool is_null; }; } // namespace flutter namespace tonic { template <> struct DartConverter<flutter::RRect> { using NativeType = flutter::RRect; using FfiType = Dart_Handle; static constexpr const char* kFfiRepresentation = "Handle"; static constexpr const char* kDartRepresentation = "Object"; static constexpr bool kAllowedInLeafCall = false; static NativeType FromDart(Dart_Handle handle); static NativeType FromArguments(Dart_NativeArguments args, int index, Dart_Handle& exception); static NativeType FromFfi(FfiType val) { return FromDart(val); } static const char* GetFfiRepresentation() { return kFfiRepresentation; } static const char* GetDartRepresentation() { return kDartRepresentation; } static bool AllowedInLeafCall() { return kAllowedInLeafCall; } }; } // namespace tonic #endif // FLUTTER_LIB_UI_PAINTING_RRECT_H_
engine/lib/ui/painting/rrect.h/0
{ "file_path": "engine/lib/ui/painting/rrect.h", "repo_id": "engine", "token_count": 523 }
259
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_PLUGINS_CALLBACK_CACHE_H_ #define FLUTTER_LIB_UI_PLUGINS_CALLBACK_CACHE_H_ #include <map> #include <memory> #include <mutex> #include <string> #include "flutter/fml/macros.h" #include "third_party/dart/runtime/include/dart_api.h" namespace flutter { struct DartCallbackRepresentation { std::string name; std::string class_name; std::string library_path; }; class DartCallbackCache { public: static void SetCachePath(const std::string& path); static std::string GetCachePath() { return cache_path_; } static int64_t GetCallbackHandle(const std::string& name, const std::string& class_name, const std::string& library_path); static Dart_Handle GetCallback(int64_t handle); static std::unique_ptr<DartCallbackRepresentation> GetCallbackInformation( int64_t handle); static void LoadCacheFromDisk(); private: static Dart_Handle LookupDartClosure(const std::string& name, const std::string& class_name, const std::string& library_path); static void SaveCacheToDisk(); static std::mutex mutex_; static std::string cache_path_; static std::map<int64_t, DartCallbackRepresentation> cache_; FML_DISALLOW_IMPLICIT_CONSTRUCTORS(DartCallbackCache); }; } // namespace flutter #endif // FLUTTER_LIB_UI_PLUGINS_CALLBACK_CACHE_H_
engine/lib/ui/plugins/callback_cache.h/0
{ "file_path": "engine/lib/ui/plugins/callback_cache.h", "repo_id": "engine", "token_count": 631 }
260
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. part of dart.ui; /// Whether to use the italic type variation of glyphs in the font. /// /// Some modern fonts allow this to be selected in a more fine-grained manner. /// See [FontVariation.italic] for details. /// /// Italic type is distinct from slanted glyphs. To control the slant of a /// glyph, consider the [FontVariation.slant] font feature. enum FontStyle { /// Use the upright ("Roman") glyphs. normal, /// Use glyphs that have a more pronounced angle and typically a cursive style /// ("italic type"). italic, } /// The thickness of the glyphs used to draw the text. /// /// Fonts are typically weighted on a 9-point scale, which, for historical /// reasons, uses the names 100 to 900. In Flutter, these are named `w100` to /// `w900` and have the following conventional meanings: /// /// * [w100]: Thin, the thinnest font weight. /// /// * [w200]: Extra light. /// /// * [w300]: Light. /// /// * [w400]: Normal. The constant [FontWeight.normal] is an alias for this value. /// /// * [w500]: Medium. /// /// * [w600]: Semi-bold. /// /// * [w700]: Bold. The constant [FontWeight.bold] is an alias for this value. /// /// * [w800]: Extra-bold. /// /// * [w900]: Black, the thickest font weight. /// /// For example, the font named "Roboto Medium" is typically exposed as a font /// with the name "Roboto" and the weight [FontWeight.w500]. /// /// Some modern fonts allow the weight to be adjusted in arbitrary increments. /// See [FontVariation.weight] for details. class FontWeight { const FontWeight._(this.index, this.value); /// The encoded integer value of this font weight. final int index; /// The thickness value of this font weight. final int value; /// Thin, the least thick. static const FontWeight w100 = FontWeight._(0, 100); /// Extra-light. static const FontWeight w200 = FontWeight._(1, 200); /// Light. static const FontWeight w300 = FontWeight._(2, 300); /// Normal / regular / plain. static const FontWeight w400 = FontWeight._(3, 400); /// Medium. static const FontWeight w500 = FontWeight._(4, 500); /// Semi-bold. static const FontWeight w600 = FontWeight._(5, 600); /// Bold. static const FontWeight w700 = FontWeight._(6, 700); /// Extra-bold. static const FontWeight w800 = FontWeight._(7, 800); /// Black, the most thick. static const FontWeight w900 = FontWeight._(8, 900); /// The default font weight. static const FontWeight normal = w400; /// A commonly used font weight that is heavier than normal. static const FontWeight bold = w700; /// A list of all the font weights. static const List<FontWeight> values = <FontWeight>[ w100, w200, w300, w400, w500, w600, w700, w800, w900 ]; /// Linearly interpolates between two font weights. /// /// Rather than using fractional weights, the interpolation rounds to the /// nearest weight. /// /// For a smoother animation of font weight, consider using /// [FontVariation.weight] if the font in question supports it. /// /// If both `a` and `b` are null, then this method will return null. Otherwise, /// any null values for `a` or `b` are interpreted as equivalent to [normal] /// (also known as [w400]). /// /// The `t` argument represents position on the timeline, with 0.0 meaning /// that the interpolation has not started, returning `a` (or something /// equivalent to `a`), 1.0 meaning that the interpolation has finished, /// returning `b` (or something equivalent to `b`), and values in between /// meaning that the interpolation is at the relevant point on the timeline /// between `a` and `b`. The interpolation can be extrapolated beyond 0.0 and /// 1.0, so negative values and values greater than 1.0 are valid (and can /// easily be generated by curves such as [Curves.elasticInOut]). The result /// is clamped to the range [w100]–[w900]. /// /// Values for `t` are usually obtained from an [Animation<double>], such as /// an [AnimationController]. static FontWeight? lerp(FontWeight? a, FontWeight? b, double t) { if (a == null && b == null) { return null; } return values[_lerpInt((a ?? normal).index, (b ?? normal).index, t).round().clamp(0, 8)]; } @override String toString() { return const <int, String>{ 0: 'FontWeight.w100', 1: 'FontWeight.w200', 2: 'FontWeight.w300', 3: 'FontWeight.w400', 4: 'FontWeight.w500', 5: 'FontWeight.w600', 6: 'FontWeight.w700', 7: 'FontWeight.w800', 8: 'FontWeight.w900', }[index]!; } } /// A feature tag and value that affect the selection of glyphs in a font. /// /// Different fonts support different features. Consider using a tool /// such as <https://wakamaifondue.com/> to examine your fonts to /// determine what features are available. /// /// {@tool sample} /// This example shows usage of several OpenType font features, /// including Small Caps (selected manually using the "smcp" code), /// old-style figures, fractional ligatures, and stylistic sets. /// /// ** See code in examples/api/lib/ui/text/font_feature.0.dart ** /// {@end-tool} /// /// Some fonts also support continuous font variations; see the [FontVariation] /// class. /// /// See also: /// /// * <https://en.wikipedia.org/wiki/List_of_typographic_features>, /// Wikipedia's description of these typographic features. /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/featuretags>, /// Microsoft's registry of these features. class FontFeature { /// Creates a [FontFeature] object, which can be added to a [TextStyle] to /// change how the engine selects glyphs when rendering text. /// /// `feature` is the four-character tag that identifies the feature. /// These tags are specified by font formats such as OpenType. /// /// `value` is the value that the feature will be set to. The behavior /// of the value depends on the specific feature. Many features are /// flags whose value can be 1 (when enabled) or 0 (when disabled). /// /// See <https://docs.microsoft.com/en-us/typography/opentype/spec/featuretags> const FontFeature( this.feature, [ this.value = 1 ] ) : assert(feature.length == 4, 'Feature tag must be exactly four characters long.'), assert(value >= 0, 'Feature value must be zero or a positive integer.'); /// Create a [FontFeature] object that enables the feature with the given tag. const FontFeature.enable(String feature) : this(feature, 1); /// Create a [FontFeature] object that disables the feature with the given tag. const FontFeature.disable(String feature) : this(feature, 0); // Features below should be alphabetic by feature tag. This makes it // easier to determine when a feature is missing so that we avoid // adding duplicates. // // The full list is extremely long, and many of the features are // language-specific, or indeed force-enabled for particular locales // by HarfBuzz, so we don't even attempt to be comprehensive here. // Features listed below are those we deemed "interesting enough" to // have their own constructor, mostly on the basis of whether we // could find a font where the feature had a useful effect that // could be demonstrated. // Start of feature tag list. // ------------------------------------------------------------------------ /// Access alternative glyphs. (`aalt`) /// /// This feature selects the given glyph variant for glyphs in the span. /// /// {@tool sample} /// The Raleway font supports several alternate glyphs. The code /// below shows how specific glyphs can be selected. With `aalt` set /// to zero, the default, the normal glyphs are used. With a /// non-zero value, Raleway substitutes small caps for lower case /// letters. With value 2, the lowercase "a" changes to a stemless /// "a", whereas the lowercase "t" changes to a vertical bar instead /// of having a curve. By targeting specific letters in the text /// (using [widgets.Text.rich]), the desired rendering for each glyph can be /// achieved. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_aalt.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_alternative.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#aalt> const FontFeature.alternative(this.value) : feature = 'aalt'; /// Use alternative ligatures to represent fractions. (`afrc`) /// /// When this feature is enabled (and the font supports it), /// sequences of digits separated by U+002F SOLIDUS character (/) or /// U+2044 FRACTION SLASH (⁄) are replaced by ligatures that /// represent the corresponding fraction. These ligatures may differ /// from those used by the [FontFeature.fractions] feature. /// /// This feature overrides all other features. /// /// {@tool sample} /// The Ubuntu Mono font supports the `afrc` feature. It causes digits /// before slashes to become superscripted and digits after slashes to become /// subscripted. This contrasts to the effect seen with [FontFeature.fractions]. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_afrc.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_alternative_fractions.0.dart ** /// {@end-tool} /// /// See also: /// /// * [FontFeature.fractions], which has a similar (but different) effect. /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#afrc> const FontFeature.alternativeFractions() : feature = 'afrc', value = 1; /// Enable contextual alternates. (`calt`) /// /// With this feature enabled, specific glyphs may be replaced by /// alternatives based on nearby text. /// /// {@tool sample} /// The Barriecito font supports the `calt` feature. It causes some /// letters in close proximity to other instances of themselves to /// use different glyphs, to give the appearance of more variation /// in the glyphs, rather than having each letter always use a /// particular glyph. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_calt.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_contextual_alternates.0.dart ** /// {@end-tool} /// /// See also: /// /// * [FontFeature.randomize], which is more a rarely supported but more /// powerful way to get a similar effect. /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#calt> const FontFeature.contextualAlternates() : feature = 'calt', value = 1; /// Enable case-sensitive forms. (`case`) /// /// Some glyphs, for example parentheses or operators, are typically /// designed to fit nicely with mixed case, or even predominantly /// lowercase, text. When these glyphs are placed near strings of /// capital letters, they appear a little off-center. /// /// This feature, when supported by the font, causes these glyphs to /// be shifted slightly, or otherwise adjusted, so as to form a more /// aesthetically pleasing combination with capital letters. /// /// {@tool sample} /// The Piazzolla font supports the `case` feature. It causes /// parentheses, brackets, braces, guillemets, slashes, bullets, and /// some other glyphs (not shown below) to be shifted up slightly so /// that capital letters appear centered in comparison. When the /// feature is disabled, those glyphs are optimized for use with /// lowercase letters, and so capital letters appear to ride higher /// relative to the punctuation marks. /// /// The difference is very subtle. It may be most obvious when /// examining the square brackets compared to the capital A. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_case.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_case_sensitive_forms.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#case> const FontFeature.caseSensitiveForms() : feature = 'case', value = 1; /// Select a character variant. (`cv01` through `cv99`) /// /// Fonts may have up to 99 character variant sets, numbered 1 /// through 99, each of which can be independently enabled or /// disabled. /// /// Related character variants are typically grouped into stylistic /// sets, controlled by the [FontFeature.stylisticSet] feature /// (`ssXX`). /// /// {@tool sample} /// The Source Code Pro font supports the `cvXX` feature for several /// characters. In the example below, variants 1 (`cv01`), 2 /// (`cv02`), and 4 (`cv04`) are selected. Variant 1 changes the /// rendering of the "a" character, variant 2 changes the lowercase /// "g" character, and variant 4 changes the lowercase "i" and "l" /// characters. There are also variants (not shown here) that /// control the rendering of various greek characters such as beta /// and theta. /// /// Notably, this can be contrasted with the stylistic sets, where /// the set which affects the "a" character also affects beta, and /// the set which affects the "g" character also affects theta and /// delta. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_cvXX.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_character_variant.0.dart ** /// {@end-tool} /// /// See also: /// /// * [FontFeature.stylisticSet], which allows for groups of characters /// variants to be selected at once, as opposed to individual character variants. /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99> factory FontFeature.characterVariant(int value) { assert(value >= 1); assert(value <= 99); return FontFeature('cv${value.toString().padLeft(2, "0")}'); } /// Display digits as denominators. (`dnom`) /// /// This is typically used automatically by the font rendering /// system as part of the implementation of `frac` for the denominator /// part of fractions (see [FontFeature.fractions]). /// /// {@tool sample} /// The Piazzolla font supports the `dnom` feature. It causes /// the digits to be rendered smaller and near the bottom of the EM box. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_dnom.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_denominator.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#dnom> const FontFeature.denominator() : feature = 'dnom', value = 1; /// Use ligatures to represent fractions. (`afrc`) /// /// When this feature is enabled (and the font supports it), /// sequences of digits separated by U+002F SOLIDUS character (/) or /// U+2044 FRACTION SLASH (⁄) are replaced by ligatures that /// represent the corresponding fraction. /// /// This feature may imply the [FontFeature.numerators] and /// [FontFeature.denominator] features. /// /// {@tool sample} /// The Ubuntu Mono font supports the `frac` feature. It causes /// digits around slashes to be turned into dedicated fraction /// glyphs. This contrasts to the effect seen with /// [FontFeature.alternativeFractions]. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_frac.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_fractions.0.dart ** /// {@end-tool} /// /// See also: /// /// * [FontFeature.alternativeFractions], which has a similar (but different) effect. /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_fj#frac> const FontFeature.fractions() : feature = 'frac', value = 1; /// Use historical forms. (`hist`) /// /// Some fonts have alternatives for letters whose forms have changed /// through the ages. In the Latin alphabet, this is common for /// example with the long-form "s" or the Fraktur "k". This feature enables /// those alternative glyphs. /// /// This does not enable legacy ligatures, only single-character alternatives. /// To enable historical ligatures, use [FontFeature.historicalLigatures]. /// /// This feature may override other glyph-substitution features. /// /// {@tool sample} /// The Cardo font supports the `hist` feature specifically for the /// letter "s": it changes occurrences of that letter for the glyph /// used by U+017F LATIN SMALL LETTER LONG S. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_historical.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_historical_forms.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_fj#hist> const FontFeature.historicalForms() : feature = 'hist', value = 1; /// Use historical ligatures. (`hlig`) /// /// Some fonts support ligatures that have fallen out of favor today, /// but were historically in common use. This feature enables those /// ligatures. /// /// For example, the "long s" glyph was historically typeset with /// characters such as "t" and "h" as a single ligature. /// /// This does not enable the legacy forms, only ligatures. See /// [FontFeature.historicalForms] to enable single characters to be /// replaced with their historical alternatives. Combining both is /// usually desired since the ligatures typically apply specifically /// to characters that have historical forms as well. For example, /// the historical forms feature might replace the "s" character /// with the "long s" (ſ) character, while the historical ligatures /// feature might specifically apply to cases where "long s" is /// followed by other characters such as "t". In such cases, without /// the historical forms being enabled, the ligatures would only /// apply when the "long s" is used explicitly. /// /// This feature may override other glyph-substitution features. /// /// {@tool sample} /// The Cardo font supports the `hlig` feature. It has legacy /// ligatures for "VI" and "NT", and various ligatures involving the /// "long s". In the example below, both historical forms (`hist 1`) /// and historical ligatures (`hlig 1`) are enabled, so, for /// instance, "fish" becomes "fiſh" which is then rendered using a /// ligature for the last two characters. /// /// Similarly, the word "business" is turned into "buſineſſ" by /// `hist`, and the `ſi` and `ſſ` pairs are ligated by `hlig`. /// Observe in particular the position of the dot of the "i" in /// "business" in the various combinations of these features. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_historical.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_historical_ligatures.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_fj#hlig> const FontFeature.historicalLigatures() : feature = 'hlig', value = 1; /// Use lining figures. (`lnum`) /// /// Some fonts have digits that, like lowercase latin letters, have /// both descenders and ascenders. In some situations, especially in /// conjunction with capital letters, this leads to an aesthetically /// questionable irregularity. Lining figures, on the other hand, /// have a uniform height, and align with the baseline and the /// height of capital letters. Conceptually, they can be thought of /// as "capital digits". /// /// This feature may conflict with [FontFeature.oldstyleFigures]. /// /// {@tool sample} /// The Sorts Mill Goudy font supports the `lnum` feature. It causes /// digits to fit more seamlessly with capital letters. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_lnum.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_lining_figures.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ko#lnum> const FontFeature.liningFigures() : feature = 'lnum', value = 1; /// Use locale-specific glyphs. (`locl`) /// /// Some characters, most notably those in the Unicode Han /// Unification blocks, vary in presentation based on the locale in /// use. For example, the ideograph for "grass" (U+8349, 草) has a /// broken top line in Traditional Chinese, but a solid top line in /// Simplified Chinese, Japanese, Korean, and Vietnamese. This kind /// of variation also exists with other alphabets, for example /// Cyrillic characters as used in the Bulgarian and Serbian /// alphabets vary from their Russian counterparts. /// /// A particular font may default to the forms for the locale for /// which it was constructed, but still support alternative forms /// for other locales. When this feature is enabled, the locale (as /// specified using [painting.TextStyle.locale], for instance) is /// used to determine which glyphs to use when locale-specific /// alternatives exist. Disabling this feature causes the font /// rendering to ignore locale information and only use the default /// glyphs. /// /// This feature is enabled by default. Using /// `FontFeature.localeAware(enable: false)` disables the /// locale-awareness. (So does not specifying the locale in the /// first place, of course.) /// /// {@tool sample} /// The Noto Sans CJK font supports the `locl` feature for CJK characters. /// In this example, the `localeAware` feature is not explicitly used, as it is /// enabled by default. This example instead shows how to set the locale, /// thus demonstrating how Noto Sans adapts the glyph shapes to the locale. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_locl.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_locale_aware.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ko#locl> /// * <https://en.wikipedia.org/wiki/Han_unification> /// * <https://en.wikipedia.org/wiki/Cyrillic_script> const FontFeature.localeAware({ bool enable = true }) : feature = 'locl', value = enable ? 1 : 0; /// Display alternative glyphs for numerals (alternate annotation forms). (`nalt`) /// /// Replaces glyphs used in numbering lists (e.g. 1, 2, 3...; or a, b, c...) with notational /// variants that might be more typographically interesting. /// /// Fonts sometimes support multiple alternatives, and the argument /// selects the set to use (a positive integer, or 0 to disable the /// feature). The default set if none is specified is 1. /// /// {@tool sample} /// The Gothic A1 font supports several notational variant sets via /// the `nalt` feature. /// /// Set 1 changes the spacing of the glyphs. Set 2 parenthesizes the /// latin letters and reduces the numerals to subscripts. Set 3 /// circles the glyphs. Set 4 parenthesizes the digits. Set 5 uses /// reverse-video circles for the digits. Set 7 superscripts the /// digits. /// /// The code below shows how to select set 3. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_nalt.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_notational_forms.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ko#nalt> const FontFeature.notationalForms([this.value = 1]) : feature = 'nalt', assert(value >= 0); /// Display digits as numerators. (`numr`) /// /// This is typically used automatically by the font rendering /// system as part of the implementation of `frac` for the numerator /// part of fractions (see [FontFeature.fractions]). /// /// {@tool sample} /// The Piazzolla font supports the `numr` feature. It causes /// the digits to be rendered smaller and near the top of the EM box. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_numr.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_numerators.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ko#numr> const FontFeature.numerators() : feature = 'numr', value = 1; /// Use old style figures. (`onum`) /// /// Some fonts have variants of the figures (e.g. the digit 9) that, /// when this feature is enabled, render with descenders under the /// baseline instead of being entirely above the baseline. If the /// default digits are lining figures, this allows the selection of /// digits that fit better with mixed case (uppercase and lowercase) /// text. /// /// This overrides [FontFeature.slashedZero] and may conflict with /// [FontFeature.liningFigures]. /// /// {@tool sample} /// The Piazzolla font supports the `onum` feature. It causes /// digits to extend below the baseline. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_onum.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_oldstyle_figures.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ko#onum> /// * <https://en.wikipedia.org/wiki/Text_figures> const FontFeature.oldstyleFigures() : feature = 'onum', value = 1; /// Use ordinal forms for alphabetic glyphs. (`ordn`) /// /// Some fonts have variants of the alphabetic glyphs intended for /// use after numbers when expressing ordinals, as in "1st", "2nd", /// "3rd". This feature enables those alternative glyphs. /// /// This may override other features that substitute glyphs. /// /// {@tool sample} /// The Piazzolla font supports the `ordn` feature. It causes /// alphabetic glyphs to become smaller and superscripted. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_ordn.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_ordinal_forms.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_ko#ordn> const FontFeature.ordinalForms() : feature = 'ordn', value = 1; /// Use proportional (varying width) figures. (`pnum`) /// /// For fonts that have both proportional and tabular (monospace) figures, /// this enables the proportional figures. /// /// This is mutually exclusive with [FontFeature.tabularFigures]. /// /// The default behavior varies from font to font. /// /// {@tool sample} /// The Kufam font supports the `pnum` feature. It causes the digits /// to become proportionally-sized, rather than all being the same /// width. In this font this is especially noticeable with the digit /// "1": normally, the 1 has very noticeable serifs in this /// sans-serif font, but with the proportionally figures enabled, /// the digit becomes much narrower. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_pnum.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_proportional_figures.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#pnum> const FontFeature.proportionalFigures() : feature = 'pnum', value = 1; /// Randomize the alternate forms used in text. (`rand`) /// /// For example, this can be used with suitably-prepared handwriting fonts to /// vary the forms used for each character, so that, for instance, the word /// "cross-section" would be rendered with two different "c"s, two different "o"s, /// and three different "s"s. /// /// Contextual alternates ([FontFeature.contextualAlternates]) /// provide a similar effect in some fonts, without using /// randomness. /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#rand> const FontFeature.randomize() : feature = 'rand', value = 1; /// Enable stylistic alternates. (`salt`) /// /// Some fonts have alternative forms that are not tied to a /// particular purpose (such as being historical forms, or /// contextually relevant alternatives, or ligatures, etc). This /// font feature enables these purely stylistic alternatives. /// /// This may override other features that substitute glyphs. /// /// {@tool sample} /// The Source Code Pro font supports the `salt` feature. It causes /// some glyphs to be rendered differently, for example the "a" and /// "g" glyphs change from their typographically common /// double-storey forms to simpler single-storey forms, the dollar /// sign's line changes from discontinuous to continuous (and is /// angled), and the "0" rendering changes from a center dot to a /// slash. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_salt.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_stylistic_alternates.0.dart ** /// {@end-tool} /// /// See also: /// /// * [FontFeature.contextualAlternates], which is enables alternates specific to certain contexts. /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#salt> const FontFeature.stylisticAlternates() : feature = 'salt', value = 1; /// Use scientific inferiors. (`sinf`) /// /// Some fonts have variants of the figures (e.g. the digit 2) that, /// when this feature is enabled, render in a manner more /// appropriate for subscripted digits ("inferiors") used in /// scientific contexts, e.g. the subscripts in chemical formulae. /// /// This may override other features that substitute glyphs. /// /// {@tool sample} /// The Piazzolla font supports the `sinf` feature. It causes /// digits to be smaller and subscripted. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_sinf.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_scientific_inferiors.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#sinf> const FontFeature.scientificInferiors() : feature = 'sinf', value = 1; /// Select a stylistic set. (`ss01` through `ss20`) /// /// Fonts may have up to 20 stylistic sets, numbered 1 through 20, /// each of which can be independently enabled or disabled. /// /// For more fine-grained control, in some fonts individual /// character variants can also be controlled by the /// [FontFeature.characterVariant] feature (`cvXX`). /// /// {@tool sample} /// The Source Code Pro font supports the `ssXX` feature for several /// sets. In the example below, stylistic sets 2 (`ss02`), 3 /// (`ss03`), and 4 (`ss04`) are selected. Stylistic set 2 changes /// the rendering of the "a" character and the beta character, /// stylistic set 3 changes the lowercase "g", theta, and delta /// characters, and stylistic set 4 changes the lowercase "i" and /// "l" characters. /// /// This font also supports character variants (see /// [FontFeature.characterVariant]). /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_ssXX_1.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_stylistic_set.0.dart ** /// {@end-tool} /// /// {@tool sample} /// The Piazzolla font supports the `ssXX` feature for more /// elaborate stylistic effects. Set 1 turns some Latin characters /// into Roman numerals, set 2 enables some ASCII characters to be /// used to create pretty arrows, and so forth. /// /// _These_ stylistic sets do _not_ correspond to character variants. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_ssXX_2.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_stylistic_set.1.dart ** /// {@end-tool} /// /// See also: /// /// * [FontFeature.characterVariant], which allows for individual character /// variants to be selected, as opposed to entire sets. /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx> factory FontFeature.stylisticSet(int value) { assert(value >= 1); assert(value <= 20); return FontFeature('ss${value.toString().padLeft(2, "0")}'); } /// Enable subscripts. (`subs`) /// /// This feature causes some fonts to change some glyphs to their subscripted form. /// /// It typically does not affect all glyphs, and so is not appropriate for generally causing /// all text to be subscripted. /// /// This may override other features that substitute glyphs. /// /// {@tool sample} /// The Piazzolla font supports the `subs` feature. It causes /// digits to be smaller and subscripted. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_subs.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_subscripts.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#subs> /// * [FontFeature.scientificInferiors], which is similar but intended specifically for /// subscripts used in scientific contexts. /// * [FontFeature.superscripts], which is similar but for subscripting. const FontFeature.subscripts() : feature = 'subs', value = 1; /// Enable superscripts. (`sups`) /// /// This feature causes some fonts to change some glyphs to their /// superscripted form. This may be more than just changing their /// position. For example, digits might change to lining figures /// (see [FontFeature.liningFigures]) in addition to being raised /// and shrunk. /// /// It typically does not affect all glyphs, and so is not /// appropriate for generally causing all text to be superscripted. /// /// This may override other features that substitute glyphs. /// /// {@tool sample} /// The Sorts Mill Goudy font supports the `sups` feature. It causes /// digits to be smaller, superscripted, and changes them to lining /// figures (so they are all the same height). /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_sups.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_superscripts.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#sups> /// * [FontFeature.subscripts], which is similar but for subscripting. const FontFeature.superscripts() : feature = 'sups', value = 1; /// Enable swash glyphs. (`swsh`) /// /// Some fonts have beautiful flourishes on some characters. These /// come in many forms, such as exaggerated serifs, long tails, long /// entry strokes, or other forms of decorative extensions to the /// base character. /// /// This feature enables the rendering of these flourishes. Some /// fonts have many swashes per character; the argument, if /// specified, selects which swash to use (0 disables them /// altogether). /// /// Some fonts have an absurd number of alternative swashes. For /// example, Adobe's Poetica famously has 63 different ampersand /// forms available through this feature! /// /// {@tool sample} /// The BioRhyme Expanded font supports the `swsh` feature specifically /// for the capital "Q" and "R" glyphs and the ampersand. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_swsh.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_swash.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#swsh> /// * <https://en.wikipedia.org/wiki/Swash_(typography)> const FontFeature.swash([this.value = 1]) : feature = 'swsh', assert(value >= 0); /// Use tabular (monospace) figures. (`tnum`) /// /// For fonts that have both proportional (varying width) and tabular figures, /// this enables the tabular figures. Tabular figures are monospaced (all the /// same width), so that they align in tables of figures. /// /// This is mutually exclusive with [FontFeature.proportionalFigures]. /// /// The default behavior varies from font to font. /// /// {@tool sample} /// The Piazzolla font supports the `tnum` feature. It causes the /// digits to become uniformly-sized, rather than having variable /// widths. In this font this is especially noticeable with the /// digit "1"; with tabular figures enabled, the "1" digit is more /// widely spaced. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_tnum.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_tabular_figures.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tnum> const FontFeature.tabularFigures() : feature = 'tnum', value = 1; /// Use the slashed zero. (`zero`) /// /// Some fonts contain both a circular zero and a zero with a slash. This /// enables the use of the latter form. /// /// This is overridden by [FontFeature.oldstyleFigures]. /// /// {@tool sample} /// The Source Code Pro font supports the `zero` feature. It causes the /// zero digit to be drawn with a slash rather than the default rendering, /// which in this case has a dot through the zero rather than a slash. /// /// ![](https://flutter.github.io/assets-for-api-docs/assets/dart-ui/font_feature_zero.png) /// /// ** See code in examples/api/lib/ui/text/font_feature.font_feature_slashed_zero.0.dart ** /// {@end-tool} /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/features_uz#zero> const FontFeature.slashedZero() : feature = 'zero', value = 1; // ------------------------------------------------------------------------ // End of feature tags list. /// The tag that identifies the effect of this feature. Must consist of 4 /// ASCII characters (typically lowercase letters). /// /// These features are defined in a registry maintained by Microsoft: /// <https://docs.microsoft.com/en-us/typography/opentype/spec/featuretags> final String feature; /// The value assigned to this feature. /// /// Must be a positive integer. Many features are Boolean values that accept /// values of either 0 (feature is disabled) or 1 (feature is enabled). Other /// features have a bound range of values (which may be documented in these /// API docs for features that have dedicated constructors, and are generally /// documented in the official registry). In some cases the precise supported /// range depends on the font. /// /// See also: /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/featurelist> final int value; static const int _kEncodedSize = 8; void _encode(ByteData byteData) { assert(feature.codeUnits.every((int c) => c >= 0x20 && c <= 0x7F)); for (int i = 0; i < 4; i++) { byteData.setUint8(i, feature.codeUnitAt(i)); } byteData.setInt32(4, value, _kFakeHostEndian); } @override bool operator ==(Object other) { if (other.runtimeType != runtimeType) { return false; } return other is FontFeature && other.feature == feature && other.value == value; } @override int get hashCode => Object.hash(feature, value); @override String toString() => "FontFeature('$feature', $value)"; } /// An axis tag and value that can be used to customize variable fonts. /// /// Some fonts are variable fonts that can generate a range of different /// font faces by altering the values of the font's design axes. /// /// For example: /// /// ```dart /// const TextStyle(fontVariations: <ui.FontVariation>[ui.FontVariation('wght', 800.0)]) /// ``` /// /// Font variations are distinct from font features, as exposed by the /// [FontFeature] class. Where features can be enabled or disabled in a discrete /// manner, font variations provide a continuous axis of control. /// /// See also: /// /// * <https://learn.microsoft.com/en-us/typography/opentype/spec/dvaraxisreg#registered-axis-tags>, /// which lists registered axis tags. /// /// * <https://docs.microsoft.com/en-us/typography/opentype/spec/otvaroverview>, /// an overview of the font variations technology. class FontVariation { /// Creates a [FontVariation] object, which can be added to a [TextStyle] to /// change the variable attributes of a font. /// /// `axis` is the four-character tag that identifies the design axis. /// OpenType lists the [currently registered axis /// tags](https://docs.microsoft.com/en-us/typography/opentype/spec/dvaraxisreg). /// /// `value` is the value that the axis will be set to. The behavior /// depends on how the font implements the axis. const FontVariation( this.axis, this.value, ) : assert(axis.length == 4, 'Axis tag must be exactly four characters long.'), assert(value >= -32768.0 && value < 32768.0, 'Value must be representable as a signed 16.16 fixed-point number, i.e. it must be in this range: -32768.0 ≤ value < 32768.0'); // Constructors below should be alphabetic by axis tag. This makes it easier // to determine when an axis is missing so that we avoid adding duplicates. // Start of axis tag list. // ------------------------------------------------------------------------ /// Variable font style. (`ital`) /// /// Varies the style of glyphs in the font between normal and italic. /// /// Values must in the range 0.0 (meaning normal, or Roman, as in /// [FontStyle.normal]) to 1.0 (meaning fully italic, as in /// [FontStyle.italic]). /// /// This is distinct from [FontVariation.slant], which leans the characters /// without changing the font style. /// /// See also: /// /// * <https://learn.microsoft.com/en-us/typography/opentype/spec/dvaraxistag_ital> const FontVariation.italic(this.value) : assert(value >= 0.0), assert(value <= 1.0), axis = 'ital'; /// Optical size optimization. (`opzs`) /// /// Changes the rendering of the font to be optimized for the given text size. /// Normally, the optical size of the font will be derived from the font size. /// /// This feature could be used when the text represents a particular physical /// font size, for example text in the representation of a hardcopy magazine, /// which does not correspond to the actual font size being used to render the /// text. By setting the optical size explicitly, font variations that might /// be applied as the text is zoomed will be fixed at the size being /// represented by the text. /// /// This feature could also be used to smooth animations. If a font varies its /// rendering as the font size is adjusted, it may appear to "quiver" (or, one /// might even say, "flutter") if the font size is animated. By setting a /// fixed optical size, the rendering can be fixed to one particular style as /// the text size animates. /// /// Values must be greater than zero, and are interpreted as points. A point /// is 1/72 of an inch, or 1.333 logical pixels (96/72). /// /// See also: /// /// * <https://learn.microsoft.com/en-us/typography/opentype/spec/dvaraxistag_opsz> const FontVariation.opticalSize(this.value) : assert(value > 0.0), axis = 'opsz'; /// Variable font width. (`slnt`) /// /// Varies the slant of glyphs in the font. /// /// Values must be greater than -90.0 and less than +90.0, and represents the /// angle in _counter-clockwise_ degrees relative to "normal", at 0.0. /// /// For example, to lean the glyphs forward by 45 degrees, one would use /// `FontVariation.slant(-45.0)`. /// /// This is distinct from [FontVariation.italic], in that slant leans the /// characters without changing the font style. /// /// See also: /// /// * <https://learn.microsoft.com/en-us/typography/opentype/spec/dvaraxistag_slnt> const FontVariation.slant(this.value) : assert(value > -90.0), assert(value < 90.0), axis = 'slnt'; /// Variable font width. (`wdth`) /// /// Varies the width of glyphs in the font. /// /// Values must be greater than zero, with no upper limit. 100.0 represents /// the "normal" width. Smaller values are "condensed", greater values are /// "extended". /// /// See also: /// /// * <https://learn.microsoft.com/en-us/typography/opentype/spec/dvaraxistag_wdth> const FontVariation.width(this.value) : assert(value >= 0.0), axis = 'wdth'; /// Variable font weight. (`wght`) /// /// Varies the stroke thickness of the font, similar to [FontWeight] but on a /// continuous axis. /// /// Values must be in the range 1..1000, and are to be interpreted in a manner /// consistent with the values of [FontWeight]. For instance, `400` is the /// "normal" weight, and `700` is "bold". /// /// See also: /// /// * <https://learn.microsoft.com/en-us/typography/opentype/spec/dvaraxistag_wght> const FontVariation.weight(this.value) : assert(value >= 1), assert(value <= 1000), axis = 'wght'; // ------------------------------------------------------------------------ // End of axis tags list. /// The tag that identifies the design axis. /// /// An axis tag must consist of 4 ASCII characters. final String axis; /// The value assigned to this design axis. /// /// The range of usable values depends on the specification of the axis. /// /// While this property is represented as a [double] in this API /// ([binary64](https://en.wikipedia.org/wiki/Double-precision_floating-point_format)), /// fonts use the fixed-point 16.16 format to represent the value of font /// variations. This means that the actual range is -32768.0 to approximately /// 32767.999985 and in principle the smallest increment between two values is /// approximately 0.000015 (1/65536). /// /// Unfortunately for technical reasons the value is first converted to the /// [binary32 floating point /// format](https://en.wikipedia.org/wiki/Single-precision_floating-point_format), /// which only has 24 bits of precision. This means that for values outside /// the range -256.0 to 256.0, the smallest increment is larger than what is /// technically supported by OpenType. At the extreme edge of the range, the /// smallest increment is only approximately ±0.002. final double value; static const int _kEncodedSize = 8; void _encode(ByteData byteData) { assert(axis.codeUnits.every((int c) => c >= 0x20 && c <= 0x7F)); for (int i = 0; i < 4; i++) { byteData.setUint8(i, axis.codeUnitAt(i)); } byteData.setFloat32(4, value, _kFakeHostEndian); } @override bool operator ==(Object other) { if (other.runtimeType != runtimeType) { return false; } return other is FontVariation && other.axis == axis && other.value == value; } @override int get hashCode => Object.hash(axis, value); /// Linearly interpolates between two font variations. /// /// If the two variations have different axis tags, the interpolation switches /// abruptly from one to the other at t=0.5. Otherwise, the value is /// interpolated (see [lerpDouble]. /// /// The value is not clamped to the valid values of the axis tag, but it is /// clamped to the valid range of font variations values in general (the range /// of signed 16.16 fixed point numbers). /// /// The `t` argument represents position on the timeline, with 0.0 meaning /// that the interpolation has not started, returning `a` (or something /// equivalent to `a`), 1.0 meaning that the interpolation has finished, /// returning `b` (or something equivalent to `b`), and values in between /// meaning that the interpolation is at the relevant point on the timeline /// between `a` and `b`. The interpolation can be extrapolated beyond 0.0 and /// 1.0, so negative values and values greater than 1.0 are valid (and can /// easily be generated by curves such as [Curves.elasticInOut]). /// /// Values for `t` are usually obtained from an [Animation<double>], such as /// an [AnimationController]. static FontVariation? lerp(FontVariation? a, FontVariation? b, double t) { if (a?.axis != b?.axis || (a == null && b == null)) { return t < 0.5 ? a : b; } return FontVariation( a!.axis, clampDouble(lerpDouble(a.value, b!.value, t)!, -32768.0, 32768.0 - 1.0/65536.0), ); } @override String toString() => "FontVariation('$axis', $value)"; } /// The measurements of a character (or a sequence of visually connected /// characters) within a paragraph. /// /// See also: /// /// * [Paragraph.getGlyphInfoAt], which finds the [GlyphInfo] associated with /// a code unit in the text. /// * [Paragraph.getClosestGlyphInfoForOffset], which finds the [GlyphInfo] of /// the glyph(s) onscreen that's closest to the given [Offset]. final class GlyphInfo { /// Creates a [GlyphInfo] with the specified values. GlyphInfo(this.graphemeClusterLayoutBounds, this.graphemeClusterCodeUnitRange, this.writingDirection); GlyphInfo._(double left, double top, double right, double bottom, int graphemeStart, int graphemeEnd, bool isLTR) : graphemeClusterLayoutBounds = Rect.fromLTRB(left, top, right, bottom), graphemeClusterCodeUnitRange = TextRange(start: graphemeStart, end: graphemeEnd), writingDirection = isLTR ? TextDirection.ltr : TextDirection.rtl; /// The layout bounding rect of the associated character, in the paragraph's /// coordinates. /// /// This is **not** a tight bounding box that encloses the character's outline. /// The vertical extent reported is derived from the font metrics (instead of /// glyph metrics), and the horizontal extent is the horizontal advance of the /// character. final Rect graphemeClusterLayoutBounds; /// The UTF-16 range of the associated character in the text. final TextRange graphemeClusterCodeUnitRange; /// The writing direction within the [GlyphInfo]. final TextDirection writingDirection; @override bool operator ==(Object other) { if (identical(this, other)) { return true; } return other is GlyphInfo && graphemeClusterLayoutBounds == other.graphemeClusterLayoutBounds && graphemeClusterCodeUnitRange == other.graphemeClusterCodeUnitRange && writingDirection == other.writingDirection; } @override int get hashCode => Object.hash(graphemeClusterLayoutBounds, graphemeClusterCodeUnitRange, writingDirection); @override String toString() => 'Glyph($graphemeClusterLayoutBounds, textRange: $graphemeClusterCodeUnitRange, direction: $writingDirection)'; } /// Whether and how to align text horizontally. // The order of this enum must match the order of the values in RenderStyleConstants.h's ETextAlign. enum TextAlign { /// Align the text on the left edge of the container. left, /// Align the text on the right edge of the container. right, /// Align the text in the center of the container. center, /// Stretch lines of text that end with a soft line break to fill the width of /// the container. /// /// Lines that end with hard line breaks are aligned towards the [start] edge. justify, /// Align the text on the leading edge of the container. /// /// For left-to-right text ([TextDirection.ltr]), this is the left edge. /// /// For right-to-left text ([TextDirection.rtl]), this is the right edge. start, /// Align the text on the trailing edge of the container. /// /// For left-to-right text ([TextDirection.ltr]), this is the right edge. /// /// For right-to-left text ([TextDirection.rtl]), this is the left edge. end, } /// A horizontal line used for aligning text. enum TextBaseline { /// The horizontal line used to align the bottom of glyphs for alphabetic characters. alphabetic, /// The horizontal line used to align ideographic characters. ideographic, } /// A linear decoration to draw near the text. class TextDecoration { const TextDecoration._(this._mask); /// Creates a decoration that paints the union of all the given decorations. factory TextDecoration.combine(List<TextDecoration> decorations) { int mask = 0; for (final TextDecoration decoration in decorations) { mask |= decoration._mask; } return TextDecoration._(mask); } final int _mask; /// Whether this decoration will paint at least as much decoration as the given decoration. bool contains(TextDecoration other) { return (_mask | other._mask) == _mask; } /// Do not draw a decoration static const TextDecoration none = TextDecoration._(0x0); /// Draw a line underneath each line of text static const TextDecoration underline = TextDecoration._(0x1); /// Draw a line above each line of text static const TextDecoration overline = TextDecoration._(0x2); /// Draw a line through each line of text static const TextDecoration lineThrough = TextDecoration._(0x4); @override bool operator ==(Object other) { return other is TextDecoration && other._mask == _mask; } @override int get hashCode => _mask.hashCode; @override String toString() { if (_mask == 0) { return 'TextDecoration.none'; } final List<String> values = <String>[]; if (_mask & underline._mask != 0) { values.add('underline'); } if (_mask & overline._mask != 0) { values.add('overline'); } if (_mask & lineThrough._mask != 0) { values.add('lineThrough'); } if (values.length == 1) { return 'TextDecoration.${values[0]}'; } return 'TextDecoration.combine([${values.join(", ")}])'; } } /// The style in which to draw a text decoration enum TextDecorationStyle { /// Draw a solid line solid, /// Draw two lines double, /// Draw a dotted line dotted, /// Draw a dashed line dashed, /// Draw a sinusoidal line wavy } /// {@macro dart.ui.textLeadingDistribution} enum TextLeadingDistribution { /// Distributes the [leading](https://en.wikipedia.org/wiki/Leading) /// of the text proportionally above and below the text, to the font's /// ascent/descent ratio. /// /// {@template dart.ui.leading} /// The leading of a text run is defined as /// `TextStyle.height * TextStyle.fontSize - TextStyle.fontSize`. When /// [TextStyle.height] is not set, the text run uses the leading specified by /// the font instead. /// {@endtemplate} proportional, /// Distributes the ["leading"](https://en.wikipedia.org/wiki/Leading) /// of the text evenly above and below the text (i.e. evenly above the /// font's ascender and below the descender). /// /// {@macro dart.ui.leading} /// /// The leading can become negative when [TextStyle.height] is smaller than /// 1.0. /// /// This is the default strategy used by CSS, known as /// ["half-leading"](https://www.w3.org/TR/css-inline-3/#half-leading). even, } /// {@template dart.ui.textHeightBehavior} /// Defines how to apply [TextStyle.height] over and under text. /// /// [TextHeightBehavior.applyHeightToFirstAscent] and /// [TextHeightBehavior.applyHeightToLastDescent] represent whether the /// [TextStyle.height] modifier will be applied to the corresponding metric. By /// default both properties are true, and [TextStyle.height] is applied as /// normal. When set to false, the font's default ascent will be used. /// /// [TextHeightBehavior.leadingDistribution] determines how the /// leading is distributed over and under text. This /// property applies before [TextHeightBehavior.applyHeightToFirstAscent] and /// [TextHeightBehavior.applyHeightToLastDescent]. /// /// {@endtemplate} class TextHeightBehavior { /// Creates a new TextHeightBehavior object. /// /// * applyHeightToFirstAscent: When true, the [TextStyle.height] modifier /// will be applied to the ascent of the first line. When false, the font's /// default ascent will be used. /// * applyHeightToLastDescent: When true, the [TextStyle.height] modifier /// will be applied to the descent of the last line. When false, the font's /// default descent will be used. /// * leadingDistribution: How the leading is distributed over and under /// text. /// /// All properties default to true (height modifications applied as normal). const TextHeightBehavior({ this.applyHeightToFirstAscent = true, this.applyHeightToLastDescent = true, this.leadingDistribution = TextLeadingDistribution.proportional, }); /// Creates a new TextHeightBehavior object from an encoded form. /// /// See [_encode] for the creation of the encoded form. const TextHeightBehavior._fromEncoded(int encoded, this.leadingDistribution) : applyHeightToFirstAscent = (encoded & 0x1) == 0, applyHeightToLastDescent = (encoded & 0x2) == 0; /// Whether to apply the [TextStyle.height] modifier to the ascent of the first /// line in the paragraph. /// /// When true, the [TextStyle.height] modifier will be applied to the ascent /// of the first line. When false, the font's default ascent will be used and /// the [TextStyle.height] will have no effect on the ascent of the first line. /// /// This property only has effect if a non-null [TextStyle.height] is specified. /// /// Defaults to true (height modifications applied as normal). final bool applyHeightToFirstAscent; /// Whether to apply the [TextStyle.height] modifier to the descent of the last /// line in the paragraph. /// /// When true, the [TextStyle.height] modifier will be applied to the descent /// of the last line. When false, the font's default descent will be used and /// the [TextStyle.height] will have no effect on the descent of the last line. /// /// This property only has effect if a non-null [TextStyle.height] is specified. /// /// Defaults to true (height modifications applied as normal). final bool applyHeightToLastDescent; /// {@template dart.ui.textLeadingDistribution} /// How the ["leading"](https://en.wikipedia.org/wiki/Leading) is distributed /// over and under the text. /// /// Does not affect layout when [TextStyle.height] is not specified. The /// leading can become negative, for example, when [TextLeadingDistribution.even] /// is used with a [TextStyle.height] much smaller than 1.0. /// {@endtemplate} /// /// Defaults to [TextLeadingDistribution.proportional], final TextLeadingDistribution leadingDistribution; /// Returns an encoded int representation of this object (excluding /// [leadingDistribution]). int _encode() { return (applyHeightToFirstAscent ? 0 : 1 << 0) | (applyHeightToLastDescent ? 0 : 1 << 1); } @override bool operator ==(Object other) { if (other.runtimeType != runtimeType) { return false; } return other is TextHeightBehavior && other.applyHeightToFirstAscent == applyHeightToFirstAscent && other.applyHeightToLastDescent == applyHeightToLastDescent && other.leadingDistribution == leadingDistribution; } @override int get hashCode { return Object.hash( applyHeightToFirstAscent, applyHeightToLastDescent, leadingDistribution.index, ); } @override String toString() { return 'TextHeightBehavior(' 'applyHeightToFirstAscent: $applyHeightToFirstAscent, ' 'applyHeightToLastDescent: $applyHeightToLastDescent, ' 'leadingDistribution: $leadingDistribution' ')'; } } /// Determines if lists [a] and [b] are deep equivalent. /// /// Returns true if the lists are both null, or if they are both non-null, have /// the same length, and contain the same elements in the same order. Returns /// false otherwise. bool _listEquals<T>(List<T>? a, List<T>? b) { if (a == null) { return b == null; } if (b == null || a.length != b.length) { return false; } for (int index = 0; index < a.length; index += 1) { if (a[index] != b[index]) { return false; } } return true; } // This encoding must match the C++ version of ParagraphBuilder::pushStyle. // // The encoded array buffer has 8 elements. // // - Element 0: A bit field where the ith bit indicates whether the ith element // has a non-null value. Bits 8 to 12 indicate whether |fontFamily|, // |fontSize|, |letterSpacing|, |wordSpacing|, and |height| are non-null, // respectively. Bit 0 indicates the [TextLeadingDistribution] of the text // style. // // - Element 1: The |color| in ARGB with 8 bits per channel. // // - Element 2: A bit field indicating which text decorations are present in // the |textDecoration| list. The ith bit is set if there's a TextDecoration // with enum index i in the list. // // - Element 3: The |decorationColor| in ARGB with 8 bits per channel. // // - Element 4: The bit field of the |decorationStyle|. // // - Element 5: The index of the |fontWeight|. // // - Element 6: The enum index of the |fontStyle|. // // - Element 7: The enum index of the |textBaseline|. // Int32List _encodeTextStyle( Color? color, TextDecoration? decoration, Color? decorationColor, TextDecorationStyle? decorationStyle, double? decorationThickness, FontWeight? fontWeight, FontStyle? fontStyle, TextBaseline? textBaseline, String? fontFamily, List<String>? fontFamilyFallback, double? fontSize, double? letterSpacing, double? wordSpacing, double? height, Locale? locale, Paint? background, Paint? foreground, List<Shadow>? shadows, List<FontFeature>? fontFeatures, List<FontVariation>? fontVariations, ) { final Int32List result = Int32List(9); // The 0th bit of result[0] is reserved for leadingDistribution. if (color != null) { result[0] |= 1 << 1; result[1] = color.value; } if (decoration != null) { result[0] |= 1 << 2; result[2] = decoration._mask; } if (decorationColor != null) { result[0] |= 1 << 3; result[3] = decorationColor.value; } if (decorationStyle != null) { result[0] |= 1 << 4; result[4] = decorationStyle.index; } if (fontWeight != null) { result[0] |= 1 << 5; result[5] = fontWeight.index; } if (fontStyle != null) { result[0] |= 1 << 6; result[6] = fontStyle.index; } if (textBaseline != null) { result[0] |= 1 << 7; result[7] = textBaseline.index; } if (decorationThickness != null) { result[0] |= 1 << 8; } if (fontFamily != null || (fontFamilyFallback != null && fontFamilyFallback.isNotEmpty)) { result[0] |= 1 << 9; // Passed separately to native. } if (fontSize != null) { result[0] |= 1 << 10; // Passed separately to native. } if (letterSpacing != null) { result[0] |= 1 << 11; // Passed separately to native. } if (wordSpacing != null) { result[0] |= 1 << 12; // Passed separately to native. } if (height != null) { result[0] |= 1 << 13; // Passed separately to native. } if (locale != null) { result[0] |= 1 << 14; // Passed separately to native. } if (background != null) { result[0] |= 1 << 15; // Passed separately to native. } if (foreground != null) { result[0] |= 1 << 16; // Passed separately to native. } if (shadows != null) { result[0] |= 1 << 17; // Passed separately to native. } if (fontFeatures != null) { result[0] |= 1 << 18; // Passed separately to native. } if (fontVariations != null) { result[0] |= 1 << 19; // Passed separately to native. } return result; } /// An opaque object that determines the size, position, and rendering of text. /// /// See also: /// /// * [TextStyle](https://api.flutter.dev/flutter/painting/TextStyle-class.html), the class in the [painting] library. /// class TextStyle { /// Creates a new TextStyle object. /// /// * `color`: The color to use when painting the text. If this is specified, `foreground` must be null. /// * `decoration`: The decorations to paint near the text (e.g., an underline). /// * `decorationColor`: The color in which to paint the text decorations. /// * `decorationStyle`: The style in which to paint the text decorations (e.g., dashed). /// * `decorationThickness`: The thickness of the decoration as a multiplier on the thickness specified by the font. /// * `fontWeight`: The typeface thickness to use when painting the text (e.g., bold). /// * `fontStyle`: The typeface variant to use when drawing the letters (e.g., italics). /// * `fontFamily`: The name of the font to use when painting the text (e.g., Roboto). If a `fontFamilyFallback` is /// provided and `fontFamily` is not, then the first font family in `fontFamilyFallback` will take the position of /// the preferred font family. When a higher priority font cannot be found or does not contain a glyph, a lower /// priority font will be used. /// * `fontFamilyFallback`: An ordered list of the names of the fonts to fallback on when a glyph cannot /// be found in a higher priority font. When the `fontFamily` is null, the first font family in this list /// is used as the preferred font. Internally, the 'fontFamily` is concatenated to the front of this list. /// When no font family is provided through 'fontFamilyFallback' (null or empty) or `fontFamily`, then the /// platform default font will be used. /// * `fontSize`: The size of glyphs (in logical pixels) to use when painting the text. /// * `letterSpacing`: The amount of space (in logical pixels) to add between each letter. /// * `wordSpacing`: The amount of space (in logical pixels) to add at each sequence of white-space (i.e. between each word). /// * `textBaseline`: The common baseline that should be aligned between this text span and its parent text span, or, for the root text spans, with the line box. /// * `height`: The height of this text span, as a multiplier of the font size. Omitting `height` will allow the line height /// to take the height as defined by the font, which may not be exactly the height of the fontSize. /// * `leadingDistribution`: When `height` is specified, how the extra vertical space should be distributed over and under the text. Defaults /// to the paragraph's [TextHeightBehavior] if left unspecified. /// * `locale`: The locale used to select region-specific glyphs. /// * `background`: The paint drawn as a background for the text. /// * `foreground`: The paint used to draw the text. If this is specified, `color` must be null. /// * `fontFeatures`: The font features that should be applied to the text. /// * `fontVariations`: The font variations that should be applied to the text. TextStyle({ Color? color, TextDecoration? decoration, Color? decorationColor, TextDecorationStyle? decorationStyle, double? decorationThickness, FontWeight? fontWeight, FontStyle? fontStyle, TextBaseline? textBaseline, String? fontFamily, List<String>? fontFamilyFallback, double? fontSize, double? letterSpacing, double? wordSpacing, double? height, TextLeadingDistribution? leadingDistribution, Locale? locale, Paint? background, Paint? foreground, List<Shadow>? shadows, List<FontFeature>? fontFeatures, List<FontVariation>? fontVariations, }) : assert(color == null || foreground == null, 'Cannot provide both a color and a foreground\n' 'The color argument is just a shorthand for "foreground: Paint()..color = color".' ), _encoded = _encodeTextStyle( color, decoration, decorationColor, decorationStyle, decorationThickness, fontWeight, fontStyle, textBaseline, fontFamily, fontFamilyFallback, fontSize, letterSpacing, wordSpacing, height, locale, background, foreground, shadows, fontFeatures, fontVariations, ), _leadingDistribution = leadingDistribution, _fontFamily = fontFamily ?? '', _fontFamilyFallback = fontFamilyFallback, _fontSize = fontSize, _letterSpacing = letterSpacing, _wordSpacing = wordSpacing, _height = height, _decorationThickness = decorationThickness, _locale = locale, _background = background, _foreground = foreground, _shadows = shadows, _fontFeatures = fontFeatures, _fontVariations = fontVariations; final Int32List _encoded; final String _fontFamily; final List<String>? _fontFamilyFallback; final double? _fontSize; final double? _letterSpacing; final double? _wordSpacing; final double? _height; final double? _decorationThickness; final Locale? _locale; final Paint? _background; final Paint? _foreground; final List<Shadow>? _shadows; final List<FontFeature>? _fontFeatures; final List<FontVariation>? _fontVariations; final TextLeadingDistribution? _leadingDistribution; @override bool operator ==(Object other) { if (identical(this, other)) { return true; } return other is TextStyle && other._leadingDistribution == _leadingDistribution && other._fontFamily == _fontFamily && other._fontSize == _fontSize && other._letterSpacing == _letterSpacing && other._wordSpacing == _wordSpacing && other._height == _height && other._decorationThickness == _decorationThickness && other._locale == _locale && other._background == _background && other._foreground == _foreground && _listEquals<int>(other._encoded, _encoded) && _listEquals<Shadow>(other._shadows, _shadows) && _listEquals<String>(other._fontFamilyFallback, _fontFamilyFallback) && _listEquals<FontFeature>(other._fontFeatures, _fontFeatures) && _listEquals<FontVariation>(other._fontVariations, _fontVariations); } @override int get hashCode { final List<Shadow>? shadows = _shadows; final List<FontFeature>? fontFeatures = _fontFeatures; final List<FontVariation>? fontVariations = _fontVariations; return Object.hash( Object.hashAll(_encoded), _leadingDistribution, _fontFamily, _fontFamilyFallback, _fontSize, _letterSpacing, _wordSpacing, _height, _locale, _background, _foreground, shadows == null ? null : Object.hashAll(shadows), _decorationThickness, fontFeatures == null ? null : Object.hashAll(fontFeatures), fontVariations == null ? null : Object.hashAll(fontVariations), ); } @override String toString() { final List<String>? fontFamilyFallback = _fontFamilyFallback; return 'TextStyle(' 'color: ${ _encoded[0] & 0x00002 == 0x00002 ? Color(_encoded[1]) : "unspecified"}, ' 'decoration: ${ _encoded[0] & 0x00004 == 0x00004 ? TextDecoration._(_encoded[2]) : "unspecified"}, ' 'decorationColor: ${ _encoded[0] & 0x00008 == 0x00008 ? Color(_encoded[3]) : "unspecified"}, ' 'decorationStyle: ${ _encoded[0] & 0x00010 == 0x00010 ? TextDecorationStyle.values[_encoded[4]] : "unspecified"}, ' // The decorationThickness is not in encoded order in order to keep it near the other decoration properties. 'decorationThickness: ${_encoded[0] & 0x00100 == 0x00100 ? _decorationThickness : "unspecified"}, ' 'fontWeight: ${ _encoded[0] & 0x00020 == 0x00020 ? FontWeight.values[_encoded[5]] : "unspecified"}, ' 'fontStyle: ${ _encoded[0] & 0x00040 == 0x00040 ? FontStyle.values[_encoded[6]] : "unspecified"}, ' 'textBaseline: ${ _encoded[0] & 0x00080 == 0x00080 ? TextBaseline.values[_encoded[7]] : "unspecified"}, ' 'fontFamily: ${ _encoded[0] & 0x00200 == 0x00200 && _fontFamily != '' ? _fontFamily : "unspecified"}, ' 'fontFamilyFallback: ${ _encoded[0] & 0x00200 == 0x00200 && fontFamilyFallback != null && fontFamilyFallback.isNotEmpty ? fontFamilyFallback : "unspecified"}, ' 'fontSize: ${ _encoded[0] & 0x00400 == 0x00400 ? _fontSize : "unspecified"}, ' 'letterSpacing: ${ _encoded[0] & 0x00800 == 0x00800 ? "${_letterSpacing}x" : "unspecified"}, ' 'wordSpacing: ${ _encoded[0] & 0x01000 == 0x01000 ? "${_wordSpacing}x" : "unspecified"}, ' 'height: ${ _encoded[0] & 0x02000 == 0x02000 ? "${_height}x" : "unspecified"}, ' 'leadingDistribution: ${_leadingDistribution ?? "unspecified"}, ' 'locale: ${ _encoded[0] & 0x04000 == 0x04000 ? _locale : "unspecified"}, ' 'background: ${ _encoded[0] & 0x08000 == 0x08000 ? _background : "unspecified"}, ' 'foreground: ${ _encoded[0] & 0x10000 == 0x10000 ? _foreground : "unspecified"}, ' 'shadows: ${ _encoded[0] & 0x20000 == 0x20000 ? _shadows : "unspecified"}, ' 'fontFeatures: ${ _encoded[0] & 0x40000 == 0x40000 ? _fontFeatures : "unspecified"}, ' 'fontVariations: ${ _encoded[0] & 0x80000 == 0x80000 ? _fontVariations : "unspecified"}' ')'; } } // This encoding must match the C++ version ParagraphBuilder::build. // // The encoded array buffer has 6 elements. // // - Element 0: A bit mask indicating which fields are non-null. // Bit 0 is unused. Bits 1-n are set if the corresponding index in the // encoded array is non-null. The remaining bits represent fields that // are passed separately from the array. // // - Element 1: The enum index of the |textAlign|. // // - Element 2: The enum index of the |textDirection|. // // - Element 3: The index of the |fontWeight|. // // - Element 4: The enum index of the |fontStyle|. // // - Element 5: The value of |maxLines|. // // - Element 6: The encoded value of |textHeightBehavior|, except its leading // distribution. Int32List _encodeParagraphStyle( TextAlign? textAlign, TextDirection? textDirection, int? maxLines, String? fontFamily, double? fontSize, double? height, TextHeightBehavior? textHeightBehavior, FontWeight? fontWeight, FontStyle? fontStyle, StrutStyle? strutStyle, String? ellipsis, Locale? locale, ) { final Int32List result = Int32List(7); // also update paragraph_builder.cc if (textAlign != null) { result[0] |= 1 << 1; result[1] = textAlign.index; } if (textDirection != null) { result[0] |= 1 << 2; result[2] = textDirection.index; } if (fontWeight != null) { result[0] |= 1 << 3; result[3] = fontWeight.index; } if (fontStyle != null) { result[0] |= 1 << 4; result[4] = fontStyle.index; } if (maxLines != null) { result[0] |= 1 << 5; result[5] = maxLines; } if (textHeightBehavior != null) { result[0] |= 1 << 6; result[6] = textHeightBehavior._encode(); } if (fontFamily != null) { result[0] |= 1 << 7; // Passed separately to native. } if (fontSize != null) { result[0] |= 1 << 8; // Passed separately to native. } if (height != null) { result[0] |= 1 << 9; // Passed separately to native. } if (strutStyle != null) { result[0] |= 1 << 10; // Passed separately to native. } if (ellipsis != null) { result[0] |= 1 << 11; // Passed separately to native. } if (locale != null) { result[0] |= 1 << 12; // Passed separately to native. } return result; } /// An opaque object that determines the configuration used by /// [ParagraphBuilder] to position lines within a [Paragraph] of text. class ParagraphStyle { /// Creates a new ParagraphStyle object. /// /// * `textAlign`: The alignment of the text within the lines of the /// paragraph. If the last line is ellipsized (see `ellipsis` below), the /// alignment is applied to that line after it has been truncated but before /// the ellipsis has been added. /// See: https://github.com/flutter/flutter/issues/9819 /// /// * `textDirection`: The directionality of the text, left-to-right (e.g. /// Norwegian) or right-to-left (e.g. Hebrew). This controls the overall /// directionality of the paragraph, as well as the meaning of /// [TextAlign.start] and [TextAlign.end] in the `textAlign` field. /// /// * `maxLines`: The maximum number of lines painted. Lines beyond this /// number are silently dropped. For example, if `maxLines` is 1, then only /// one line is rendered. If `maxLines` is null, but `ellipsis` is not null, /// then lines after the first one that overflows the width constraints are /// dropped. The width constraints are those set in the /// [ParagraphConstraints] object passed to the [Paragraph.layout] method. /// /// * `fontFamily`: The name of the font family to apply when painting the text, /// in the absence of a `textStyle` being attached to the span. /// /// * `fontSize`: The fallback size of glyphs (in logical pixels) to /// use when painting the text. This is used when there is no [TextStyle]. /// /// * `height`: The fallback height of the spans as a multiplier of the font /// size. The fallback height is used when no height is provided through /// [TextStyle.height]. Omitting `height` here and in [TextStyle] will allow /// the line height to take the height as defined by the font, which may not /// be exactly the height of the `fontSize`. /// /// * `textHeightBehavior`: Specifies how the `height` multiplier is /// applied to ascent of the first line and the descent of the last line. /// /// * `leadingDistribution`: Specifies how the extra vertical space added by /// the `height` multiplier should be distributed over and under the text. /// Defaults to [TextLeadingDistribution.proportional]. /// /// * `fontWeight`: The typeface thickness to use when painting the text /// (e.g., bold). /// /// * `fontStyle`: The typeface variant to use when drawing the letters (e.g., /// italics). /// /// * `strutStyle`: The properties of the strut. Strut defines a set of minimum /// vertical line height related metrics and can be used to obtain more /// advanced line spacing behavior. /// /// * `ellipsis`: String used to ellipsize overflowing text. If `maxLines` is /// not null, then the `ellipsis`, if any, is applied to the last rendered /// line, if that line overflows the width constraints. If `maxLines` is /// null, then the `ellipsis` is applied to the first line that overflows /// the width constraints, and subsequent lines are dropped. The width /// constraints are those set in the [ParagraphConstraints] object passed to /// the [Paragraph.layout] method. The empty string and the null value are /// considered equivalent and turn off this behavior. /// /// * `locale`: The locale used to select region-specific glyphs. ParagraphStyle({ TextAlign? textAlign, TextDirection? textDirection, int? maxLines, String? fontFamily, double? fontSize, double? height, TextHeightBehavior? textHeightBehavior, FontWeight? fontWeight, FontStyle? fontStyle, StrutStyle? strutStyle, String? ellipsis, Locale? locale, }) : _encoded = _encodeParagraphStyle( textAlign, textDirection, maxLines, fontFamily, fontSize, height, textHeightBehavior, fontWeight, fontStyle, strutStyle, ellipsis, locale, ), _fontFamily = fontFamily, _fontSize = fontSize, _height = height, _strutStyle = strutStyle, _ellipsis = ellipsis, _locale = locale, _leadingDistribution = textHeightBehavior?.leadingDistribution ?? TextLeadingDistribution.proportional; final Int32List _encoded; final String? _fontFamily; final double? _fontSize; final double? _height; final StrutStyle? _strutStyle; final String? _ellipsis; final Locale? _locale; final TextLeadingDistribution _leadingDistribution; @override bool operator ==(Object other) { if (identical(this, other)) { return true; } if (other.runtimeType != runtimeType) { return false; } return other is ParagraphStyle && other._fontFamily == _fontFamily && other._fontSize == _fontSize && other._height == _height && other._strutStyle == _strutStyle && other._ellipsis == _ellipsis && other._locale == _locale && other._leadingDistribution == _leadingDistribution && _listEquals<int>(other._encoded, _encoded); } @override int get hashCode => Object.hash(Object.hashAll(_encoded), _fontFamily, _fontSize, _height, _ellipsis, _locale, _leadingDistribution); @override String toString() { return 'ParagraphStyle(' 'textAlign: ${ _encoded[0] & 0x002 == 0x002 ? TextAlign.values[_encoded[1]] : "unspecified"}, ' 'textDirection: ${ _encoded[0] & 0x004 == 0x004 ? TextDirection.values[_encoded[2]] : "unspecified"}, ' 'fontWeight: ${ _encoded[0] & 0x008 == 0x008 ? FontWeight.values[_encoded[3]] : "unspecified"}, ' 'fontStyle: ${ _encoded[0] & 0x010 == 0x010 ? FontStyle.values[_encoded[4]] : "unspecified"}, ' 'maxLines: ${ _encoded[0] & 0x020 == 0x020 ? _encoded[5] : "unspecified"}, ' 'textHeightBehavior: ${ _encoded[0] & 0x040 == 0x040 ? TextHeightBehavior._fromEncoded(_encoded[6], _leadingDistribution).toString() : "unspecified"}, ' 'fontFamily: ${ _encoded[0] & 0x080 == 0x080 ? _fontFamily : "unspecified"}, ' 'fontSize: ${ _encoded[0] & 0x100 == 0x100 ? _fontSize : "unspecified"}, ' 'height: ${ _encoded[0] & 0x200 == 0x200 ? "${_height}x" : "unspecified"}, ' 'strutStyle: ${ _encoded[0] & 0x400 == 0x400 ? _strutStyle : "unspecified"}, ' 'ellipsis: ${ _encoded[0] & 0x800 == 0x800 ? '"$_ellipsis"' : "unspecified"}, ' 'locale: ${ _encoded[0] & 0x1000 == 0x1000 ? _locale : "unspecified"}' ')'; } } // Serialize strut properties into ByteData. This encoding errs towards // compactness. The first 8 bits is a bitmask that records which properties are // null. The rest of the values are encoded in the same order encountered in the // bitmask. The final returned value truncates any unused bytes at the end. For // ease of decoding, all 8 bit integers are stored before any 32 bit integers. // // We serialize this more thoroughly than ParagraphStyle because it is // much more likely that the strut is empty/null and we wish to add // minimal overhead for non-strut cases. ByteData _encodeStrut( String? fontFamily, List<String>? fontFamilyFallback, double? fontSize, double? height, TextLeadingDistribution? leadingDistribution, double? leading, FontWeight? fontWeight, FontStyle? fontStyle, bool? forceStrutHeight) { if (fontFamily == null && fontSize == null && height == null && leadingDistribution == null && leading == null && fontWeight == null && fontStyle == null && forceStrutHeight == null) { return ByteData(0); } final ByteData data = ByteData(16); // Max size is 16 bytes int bitmask = 0; // 8 bit mask int byteCount = 1; if (fontWeight != null) { bitmask |= 1 << 0; data.setInt8(byteCount, fontWeight.index); byteCount += 1; } if (fontStyle != null) { bitmask |= 1 << 1; data.setInt8(byteCount, fontStyle.index); byteCount += 1; } if (fontFamily != null || (fontFamilyFallback != null && fontFamilyFallback.isNotEmpty)) { bitmask |= 1 << 2; // passed separately to native } // The 3rd bit (0-indexed) is reserved for leadingDistribution. if (fontSize != null) { bitmask |= 1 << 4; data.setFloat32(byteCount, fontSize, _kFakeHostEndian); byteCount += 4; } if (height != null) { bitmask |= 1 << 5; data.setFloat32(byteCount, height, _kFakeHostEndian); byteCount += 4; } if (leading != null) { bitmask |= 1 << 6; data.setFloat32(byteCount, leading, _kFakeHostEndian); byteCount += 4; } if (forceStrutHeight ?? false) { bitmask |= 1 << 7; } data.setInt8(0, bitmask); assert(byteCount <= 16); assert(bitmask >> 8 == 0, 'strut bitmask overflow: $bitmask'); return ByteData.view(data.buffer, 0, byteCount); } /// See also: /// /// * [StrutStyle](https://api.flutter.dev/flutter/painting/StrutStyle-class.html), the class in the [painting] library. /// class StrutStyle { /// Creates a new StrutStyle object. /// /// * `fontFamily`: The name of the font to use when painting the text (e.g., /// Roboto). /// /// * `fontFamilyFallback`: An ordered list of font family names that will be /// searched for when the font in `fontFamily` cannot be found. /// /// * `fontSize`: The size of glyphs (in logical pixels) to use when painting /// the text. /// /// * `height`: The minimum height of the line boxes, as a multiplier of the /// font size. The lines of the paragraph will be at least /// `(height + leading) * fontSize` tall when `fontSize` is not null. Omitting /// `height` will allow the minimum line height to take the height as defined /// by the font, which may not be exactly the height of the `fontSize`. When /// `fontSize` is null, there is no minimum line height. Tall glyphs due to /// baseline alignment or large [TextStyle.fontSize] may cause the actual line /// height after layout to be taller than specified here. The `fontSize` must /// be provided for this property to take effect. /// /// * `leading`: The minimum amount of leading between lines as a multiple of /// the font size. `fontSize` must be provided for this property to take /// effect. The leading added by this property is distributed evenly over /// and under the text, regardless of `leadingDistribution`. /// /// * `leadingDistribution`: how the extra vertical space added by the /// `height` multiplier should be distributed over and under the text, /// independent of `leading` (which is always distributed evenly over and /// under text). Defaults to the paragraph's [TextHeightBehavior]'s leading /// distribution. /// /// * `fontWeight`: The typeface thickness to use when painting the text /// (e.g., bold). /// /// * `fontStyle`: The typeface variant to use when drawing the letters (e.g., /// italics). /// /// * `forceStrutHeight`: When true, the paragraph will force all lines to be exactly /// `(height + leading) * fontSize` tall from baseline to baseline. /// [TextStyle] is no longer able to influence the line height, and any tall /// glyphs may overlap with lines above. If a `fontFamily` is specified, the /// total ascent of the first line will be the min of the `Ascent + half-leading` /// of the `fontFamily` and `(height + leading) * fontSize`. Otherwise, it /// will be determined by the Ascent + half-leading of the first text. StrutStyle({ String? fontFamily, List<String>? fontFamilyFallback, double? fontSize, double? height, TextLeadingDistribution? leadingDistribution, double? leading, FontWeight? fontWeight, FontStyle? fontStyle, bool? forceStrutHeight, }) : _encoded = _encodeStrut( fontFamily, fontFamilyFallback, fontSize, height, leadingDistribution, leading, fontWeight, fontStyle, forceStrutHeight, ), _leadingDistribution = leadingDistribution, _fontFamily = fontFamily, _fontFamilyFallback = fontFamilyFallback; final ByteData _encoded; // Most of the data for strut is encoded. final String? _fontFamily; final List<String>? _fontFamilyFallback; final TextLeadingDistribution? _leadingDistribution; bool get _enabled => _encoded.lengthInBytes > 0; @override bool operator ==(Object other) { if (identical(this, other)) { return true; } if (other.runtimeType != runtimeType) { return false; } return other is StrutStyle && other._fontFamily == _fontFamily && other._leadingDistribution == _leadingDistribution && _listEquals<String>(other._fontFamilyFallback, _fontFamilyFallback) && _listEquals<int>(other._encoded.buffer.asInt8List(), _encoded.buffer.asInt8List()); } @override int get hashCode => Object.hash(Object.hashAll(_encoded.buffer.asInt8List()), _fontFamily, _leadingDistribution); } /// A direction in which text flows. /// /// Some languages are written from the left to the right (for example, English, /// Tamil, or Chinese), while others are written from the right to the left (for /// example Aramaic, Hebrew, or Urdu). Some are also written in a mixture, for /// example Arabic is mostly written right-to-left, with numerals written /// left-to-right. /// /// The text direction must be provided to APIs that render text or lay out /// boxes horizontally, so that they can determine which direction to start in: /// either right-to-left, [TextDirection.rtl]; or left-to-right, /// [TextDirection.ltr]. /// /// ## Design discussion /// /// Flutter is designed to address the needs of applications written in any of /// the world's currently-used languages, whether they use a right-to-left or /// left-to-right writing direction. Flutter does not support other writing /// modes, such as vertical text or boustrophedon text, as these are rarely used /// in computer programs. /// /// It is common when developing user interface frameworks to pick a default /// text direction — typically left-to-right, the direction most familiar to the /// engineers working on the framework — because this simplifies the development /// of applications on the platform. Unfortunately, this frequently results in /// the platform having unexpected left-to-right biases or assumptions, as /// engineers will typically miss places where they need to support /// right-to-left text. This then results in bugs that only manifest in /// right-to-left environments. /// /// In an effort to minimize the extent to which Flutter experiences this /// category of issues, the lowest levels of the Flutter framework do not have a /// default text reading direction. Any time a reading direction is necessary, /// for example when text is to be displayed, or when a /// writing-direction-dependent value is to be interpreted, the reading /// direction must be explicitly specified. Where possible, such as in `switch` /// statements, the right-to-left case is listed first, to avoid the impression /// that it is an afterthought. /// /// At the higher levels (specifically starting at the widgets library), an /// ambient [Directionality] is introduced, which provides a default. Thus, for /// instance, a [widgets.Text] widget in the scope of a [MaterialApp] widget /// does not need to be given an explicit writing direction. The /// [Directionality.of] static method can be used to obtain the ambient text /// direction for a particular [BuildContext]. /// /// ### Known left-to-right biases in Flutter /// /// Despite the design intent described above, certain left-to-right biases have /// nonetheless crept into Flutter's design. These include: /// /// * The [Canvas] origin is at the top left, and the x-axis increases in a /// left-to-right direction. /// /// * The default localization in the widgets and material libraries is /// American English, which is left-to-right. /// /// ### Visual properties vs directional properties /// /// Many classes in the Flutter framework are offered in two versions, a /// visually-oriented variant, and a text-direction-dependent variant. For /// example, [EdgeInsets] is described in terms of top, left, right, and bottom, /// while [EdgeInsetsDirectional] is described in terms of top, start, end, and /// bottom, where start and end correspond to right and left in right-to-left /// text and left and right in left-to-right text. /// /// There are distinct use cases for each of these variants. /// /// Text-direction-dependent variants are useful when developing user interfaces /// that should "flip" with the text direction. For example, a paragraph of text /// in English will typically be left-aligned and a quote will be indented from /// the left, while in Arabic it will be right-aligned and indented from the /// right. Both of these cases are described by the direction-dependent /// [TextAlign.start] and [EdgeInsetsDirectional.start]. /// /// In contrast, the visual variants are useful when the text direction is known /// and not affected by the reading direction. For example, an application /// giving driving directions might show a "turn left" arrow on the left and a /// "turn right" arrow on the right — and would do so whether the application /// was localized to French (left-to-right) or Hebrew (right-to-left). /// /// In practice, it is also expected that many developers will only be /// targeting one language, and in that case it may be simpler to think in /// visual terms. // The order of this enum must match the order of the values in TextDirection.h's TextDirection. enum TextDirection { /// The text flows from right to left (e.g. Arabic, Hebrew). rtl, /// The text flows from left to right (e.g., English, French). ltr, } /// A rectangle enclosing a run of text. /// /// This is similar to [Rect] but includes an inherent [TextDirection]. class TextBox { /// Creates an object that describes a box containing text. const TextBox.fromLTRBD( this.left, this.top, this.right, this.bottom, this.direction, ); /// The left edge of the text box, irrespective of direction. /// /// To get the leading edge (which may depend on the [direction]), consider [start]. final double left; /// The top edge of the text box. final double top; /// The right edge of the text box, irrespective of direction. /// /// To get the trailing edge (which may depend on the [direction]), consider [end]. final double right; /// The bottom edge of the text box. final double bottom; /// The direction in which text inside this box flows. final TextDirection direction; /// Returns a rect of the same size as this box. Rect toRect() => Rect.fromLTRB(left, top, right, bottom); /// The [left] edge of the box for left-to-right text; the [right] edge of the box for right-to-left text. /// /// See also: /// /// * [direction], which specifies the text direction. double get start { return (direction == TextDirection.ltr) ? left : right; } /// The [right] edge of the box for left-to-right text; the [left] edge of the box for right-to-left text. /// /// See also: /// /// * [direction], which specifies the text direction. double get end { return (direction == TextDirection.ltr) ? right : left; } @override bool operator ==(Object other) { if (identical(this, other)) { return true; } if (other.runtimeType != runtimeType) { return false; } return other is TextBox && other.left == left && other.top == top && other.right == right && other.bottom == bottom && other.direction == direction; } @override int get hashCode => Object.hash(left, top, right, bottom, direction); @override String toString() => 'TextBox.fromLTRBD(${left.toStringAsFixed(1)}, ${top.toStringAsFixed(1)}, ${right.toStringAsFixed(1)}, ${bottom.toStringAsFixed(1)}, $direction)'; } /// A way to disambiguate a [TextPosition] when its offset could match two /// different locations in the rendered string. /// /// For example, at an offset where the rendered text wraps, there are two /// visual positions that the offset could represent: one prior to the line /// break (at the end of the first line) and one after the line break (at the /// start of the second line). A text affinity disambiguates between these two /// cases. /// /// This affects only line breaks caused by wrapping, not explicit newline /// characters. For newline characters, the position is fully specified by the /// offset alone, and there is no ambiguity. /// /// [TextAffinity] also affects bidirectional text at the interface between LTR /// and RTL text. Consider the following string, where the lowercase letters /// will be displayed as LTR and the uppercase letters RTL: "helloHELLO". When /// rendered, the string would appear visually as "helloOLLEH". An offset of 5 /// would be ambiguous without a corresponding [TextAffinity]. Looking at the /// string in code, the offset represents the position just after the "o" and /// just before the "H". When rendered, this offset could be either in the /// middle of the string to the right of the "o" or at the end of the string to /// the right of the "H". enum TextAffinity { /// The position has affinity for the upstream side of the text position, i.e. /// in the direction of the beginning of the string. /// /// In the example of an offset at the place where text is wrapping, upstream /// indicates the end of the first line. /// /// In the bidirectional text example "helloHELLO", an offset of 5 with /// [TextAffinity] upstream would appear in the middle of the rendered text, /// just to the right of the "o". See the definition of [TextAffinity] for the /// full example. upstream, /// The position has affinity for the downstream side of the text position, /// i.e. in the direction of the end of the string. /// /// In the example of an offset at the place where text is wrapping, /// downstream indicates the beginning of the second line. /// /// In the bidirectional text example "helloHELLO", an offset of 5 with /// [TextAffinity] downstream would appear at the end of the rendered text, /// just to the right of the "H". See the definition of [TextAffinity] for the /// full example. downstream, } /// A position in a string of text. /// /// A TextPosition can be used to describe a caret position in between /// characters. The [offset] points to the position between `offset - 1` and /// `offset` characters of the string, and the [affinity] is used to describe /// which character this position affiliates with. /// /// One use case is when rendered text is forced to wrap. In this case, the offset /// where the wrap occurs could visually appear either at the end of the first /// line or the beginning of the second line. The second way is with /// bidirectional text. An offset at the interface between two different text /// directions could have one of two locations in the rendered text. /// /// See the documentation for [TextAffinity] for more information on how /// TextAffinity disambiguates situations like these. class TextPosition { /// Creates an object representing a particular position in a string. /// /// The arguments must not be null (so the [offset] argument is required). const TextPosition({ required this.offset, this.affinity = TextAffinity.downstream, }); /// The index of the character that immediately follows the position in the /// string representation of the text. /// /// For example, given the string `'Hello'`, offset 0 represents the cursor /// being before the `H`, while offset 5 represents the cursor being just /// after the `o`. final int offset; /// Disambiguates cases where the position in the string given by [offset] /// could represent two different visual positions in the rendered text. For /// example, this can happen when text is forced to wrap, or when one string /// of text is rendered with multiple text directions. /// /// See the documentation for [TextAffinity] for more information on how /// TextAffinity disambiguates situations like these. final TextAffinity affinity; @override bool operator ==(Object other) { if (other.runtimeType != runtimeType) { return false; } return other is TextPosition && other.offset == offset && other.affinity == affinity; } @override int get hashCode => Object.hash(offset, affinity); @override String toString() { return 'TextPosition(offset: $offset, affinity: $affinity)'; } } /// A range of characters in a string of text. class TextRange { /// Creates a text range. /// /// The [start] and [end] arguments must not be null. Both the [start] and /// [end] must either be greater than or equal to zero or both exactly -1. /// /// The text included in the range includes the character at [start], but not /// the one at [end]. /// /// Instead of creating an empty text range, consider using the [empty] /// constant. const TextRange({ required this.start, required this.end, }) : assert(start >= -1), assert(end >= -1); /// A text range that starts and ends at offset. /// /// The [offset] argument must be non-null and greater than or equal to -1. const TextRange.collapsed(int offset) : assert(offset >= -1), start = offset, end = offset; /// A text range that contains nothing and is not in the text. static const TextRange empty = TextRange(start: -1, end: -1); /// The index of the first character in the range. /// /// If [start] and [end] are both -1, the text range is empty. final int start; /// The next index after the characters in this range. /// /// If [start] and [end] are both -1, the text range is empty. final int end; /// Whether this range represents a valid position in the text. bool get isValid => start >= 0 && end >= 0; /// Whether this range is empty (but still potentially placed inside the text). bool get isCollapsed => start == end; /// Whether the start of this range precedes the end. bool get isNormalized => end >= start; /// The text before this range. String textBefore(String text) { assert(isNormalized); return text.substring(0, start); } /// The text after this range. String textAfter(String text) { assert(isNormalized); return text.substring(end); } /// The text inside this range. String textInside(String text) { assert(isNormalized); return text.substring(start, end); } @override bool operator ==(Object other) { if (identical(this, other)) { return true; } return other is TextRange && other.start == start && other.end == end; } @override int get hashCode => Object.hash( start.hashCode, end.hashCode, ); @override String toString() => 'TextRange(start: $start, end: $end)'; } /// Layout constraints for [Paragraph] objects. /// /// Instances of this class are typically used with [Paragraph.layout]. /// /// The only constraint that can be specified is the [width]. See the discussion /// at [width] for more details. class ParagraphConstraints { /// Creates constraints for laying out a paragraph. /// /// The [width] argument must not be null. const ParagraphConstraints({ required this.width, }); /// The width the paragraph should use whey computing the positions of glyphs. /// /// If possible, the paragraph will select a soft line break prior to reaching /// this width. If no soft line break is available, the paragraph will select /// a hard line break prior to reaching this width. If that would force a line /// break without any characters having been placed (i.e. if the next /// character to be laid out does not fit within the given width constraint) /// then the next character is allowed to overflow the width constraint and a /// forced line break is placed after it (even if an explicit line break /// follows). /// /// The width influences how ellipses are applied. See the discussion at /// [ParagraphStyle.new] for more details. /// /// This width is also used to position glyphs according to the [TextAlign] /// alignment described in the [ParagraphStyle] used when building the /// [Paragraph] with a [ParagraphBuilder]. final double width; @override bool operator ==(Object other) { if (other.runtimeType != runtimeType) { return false; } return other is ParagraphConstraints && other.width == width; } @override int get hashCode => width.hashCode; @override String toString() => 'ParagraphConstraints(width: $width)'; } /// Defines various ways to vertically bound the boxes returned by /// [Paragraph.getBoxesForRange]. /// /// See [BoxWidthStyle] for a similar property to control width. enum BoxHeightStyle { /// Provide tight bounding boxes that fit heights per run. This style may result /// in uneven bounding boxes that do not nicely connect with adjacent boxes. tight, /// The height of the boxes will be the maximum height of all runs in the /// line. All boxes in the same line will be the same height. /// /// This does not guarantee that the boxes will cover the entire vertical height of the line /// when there is additional line spacing. /// /// See [BoxHeightStyle.includeLineSpacingTop], [BoxHeightStyle.includeLineSpacingMiddle], /// and [BoxHeightStyle.includeLineSpacingBottom] for styles that will cover /// the entire line. max, /// Extends the top and bottom edge of the bounds to fully cover any line /// spacing. /// /// The top and bottom of each box will cover half of the /// space above and half of the space below the line. /// /// {@template dart.ui.boxHeightStyle.includeLineSpacing} /// The top edge of each line should be the same as the bottom edge /// of the line above. There should be no gaps in vertical coverage given any /// amount of line spacing. Line spacing is not included above the first line /// and below the last line due to no additional space present there. /// {@endtemplate} includeLineSpacingMiddle, /// Extends the top edge of the bounds to fully cover any line spacing. /// /// The line spacing will be added to the top of the box. /// /// {@macro dart.ui.boxHeightStyle.includeLineSpacing} includeLineSpacingTop, /// Extends the bottom edge of the bounds to fully cover any line spacing. /// /// The line spacing will be added to the bottom of the box. /// /// {@macro dart.ui.boxHeightStyle.includeLineSpacing} includeLineSpacingBottom, /// Calculate box heights based on the metrics of this paragraph's [StrutStyle]. /// /// Boxes based on the strut will have consistent heights throughout the /// entire paragraph. The top edge of each line will align with the bottom /// edge of the previous line. It is possible for glyphs to extend outside /// these boxes. strut, } /// Defines various ways to horizontally bound the boxes returned by /// [Paragraph.getBoxesForRange]. /// /// See [BoxHeightStyle] for a similar property to control height. enum BoxWidthStyle { /// Provide tight bounding boxes that fit widths to the runs of each line /// independently. tight, /// Adds up to two additional boxes as needed at the beginning and/or end /// of each line so that the widths of the boxes in line are the same width /// as the widest line in the paragraph. /// /// The additional boxes on each line are only added when the relevant box /// at the relevant edge of that line does not span the maximum width of /// the paragraph. max, } /// Where to vertically align the placeholder relative to the surrounding text. /// /// Used by [ParagraphBuilder.addPlaceholder]. enum PlaceholderAlignment { /// Match the baseline of the placeholder with the baseline. /// /// The [TextBaseline] to use must be specified and non-null when using this /// alignment mode. baseline, /// Align the bottom edge of the placeholder with the baseline such that the /// placeholder sits on top of the baseline. /// /// The [TextBaseline] to use must be specified and non-null when using this /// alignment mode. aboveBaseline, /// Align the top edge of the placeholder with the baseline specified /// such that the placeholder hangs below the baseline. /// /// The [TextBaseline] to use must be specified and non-null when using this /// alignment mode. belowBaseline, /// Align the top edge of the placeholder with the top edge of the text. /// /// When the placeholder is very tall, the extra space will hang from /// the top and extend through the bottom of the line. top, /// Align the bottom edge of the placeholder with the bottom edge of the text. /// /// When the placeholder is very tall, the extra space will rise from the /// bottom and extend through the top of the line. bottom, /// Align the middle of the placeholder with the middle of the text. /// /// When the placeholder is very tall, the extra space will grow equally /// from the top and bottom of the line. middle, } /// [LineMetrics] stores the measurements and statistics of a single line in the /// paragraph. /// /// The measurements here are for the line as a whole, and represent the maximum /// extent of the line instead of per-run or per-glyph metrics. For more detailed /// metrics, see [TextBox] and [Paragraph.getBoxesForRange]. /// /// [LineMetrics] should be obtained directly from the [Paragraph.computeLineMetrics] /// method. class LineMetrics { /// Creates a [LineMetrics] object with only the specified values. LineMetrics({ required this.hardBreak, required this.ascent, required this.descent, required this.unscaledAscent, required this.height, required this.width, required this.left, required this.baseline, required this.lineNumber, }); LineMetrics._( this.hardBreak, this.ascent, this.descent, this.unscaledAscent, this.height, this.width, this.left, this.baseline, this.lineNumber, ); /// True if this line ends with an explicit line break (e.g. '\n') or is the end /// of the paragraph. False otherwise. final bool hardBreak; /// The rise from the [baseline] as calculated from the font and style for this line. /// /// This is the final computed ascent and can be impacted by the strut, height, scaling, /// as well as outlying runs that are very tall. /// /// The [ascent] is provided as a positive value, even though it is typically defined /// in fonts as negative. This is to ensure the signage of operations with these /// metrics directly reflects the intended signage of the value. For example, /// the y coordinate of the top edge of the line is `baseline - ascent`. final double ascent; /// The drop from the [baseline] as calculated from the font and style for this line. /// /// This is the final computed ascent and can be impacted by the strut, height, scaling, /// as well as outlying runs that are very tall. /// /// The y coordinate of the bottom edge of the line is `baseline + descent`. final double descent; /// The rise from the [baseline] as calculated from the font and style for this line /// ignoring the [TextStyle.height]. /// /// The [unscaledAscent] is provided as a positive value, even though it is typically /// defined in fonts as negative. This is to ensure the signage of operations with /// these metrics directly reflects the intended signage of the value. final double unscaledAscent; /// Total height of the line from the top edge to the bottom edge. /// /// This is equivalent to `round(ascent + descent)`. This value is provided /// separately due to rounding causing sub-pixel differences from the unrounded /// values. final double height; /// Width of the line from the left edge of the leftmost glyph to the right /// edge of the rightmost glyph. /// /// This is not the same as the width of the pargraph. /// /// See also: /// /// * [Paragraph.width], the max width passed in during layout. /// * [Paragraph.longestLine], the width of the longest line in the paragraph. final double width; /// The x coordinate of left edge of the line. /// /// The right edge can be obtained with `left + width`. final double left; /// The y coordinate of the baseline for this line from the top of the paragraph. /// /// The bottom edge of the paragraph up to and including this line may be obtained /// through `baseline + descent`. final double baseline; /// The number of this line in the overall paragraph, with the first line being /// index zero. /// /// For example, the first line is line 0, second line is line 1. final int lineNumber; @override bool operator ==(Object other) { if (other.runtimeType != runtimeType) { return false; } return other is LineMetrics && other.hardBreak == hardBreak && other.ascent == ascent && other.descent == descent && other.unscaledAscent == unscaledAscent && other.height == height && other.width == width && other.left == left && other.baseline == baseline && other.lineNumber == lineNumber; } @override int get hashCode => Object.hash(hardBreak, ascent, descent, unscaledAscent, height, width, left, baseline, lineNumber); @override String toString() { return 'LineMetrics(hardBreak: $hardBreak, ' 'ascent: $ascent, ' 'descent: $descent, ' 'unscaledAscent: $unscaledAscent, ' 'height: $height, ' 'width: $width, ' 'left: $left, ' 'baseline: $baseline, ' 'lineNumber: $lineNumber)'; } } /// A paragraph of text. /// /// A paragraph retains the size and position of each glyph in the text and can /// be efficiently resized and painted. /// /// To create a [Paragraph] object, use a [ParagraphBuilder]. /// /// Paragraphs can be displayed on a [Canvas] using the [Canvas.drawParagraph] /// method. abstract class Paragraph { /// The amount of horizontal space this paragraph occupies. /// /// Valid only after [layout] has been called. double get width; /// The amount of vertical space this paragraph occupies. /// /// Valid only after [layout] has been called. double get height; /// The distance from the left edge of the leftmost glyph to the right edge of /// the rightmost glyph in the paragraph. /// /// Valid only after [layout] has been called. double get longestLine; /// The minimum width that this paragraph could be without failing to paint /// its contents within itself. /// /// Valid only after [layout] has been called. double get minIntrinsicWidth; /// Returns the smallest width beyond which increasing the width never /// decreases the height. /// /// Valid only after [layout] has been called. double get maxIntrinsicWidth; /// The distance from the top of the paragraph to the alphabetic /// baseline of the first line, in logical pixels. double get alphabeticBaseline; /// The distance from the top of the paragraph to the ideographic /// baseline of the first line, in logical pixels. double get ideographicBaseline; /// True if there is more vertical content, but the text was truncated, either /// because we reached `maxLines` lines of text or because the `maxLines` was /// null, `ellipsis` was not null, and one of the lines exceeded the width /// constraint. /// /// See the discussion of the `maxLines` and `ellipsis` arguments at /// [ParagraphStyle.new]. bool get didExceedMaxLines; /// Computes the size and position of each glyph in the paragraph. /// /// The [ParagraphConstraints] control how wide the text is allowed to be. void layout(ParagraphConstraints constraints); /// Returns a list of text boxes that enclose the given text range. /// /// The [boxHeightStyle] and [boxWidthStyle] parameters allow customization /// of how the boxes are bound vertically and horizontally. Both style /// parameters default to the tight option, which will provide close-fitting /// boxes and will not account for any line spacing. /// /// Coordinates of the TextBox are relative to the upper-left corner of the paragraph, /// where positive y values indicate down. /// /// The [boxHeightStyle] and [boxWidthStyle] parameters must not be null. /// /// See [BoxHeightStyle] and [BoxWidthStyle] for full descriptions of each option. List<TextBox> getBoxesForRange(int start, int end, {BoxHeightStyle boxHeightStyle = BoxHeightStyle.tight, BoxWidthStyle boxWidthStyle = BoxWidthStyle.tight}); /// Returns a list of text boxes that enclose all placeholders in the paragraph. /// /// The order of the boxes are in the same order as passed in through /// [ParagraphBuilder.addPlaceholder]. /// /// Coordinates of the [TextBox] are relative to the upper-left corner of the paragraph, /// where positive y values indicate down. List<TextBox> getBoxesForPlaceholders(); /// Returns the text position closest to the given offset. /// /// This method always returns a [TextPosition] for any given [offset], even /// when the [offset] is not close to any text, or when the paragraph is empty. /// This is useful for determining the text to select when the user drags the /// text selection handle. /// /// See also: /// /// * [getClosestGlyphInfoForOffset], which returns more information about /// the closest character to an [Offset]. TextPosition getPositionForOffset(Offset offset); /// Returns the [GlyphInfo] of the glyph closest to the given `offset` in the /// paragraph coordinate system, or null if if the text is empty, or is /// entirely clipped or ellipsized away. /// /// This method first finds the line closest to `offset.dy`, and then returns /// the [GlyphInfo] of the closest glyph(s) within that line. GlyphInfo? getClosestGlyphInfoForOffset(Offset offset); /// Returns the [GlyphInfo] located at the given UTF-16 `codeUnitOffset` in /// the paragraph, or null if the given `codeUnitOffset` is out of the visible /// lines or is ellipsized. GlyphInfo? getGlyphInfoAt(int codeUnitOffset); /// Returns the [TextRange] of the word at the given [TextPosition]. /// /// Characters not part of a word, such as spaces, symbols, and punctuation, /// have word breaks on both sides. In such cases, this method will return /// (offset, offset+1). Word boundaries are defined more precisely in Unicode /// Standard Annex #29 http://www.unicode.org/reports/tr29/#Word_Boundaries /// /// The [TextPosition] is treated as caret position, its [TextPosition.affinity] /// is used to determine which character this position points to. For example, /// the word boundary at `TextPosition(offset: 5, affinity: TextPosition.upstream)` /// of the `string = 'Hello word'` will return range (0, 5) because the position /// points to the character 'o' instead of the space. TextRange getWordBoundary(TextPosition position); /// Returns the [TextRange] of the line at the given [TextPosition]. /// /// The newline (if any) is returned as part of the range. /// /// Not valid until after layout. /// /// This can potentially be expensive, since it needs to compute the line /// metrics, so use it sparingly. TextRange getLineBoundary(TextPosition position); /// Returns the full list of [LineMetrics] that describe in detail the various /// metrics of each laid out line. /// /// Not valid until after layout. /// /// This can potentially return a large amount of data, so it is not recommended /// to repeatedly call this. Instead, cache the results. List<LineMetrics> computeLineMetrics(); /// Returns the [LineMetrics] for the line at `lineNumber`, or null if the /// given `lineNumber` is greater than or equal to [numberOfLines]. LineMetrics? getLineMetricsAt(int lineNumber); /// The total number of visible lines in the paragraph. /// /// Returns a non-negative number. If `maxLines` is non-null, the value of /// [numberOfLines] never exceeds `maxLines`. int get numberOfLines; /// Returns the line number of the line that contains the code unit that /// `codeUnitOffset` points to. /// /// This method returns null if the given `codeUnitOffset` is out of bounds, or /// is logically after the last visible codepoint. This includes the case where /// its codepoint belongs to a visible line, but the text layout library /// replaced it with an ellipsis. /// /// If the target code unit points to a control character that introduces /// mandatory line breaks (most notably the line feed character `LF`, typically /// represented in strings as the escape sequence "\n"), to conform to /// [the unicode rules](https://unicode.org/reports/tr14/#LB4), the control /// character itself is always considered to be at the end of "current" line /// rather than the beginning of the new line. int? getLineNumberAt(int codeUnitOffset); /// Release the resources used by this object. The object is no longer usable /// after this method is called. void dispose(); /// Whether this reference to the underlying picture is [dispose]d. /// /// This only returns a valid value if asserts are enabled, and must not be /// used otherwise. bool get debugDisposed; } base class _NativeParagraph extends NativeFieldWrapperClass1 implements Paragraph { /// This class is created by the engine, and should not be instantiated /// or extended directly. /// /// To create a [Paragraph] object, use a [ParagraphBuilder]. _NativeParagraph._(); bool _needsLayout = true; @override @Native<Double Function(Pointer<Void>)>(symbol: 'Paragraph::width', isLeaf: true) external double get width; @override @Native<Double Function(Pointer<Void>)>(symbol: 'Paragraph::height', isLeaf: true) external double get height; @override @Native<Double Function(Pointer<Void>)>(symbol: 'Paragraph::longestLine', isLeaf: true) external double get longestLine; @override @Native<Double Function(Pointer<Void>)>(symbol: 'Paragraph::minIntrinsicWidth', isLeaf: true) external double get minIntrinsicWidth; @override @Native<Double Function(Pointer<Void>)>(symbol: 'Paragraph::maxIntrinsicWidth', isLeaf: true) external double get maxIntrinsicWidth; @override @Native<Double Function(Pointer<Void>)>(symbol: 'Paragraph::alphabeticBaseline', isLeaf: true) external double get alphabeticBaseline; @override @Native<Double Function(Pointer<Void>)>(symbol: 'Paragraph::ideographicBaseline', isLeaf: true) external double get ideographicBaseline; @override @Native<Bool Function(Pointer<Void>)>(symbol: 'Paragraph::didExceedMaxLines', isLeaf: true) external bool get didExceedMaxLines; @override void layout(ParagraphConstraints constraints) { _layout(constraints.width); assert(() { _needsLayout = false; return true; }()); } @Native<Void Function(Pointer<Void>, Double)>(symbol: 'Paragraph::layout', isLeaf: true) external void _layout(double width); List<TextBox> _decodeTextBoxes(Float32List encoded) { final int count = encoded.length ~/ 5; final List<TextBox> boxes = <TextBox>[]; int position = 0; for (int index = 0; index < count; index += 1) { boxes.add(TextBox.fromLTRBD( encoded[position++], encoded[position++], encoded[position++], encoded[position++], TextDirection.values[encoded[position++].toInt()], )); } return boxes; } @override List<TextBox> getBoxesForRange(int start, int end, {BoxHeightStyle boxHeightStyle = BoxHeightStyle.tight, BoxWidthStyle boxWidthStyle = BoxWidthStyle.tight}) { return _decodeTextBoxes(_getBoxesForRange(start, end, boxHeightStyle.index, boxWidthStyle.index)); } // See paragraph.cc for the layout of this return value. @Native<Handle Function(Pointer<Void>, Uint32, Uint32, Uint32, Uint32)>(symbol: 'Paragraph::getRectsForRange') external Float32List _getBoxesForRange(int start, int end, int boxHeightStyle, int boxWidthStyle); @override List<TextBox> getBoxesForPlaceholders() { return _decodeTextBoxes(_getBoxesForPlaceholders()); } @Native<Handle Function(Pointer<Void>)>(symbol: 'Paragraph::getRectsForPlaceholders') external Float32List _getBoxesForPlaceholders(); @override TextPosition getPositionForOffset(Offset offset) { final List<int> encoded = _getPositionForOffset(offset.dx, offset.dy); return TextPosition(offset: encoded[0], affinity: TextAffinity.values[encoded[1]]); } @Native<Handle Function(Pointer<Void>, Double, Double)>(symbol: 'Paragraph::getPositionForOffset') external List<int> _getPositionForOffset(double dx, double dy); @override GlyphInfo? getGlyphInfoAt(int codeUnitOffset) => _getGlyphInfoAt(codeUnitOffset, GlyphInfo._); @Native<Handle Function(Pointer<Void>, Uint32, Handle)>(symbol: 'Paragraph::getGlyphInfoAt') external GlyphInfo? _getGlyphInfoAt(int codeUnitOffset, Function constructor); @override GlyphInfo? getClosestGlyphInfoForOffset(Offset offset) => _getClosestGlyphInfoForOffset(offset.dx, offset.dy, GlyphInfo._); @Native<Handle Function(Pointer<Void>, Double, Double, Handle)>(symbol: 'Paragraph::getClosestGlyphInfo') external GlyphInfo? _getClosestGlyphInfoForOffset(double dx, double dy, Function constructor); @override TextRange getWordBoundary(TextPosition position) { final int characterPosition; switch (position.affinity) { case TextAffinity.upstream: characterPosition = position.offset - 1; case TextAffinity.downstream: characterPosition = position.offset; } final List<int> boundary = _getWordBoundary(characterPosition); return TextRange(start: boundary[0], end: boundary[1]); } @Native<Handle Function(Pointer<Void>, Uint32)>(symbol: 'Paragraph::getWordBoundary') external List<int> _getWordBoundary(int offset); @override TextRange getLineBoundary(TextPosition position) { final List<int> boundary = _getLineBoundary(position.offset); final TextRange line = TextRange(start: boundary[0], end: boundary[1]); final List<int> nextBoundary = _getLineBoundary(position.offset + 1); final TextRange nextLine = TextRange(start: nextBoundary[0], end: nextBoundary[1]); // If there is no next line, because we're at the end of the field, return line. if (!nextLine.isValid) { return line; } // _getLineBoundary only considers the offset and assumes that the // TextAffinity is upstream. In the case that TextPosition is just after a // word wrap (downstream), we need to return the line for the next offset. if (position.affinity == TextAffinity.downstream && line != nextLine && position.offset == line.end && line.end == nextLine.start) { return TextRange(start: nextBoundary[0], end: nextBoundary[1]); } return line; } @Native<Handle Function(Pointer<Void>, Uint32)>(symbol: 'Paragraph::getLineBoundary') external List<int> _getLineBoundary(int offset); // Redirecting the paint function in this way solves some dependency problems // in the C++ code. If we straighten out the C++ dependencies, we can remove // this indirection. @Native<Void Function(Pointer<Void>, Pointer<Void>, Double, Double)>(symbol: 'Paragraph::paint') external void _paint(_NativeCanvas canvas, double x, double y); @override List<LineMetrics> computeLineMetrics() { final Float64List encoded = _computeLineMetrics(); final int count = encoded.length ~/ 9; int position = 0; final List<LineMetrics> metrics = <LineMetrics>[ for (int index = 0; index < count; index += 1) LineMetrics( hardBreak: encoded[position++] != 0, ascent: encoded[position++], descent: encoded[position++], unscaledAscent: encoded[position++], height: encoded[position++], width: encoded[position++], left: encoded[position++], baseline: encoded[position++], lineNumber: encoded[position++].toInt(), ) ]; return metrics; } @Native<Handle Function(Pointer<Void>)>(symbol: 'Paragraph::computeLineMetrics') external Float64List _computeLineMetrics(); @override LineMetrics? getLineMetricsAt(int lineNumber) => _getLineMetricsAt(lineNumber, LineMetrics._); @Native<Handle Function(Pointer<Void>, Uint32, Handle)>(symbol: 'Paragraph::getLineMetricsAt') external LineMetrics? _getLineMetricsAt(int lineNumber, Function constructor); @override @Native<Uint32 Function(Pointer<Void>)>(symbol: 'Paragraph::getNumberOfLines') external int get numberOfLines; @override int? getLineNumberAt(int codeUnitOffset) { final int lineNumber = _getLineNumber(codeUnitOffset); return lineNumber < 0 ? null : lineNumber; } @Native<Int32 Function(Pointer<Void>, Uint32)>(symbol: 'Paragraph::getLineNumberAt') external int _getLineNumber(int codeUnitOffset); @override void dispose() { assert(!_disposed); assert(() { _disposed = true; return true; }()); _dispose(); } /// This can't be a leaf call because the native function calls Dart API /// (Dart_SetNativeInstanceField). @Native<Void Function(Pointer<Void>)>(symbol: 'Paragraph::dispose') external void _dispose(); bool _disposed = false; @override bool get debugDisposed { bool? disposed; assert(() { disposed = _disposed; return true; }()); return disposed ?? (throw StateError('$runtimeType.debugDisposed is only available when asserts are enabled.')); } @override String toString() { String? result; assert(() { if (_disposed && _needsLayout) { result = 'Paragraph(DISPOSED while dirty)'; } if (_disposed && !_needsLayout) { result = 'Paragraph(DISPOSED)'; } return true; }()); if (result != null) { return result!; } if (_needsLayout) { return 'Paragraph(dirty)'; } return 'Paragraph()'; } } /// Builds a [Paragraph] containing text with the given styling information. /// /// To set the paragraph's alignment, truncation, and ellipsizing behavior, pass /// an appropriately-configured [ParagraphStyle] object to the /// [ParagraphBuilder.new] constructor. /// /// Then, call combinations of [pushStyle], [addText], and [pop] to add styled /// text to the object. /// /// Finally, call [build] to obtain the constructed [Paragraph] object. After /// this point, the builder is no longer usable. /// /// After constructing a [Paragraph], call [Paragraph.layout] on it and then /// paint it with [Canvas.drawParagraph]. abstract class ParagraphBuilder { /// Creates a new [ParagraphBuilder] object, which is used to create a /// [Paragraph]. factory ParagraphBuilder(ParagraphStyle style) = _NativeParagraphBuilder; /// The number of placeholders currently in the paragraph. int get placeholderCount; /// The scales of the placeholders in the paragraph. List<double> get placeholderScales; /// Applies the given style to the added text until [pop] is called. /// /// See [pop] for details. void pushStyle(TextStyle style); /// Ends the effect of the most recent call to [pushStyle]. /// /// Internally, the paragraph builder maintains a stack of text styles. Text /// added to the paragraph is affected by all the styles in the stack. Calling /// [pop] removes the topmost style in the stack, leaving the remaining styles /// in effect. void pop(); /// Adds the given text to the paragraph. /// /// The text will be styled according to the current stack of text styles. void addText(String text); /// Adds an inline placeholder space to the paragraph. /// /// The paragraph will contain a rectangular space with no text of the dimensions /// specified. /// /// The `width` and `height` parameters specify the size of the placeholder rectangle. /// /// The `alignment` parameter specifies how the placeholder rectangle will be vertically /// aligned with the surrounding text. When [PlaceholderAlignment.baseline], /// [PlaceholderAlignment.aboveBaseline], and [PlaceholderAlignment.belowBaseline] /// alignment modes are used, the baseline needs to be set with the `baseline`. /// When using [PlaceholderAlignment.baseline], `baselineOffset` indicates the distance /// of the baseline down from the top of the rectangle. The default `baselineOffset` /// is the `height`. /// /// Examples: /// /// * For a 30x50 placeholder with the bottom edge aligned with the bottom of the text, use: /// `addPlaceholder(30, 50, PlaceholderAlignment.bottom);` /// * For a 30x50 placeholder that is vertically centered around the text, use: /// `addPlaceholder(30, 50, PlaceholderAlignment.middle);`. /// * For a 30x50 placeholder that sits completely on top of the alphabetic baseline, use: /// `addPlaceholder(30, 50, PlaceholderAlignment.aboveBaseline, baseline: TextBaseline.alphabetic)`. /// * For a 30x50 placeholder with 40 pixels above and 10 pixels below the alphabetic baseline, use: /// `addPlaceholder(30, 50, PlaceholderAlignment.baseline, baseline: TextBaseline.alphabetic, baselineOffset: 40)`. /// /// Lines are permitted to break around each placeholder. /// /// Decorations will be drawn based on the font defined in the most recently /// pushed [TextStyle]. The decorations are drawn as if unicode text were present /// in the placeholder space, and will draw the same regardless of the height and /// alignment of the placeholder. To hide or manually adjust decorations to fit, /// a text style with the desired decoration behavior should be pushed before /// adding a placeholder. /// /// Any decorations drawn through a placeholder will exist on the same canvas/layer /// as the text. This means any content drawn on top of the space reserved by /// the placeholder will be drawn over the decoration, possibly obscuring the /// decoration. /// /// Placeholders are represented by a unicode 0xFFFC "object replacement character" /// in the text buffer. For each placeholder, one object replacement character is /// added on to the text buffer. /// /// The `scale` parameter will scale the `width` and `height` by the specified amount, /// and keep track of the scale. The scales of placeholders added can be accessed /// through [placeholderScales]. This is primarily used for accessibility scaling. void addPlaceholder(double width, double height, PlaceholderAlignment alignment, { double scale = 1.0, double? baselineOffset, TextBaseline? baseline, }); /// Applies the given paragraph style and returns a [Paragraph] containing the /// added text and associated styling. /// /// After calling this function, the paragraph builder object is invalid and /// cannot be used further. Paragraph build(); } base class _NativeParagraphBuilder extends NativeFieldWrapperClass1 implements ParagraphBuilder { _NativeParagraphBuilder(ParagraphStyle style) : _defaultLeadingDistribution = style._leadingDistribution { List<String>? strutFontFamilies; final StrutStyle? strutStyle = style._strutStyle; final ByteData? encodedStrutStyle; if (strutStyle != null && strutStyle._enabled) { final String? fontFamily = strutStyle._fontFamily; strutFontFamilies = <String>[ if (fontFamily != null) fontFamily, ...?strutStyle._fontFamilyFallback, ]; assert(TextLeadingDistribution.values.length <= 2); final TextLeadingDistribution leadingDistribution = strutStyle._leadingDistribution ?? style._leadingDistribution; encodedStrutStyle = strutStyle._encoded; int bitmask = encodedStrutStyle.getInt8(0); bitmask |= (leadingDistribution.index) << 3; encodedStrutStyle.setInt8(0, bitmask); } else { encodedStrutStyle = null; } _constructor( style._encoded, encodedStrutStyle, style._fontFamily ?? '', strutFontFamilies, style._fontSize ?? 0, style._height ?? 0, style._ellipsis ?? '', _encodeLocale(style._locale), ); } @Native<Void Function(Handle, Handle, Handle, Handle, Handle, Double, Double, Handle, Handle)>(symbol: 'ParagraphBuilder::Create') external void _constructor( Int32List encoded, ByteData? strutData, String fontFamily, List<Object?>? strutFontFamily, double fontSize, double height, String ellipsis, String locale, ); @override int get placeholderCount => _placeholderCount; int _placeholderCount = 0; @override List<double> get placeholderScales => _placeholderScales; final List<double> _placeholderScales = <double>[]; final TextLeadingDistribution _defaultLeadingDistribution; @override void pushStyle(TextStyle style) { final List<String> fullFontFamilies = <String>[]; fullFontFamilies.add(style._fontFamily); final List<String>? fontFamilyFallback = style._fontFamilyFallback; if (fontFamilyFallback != null) { fullFontFamilies.addAll(fontFamilyFallback); } final Int32List encoded = style._encoded; final TextLeadingDistribution finalLeadingDistribution = style._leadingDistribution ?? _defaultLeadingDistribution; // ensure the enum can be represented using 1 bit. assert(TextLeadingDistribution.values.length <= 2); // Use the leading distribution from the paragraph's style if it's not // explicitly set in `style`. encoded[0] |= finalLeadingDistribution.index << 0; ByteData? encodedFontFeatures; final List<FontFeature>? fontFeatures = style._fontFeatures; if (fontFeatures != null) { encodedFontFeatures = ByteData(fontFeatures.length * FontFeature._kEncodedSize); int byteOffset = 0; for (final FontFeature feature in fontFeatures) { feature._encode(ByteData.view(encodedFontFeatures.buffer, byteOffset, FontFeature._kEncodedSize)); byteOffset += FontFeature._kEncodedSize; } } ByteData? encodedFontVariations; final List<FontVariation>? fontVariations = style._fontVariations; if (fontVariations != null) { encodedFontVariations = ByteData(fontVariations.length * FontVariation._kEncodedSize); int byteOffset = 0; for (final FontVariation variation in fontVariations) { variation._encode(ByteData.view(encodedFontVariations.buffer, byteOffset, FontVariation._kEncodedSize)); byteOffset += FontVariation._kEncodedSize; } } _pushStyle( encoded, fullFontFamilies, style._fontSize ?? 0, style._letterSpacing ?? 0, style._wordSpacing ?? 0, style._height ?? 0, style._decorationThickness ?? 0, _encodeLocale(style._locale), style._background?._objects, style._background?._data, style._foreground?._objects, style._foreground?._data, Shadow._encodeShadows(style._shadows), encodedFontFeatures, encodedFontVariations, ); } @Native< Void Function( Pointer<Void>, Handle, Handle, Double, Double, Double, Double, Double, Handle, Handle, Handle, Handle, Handle, Handle, Handle, Handle)>(symbol: 'ParagraphBuilder::pushStyle') external void _pushStyle( Int32List encoded, List<Object?> fontFamilies, double fontSize, double letterSpacing, double wordSpacing, double height, double decorationThickness, String locale, List<Object?>? backgroundObjects, ByteData? backgroundData, List<Object?>? foregroundObjects, ByteData? foregroundData, ByteData shadowsData, ByteData? fontFeaturesData, ByteData? fontVariationsData, ); static String _encodeLocale(Locale? locale) => locale?.toString() ?? ''; @override @Native<Void Function(Pointer<Void>)>(symbol: 'ParagraphBuilder::pop', isLeaf: true) external void pop(); @override void addText(String text) { final String? error = _addText(text); if (error != null) { throw ArgumentError(error); } } @Native<Handle Function(Pointer<Void>, Handle)>(symbol: 'ParagraphBuilder::addText') external String? _addText(String text); @override void addPlaceholder(double width, double height, PlaceholderAlignment alignment, { double scale = 1.0, double? baselineOffset, TextBaseline? baseline, }) { // Require a baseline to be specified if using a baseline-based alignment. assert(!(alignment == PlaceholderAlignment.aboveBaseline || alignment == PlaceholderAlignment.belowBaseline || alignment == PlaceholderAlignment.baseline) || baseline != null); // Default the baselineOffset to height if null. This will place the placeholder // fully above the baseline, similar to [PlaceholderAlignment.aboveBaseline]. baselineOffset = baselineOffset ?? height; _addPlaceholder(width * scale, height * scale, alignment.index, baselineOffset * scale, (baseline ?? TextBaseline.alphabetic).index); _placeholderCount++; _placeholderScales.add(scale); } @Native<Void Function(Pointer<Void>, Double, Double, Uint32, Double, Uint32)>(symbol: 'ParagraphBuilder::addPlaceholder') external void _addPlaceholder(double width, double height, int alignment, double baselineOffset, int baseline); @override Paragraph build() { final _NativeParagraph paragraph = _NativeParagraph._(); _build(paragraph); return paragraph; } @Native<Void Function(Pointer<Void>, Handle)>(symbol: 'ParagraphBuilder::build') external void _build(_NativeParagraph outParagraph); @override String toString() => 'ParagraphBuilder'; } /// Loads a font from a buffer and makes it available for rendering text. /// /// * `list`: A list of bytes containing the font file. /// * `fontFamily`: The family name used to identify the font in text styles. /// If this is not provided, then the family name will be extracted from the font file. Future<void> loadFontFromList(Uint8List list, {String? fontFamily}) { return _futurize( (_Callback<void> callback) { _loadFontFromList(list, callback, fontFamily ?? ''); return null; } ).then((_) => _sendFontChangeMessage()); } final ByteData _fontChangeMessage = utf8.encode( json.encode(<String, Object?>{'type': 'fontsChange'}) ).buffer.asByteData(); FutureOr<void> _sendFontChangeMessage() async { const String kSystemChannelName = 'flutter/system'; if (PlatformDispatcher.instance.onPlatformMessage != null) { _invoke3<String, ByteData?, PlatformMessageResponseCallback>( PlatformDispatcher.instance.onPlatformMessage, PlatformDispatcher.instance._onPlatformMessageZone, kSystemChannelName, _fontChangeMessage, (ByteData? responseData) { }, ); } else { channelBuffers.push(kSystemChannelName, _fontChangeMessage, (ByteData? responseData) { }); } } @Native<Void Function(Handle, Handle, Handle)>(symbol: 'FontCollection::LoadFontFromList') external void _loadFontFromList(Uint8List list, _Callback<void> callback, String fontFamily);
engine/lib/ui/text.dart/0
{ "file_path": "engine/lib/ui/text.dart", "repo_id": "engine", "token_count": 46277 }
261
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/lib/ui/window/key_data.h" #include <cstring> namespace flutter { static_assert(sizeof(KeyData) == kBytesPerKeyField * kKeyDataFieldCount, "KeyData has the wrong size"); void KeyData::Clear() { memset(this, 0, sizeof(KeyData)); } } // namespace flutter
engine/lib/ui/window/key_data.cc/0
{ "file_path": "engine/lib/ui/window/key_data.cc", "repo_id": "engine", "token_count": 156 }
262
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_WINDOW_PLATFORM_MESSAGE_RESPONSE_DART_PORT_H_ #define FLUTTER_LIB_UI_WINDOW_PLATFORM_MESSAGE_RESPONSE_DART_PORT_H_ #include "flutter/fml/message_loop.h" #include "flutter/lib/ui/window/platform_message_response.h" #include "third_party/tonic/dart_persistent_value.h" namespace flutter { /// A \ref PlatformMessageResponse that will respond over a Dart port. class PlatformMessageResponseDartPort : public PlatformMessageResponse { FML_FRIEND_MAKE_REF_COUNTED(PlatformMessageResponseDartPort); public: // Callable on any thread. void Complete(std::unique_ptr<fml::Mapping> data) override; void CompleteEmpty() override; protected: explicit PlatformMessageResponseDartPort(Dart_Port send_port, int64_t identifier, const std::string& channel); Dart_Port send_port_; int64_t identifier_; const std::string channel_; }; } // namespace flutter #endif // FLUTTER_LIB_UI_WINDOW_PLATFORM_MESSAGE_RESPONSE_DART_PORT_H_
engine/lib/ui/window/platform_message_response_dart_port.h/0
{ "file_path": "engine/lib/ui/window/platform_message_response_dart_port.h", "repo_id": "engine", "token_count": 470 }
263
# Web-specific analysis options. # # As of today the web code contains quite a few deviations from the repo-wide # analysis options due to having been migrated from google3. The ultimate goal # is to clean up our code and delete this file. include: ../../analysis_options.yaml linter: rules: avoid_dynamic_calls: false avoid_print: false avoid_setters_without_getters: false library_private_types_in_public_api: false no_default_cases: false prefer_relative_imports: false public_member_api_docs: false unawaited_futures: true use_setters_to_change_properties: false
engine/lib/web_ui/analysis_options.yaml/0
{ "file_path": "engine/lib/web_ui/analysis_options.yaml", "repo_id": "engine", "token_count": 197 }
264
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:ffi' as ffi; import 'dart:io' as io; import 'package:path/path.dart' as pathlib; import 'exceptions.dart'; /// Contains various environment variables, such as common file paths and command-line options. Environment get environment { return _environment ??= Environment(); } Environment? _environment; /// Contains various environment variables, such as common file paths and command-line options. class Environment { factory Environment() { final bool isMacosArm = ffi.Abi.current() == ffi.Abi.macosArm64; final io.File dartExecutable = io.File(io.Platform.resolvedExecutable); final io.File self = io.File.fromUri(io.Platform.script); final io.Directory engineSrcDir = self.parent.parent.parent.parent.parent; final io.Directory engineToolsDir = io.Directory(pathlib.join(engineSrcDir.path, 'flutter', 'tools')); final io.Directory outDir = io.Directory(pathlib.join(engineSrcDir.path, 'out')); final io.Directory wasmReleaseOutDir = io.Directory(pathlib.join(outDir.path, 'wasm_release')); final io.Directory wasmProfileOutDir = io.Directory(pathlib.join(outDir.path, 'wasm_profile')); final io.Directory wasmDebugUnoptOutDir = io.Directory(pathlib.join(outDir.path, 'wasm_debug_unopt')); final io.Directory hostDebugUnoptDir = io.Directory(pathlib.join(outDir.path, 'host_debug_unopt')); final io.Directory dartSdkDir = dartExecutable.parent.parent; final io.Directory webUiRootDir = io.Directory( pathlib.join(engineSrcDir.path, 'flutter', 'lib', 'web_ui')); for (final io.Directory expectedDirectory in <io.Directory>[ engineSrcDir, webUiRootDir ]) { if (!expectedDirectory.existsSync()) { throw ToolExit('$expectedDirectory does not exist.'); } } return Environment._( self: self, isMacosArm: isMacosArm, webUiRootDir: webUiRootDir, engineSrcDir: engineSrcDir, engineToolsDir: engineToolsDir, outDir: outDir, wasmReleaseOutDir: wasmReleaseOutDir, wasmProfileOutDir: wasmProfileOutDir, wasmDebugUnoptOutDir: wasmDebugUnoptOutDir, hostDebugUnoptDir: hostDebugUnoptDir, dartSdkDir: dartSdkDir, ); } Environment._({ required this.self, required this.isMacosArm, required this.webUiRootDir, required this.engineSrcDir, required this.engineToolsDir, required this.outDir, required this.wasmReleaseOutDir, required this.wasmProfileOutDir, required this.wasmDebugUnoptOutDir, required this.hostDebugUnoptDir, required this.dartSdkDir, }); /// The Dart script that's currently running. final io.File self; /// Whether the environment is a macOS arm environment. final bool isMacosArm; /// Path to the "web_ui" package sources. final io.Directory webUiRootDir; /// Path to the engine's "src" directory. final io.Directory engineSrcDir; /// Path to the engine's "tools" directory. final io.Directory engineToolsDir; /// Path to the engine's "out" directory. /// /// This is where you'll find the ninja output, such as the Dart SDK. final io.Directory outDir; /// The output directory for the wasm_release build. /// /// We build CanvasKit in release mode to reduce code size. final io.Directory wasmReleaseOutDir; /// The output directory for the wasm_profile build. final io.Directory wasmProfileOutDir; /// The output directory for the wasm_debug build. final io.Directory wasmDebugUnoptOutDir; /// The output directory for the host_debug_unopt build. final io.Directory hostDebugUnoptDir; /// The root of the Dart SDK. final io.Directory dartSdkDir; /// The "dart" executable file. String get dartExecutable => pathlib.join(dartSdkDir.path, 'bin', 'dart'); /// Path to dartaotruntime for running aot snapshots String get dartAotRuntimePath => pathlib.join(dartSdkDir.path, 'bin', 'dartaotruntime'); /// The "pub" executable file. String get pubExecutable => pathlib.join(dartSdkDir.path, 'bin', 'pub'); /// The path to dart2wasm pre-compiled snapshot String get dart2wasmSnapshotPath => pathlib.join(dartSdkDir.path, 'bin', 'snapshots', 'dart2wasm_product.snapshot'); /// The path to dart2wasm.dart file String get dart2wasmScriptPath => pathlib.join( engineSrcDir.path, 'third_party', 'dart', 'pkg', 'dart2wasm', 'bin', 'dart2wasm.dart' ); /// Path to where github.com/flutter/engine is checked out inside the engine workspace. io.Directory get flutterDirectory => io.Directory(pathlib.join(engineSrcDir.path, 'flutter')); io.Directory get webSdkRootDir => io.Directory(pathlib.join( flutterDirectory.path, 'web_sdk', )); /// Path to the "web_engine_tester" package. io.Directory get webEngineTesterRootDir => io.Directory(pathlib.join( webSdkRootDir.path, 'web_engine_tester', )); /// Path to the "build" directory, generated by "package:build_runner". /// /// This is where compiled test output goes. io.Directory get webUiBuildDir => io.Directory(pathlib.join( outDir.path, 'web_tests', )); io.Directory get webTestsArtifactsDir => io.Directory(pathlib.join( webUiBuildDir.path, 'artifacts', )); /// Path to the ".dart_tool" directory, generated by various Dart tools. io.Directory get webUiDartToolDir => io.Directory(pathlib.join( webUiRootDir.path, '.dart_tool', )); /// Path to the ".dart_tool" directory living under `engine/src/flutter`. /// /// This is a designated area for tool downloads which can be used by /// multiple platforms. For exampe: Flutter repo for e2e tests. io.Directory get engineDartToolDir => io.Directory(pathlib.join( engineSrcDir.path, 'flutter', '.dart_tool', )); /// Path to the "dev" directory containing engine developer tools and /// configuration files. io.Directory get webUiDevDir => io.Directory(pathlib.join( webUiRootDir.path, 'dev', )); /// Path to the "test" directory containing web engine tests. io.Directory get webUiTestDir => io.Directory(pathlib.join( webUiRootDir.path, 'test', )); /// Path to the "lib" directory containing web engine code. io.Directory get webUiLibDir => io.Directory(pathlib.join( webUiRootDir.path, 'lib', )); /// Path to the base directory to be used by Skia Gold. io.Directory get webUiSkiaGoldDirectory => io.Directory(pathlib.join( webUiDartToolDir.path, 'skia_gold', )); /// Directory to add test results which would later be uploaded to a gcs /// bucket by LUCI. io.Directory get webUiTestResultsDirectory => io.Directory(pathlib.join( webUiDartToolDir.path, 'test_results', )); }
engine/lib/web_ui/dev/environment.dart/0
{ "file_path": "engine/lib/web_ui/dev/environment.dart", "repo_id": "engine", "token_count": 2536 }
265
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:convert'; import 'dart:io' as io; import 'package:path/path.dart' as pathlib; import 'package:pool/pool.dart'; import '../environment.dart'; import '../exceptions.dart'; import '../felt_config.dart'; import '../pipeline.dart'; import '../utils.dart' show AnsiColors, FilePath, ProcessManager, cleanup, getBundleBuildDirectory, startProcess; /// Compiles a web test bundle into web_ui/build/test_bundles/<bundle-name>. class CompileBundleStep implements PipelineStep { CompileBundleStep({ required this.bundle, required this.isVerbose, this.testFiles, }); final TestBundle bundle; final bool isVerbose; final Set<FilePath>? testFiles; // Maximum number of concurrent compile processes to use. static final int _compileConcurrency = int.parse(io.Platform.environment['FELT_COMPILE_CONCURRENCY'] ?? '8'); final Pool compilePool = Pool(_compileConcurrency); @override String get description => 'compile_bundle'; @override bool get isSafeToInterrupt => true; @override Future<void> interrupt() async { await cleanup(); } io.Directory get testSetDirectory => io.Directory( pathlib.join(environment.webUiTestDir.path, bundle.testSet.directory) ); io.Directory get outputBundleDirectory => getBundleBuildDirectory(bundle); List<FilePath> _findTestFiles() { final io.Directory testDirectory = testSetDirectory; if (!testDirectory.existsSync()) { throw ToolExit('Test directory "${testDirectory.path}" for bundle ${bundle.name.ansiMagenta} does not exist.'); } return testDirectory .listSync(recursive: true) .whereType<io.File>() .where((io.File f) => f.path.endsWith('_test.dart')) .map<FilePath>((io.File f) => FilePath.fromWebUi( pathlib.relative(f.path, from: environment.webUiRootDir.path))) .toList(); } TestCompiler _createCompiler(CompileConfiguration config) { switch (config.compiler) { case Compiler.dart2js: return Dart2JSCompiler( testSetDirectory, outputBundleDirectory, renderer: config.renderer, isVerbose: isVerbose, ); case Compiler.dart2wasm: return Dart2WasmCompiler( testSetDirectory, outputBundleDirectory, renderer: config.renderer, isVerbose: isVerbose, ); } } @override Future<void> run() async { print('Compiling test bundle ${bundle.name.ansiMagenta}...'); final List<FilePath> allTests = _findTestFiles(); final List<TestCompiler> compilers = bundle.compileConfigs.map( (CompileConfiguration config) => _createCompiler(config) ).toList(); final Stopwatch stopwatch = Stopwatch()..start(); final String testSetDirectoryPath = testSetDirectory.path; // Clear out old bundle compilations, if they exist if (outputBundleDirectory.existsSync()) { outputBundleDirectory.deleteSync(recursive: true ); } final List<Future<MapEntry<String, CompileResult>>> pendingResults = <Future<MapEntry<String, CompileResult>>>[]; for (final TestCompiler compiler in compilers) { for (final FilePath testFile in allTests) { final String relativePath = pathlib.relative( testFile.absolute, from: testSetDirectoryPath); final Future<MapEntry<String, CompileResult>> result = compilePool.withResource(() async { if (testFiles != null && !testFiles!.contains(testFile)) { return MapEntry<String, CompileResult>(relativePath, CompileResult.filtered); } final bool success = await compiler.compileTest(testFile); const int maxTestNameLength = 80; final String truncatedPath = relativePath.length > maxTestNameLength ? relativePath.replaceRange(maxTestNameLength - 3, relativePath.length, '...') : relativePath; final String expandedPath = truncatedPath.padRight(maxTestNameLength); io.stdout.write('\r ${success ? expandedPath.ansiGreen : expandedPath.ansiRed}'); return success ? MapEntry<String, CompileResult>(relativePath, CompileResult.success) : MapEntry<String, CompileResult>(relativePath, CompileResult.compilationFailure); }); pendingResults.add(result); } } final Map<String, CompileResult> results = Map<String, CompileResult>.fromEntries(await Future.wait(pendingResults)); stopwatch.stop(); final String resultsJson = const JsonEncoder.withIndent(' ').convert(<String, dynamic>{ 'name': bundle.name, 'directory': bundle.testSet.directory, 'builds': bundle.compileConfigs.map( (CompileConfiguration config) => <String, dynamic>{ 'compiler': config.compiler.name, 'renderer': config.renderer.name, }).toList(), 'compileTimeInMs': stopwatch.elapsedMilliseconds, 'results': results.map((String k, CompileResult v) => MapEntry<String, String>(k, v.name)), }); final io.File outputResultsFile = io.File(pathlib.join( outputBundleDirectory.path, 'results.json', )); outputResultsFile.writeAsStringSync(resultsJson); final List<String> failedFiles = <String>[]; results.forEach((String fileName, CompileResult result) { if (result == CompileResult.compilationFailure) { failedFiles.add(fileName); } }); if (failedFiles.isEmpty) { print('\rCompleted compilation of ${bundle.name.ansiMagenta} in ${stopwatch.elapsedMilliseconds}ms.'.padRight(82)); } else { print('\rThe bundle ${bundle.name.ansiMagenta} compiled with some failures in ${stopwatch.elapsedMilliseconds}ms.'); print('Compilation failures:'); for (final String fileName in failedFiles) { print(' $fileName'); } throw ToolExit('Failed to compile ${bundle.name.ansiMagenta}.'); } } } enum CompileResult { success, compilationFailure, filtered, } abstract class TestCompiler { TestCompiler( this.inputTestSetDirectory, this.outputTestBundleDirectory, { required this.renderer, required this.isVerbose, } ); final io.Directory inputTestSetDirectory; final io.Directory outputTestBundleDirectory; final Renderer renderer; final bool isVerbose; Future<bool> compileTest(FilePath input); } class Dart2JSCompiler extends TestCompiler { Dart2JSCompiler( super.inputTestSetDirectory, super.outputTestBundleDirectory, { required super.renderer, required super.isVerbose, } ); @override Future<bool> compileTest(FilePath input) async { final String relativePath = pathlib.relative( input.absolute, from: inputTestSetDirectory.path ); final String targetFileName = pathlib.join( outputTestBundleDirectory.path, '$relativePath.browser_test.dart.js', ); final io.Directory outputDirectory = io.File(targetFileName).parent; if (!outputDirectory.existsSync()) { outputDirectory.createSync(recursive: true); } final List<String> arguments = <String>[ 'compile', 'js', '--no-minify', '--disable-inlining', '--enable-asserts', // We do not want to auto-select a renderer in tests. As of today, tests // are designed to run in one specific mode. So instead, we specify the // renderer explicitly. '-DFLUTTER_WEB_AUTO_DETECT=false', '-DFLUTTER_WEB_USE_SKIA=${renderer == Renderer.canvaskit}', '-DFLUTTER_WEB_USE_SKWASM=${renderer == Renderer.skwasm}', '-O2', '-o', targetFileName, // target path. relativePath, // current path. ]; final ProcessManager process = await startProcess( environment.dartExecutable, arguments, workingDirectory: inputTestSetDirectory.path, failureIsSuccess: true, evalOutput: !isVerbose, ); final int exitCode = await process.wait(); if (exitCode != 0) { io.stderr.writeln('ERROR: Failed to compile test $input. ' 'Dart2js exited with exit code $exitCode'); return false; } else { return true; } } } class Dart2WasmCompiler extends TestCompiler { Dart2WasmCompiler( super.inputTestSetDirectory, super.outputTestBundleDirectory, { required super.renderer, required super.isVerbose, } ); @override Future<bool> compileTest(FilePath input) async { final String relativePath = pathlib.relative( input.absolute, from: inputTestSetDirectory.path ); final String targetFileName = pathlib.join( outputTestBundleDirectory.path, '$relativePath.browser_test.dart.wasm', ); final io.Directory outputDirectory = io.File(targetFileName).parent; if (!outputDirectory.existsSync()) { outputDirectory.createSync(recursive: true); } final List<String> arguments = <String>[ environment.dart2wasmSnapshotPath, '--dart-sdk=${environment.dartSdkDir.path}', '--enable-asserts', // We do not want to auto-select a renderer in tests. As of today, tests // are designed to run in one specific mode. So instead, we specify the // renderer explicitly. '-DFLUTTER_WEB_AUTO_DETECT=false', '-DFLUTTER_WEB_USE_SKIA=${renderer == Renderer.canvaskit}', '-DFLUTTER_WEB_USE_SKWASM=${renderer == Renderer.skwasm}', if (renderer == Renderer.skwasm) ...<String>[ '--import-shared-memory', '--shared-memory-max-pages=32768', ], relativePath, // current path. targetFileName, // target path. ]; final ProcessManager process = await startProcess( environment.dartAotRuntimePath, arguments, workingDirectory: inputTestSetDirectory.path, failureIsSuccess: true, evalOutput: !isVerbose, ); final int exitCode = await process.wait(); if (exitCode != 0) { io.stderr.writeln('ERROR: Failed to compile test $input. ' 'dart2wasm exited with exit code $exitCode'); return false; } else { return true; } } }
engine/lib/web_ui/dev/steps/compile_bundle_step.dart/0
{ "file_path": "engine/lib/web_ui/dev/steps/compile_bundle_step.dart", "repo_id": "engine", "token_count": 3911 }
266
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // This is a little helper function that helps us start the fetch and compilation // of an emscripten wasm module in parallel with the fetch of its script. export const createWasmInstantiator = (url) => { const modulePromise = WebAssembly.compileStreaming(fetch(url)); return (imports, successCallback) => { (async () => { const module = await modulePromise; const instance = await WebAssembly.instantiate(module, imports); successCallback(instance, module); })(); return {}; }; }
engine/lib/web_ui/flutter_js/src/instantiate_wasm.js/0
{ "file_path": "engine/lib/web_ui/flutter_js/src/instantiate_wasm.js", "repo_id": "engine", "token_count": 201 }
267
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. part of ui; // ignore_for_file: avoid_classes_with_only_static_members /// Helper functions for Dart Plugin Registrants. class DartPluginRegistrant { /// Makes sure the that the Dart Plugin Registrant has been called for this /// isolate. This can safely be executed multiple times on the same isolate, /// but should not be called on the Root isolate. static void ensureInitialized() { throw UnimplementedError( '`ensureInitialized` is not implemented on the web.'); } }
engine/lib/web_ui/lib/natives.dart/0
{ "file_path": "engine/lib/web_ui/lib/natives.dart", "repo_id": "engine", "token_count": 181 }
268
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:typed_data'; import 'package:ui/src/engine/vector_math.dart'; import 'package:ui/ui.dart' as ui; import '../color_filter.dart'; import '../util.dart'; import 'canvaskit_api.dart'; import 'image_filter.dart'; import 'native_memory.dart'; /// Owns a [SkColorFilter] and manages its lifecycle. /// /// See also: /// /// * [CkPaint.colorFilter], which uses a [ManagedSkColorFilter] to manage /// the lifecycle of its [SkColorFilter]. class ManagedSkColorFilter { ManagedSkColorFilter(CkColorFilter ckColorFilter) : colorFilter = ckColorFilter { _ref = UniqueRef<SkColorFilter>(this, colorFilter._initRawColorFilter(), 'ColorFilter'); } final CkColorFilter colorFilter; late final UniqueRef<SkColorFilter> _ref; SkColorFilter get skiaObject => _ref.nativeObject; @override int get hashCode => colorFilter.hashCode; @override bool operator ==(Object other) { if (runtimeType != other.runtimeType) { return false; } return other is ManagedSkColorFilter && other.colorFilter == colorFilter; } @override String toString() => colorFilter.toString(); } /// CanvasKit implementation of [ui.ColorFilter]. abstract class CkColorFilter implements CkManagedSkImageFilterConvertible { const CkColorFilter(); /// Converts this color filter into an image filter. /// /// Passes the ownership of the returned [SkImageFilter] to the caller. It is /// the caller's responsibility to manage the lifecycle of the returned value. SkImageFilter initRawImageFilter() { final SkColorFilter skColorFilter = _initRawColorFilter(); final SkImageFilter result = canvasKit.ImageFilter.MakeColorFilter(skColorFilter, null); // The underlying SkColorFilter is now owned by the SkImageFilter, so we // need to drop the reference to allow it to be collected. skColorFilter.delete(); return result; } /// Creates a Skia object based on the properties of this color filter. /// /// Passes the ownership of the returned [SkColorFilter] to the caller. It is /// the caller's responsibility to manage the lifecycle of the returned value. SkColorFilter _initRawColorFilter(); @override void imageFilter(SkImageFilterBorrow borrow) { // Since ColorFilter has a const constructor it cannot store dynamically // created Skia objects. Therefore a new SkImageFilter is created every time // it's used. However, once used it's no longer needed, so it's deleted // immediately to free memory. final SkImageFilter skImageFilter = initRawImageFilter(); borrow(skImageFilter); skImageFilter.delete(); } @override Matrix4 get transform => Matrix4.identity(); } /// A reusable identity transform matrix. /// /// WARNING: DO NOT MUTATE THIS MATRIX! It is a shared global singleton. Float32List _identityTransform = _computeIdentityTransform(); Float32List _computeIdentityTransform() { final Float32List result = Float32List(20); const List<int> translationIndices = <int>[0, 6, 12, 18]; for (final int i in translationIndices) { result[i] = 1; } _identityTransform = result; return result; } SkColorFilter createSkColorFilterFromColorAndBlendMode(ui.Color color, ui.BlendMode blendMode) { /// Return the identity matrix when the color opacity is 0. Replicates /// effect of applying no filter if (color.opacity == 0) { return canvasKit.ColorFilter.MakeMatrix(_identityTransform); } final SkColorFilter? filter = canvasKit.ColorFilter.MakeBlend( toSharedSkColor1(color), toSkBlendMode(blendMode), ); if (filter == null) { throw ArgumentError('Invalid parameters for blend mode ColorFilter'); } return filter; } class CkBlendModeColorFilter extends CkColorFilter { const CkBlendModeColorFilter(this.color, this.blendMode); final ui.Color color; final ui.BlendMode blendMode; @override SkColorFilter _initRawColorFilter() { return createSkColorFilterFromColorAndBlendMode(color, blendMode); } @override int get hashCode => Object.hash(color, blendMode); @override bool operator ==(Object other) { if (runtimeType != other.runtimeType) { return false; } return other is CkBlendModeColorFilter && other.color == color && other.blendMode == blendMode; } @override String toString() => 'ColorFilter.mode($color, $blendMode)'; } class CkMatrixColorFilter extends CkColorFilter { const CkMatrixColorFilter(this.matrix); final List<double> matrix; /// Flutter documentation says the translation column of the color matrix /// is specified in unnormalized 0..255 space. CanvasKit expects the /// translation values to be normalized to 0..1 space. /// /// See [https://api.flutter.dev/flutter/dart-ui/ColorFilter/ColorFilter.matrix.html]. Float32List get _normalizedMatrix { assert(matrix.length == 20, 'Color Matrix must have 20 entries.'); final Float32List result = Float32List(20); const List<int> translationIndices = <int>[4, 9, 14, 19]; for (int i = 0; i < 20; i++) { if (translationIndices.contains(i)) { result[i] = matrix[i] / 255.0; } else { result[i] = matrix[i]; } } return result; } @override SkColorFilter _initRawColorFilter() { return canvasKit.ColorFilter.MakeMatrix(_normalizedMatrix); } @override int get hashCode => Object.hashAll(matrix); @override bool operator ==(Object other) { return runtimeType == other.runtimeType && other is CkMatrixColorFilter && listEquals<double>(matrix, other.matrix); } @override String toString() => 'ColorFilter.matrix($matrix)'; } class CkLinearToSrgbGammaColorFilter extends CkColorFilter { const CkLinearToSrgbGammaColorFilter(); @override SkColorFilter _initRawColorFilter() => canvasKit.ColorFilter.MakeLinearToSRGBGamma(); @override bool operator ==(Object other) => runtimeType == other.runtimeType; @override int get hashCode => runtimeType.hashCode; @override String toString() => 'ColorFilter.linearToSrgbGamma()'; } class CkSrgbToLinearGammaColorFilter extends CkColorFilter { const CkSrgbToLinearGammaColorFilter(); @override SkColorFilter _initRawColorFilter() => canvasKit.ColorFilter.MakeSRGBToLinearGamma(); @override bool operator ==(Object other) => runtimeType == other.runtimeType; @override int get hashCode => runtimeType.hashCode; @override String toString() => 'ColorFilter.srgbToLinearGamma()'; } class CkComposeColorFilter extends CkColorFilter { const CkComposeColorFilter(this.outer, this.inner); final ManagedSkColorFilter? outer; final ManagedSkColorFilter inner; @override SkColorFilter _initRawColorFilter() => canvasKit.ColorFilter.MakeCompose(outer?.skiaObject, inner.skiaObject); @override bool operator ==(Object other) { if (other is! CkComposeColorFilter) { return false; } final CkComposeColorFilter filter = other; return filter.outer == outer && filter.inner == inner; } @override int get hashCode => Object.hash(outer, inner); @override String toString() => 'ColorFilter.compose($outer, $inner)'; } /// Convert the current [ColorFilter] to a CkColorFilter. /// /// This workaround allows ColorFilter to be const constructbile and /// efficiently comparable, so that widgets can check for ColorFilter equality to /// avoid repainting. CkColorFilter? createCkColorFilter(EngineColorFilter colorFilter) { switch (colorFilter.type) { case ColorFilterType.mode: if (colorFilter.color == null || colorFilter.blendMode == null) { return null; } return CkBlendModeColorFilter(colorFilter.color!, colorFilter.blendMode!); case ColorFilterType.matrix: if (colorFilter.matrix == null) { return null; } assert(colorFilter.matrix!.length == 20, 'Color Matrix must have 20 entries.'); return CkMatrixColorFilter(colorFilter.matrix!); case ColorFilterType.linearToSrgbGamma: return const CkLinearToSrgbGammaColorFilter(); case ColorFilterType.srgbToLinearGamma: return const CkSrgbToLinearGammaColorFilter(); default: throw StateError('Unknown mode $colorFilter.type for ColorFilter.'); } }
engine/lib/web_ui/lib/src/engine/canvaskit/color_filter.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/canvaskit/color_filter.dart", "repo_id": "engine", "token_count": 2774 }
269
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:meta/meta.dart'; import 'package:ui/src/engine/util.dart'; import 'package:ui/ui.dart' as ui; import '../../engine.dart' show PlatformViewManager; import '../vector_math.dart'; import 'embedded_views.dart'; import 'picture.dart'; import 'rasterizer.dart'; /// If `true`, draws the computed bounds for platform views and pictures to /// help debug issues with the overlay optimization. bool debugOverlayOptimizationBounds = false; /// A [Rendering] is a concrete description of how a Flutter scene will be /// rendered in a web browser. /// /// A [Rendering] is a sequence containing two types of entities: /// * Render canvases: which contain rasterized CkPictures, and /// * Platform views: being HTML content that is to be composited along with /// the Flutter content. class Rendering { final List<RenderingEntity> entities = <RenderingEntity>[]; void add(RenderingEntity entity) { entities.add(entity); } /// Returns [true] if this is equibalent to [other] for use in rendering. bool equalsForRendering(Rendering other) { if (other.entities.length != entities.length) { return false; } for (int i = 0; i < entities.length; i++) { if (!entities[i].equalsForRendering(other.entities[i])) { return false; } } return true; } /// A list of just the canvases in the rendering. List<RenderingRenderCanvas> get canvases => entities.whereType<RenderingRenderCanvas>().toList(); @override String toString() => entities.toString(); } /// An element of a [Rendering]. Either a render canvas or a platform view. sealed class RenderingEntity { /// Returns [true] if this entity is equal to [other] for use in a rendering. /// /// For example, all [RenderingRenderCanvas] objects are equal to each other /// for purposes of rendering since any canvas in that place in the rendering /// will be equivalent. Platform views are only equal if they are for the same /// view id. bool equalsForRendering(RenderingEntity other); } class RenderingRenderCanvas extends RenderingEntity { RenderingRenderCanvas(); /// The [pictures] which should be rendered in this canvas. final List<CkPicture> pictures = <CkPicture>[]; /// The [DisplayCanvas] that will be used to display [pictures]. /// /// This is set by the view embedder. DisplayCanvas? displayCanvas; /// Adds the [picture] to the pictures that should be rendered in this canvas. void add(CkPicture picture) { pictures.add(picture); } @override bool equalsForRendering(RenderingEntity other) { return other is RenderingRenderCanvas; } @override String toString() { return '$RenderingRenderCanvas(${pictures.length} pictures)'; } } /// A platform view to be rendered. class RenderingPlatformView extends RenderingEntity { RenderingPlatformView(this.viewId); /// The [viewId] of the platform view to render. final int viewId; @override bool equalsForRendering(RenderingEntity other) { return other is RenderingPlatformView && other.viewId == viewId; } @override String toString() { return '$RenderingPlatformView($viewId)'; } /// The bounds that were computed for this platform view when creating the /// optimized rendering. This is only set in debug mode. ui.Rect? debugComputedBounds; } // Computes the bounds of the platform view from its associated parameters. @visibleForTesting ui.Rect computePlatformViewBounds(EmbeddedViewParams params) { ui.Rect currentClipBounds = ui.Rect.largest; Matrix4 currentTransform = Matrix4.identity(); for (final Mutator mutator in params.mutators.reversed) { switch (mutator.type) { case MutatorType.clipRect: final ui.Rect transformedClipBounds = transformRectWithMatrix(currentTransform, mutator.rect!); currentClipBounds = currentClipBounds.intersect(transformedClipBounds); case MutatorType.clipRRect: final ui.Rect transformedClipBounds = transformRectWithMatrix(currentTransform, mutator.rrect!.outerRect); currentClipBounds = currentClipBounds.intersect(transformedClipBounds); case MutatorType.clipPath: final ui.Rect transformedClipBounds = transformRectWithMatrix( currentTransform, mutator.path!.getBounds()); currentClipBounds.intersect(transformedClipBounds); case MutatorType.transform: currentTransform = currentTransform.multiplied(mutator.matrix!); case MutatorType.opacity: // Doesn't effect bounds. continue; } } // The width and height are in physical pixels already, so apply the inverse // scale since the transform already applied the scaling. final ui.Rect rawBounds = ui.Rect.fromLTWH( params.offset.dx, params.offset.dy, params.size.width, params.size.height, ); final ui.Rect transformedBounds = transformRectWithMatrix(currentTransform, rawBounds); return transformedBounds.intersect(currentClipBounds); } /// Returns the optimized [Rendering] for a sequence of [pictures] and /// [platformViews]. /// /// [paramsForViews] is required to compute the bounds of the platform views. Rendering createOptimizedRendering( List<CkPicture> pictures, List<int> platformViews, Map<int, EmbeddedViewParams> paramsForViews, ) { assert(pictures.length == platformViews.length + 1); final Rendering result = Rendering(); // The first render canvas is required due to the pseudo-platform view "V_0" // which is defined as a platform view that comes before all Flutter drawing // commands and intersects with everything. RenderingRenderCanvas currentRenderCanvas = RenderingRenderCanvas(); // This line essentially unwinds the first iteration of the following loop. // Since "V_0" intersects with all subsequent pictures, then the first picture // it intersects with is "P_0", so we create a new render canvas and add "P_0" // to it. if (!pictures[0].cullRect.isEmpty) { currentRenderCanvas.add(pictures[0]); } for (int i = 0; i < platformViews.length; i++) { final RenderingPlatformView platformView = RenderingPlatformView(platformViews[i]); if (PlatformViewManager.instance.isVisible(platformViews[i])) { final ui.Rect platformViewBounds = computePlatformViewBounds(paramsForViews[platformViews[i]]!); if (debugOverlayOptimizationBounds) { platformView.debugComputedBounds = platformViewBounds; } bool intersectsWithCurrentPictures = false; for (final CkPicture picture in currentRenderCanvas.pictures) { if (picture.cullRect.overlaps(platformViewBounds)) { intersectsWithCurrentPictures = true; break; } } if (intersectsWithCurrentPictures) { result.add(currentRenderCanvas); currentRenderCanvas = RenderingRenderCanvas(); } } result.add(platformView); if (!pictures[i + 1].cullRect.isEmpty) { currentRenderCanvas.add(pictures[i + 1]); } } if (currentRenderCanvas.pictures.isNotEmpty) { result.add(currentRenderCanvas); } return result; }
engine/lib/web_ui/lib/src/engine/canvaskit/overlay_scene_optimizer.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/canvaskit/overlay_scene_optimizer.dart", "repo_id": "engine", "token_count": 2411 }
270
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:typed_data'; import 'package:ui/ui.dart' as ui; import 'canvaskit_api.dart'; import 'native_memory.dart'; class CkVertices implements ui.Vertices { factory CkVertices( ui.VertexMode mode, List<ui.Offset> positions, { List<ui.Offset>? textureCoordinates, List<ui.Color>? colors, List<int>? indices, }) { if (textureCoordinates != null && textureCoordinates.length != positions.length) { throw ArgumentError( '"positions" and "textureCoordinates" lengths must match.'); } if (colors != null && colors.length != positions.length) { throw ArgumentError('"positions" and "colors" lengths must match.'); } if (indices != null && indices.any((int i) => i < 0 || i >= positions.length)) { throw ArgumentError( '"indices" values must be valid indices in the positions list.'); } return CkVertices._( toSkVertexMode(mode), toFlatSkPoints(positions), textureCoordinates != null ? toFlatSkPoints(textureCoordinates) : null, colors != null ? toFlatColors(colors) : null, indices != null ? toUint16List(indices) : null, ); } factory CkVertices.raw( ui.VertexMode mode, Float32List positions, { Float32List? textureCoordinates, Int32List? colors, Uint16List? indices, }) { if (textureCoordinates != null && textureCoordinates.length != positions.length) { throw ArgumentError( '"positions" and "textureCoordinates" lengths must match.'); } if (colors != null && colors.length * 2 != positions.length) { throw ArgumentError('"positions" and "colors" lengths must match.'); } if (indices != null && indices.any((int i) => i < 0 || i >= positions.length)) { throw ArgumentError( '"indices" values must be valid indices in the positions list.'); } Uint32List? unsignedColors; if (colors != null) { unsignedColors = colors.buffer.asUint32List(colors.offsetInBytes, colors.length); } return CkVertices._( toSkVertexMode(mode), positions, textureCoordinates, unsignedColors, indices, ); } CkVertices._( this._mode, this._positions, this._textureCoordinates, this._colors, this._indices, ) { final SkVertices skVertices = canvasKit.MakeVertices( _mode, _positions, _textureCoordinates, _colors, _indices, ); _ref = UniqueRef<SkVertices>(this, skVertices, 'Vertices'); } final SkVertexMode _mode; final Float32List _positions; final Float32List? _textureCoordinates; final Uint32List? _colors; final Uint16List? _indices; late final UniqueRef<SkVertices> _ref; SkVertices get skiaObject => _ref.nativeObject; @override void dispose() { _ref.dispose(); } @override bool get debugDisposed => _ref.isDisposed; }
engine/lib/web_ui/lib/src/engine/canvaskit/vertices.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/canvaskit/vertices.dart", "repo_id": "engine", "token_count": 1206 }
271
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:ui/ui.dart' as ui; import '../dom.dart'; import '../svg.dart'; import '../util.dart'; import 'path_to_svg_clip.dart'; import 'surface.dart'; import 'surface_stats.dart'; /// Mixin used by surfaces that clip their contents using an overflowing DOM /// element. mixin _DomClip on PersistedContainerSurface { /// The dedicated child container element that's separate from the /// [rootElement] is used to compensate for the coordinate system shift /// introduced by the [rootElement] translation. @override DomElement? get childContainer => _childContainer; DomElement? _childContainer; @override void adoptElements(_DomClip oldSurface) { super.adoptElements(oldSurface); _childContainer = oldSurface._childContainer; oldSurface._childContainer = null; } @override DomElement createElement() { final DomElement element = defaultCreateElement('flt-clip'); _childContainer = createDomElement('flt-clip-interior'); if (debugExplainSurfaceStats) { // This creates an additional interior element. Count it too. surfaceStatsFor(this).allocatedDomNodeCount++; } _childContainer!.style.position = 'absolute'; element.append(_childContainer!); return element; } @override void discard() { super.discard(); // Do not detach the child container from the root. It is permanently // attached. The elements are reused together and are detached from the DOM // together. _childContainer = null; } void applyOverflow(DomElement element, ui.Clip? clipBehaviour) { if (!debugShowClipLayers) { // Hide overflow in production mode. When debugging we want to see the // clipped picture in full. if (clipBehaviour != ui.Clip.none) { element.style ..overflow = 'hidden' ..zIndex = '0'; } } else { // Display the outline of the clipping region. When debugShowClipLayers is // `true` we don't hide clip overflow (see above). This outline helps // visualizing clip areas. element.style.boxShadow = 'inset 0 0 10px green'; } } } /// A surface that creates a rectangular clip. class PersistedClipRect extends PersistedContainerSurface with _DomClip implements ui.ClipRectEngineLayer { PersistedClipRect(PersistedClipRect? super.oldLayer, this.rect, this.clipBehavior); final ui.Clip? clipBehavior; final ui.Rect rect; @override void recomputeTransformAndClip() { transform = parent!.transform; if (clipBehavior != ui.Clip.none) { localClipBounds = rect; } else { localClipBounds = null; } projectedClip = null; } @override DomElement createElement() { return super.createElement()..setAttribute('clip-type', 'rect'); } @override void apply() { rootElement!.style ..left = '${rect.left}px' ..top = '${rect.top}px' ..width = '${rect.right - rect.left}px' ..height = '${rect.bottom - rect.top}px'; applyOverflow(rootElement!, clipBehavior); // Translate the child container in the opposite direction to compensate for // the shift in the coordinate system introduced by the translation of the // rootElement. Clipping in Flutter has no effect on the coordinate system. childContainer!.style ..left = '${-rect.left}px' ..top = '${-rect.top}px'; } @override void update(PersistedClipRect oldSurface) { super.update(oldSurface); if (rect != oldSurface.rect || clipBehavior != oldSurface.clipBehavior) { localClipBounds = null; apply(); } } @override bool get isClipping => true; } /// A surface that creates a rounded rectangular clip. class PersistedClipRRect extends PersistedContainerSurface with _DomClip implements ui.ClipRRectEngineLayer { PersistedClipRRect(ui.EngineLayer? oldLayer, this.rrect, this.clipBehavior) : super(oldLayer as PersistedSurface?); final ui.RRect rrect; // TODO(yjbanov): can this be controlled in the browser? final ui.Clip? clipBehavior; @override void recomputeTransformAndClip() { transform = parent!.transform; if (clipBehavior != ui.Clip.none) { localClipBounds = rrect.outerRect; } else { localClipBounds = null; } projectedClip = null; } @override DomElement createElement() { return super.createElement()..setAttribute('clip-type', 'rrect'); } @override void apply() { final DomCSSStyleDeclaration style = rootElement!.style; style ..left = '${rrect.left}px' ..top = '${rrect.top}px' ..width = '${rrect.width}px' ..height = '${rrect.height}px' ..borderTopLeftRadius = '${rrect.tlRadiusX}px' ..borderTopRightRadius = '${rrect.trRadiusX}px' ..borderBottomRightRadius = '${rrect.brRadiusX}px' ..borderBottomLeftRadius = '${rrect.blRadiusX}px'; applyOverflow(rootElement!, clipBehavior); // Translate the child container in the opposite direction to compensate for // the shift in the coordinate system introduced by the translation of the // rootElement. Clipping in Flutter has no effect on the coordinate system. childContainer!.style ..left = '${-rrect.left}px' ..top = '${-rrect.top}px'; } @override void update(PersistedClipRRect oldSurface) { super.update(oldSurface); if (rrect != oldSurface.rrect || clipBehavior != oldSurface.clipBehavior) { localClipBounds = null; apply(); } } @override bool get isClipping => true; } /// A surface that clips it's children. class PersistedClipPath extends PersistedContainerSurface implements ui.ClipPathEngineLayer { PersistedClipPath( PersistedClipPath? super.oldLayer, this.clipPath, this.clipBehavior); final ui.Path clipPath; final ui.Clip clipBehavior; DomElement? _clipElement; @override DomElement createElement() { return defaultCreateElement('flt-clippath'); } @override void recomputeTransformAndClip() { super.recomputeTransformAndClip(); if (clipBehavior != ui.Clip.none) { localClipBounds ??= clipPath.getBounds(); } else { localClipBounds = null; } } @override void apply() { _clipElement?.remove(); _clipElement = createSvgClipDef(childContainer!, clipPath); childContainer!.append(_clipElement!); } @override void update(PersistedClipPath oldSurface) { super.update(oldSurface); if (oldSurface.clipPath != clipPath) { localClipBounds = null; oldSurface._clipElement?.remove(); apply(); } else { _clipElement = oldSurface._clipElement; } oldSurface._clipElement = null; } @override void discard() { _clipElement?.remove(); _clipElement = null; super.discard(); } @override bool get isClipping => true; } /// Creates an svg clipPath and applies it to [element]. SVGSVGElement createSvgClipDef(DomElement element, ui.Path clipPath) { final ui.Rect pathBounds = clipPath.getBounds(); final SVGSVGElement svgClipPath = pathToSvgClipPath(clipPath, scaleX: 1.0 / pathBounds.right, scaleY: 1.0 / pathBounds.bottom); setClipPath(element, createSvgClipUrl()); // We need to set width and height for the clipElement to cover the // bounds of the path since browsers such as Safari and Edge // seem to incorrectly intersect the element bounding rect with // the clip path. Chrome and Firefox don't perform intersect instead they // use the path itself as source of truth. element.style ..width = '${pathBounds.right}px' ..height = '${pathBounds.bottom}px'; return svgClipPath; }
engine/lib/web_ui/lib/src/engine/html/clip.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/html/clip.dart", "repo_id": "engine", "token_count": 2751 }
272
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:math' as math; import 'dart:typed_data'; import 'conic.dart'; import 'cubic.dart'; import 'path_iterator.dart'; import 'path_ref.dart'; import 'path_utils.dart'; /// Computes winding number and onCurveCount for a path and point. class PathWinding { PathWinding(this.pathRef, this.x, this.y) { _walkPath(); } final PathRef pathRef; final double x; final double y; int _w = 0; int _onCurveCount = 0; int get w => _w; int get onCurveCount => _onCurveCount; /// Buffer used for max(iterator result, chopped 3 cubics). final Float32List _buffer = Float32List(8 + 10); /// Iterates through path and computes winding. void _walkPath() { final PathIterator iter = PathIterator(pathRef, true); int verb; while ((verb = iter.next(_buffer)) != SPath.kDoneVerb) { switch (verb) { case SPath.kMoveVerb: case SPath.kCloseVerb: break; case SPath.kLineVerb: _computeLineWinding(); case SPath.kQuadVerb: _computeQuadWinding(); case SPath.kConicVerb: _computeConicWinding(pathRef.conicWeights![iter.conicWeightIndex]); case SPath.kCubicVerb: _computeCubicWinding(); } } } void _computeLineWinding() { final double x0 = _buffer[0]; final double startY = _buffer[1]; double y0 = startY; final double x1 = _buffer[2]; final double endY = _buffer[3]; double y1 = endY; final double dy = y1 - y0; int dir = 1; // Swap so that y0 <= y1 holds. if (y0 > y1) { final double temp = y0; y0 = y1; y1 = temp; dir = -1; } // If point is outside top/bottom bounds, winding is 0. if (y < y0 || y > y1) { return; } if (_checkOnCurve(x, y, x0, startY, x1, endY)) { _onCurveCount++; return; } if (y == y1) { return; } // c = ax*by − ay*bx where a is the line and b is line formed from start // to the given point(x,y). final double crossProduct = (x1 - x0) * (y - startY) - dy * (x - x0); if (crossProduct == 0) { // zero cross means the point is on the line, and since the case where // y of the query point is at the end point is handled above, we can be // sure that we're on the line (excluding the end point) here. if (x != x1 || y != endY) { _onCurveCount++; } dir = 0; } else if (SPath.scalarSignedAsInt(crossProduct) == dir) { // Direction of cross product and line the same. dir = 0; } _w += dir; } // Check if point starts the line, handle special case for horizontal lines // where and point except the end point is considered on curve. static bool _checkOnCurve(double x, double y, double startX, double startY, double endX, double endY) { if (startY == endY) { // Horizontal line. return SPath.between(startX, x, endX) && x != endX; } else { return x == startX && y == startY; } } void _computeQuadWinding() { // Check if we need to chop quadratic at extrema to compute 2 separate // windings. int n = 0; if (!_isQuadMonotonic(_buffer)) { n = _chopQuadAtExtrema(_buffer); } int winding = _computeMonoQuadWinding( _buffer[0], _buffer[1], _buffer[2], _buffer[3], _buffer[4], _buffer[5]); if (n > 0) { winding += _computeMonoQuadWinding(_buffer[4], _buffer[5], _buffer[6], _buffer[7], _buffer[8], _buffer[9]); } _w += winding; } int _computeMonoQuadWinding( double x0, double y0, double x1, double y1, double x2, double y2) { int dir = 1; final double startY = y0; final double endY = y2; if (y0 > y2) { final double temp = y0; y0 = y2; y2 = temp; dir = -1; } if (y < y0 || y > y2) { return 0; } if (_checkOnCurve(x, y, x0, startY, x2, endY)) { _onCurveCount++; return 0; } if (y == y2) { return 0; } final QuadRoots quadRoots = QuadRoots(); final int n = quadRoots.findRoots( startY - 2 * y1 + endY, 2 * (y1 - startY), startY - y); assert(n <= 1); double xt; if (0 == n) { // zero roots are returned only when y0 == y xt = dir == 1 ? x0 : x2; } else { final double t = quadRoots.root0!; final double C = x0; final double A = x2 - 2 * x1 + C; final double B = 2 * (x1 - C); xt = polyEval(A, B, C, t); } if (SPath.nearlyEqual(xt, x)) { if (x != x2 || y != endY) { // don't test end points; they're start points _onCurveCount += 1; return 0; } } return xt < x ? dir : 0; } /// Chops a non-monotonic quadratic curve, returns subdivisions and writes /// result into [buffer]. static int _chopQuadAtExtrema(Float32List buffer) { final double x0 = buffer[0]; final double y0 = buffer[1]; final double x1 = buffer[2]; final double y1 = buffer[3]; final double x2 = buffer[4]; final double y2 = buffer[5]; final double? tValueAtExtrema = validUnitDivide(y0 - y1, y0 - y1 - y1 + y2); if (tValueAtExtrema != null) { // Chop quad at t value by interpolating along p0-p1 and p1-p2. final double p01x = x0 + (tValueAtExtrema * (x1 - x0)); final double p01y = y0 + (tValueAtExtrema * (y1 - y0)); final double p12x = x1 + (tValueAtExtrema * (x2 - x1)); final double p12y = y1 + (tValueAtExtrema * (y2 - y1)); final double cx = p01x + (tValueAtExtrema * (p12x - p01x)); final double cy = p01y + (tValueAtExtrema * (p12y - p01y)); buffer[2] = p01x; buffer[3] = p01y; buffer[4] = cx; buffer[5] = cy; buffer[6] = p12x; buffer[7] = p12y; buffer[8] = x2; buffer[9] = y2; return 1; } // if we get here, we need to force output to be monotonic, even though // we couldn't compute a unit divide value (probably underflow). buffer[3] = (y0 - y1).abs() < (y1 - y2).abs() ? y0 : y2; return 0; } static bool _isQuadMonotonic(Float32List quad) { final double y0 = quad[1]; final double y1 = quad[3]; final double y2 = quad[5]; if (y0 == y1) { return true; } if (y0 < y1) { return y1 <= y2; } else { return y1 >= y2; } } void _computeConicWinding(double weight) { final Conic conic = Conic(_buffer[0], _buffer[1], _buffer[2], _buffer[3], _buffer[4], _buffer[5], weight); // If the data points are very large, the conic may not be monotonic but may also // fail to chop. Then, the chopper does not split the original conic in two. final bool isMono = _isQuadMonotonic(_buffer); final List<Conic> conics = <Conic>[]; conic.chopAtYExtrema(conics); _computeMonoConicWinding(conics[0]); if (!isMono && conics.length == 2) { _computeMonoConicWinding(conics[1]); } } void _computeMonoConicWinding(Conic conic) { double y0 = conic.p0y; double y2 = conic.p2y; int dir = 1; if (y0 > y2) { final double swap = y0; y0 = y2; y2 = swap; dir = -1; } if (y < y0 || y > y2) { return; } if (_checkOnCurve(x, y, conic.p0x, conic.p0y, conic.p2x, conic.p2y)) { _onCurveCount += 1; return; } if (y == y2) { return; } double A = conic.p2y; double B = conic.p1y * conic.fW - y * conic.fW + y; double C = conic.p0y; // A = a + c - 2*(b*w - yCept*w + yCept) A += C - 2 * B; // B = b*w - w * yCept + yCept - a B -= C; C -= y; final QuadRoots quadRoots = QuadRoots(); final int n = quadRoots.findRoots(A, 2 * B, C); assert(n <= 1); double xt; if (0 == n) { // zero roots are returned only when y0 == y // Need [0] if dir == 1 // and [2] if dir == -1 xt = dir == 1 ? conic.p0x : conic.p2x; } else { final double root = quadRoots.root0!; xt = Conic.evalNumerator(conic.p0x, conic.p1x, conic.p2x, conic.fW, root) / Conic.evalDenominator(conic.fW, root); } if (SPath.nearlyEqual(xt, x)) { if (x != conic.p2x || y != conic.p2y) { // don't test end points; they're start points _onCurveCount += 1; return; } } _w += xt < x ? dir : 0; } void _computeCubicWinding() { final int n = chopCubicAtYExtrema(_buffer, _buffer); for (int i = 0; i <= n; ++i) { _windingMonoCubic(i * 3 * 2); } } void _windingMonoCubic(int bufferIndex) { final int bufferStartPos = bufferIndex; final double px0 = _buffer[bufferIndex++]; final double py0 = _buffer[bufferIndex++]; final double px1 = _buffer[bufferIndex++]; bufferIndex++; final double px2 = _buffer[bufferIndex++]; bufferIndex++; final double px3 = _buffer[bufferIndex++]; final double py3 = _buffer[bufferIndex++]; double y0 = py0; double y3 = py3; int dir = 1; if (y0 > y3) { final double swap = y0; y0 = y3; y3 = swap; dir = -1; } if (y < y0 || y > y3) { return; } if (_checkOnCurve(x, y, px0, py0, px3, py3)) { _onCurveCount += 1; return; } if (y == y3) { return; } // Quickly reject or accept final double min = math.min(px0, math.min(px1, math.min(px2, px3))); final double max = math.max(px0, math.max(px1, math.max(px2, px3))); if (x < min) { return; } if (x > max) { _w += dir; return; } // Compute the actual x(t) value. final double? t = chopMonoAtY(_buffer, bufferStartPos, y); if (t == null) { return; } final double xt = evalCubicPts(px0, px1, px2, px3, t); if (SPath.nearlyEqual(xt, x)) { if (x != px3 || y != py3) { // don't test end points; they're start points _onCurveCount += 1; return; } } _w += xt < x ? dir : 0; } }
engine/lib/web_ui/lib/src/engine/html/path/path_windings.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/html/path/path_windings.dart", "repo_id": "engine", "token_count": 4665 }
273
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:typed_data'; import 'package:ui/ui.dart' as ui; import '../../browser_detection.dart'; import 'shader_builder.dart'; /// Provides common shaders used for gradients and drawVertices APIs. abstract final class VertexShaders { static final Uint16List vertexIndicesForRect = Uint16List.fromList(<int>[0, 1, 2, 2, 3, 0]); /// Cached vertex shaders. static String? _baseVertexShader; static String? _textureVertexShader; /// Creates a vertex shader transforms pixel space [Vertices.positions] to /// final clipSpace -1..1 coordinates with inverted Y Axis. /// #version 300 es /// layout (location=0) in vec4 position; /// layout (location=1) in vec4 color; /// uniform mat4 u_ctransform; /// uniform vec4 u_scale; /// uniform vec4 u_shift; /// out vec4 vColor; /// void main() { /// gl_Position = ((u_ctransform * position) * u_scale) + u_shift; /// v_color = color.zyxw; /// } static String writeBaseVertexShader() { if (_baseVertexShader == null) { final ShaderBuilder builder = ShaderBuilder(webGLVersion); builder.addIn(ShaderType.kVec4, name: 'position'); builder.addIn(ShaderType.kVec4, name: 'color'); builder.addUniform(ShaderType.kMat4, name: 'u_ctransform'); builder.addUniform(ShaderType.kVec4, name: 'u_scale'); builder.addUniform(ShaderType.kVec4, name: 'u_shift'); builder.addOut(ShaderType.kVec4, name: 'v_color'); final ShaderMethod method = builder.addMethod('main'); method.addStatement( 'gl_Position = ((u_ctransform * position) * u_scale) + u_shift;'); method.addStatement('v_color = color.zyxw;'); _baseVertexShader = builder.build(); } return _baseVertexShader!; } static String writeTextureVertexShader() { if (_textureVertexShader == null) { final ShaderBuilder builder = ShaderBuilder(webGLVersion); builder.addIn(ShaderType.kVec4, name: 'position'); builder.addUniform(ShaderType.kMat4, name: 'u_ctransform'); builder.addUniform(ShaderType.kVec4, name: 'u_scale'); builder.addUniform(ShaderType.kVec4, name: 'u_textransform'); builder.addUniform(ShaderType.kVec4, name: 'u_shift'); builder.addOut(ShaderType.kVec2, name: 'v_texcoord'); final ShaderMethod method = builder.addMethod('main'); method.addStatement( 'gl_Position = ((u_ctransform * position) * u_scale) + u_shift;'); method.addStatement('v_texcoord = vec2((u_textransform.z + position.x) * u_textransform.x, ' '((u_textransform.w + position.y) * u_textransform.y));'); _textureVertexShader = builder.build(); } return _textureVertexShader!; } } abstract final class FragmentShaders { static String writeTextureFragmentShader( bool isWebGl2, ui.TileMode? tileModeX, ui.TileMode? tileModeY) { final ShaderBuilder builder = ShaderBuilder.fragment(webGLVersion); builder.floatPrecision = ShaderPrecision.kMedium; builder.addIn(ShaderType.kVec2, name: 'v_texcoord'); builder.addUniform(ShaderType.kSampler2D, name: 'u_texture'); final ShaderMethod method = builder.addMethod('main'); if (isWebGl2 || tileModeX == null || tileModeY == null || (tileModeX == ui.TileMode.clamp && tileModeY == ui.TileMode.clamp)) { method.addStatement('${builder.fragmentColor.name} = ' '${builder.texture2DFunction}(u_texture, v_texcoord);'); } else { // Repeat and mirror are not supported for webgl1. Write code to // adjust texture coordinate. // // This will write u and v floats, clamp/repeat and mirror the value and // pass it to sampler. method.addTileStatements('v_texcoord.x', 'u', tileModeX); method.addTileStatements('v_texcoord.y', 'v', tileModeY); method.addStatement('vec2 uv = vec2(u, v);'); method.addStatement('${builder.fragmentColor.name} = ' '${builder.texture2DFunction}(u_texture, uv);'); } return builder.build(); } }
engine/lib/web_ui/lib/src/engine/html/shaders/vertex_shaders.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/html/shaders/vertex_shaders.dart", "repo_id": "engine", "token_count": 1659 }
274
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import '../dom.dart'; /// Listener for DOM events that prevents the default browser behavior. final DomEventListener preventDefaultListener = createDomEventListener((DomEvent event) { event.preventDefault(); });
engine/lib/web_ui/lib/src/engine/mouse/prevent_default.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/mouse/prevent_default.dart", "repo_id": "engine", "token_count": 95 }
275
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:ui/ui.dart' as ui; import 'initialization.dart'; const bool _debugLogPointerConverter = false; /// The state of the pointer of a specific device (e.g. finger, mouse). class _PointerDeviceState { _PointerDeviceState(this.x, this.y); /// The identifier used in framework hit test. int? get pointer => _pointer; int? _pointer; static int _pointerCount = 0; void startNewPointer() { _pointerCount += 1; _pointer = _pointerCount; } double x; double y; } class _GlobalPointerState { _GlobalPointerState() { assert(() { registerHotRestartListener(reset); return true; }()); } // Map from browser pointer identifiers to PointerEvent pointer identifiers. final Map<int, _PointerDeviceState> pointers = <int, _PointerDeviceState>{}; /// This field is used to keep track of button state. /// /// To normalize pointer events, when we receive pointer down followed by /// pointer up, we synthesize a move event. To make sure that button state /// is correct for move regardless of button state at the time of up event /// we store it on down,hover and move events. int activeButtons = 0; _PointerDeviceState ensurePointerDeviceState(int device, double x, double y) { return pointers.putIfAbsent( device, () => _PointerDeviceState(x, y), ); } /// Resets all pointer states. /// /// This method is invoked during hot reload to make sure we have a clean /// converter after hot reload. void reset() { pointers.clear(); _PointerDeviceState._pointerCount = 0; activeButtons = 0; } } /// Converter to convert web pointer data into a form that framework can /// understand. /// /// This converter calculates pointer location delta and pointer identifier for /// each pointer. Both are required by framework to correctly trigger gesture /// activity. It also attempts to sanitize pointer data input sequence by always /// synthesizing an add pointer data prior to hover or down if it the pointer is /// not previously added. /// /// For example: /// before: /// hover -> down -> move -> up /// after: /// add(synthesize) -> hover -> down -> move -> up /// /// before: /// down -> move -> up /// after: /// add(synthesize) -> down -> move -> up class PointerDataConverter { PointerDataConverter(); // This is made static because the state of pointer devices is global. This // matches how the framework currently handles the state of pointer devices. // // See: https://github.com/flutter/flutter/blob/023e5addaa6e8e294a200cf754afaa1656f14aa6/packages/flutter/lib/src/rendering/binding.dart#L47-L47 static final _GlobalPointerState globalPointerState = _GlobalPointerState(); ui.PointerData _generateCompletePointerData({ required int viewId, required Duration timeStamp, required ui.PointerChange change, required ui.PointerDeviceKind kind, ui.PointerSignalKind? signalKind, required int device, required double physicalX, required double physicalY, required int buttons, required bool obscured, required double pressure, required double pressureMin, required double pressureMax, required double distance, required double distanceMax, required double size, required double radiusMajor, required double radiusMinor, required double radiusMin, required double radiusMax, required double orientation, required double tilt, required int platformData, required double scrollDeltaX, required double scrollDeltaY, required double scale, }) { assert(globalPointerState.pointers.containsKey(device)); final _PointerDeviceState state = globalPointerState.pointers[device]!; final double deltaX = physicalX - state.x; final double deltaY = physicalY - state.y; state.x = physicalX; state.y = physicalY; return ui.PointerData( viewId: viewId, timeStamp: timeStamp, change: change, kind: kind, signalKind: signalKind, device: device, pointerIdentifier: state.pointer ?? 0, physicalX: physicalX, physicalY: physicalY, physicalDeltaX: deltaX, physicalDeltaY: deltaY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ); } bool _locationHasChanged(int device, double physicalX, double physicalY) { assert(globalPointerState.pointers.containsKey(device)); final _PointerDeviceState state = globalPointerState.pointers[device]!; return state.x != physicalX || state.y != physicalY; } ui.PointerData _synthesizePointerData({ required int viewId, required Duration timeStamp, required ui.PointerChange change, required ui.PointerDeviceKind kind, required int device, required double physicalX, required double physicalY, required int buttons, required bool obscured, required double pressure, required double pressureMin, required double pressureMax, required double distance, required double distanceMax, required double size, required double radiusMajor, required double radiusMinor, required double radiusMin, required double radiusMax, required double orientation, required double tilt, required int platformData, required double scrollDeltaX, required double scrollDeltaY, required double scale, }) { assert(globalPointerState.pointers.containsKey(device)); final _PointerDeviceState state = globalPointerState.pointers[device]!; final double deltaX = physicalX - state.x; final double deltaY = physicalY - state.y; state.x = physicalX; state.y = physicalY; return ui.PointerData( viewId: viewId, timeStamp: timeStamp, change: change, kind: kind, // All the pointer data except scroll should not have a signal kind, and // there is no use case for synthetic scroll event. We should be // safe to default it to ui.PointerSignalKind.none. signalKind: ui.PointerSignalKind.none, device: device, pointerIdentifier: state.pointer ?? 0, physicalX: physicalX, physicalY: physicalY, physicalDeltaX: deltaX, physicalDeltaY: deltaY, buttons: buttons, obscured: obscured, synthesized: true, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ); } /// Converts the given html pointer event metrics into a sequence of framework-compatible /// pointer data and stores it into [result] void convert( List<ui.PointerData> result, { required int viewId, Duration timeStamp = Duration.zero, ui.PointerChange change = ui.PointerChange.cancel, ui.PointerDeviceKind kind = ui.PointerDeviceKind.touch, ui.PointerSignalKind? signalKind, int device = 0, double physicalX = 0.0, double physicalY = 0.0, int buttons = 0, bool obscured = false, double pressure = 0.0, double pressureMin = 0.0, double pressureMax = 0.0, double distance = 0.0, double distanceMax = 0.0, double size = 0.0, double radiusMajor = 0.0, double radiusMinor = 0.0, double radiusMin = 0.0, double radiusMax = 0.0, double orientation = 0.0, double tilt = 0.0, int platformData = 0, double scrollDeltaX = 0.0, double scrollDeltaY = 0.0, double scale = 1.0, }) { if (_debugLogPointerConverter) { print('>> view=$viewId device=$device change=$change buttons=$buttons'); } final bool isDown = buttons != 0; if (signalKind == null || signalKind == ui.PointerSignalKind.none) { switch (change) { case ui.PointerChange.add: assert(!globalPointerState.pointers.containsKey(device)); globalPointerState.ensurePointerDeviceState(device, physicalX, physicalY); assert(!_locationHasChanged(device, physicalX, physicalY)); result.add( _generateCompletePointerData( viewId: viewId, timeStamp: timeStamp, change: change, kind: kind, signalKind: signalKind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); case ui.PointerChange.hover: final bool alreadyAdded = globalPointerState.pointers.containsKey(device); globalPointerState.ensurePointerDeviceState(device, physicalX, physicalY); assert(!isDown); if (!alreadyAdded) { // Synthesizes an add pointer data. result.add( _synthesizePointerData( viewId: viewId, timeStamp: timeStamp, change: ui.PointerChange.add, kind: kind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); } result.add( _generateCompletePointerData( viewId: viewId, timeStamp: timeStamp, change: change, kind: kind, signalKind: signalKind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); globalPointerState.activeButtons = buttons; case ui.PointerChange.down: final bool alreadyAdded = globalPointerState.pointers.containsKey(device); final _PointerDeviceState state = globalPointerState.ensurePointerDeviceState( device, physicalX, physicalY); assert(isDown); state.startNewPointer(); if (!alreadyAdded) { // Synthesizes an add pointer data. result.add( _synthesizePointerData( viewId: viewId, timeStamp: timeStamp, change: ui.PointerChange.add, kind: kind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); } if (_locationHasChanged(device, physicalX, physicalY)) { assert(alreadyAdded); // Synthesize a hover of the pointer to the down location before // sending the down event, if necessary. result.add( _synthesizePointerData( viewId: viewId, timeStamp: timeStamp, change: ui.PointerChange.hover, kind: kind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: 0, obscured: obscured, pressure: 0.0, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); } result.add( _generateCompletePointerData( viewId: viewId, timeStamp: timeStamp, change: change, kind: kind, signalKind: signalKind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); globalPointerState.activeButtons = buttons; case ui.PointerChange.move: assert(globalPointerState.pointers.containsKey(device)); assert(isDown); result.add( _generateCompletePointerData( viewId: viewId, timeStamp: timeStamp, change: change, kind: kind, signalKind: signalKind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); globalPointerState.activeButtons = buttons; case ui.PointerChange.up: case ui.PointerChange.cancel: assert(globalPointerState.pointers.containsKey(device)); final _PointerDeviceState state = globalPointerState.pointers[device]!; assert(!isDown); // Cancel events can have different coordinates due to various // reasons (window lost focus which is accompanied by window // movement, or PointerEvent simply always gives 0). Instead of // caring about the coordinates, we want to cancel the pointers as // soon as possible. if (change == ui.PointerChange.cancel) { physicalX = state.x; physicalY = state.y; } if (_locationHasChanged(device, physicalX, physicalY)) { // Synthesize a move of the pointer to the up location before // sending the up event, if necessary. result.add( _synthesizePointerData( viewId: viewId, timeStamp: timeStamp, change: ui.PointerChange.move, kind: kind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: globalPointerState.activeButtons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); } result.add( _generateCompletePointerData( viewId: viewId, timeStamp: timeStamp, change: change, kind: kind, signalKind: signalKind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); if (kind == ui.PointerDeviceKind.touch) { // The browser sends a new device ID for each touch gesture. To // avoid memory leaks, we send a "remove" event when the gesture is // over (i.e. when "up" or "cancel" is received). result.add( _synthesizePointerData( viewId: viewId, timeStamp: timeStamp, change: ui.PointerChange.remove, kind: kind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: 0, obscured: obscured, pressure: 0.0, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); globalPointerState.pointers.remove(device); } case ui.PointerChange.remove: assert(globalPointerState.pointers.containsKey(device)); final _PointerDeviceState state = globalPointerState.pointers[device]!; assert(!isDown); result.add( _generateCompletePointerData( viewId: viewId, timeStamp: timeStamp, change: change, kind: kind, signalKind: signalKind, device: device, physicalX: state.x, physicalY: state.y, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); globalPointerState.pointers.remove(device); case ui.PointerChange.panZoomStart: case ui.PointerChange.panZoomUpdate: case ui.PointerChange.panZoomEnd: // Pointer pan/zoom events are not generated on web. assert(false); } } else { switch (signalKind) { case ui.PointerSignalKind.scroll: case ui.PointerSignalKind.scrollInertiaCancel: case ui.PointerSignalKind.scale: final bool alreadyAdded = globalPointerState.pointers.containsKey(device); globalPointerState.ensurePointerDeviceState(device, physicalX, physicalY); if (!alreadyAdded) { // Synthesizes an add pointer data. result.add( _synthesizePointerData( viewId: viewId, timeStamp: timeStamp, change: ui.PointerChange.add, kind: kind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); } if (_locationHasChanged(device, physicalX, physicalY)) { // Synthesize a hover/move of the pointer to the scroll location // before sending the scroll event, if necessary, so that clients // don't have to worry about native ordering of hover and scroll // events. if (isDown) { result.add( _synthesizePointerData( viewId: viewId, timeStamp: timeStamp, change: ui.PointerChange.move, kind: kind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); } else { result.add( _synthesizePointerData( viewId: viewId, timeStamp: timeStamp, change: ui.PointerChange.hover, kind: kind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); } } result.add( _generateCompletePointerData( viewId: viewId, timeStamp: timeStamp, change: change, kind: kind, signalKind: signalKind, device: device, physicalX: physicalX, physicalY: physicalY, buttons: buttons, obscured: obscured, pressure: pressure, pressureMin: pressureMin, pressureMax: pressureMax, distance: distance, distanceMax: distanceMax, size: size, radiusMajor: radiusMajor, radiusMinor: radiusMinor, radiusMin: radiusMin, radiusMax: radiusMax, orientation: orientation, tilt: tilt, platformData: platformData, scrollDeltaX: scrollDeltaX, scrollDeltaY: scrollDeltaY, scale: scale, ) ); case ui.PointerSignalKind.none: assert(false); // This branch should already have 'none' filtered out. case ui.PointerSignalKind.unknown: // Ignore unknown signals. break; } } } }
engine/lib/web_ui/lib/src/engine/pointer_converter.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/pointer_converter.dart", "repo_id": "engine", "token_count": 13710 }
276
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import '../dom.dart'; import 'semantics.dart'; /// The method used to represend a label of a leaf node in the DOM. /// /// This is required by some screen readers and web crawlers. /// /// Container nodes only use `aria-label`, even if [domText] is chosen. This is /// because screen readers treat container nodes as "groups" of other nodes, and /// respect the `aria-label` without a [DomText] node. Crawlers typically do not /// need this information, as they primarily scan visible text, which is /// communicated in semantics as leaf text and heading nodes. enum LeafLabelRepresentation { /// Represents the label as an `aria-label` attribute. ariaLabel, /// Represents the label as a [DomText] node. domText, } /// Renders [SemanticsObject.label] and/or [SemanticsObject.value] to the semantics DOM. /// /// The value is not always rendered. Some semantics nodes correspond to /// interactive controls. In such case the value is reported via that element's /// `value` attribute rather than rendering it separately. class LabelAndValue extends RoleManager { LabelAndValue(SemanticsObject semanticsObject, PrimaryRoleManager owner, { required this.labelRepresentation }) : super(Role.labelAndValue, semanticsObject, owner); /// Configures the representation of the label in the DOM. final LeafLabelRepresentation labelRepresentation; @override void update() { final String? computedLabel = _computeLabel(); if (computedLabel == null) { _oldLabel = null; _cleanUpDom(); return; } _updateLabel(computedLabel); } DomText? _domText; String? _oldLabel; void _updateLabel(String label) { if (label == _oldLabel) { return; } _oldLabel = label; final bool needsDomText = labelRepresentation == LeafLabelRepresentation.domText && !semanticsObject.hasChildren; _domText?.remove(); if (needsDomText) { owner.removeAttribute('aria-label'); final DomText domText = domDocument.createTextNode(label); _domText = domText; semanticsObject.element.appendChild(domText); } else { owner.setAttribute('aria-label', label); _domText = null; } } /// Computes the final label to be assigned to the node. /// /// The label is a concatenation of tooltip, label, hint, and value, whichever /// combination is present. String? _computeLabel() { // If the node is incrementable the value is reported to the browser via // the respective role manager. We do not need to also render it again here. final bool shouldDisplayValue = !semanticsObject.isIncrementable && semanticsObject.hasValue; return computeDomSemanticsLabel( tooltip: semanticsObject.hasTooltip ? semanticsObject.tooltip : null, label: semanticsObject.hasLabel ? semanticsObject.label : null, hint: semanticsObject.hint, value: shouldDisplayValue ? semanticsObject.value : null, ); } void _cleanUpDom() { owner.removeAttribute('aria-label'); _domText?.remove(); } @override void dispose() { super.dispose(); _cleanUpDom(); } } String? computeDomSemanticsLabel({ String? tooltip, String? label, String? hint, String? value, }) { final String? labelHintValue = _computeLabelHintValue(label: label, hint: hint, value: value); if (tooltip == null && labelHintValue == null) { return null; } final StringBuffer combinedValue = StringBuffer(); if (tooltip != null) { combinedValue.write(tooltip); // Separate the tooltip from the rest via a line-break (if the rest exists). if (labelHintValue != null) { combinedValue.writeln(); } } if (labelHintValue != null) { combinedValue.write(labelHintValue); } return combinedValue.isNotEmpty ? combinedValue.toString() : null; } String? _computeLabelHintValue({ String? label, String? hint, String? value, }) { final String combinedValue = <String?>[label, hint, value] .whereType<String>() // poor man's null filter .where((String element) => element.trim().isNotEmpty) .join(' '); return combinedValue.isNotEmpty ? combinedValue : null; }
engine/lib/web_ui/lib/src/engine/semantics/label_and_value.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/semantics/label_and_value.dart", "repo_id": "engine", "token_count": 1338 }
277
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @DefaultAsset('skwasm') // The web_sdk/sdk_rewriter.dart uses this directive. // ignore: unnecessary_library_directive library skwasm_impl; import 'dart:ffi'; export 'skwasm_impl/canvas.dart'; export 'skwasm_impl/codecs.dart'; export 'skwasm_impl/dart_js_conversion.dart'; export 'skwasm_impl/filters.dart'; export 'skwasm_impl/font_collection.dart'; export 'skwasm_impl/image.dart'; export 'skwasm_impl/memory.dart'; export 'skwasm_impl/paint.dart'; export 'skwasm_impl/paragraph.dart'; export 'skwasm_impl/path.dart'; export 'skwasm_impl/path_metrics.dart'; export 'skwasm_impl/picture.dart'; export 'skwasm_impl/raw/raw_canvas.dart'; export 'skwasm_impl/raw/raw_filters.dart'; export 'skwasm_impl/raw/raw_fonts.dart'; export 'skwasm_impl/raw/raw_geometry.dart'; export 'skwasm_impl/raw/raw_image.dart'; export 'skwasm_impl/raw/raw_memory.dart'; export 'skwasm_impl/raw/raw_paint.dart'; export 'skwasm_impl/raw/raw_path.dart'; export 'skwasm_impl/raw/raw_path_metrics.dart'; export 'skwasm_impl/raw/raw_picture.dart'; export 'skwasm_impl/raw/raw_shaders.dart'; export 'skwasm_impl/raw/raw_skdata.dart'; export 'skwasm_impl/raw/raw_skstring.dart'; export 'skwasm_impl/raw/raw_surface.dart'; export 'skwasm_impl/raw/raw_vertices.dart'; export 'skwasm_impl/raw/skwasm_module.dart'; export 'skwasm_impl/raw/text/raw_line_metrics.dart'; export 'skwasm_impl/raw/text/raw_paragraph.dart'; export 'skwasm_impl/raw/text/raw_paragraph_builder.dart'; export 'skwasm_impl/raw/text/raw_paragraph_style.dart'; export 'skwasm_impl/raw/text/raw_strut_style.dart'; export 'skwasm_impl/raw/text/raw_text_style.dart'; export 'skwasm_impl/renderer.dart'; export 'skwasm_impl/shaders.dart'; export 'skwasm_impl/surface.dart'; export 'skwasm_impl/vertices.dart';
engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl.dart", "repo_id": "engine", "token_count": 777 }
278
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:ffi'; typedef RawRect = Pointer<Float>; typedef RawIRect = Pointer<Int32>; typedef RawRRect = Pointer<Float>; typedef RawPointArray = Pointer<Float>; typedef RawRSTransformArray = Pointer<Float>; typedef RawMatrix33 = Pointer<Float>; typedef RawMatrix44 = Pointer<Float>; typedef RawColorArray = Pointer<Uint32>;
engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/raw_geometry.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/raw_geometry.dart", "repo_id": "engine", "token_count": 169 }
279
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @DefaultAsset('skwasm') library skwasm_impl; import 'dart:ffi'; import 'package:ui/src/engine/skwasm/skwasm_impl.dart'; final class RawParagraphStyle extends Opaque {} typedef ParagraphStyleHandle = Pointer<RawParagraphStyle>; @Native<ParagraphStyleHandle Function()>(symbol: 'paragraphStyle_create', isLeaf: true) external ParagraphStyleHandle paragraphStyleCreate(); @Native<Void Function(ParagraphStyleHandle)>(symbol: 'paragraphStyle_dispose', isLeaf: true) external void paragraphStyleDispose(ParagraphStyleHandle handle); @Native<Void Function(ParagraphStyleHandle, Int)>(symbol: 'paragraphStyle_setTextAlign', isLeaf: true) external void paragraphStyleSetTextAlign(ParagraphStyleHandle handle, int textAlign); @Native<Void Function(ParagraphStyleHandle, Int)>(symbol: 'paragraphStyle_setTextDirection', isLeaf: true) external void paragraphStyleSetTextDirection(ParagraphStyleHandle handle, int textDirection); @Native<Void Function(ParagraphStyleHandle, Size)>(symbol: 'paragraphStyle_setMaxLines', isLeaf: true) external void paragraphStyleSetMaxLines(ParagraphStyleHandle handle, int maxLines); @Native<Void Function(ParagraphStyleHandle, Float)>(symbol: 'paragraphStyle_setHeight', isLeaf: true) external void paragraphStyleSetHeight(ParagraphStyleHandle handle, double height); @Native<Void Function( ParagraphStyleHandle, Bool, Bool, )>(symbol: 'paragraphStyle_setTextHeightBehavior', isLeaf: true) external void paragraphStyleSetTextHeightBehavior( ParagraphStyleHandle handle, bool applyHeightToFirstAscent, bool applyHeightToLastDescent, ); @Native<Void Function(ParagraphStyleHandle, SkStringHandle)>(symbol: 'paragraphStyle_setEllipsis', isLeaf: true) external void paragraphStyleSetEllipsis(ParagraphStyleHandle handle, SkStringHandle ellipsis); @Native<Void Function(ParagraphStyleHandle, StrutStyleHandle)>(symbol: 'paragraphStyle_setStrutStyle', isLeaf: true) external void paragraphStyleSetStrutStyle(ParagraphStyleHandle handle, StrutStyleHandle strutStyle); @Native<Void Function(ParagraphStyleHandle, TextStyleHandle)>(symbol: 'paragraphStyle_setTextStyle', isLeaf: true) external void paragraphStyleSetTextStyle(ParagraphStyleHandle handle, TextStyleHandle textStyle); @Native<Void Function(ParagraphStyleHandle, Bool)>(symbol: 'paragraphStyle_setApplyRoundingHack', isLeaf: true) external void paragraphStyleSetApplyRoundingHack(ParagraphStyleHandle handle, bool applyRoundingHack);
engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/text/raw_paragraph_style.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/text/raw_paragraph_style.dart", "repo_id": "engine", "token_count": 751 }
280
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:math' as math; import 'package:meta/meta.dart'; import 'package:ui/ui.dart' as ui; import '../dom.dart'; import 'canvas_paragraph.dart'; import 'layout_fragmenter.dart'; import 'line_breaker.dart'; import 'measurement.dart'; import 'paragraph.dart'; import 'ruler.dart'; import 'text_direction.dart'; /// A single canvas2d context to use for all text measurements. @visibleForTesting final DomCanvasRenderingContext2D textContext = // We don't use this canvas to draw anything, so let's make it as small as // possible to save memory. createDomCanvasElement(width: 0, height: 0).context2D; /// The last font used in the [textContext]. String? _lastContextFont; /// Performs layout on a [CanvasParagraph]. /// /// It uses a [DomCanvasElement] to measure text. class TextLayoutService { TextLayoutService(this.paragraph); final CanvasParagraph paragraph; // *** Results of layout *** // // Look at the Paragraph class for documentation of the following properties. double width = -1.0; double height = 0.0; ParagraphLine? longestLine; double minIntrinsicWidth = 0.0; double maxIntrinsicWidth = 0.0; double alphabeticBaseline = -1.0; double ideographicBaseline = -1.0; bool didExceedMaxLines = false; final List<ParagraphLine> lines = <ParagraphLine>[]; /// The bounds that contain the text painted inside this paragraph. ui.Rect get paintBounds => _paintBounds; ui.Rect _paintBounds = ui.Rect.zero; late final Spanometer spanometer = Spanometer(paragraph); late final LayoutFragmenter layoutFragmenter = LayoutFragmenter(paragraph.plainText, paragraph.spans); /// Performs the layout on a paragraph given the [constraints]. /// /// The function starts by resetting all layout-related properties. Then it /// starts looping through the paragraph to calculate all layout metrics. /// /// It uses a [Spanometer] to perform measurements within spans of the /// paragraph. It also uses [LineBuilders] to generate [ParagraphLine]s as /// it iterates through the paragraph. /// /// The main loop keeps going until: /// /// 1. The end of the paragraph is reached (i.e. LineBreakType.endOfText). /// 2. Enough lines have been computed to satisfy [maxLines]. /// 3. An ellipsis is appended because of an overflow. void performLayout(ui.ParagraphConstraints constraints) { // Reset results from previous layout. width = constraints.width; height = 0.0; longestLine = null; minIntrinsicWidth = 0.0; maxIntrinsicWidth = 0.0; didExceedMaxLines = false; lines.clear(); LineBuilder currentLine = LineBuilder.first(paragraph, spanometer, maxWidth: constraints.width); final List<LayoutFragment> fragments = layoutFragmenter.fragment()..forEach(spanometer.measureFragment); outerLoop: for (int i = 0; i < fragments.length; i++) { final LayoutFragment fragment = fragments[i]; currentLine.addFragment(fragment); while (currentLine.isOverflowing) { if (currentLine.canHaveEllipsis) { currentLine.insertEllipsis(); lines.add(currentLine.build()); didExceedMaxLines = true; break outerLoop; } if (currentLine.isBreakable) { currentLine.revertToLastBreakOpportunity(); } else { // The line can't be legally broken, so the last fragment (that caused // the line to overflow) needs to be force-broken. currentLine.forceBreakLastFragment(); } i += currentLine.appendZeroWidthFragments(fragments, startFrom: i + 1); lines.add(currentLine.build()); currentLine = currentLine.nextLine(); } if (currentLine.isHardBreak) { lines.add(currentLine.build()); currentLine = currentLine.nextLine(); } } final int? maxLines = paragraph.paragraphStyle.maxLines; if (maxLines != null && lines.length > maxLines) { didExceedMaxLines = true; lines.removeRange(maxLines, lines.length); } // ***************************************************************** // // *** PARAGRAPH BASELINE & HEIGHT & LONGEST LINE & PAINT BOUNDS *** // // ***************************************************************** // double boundsLeft = double.infinity; double boundsRight = double.negativeInfinity; for (final ParagraphLine line in lines) { height += line.height; if (alphabeticBaseline == -1.0) { alphabeticBaseline = line.baseline; ideographicBaseline = alphabeticBaseline * baselineRatioHack; } final double longestLineWidth = longestLine?.width ?? 0.0; if (longestLineWidth < line.width) { longestLine = line; } final double left = line.left; if (left < boundsLeft) { boundsLeft = left; } final double right = left + line.width; if (right > boundsRight) { boundsRight = right; } } _paintBounds = ui.Rect.fromLTRB( boundsLeft, 0, boundsRight, height, ); // **************************** // // *** FRAGMENT POSITIONING *** // // **************************** // // We have to perform justification alignment first so that we can position // fragments correctly later. if (lines.isNotEmpty) { final bool shouldJustifyParagraph = width.isFinite && paragraph.paragraphStyle.textAlign == ui.TextAlign.justify; if (shouldJustifyParagraph) { // Don't apply justification to the last line. for (int i = 0; i < lines.length - 1; i++) { for (final LayoutFragment fragment in lines[i].fragments) { fragment.justifyTo(paragraphWidth: width); } } } } lines.forEach(_positionLineFragments); // ******************************** // // *** MAX/MIN INTRINSIC WIDTHS *** // // ******************************** // // TODO(mdebbar): Handle maxLines https://github.com/flutter/flutter/issues/91254 double runningMinIntrinsicWidth = 0; double runningMaxIntrinsicWidth = 0; for (final LayoutFragment fragment in fragments) { runningMinIntrinsicWidth += fragment.widthExcludingTrailingSpaces; // Max intrinsic width includes the width of trailing spaces. runningMaxIntrinsicWidth += fragment.widthIncludingTrailingSpaces; switch (fragment.type) { case LineBreakType.prohibited: break; case LineBreakType.opportunity: minIntrinsicWidth = math.max(minIntrinsicWidth, runningMinIntrinsicWidth); runningMinIntrinsicWidth = 0; case LineBreakType.mandatory: case LineBreakType.endOfText: minIntrinsicWidth = math.max(minIntrinsicWidth, runningMinIntrinsicWidth); maxIntrinsicWidth = math.max(maxIntrinsicWidth, runningMaxIntrinsicWidth); runningMinIntrinsicWidth = 0; runningMaxIntrinsicWidth = 0; } } } ui.TextDirection get _paragraphDirection => paragraph.paragraphStyle.effectiveTextDirection; /// Positions the fragments taking into account their directions and the /// paragraph's direction. void _positionLineFragments(ParagraphLine line) { ui.TextDirection previousDirection = _paragraphDirection; double startOffset = 0.0; int? sandwichStart; int sequenceStart = 0; for (int i = 0; i <= line.fragments.length; i++) { if (i < line.fragments.length) { final LayoutFragment fragment = line.fragments[i]; if (fragment.fragmentFlow == FragmentFlow.previous) { sandwichStart = null; continue; } if (fragment.fragmentFlow == FragmentFlow.sandwich) { sandwichStart ??= i; continue; } assert(fragment.fragmentFlow == FragmentFlow.ltr || fragment.fragmentFlow == FragmentFlow.rtl); final ui.TextDirection currentDirection = fragment.fragmentFlow == FragmentFlow.ltr ? ui.TextDirection.ltr : ui.TextDirection.rtl; if (currentDirection == previousDirection) { sandwichStart = null; continue; } } // We've reached a fragment that'll flip the text direction. Let's // position the sequence that we've been traversing. if (sandwichStart == null) { // Position fragments in range [sequenceStart:i) startOffset += _positionFragmentRange( line: line, start: sequenceStart, end: i, direction: previousDirection, startOffset: startOffset, ); } else { // Position fragments in range [sequenceStart:sandwichStart) startOffset += _positionFragmentRange( line: line, start: sequenceStart, end: sandwichStart, direction: previousDirection, startOffset: startOffset, ); // Position fragments in range [sandwichStart:i) startOffset += _positionFragmentRange( line: line, start: sandwichStart, end: i, direction: _paragraphDirection, startOffset: startOffset, ); } sequenceStart = i; sandwichStart = null; if (i < line.fragments.length){ previousDirection = line.fragments[i].textDirection!; } } } double _positionFragmentRange({ required ParagraphLine line, required int start, required int end, required ui.TextDirection direction, required double startOffset, }) { assert(start <= end); double cumulativeWidth = 0.0; // The bodies of the two for loops below must remain identical. The only // difference is the looping direction. One goes from start to end, while // the other goes from end to start. if (direction == _paragraphDirection) { for (int i = start; i < end; i++) { cumulativeWidth += _positionOneFragment(line, i, startOffset + cumulativeWidth, direction); } } else { for (int i = end - 1; i >= start; i--) { cumulativeWidth += _positionOneFragment(line, i, startOffset + cumulativeWidth, direction); } } return cumulativeWidth; } double _positionOneFragment( ParagraphLine line, int i, double startOffset, ui.TextDirection direction, ) { final LayoutFragment fragment = line.fragments[i]; fragment.setPosition(startOffset: startOffset, textDirection: direction); return fragment.widthIncludingTrailingSpaces; } List<ui.TextBox> getBoxesForPlaceholders() { final List<ui.TextBox> boxes = <ui.TextBox>[]; for (final ParagraphLine line in lines) { for (final LayoutFragment fragment in line.fragments) { if (fragment.isPlaceholder) { boxes.add(fragment.toTextBox()); } } } return boxes; } List<ui.TextBox> getBoxesForRange( int start, int end, ui.BoxHeightStyle boxHeightStyle, ui.BoxWidthStyle boxWidthStyle, ) { // Zero-length ranges and invalid ranges return an empty list. if (start >= end || start < 0 || end < 0) { return <ui.TextBox>[]; } final int length = paragraph.plainText.length; // Ranges that are out of bounds should return an empty list. if (start > length || end > length) { return <ui.TextBox>[]; } final List<ui.TextBox> boxes = <ui.TextBox>[]; for (final ParagraphLine line in lines) { if (line.overlapsWith(start, end)) { for (final LayoutFragment fragment in line.fragments) { if (!fragment.isPlaceholder && fragment.overlapsWith(start, end)) { boxes.add(fragment.toTextBox(start: start, end: end)); } } } } return boxes; } ui.TextPosition getPositionForOffset(ui.Offset offset) { // After layout, each line has boxes that contain enough information to make // it possible to do hit testing. Once we find the box, we look inside that // box to find where exactly the `offset` is located. final ParagraphLine? line = _findLineForY(offset.dy); if (line == null) { return const ui.TextPosition(offset: 0); } // [offset] is to the left of the line. if (offset.dx <= line.left) { return ui.TextPosition( offset: line.startIndex, ); } // [offset] is to the right of the line. if (offset.dx >= line.left + line.widthWithTrailingSpaces) { return ui.TextPosition( offset: line.endIndex - line.trailingNewlines, affinity: ui.TextAffinity.upstream, ); } final double dx = offset.dx - line.left; for (final LayoutFragment fragment in line.fragments) { if (fragment.left <= dx && dx <= fragment.right) { return fragment.getPositionForX(dx - fragment.left); } } // Is this ever reachable? return ui.TextPosition(offset: line.startIndex); } ui.GlyphInfo? getClosestGlyphInfo(ui.Offset offset) { final ParagraphLine? line = _findLineForY(offset.dy); if (line == null) { return null; } final LayoutFragment? fragment = line.closestFragmentAtOffset(offset.dx - line.left); if (fragment == null) { return null; } final double dx = offset.dx; final bool closestGraphemeStartInFragment = !fragment.hasLeadingBrokenGrapheme || dx <= fragment.line.left || fragment.line.left + fragment.line.width <= dx || switch (fragment.textDirection!) { // If dx is closer to the trailing edge, no need to check other fragments. ui.TextDirection.ltr => dx >= line.left + (fragment.left + fragment.right) / 2, ui.TextDirection.rtl => dx <= line.left + (fragment.left + fragment.right) / 2, }; final ui.GlyphInfo candidate1 = fragment.getClosestCharacterBox(dx); if (closestGraphemeStartInFragment) { return candidate1; } final bool searchLeft = switch (fragment.textDirection!) { ui.TextDirection.ltr => true, ui.TextDirection.rtl => false, }; final ui.GlyphInfo? candidate2 = fragment.line.closestFragmentTo(fragment, searchLeft)?.getClosestCharacterBox(dx); if (candidate2 == null) { return candidate1; } final double distance1 = math.min( (candidate1.graphemeClusterLayoutBounds.left - dx).abs(), (candidate1.graphemeClusterLayoutBounds.right - dx).abs(), ); final double distance2 = math.min( (candidate2.graphemeClusterLayoutBounds.left - dx).abs(), (candidate2.graphemeClusterLayoutBounds.right - dx).abs(), ); return distance2 > distance1 ? candidate1 : candidate2; } ParagraphLine? _findLineForY(double y) { if (lines.isEmpty) { return null; } // We could do a binary search here but it's not worth it because the number // of line is typically low, and each iteration is a cheap comparison of // doubles. for (final ParagraphLine line in lines) { if (y <= line.height) { return line; } y -= line.height; } return lines.last; } } /// Builds instances of [ParagraphLine] for the given [paragraph]. /// /// Usage of this class starts by calling [LineBuilder.first] to start building /// the first line of the paragraph. /// /// Then fragments can be added by calling [addFragment]. /// /// After adding a fragment, one can use [isOverflowing] to determine whether /// the added fragment caused the line to overflow or not. /// /// Once the line is complete, it can be built by calling [build] to generate /// a [ParagraphLine] instance. /// /// To start building the next line, simply call [nextLine] to get a new /// [LineBuilder] for the next line. class LineBuilder { LineBuilder._( this.paragraph, this.spanometer, { required this.maxWidth, required this.lineNumber, required this.accumulatedHeight, required List<LayoutFragment> fragments, }) : _fragments = fragments { _recalculateMetrics(); } /// Creates a [LineBuilder] for the first line in a paragraph. factory LineBuilder.first( CanvasParagraph paragraph, Spanometer spanometer, { required double maxWidth, }) { return LineBuilder._( paragraph, spanometer, maxWidth: maxWidth, lineNumber: 0, accumulatedHeight: 0.0, fragments: <LayoutFragment>[], ); } final List<LayoutFragment> _fragments; List<LayoutFragment>? _fragmentsForNextLine; int get startIndex { assert(_fragments.isNotEmpty || _fragmentsForNextLine!.isNotEmpty); return isNotEmpty ? _fragments.first.start : _fragmentsForNextLine!.first.start; } int get endIndex { assert(_fragments.isNotEmpty || _fragmentsForNextLine!.isNotEmpty); return isNotEmpty ? _fragments.last.end : _fragmentsForNextLine!.first.start; } final double maxWidth; final CanvasParagraph paragraph; final Spanometer spanometer; final int lineNumber; /// The accumulated height of all preceding lines, excluding the current line. final double accumulatedHeight; /// The width of the line so far, excluding trailing white space. double width = 0.0; /// The width of the line so far, including trailing white space. double widthIncludingSpace = 0.0; double get _widthExcludingLastFragment => _fragments.length > 1 ? widthIncludingSpace - _fragments.last.widthIncludingTrailingSpaces : 0; /// The distance from the top of the line to the alphabetic baseline. double ascent = 0.0; /// The distance from the bottom of the line to the alphabetic baseline. double descent = 0.0; /// The height of the line so far. double get height => ascent + descent; int _lastBreakableFragment = -1; int _breakCount = 0; /// Whether this line can be legally broken into more than one line. bool get isBreakable { if (_fragments.isEmpty) { return false; } if (_fragments.last.isBreak) { // We need one more break other than the last one. return _breakCount > 1; } return _breakCount > 0; } /// Returns true if the line can't be legally broken any further. bool get isNotBreakable => !isBreakable; int _spaceCount = 0; int _trailingSpaces = 0; bool get isEmpty => _fragments.isEmpty; bool get isNotEmpty => _fragments.isNotEmpty; bool get isHardBreak => _fragments.isNotEmpty && _fragments.last.isHardBreak; /// The horizontal offset necessary for the line to be correctly aligned. double get alignOffset { final double emptySpace = maxWidth - width; final ui.TextAlign textAlign = paragraph.paragraphStyle.effectiveTextAlign; switch (textAlign) { case ui.TextAlign.center: return emptySpace / 2.0; case ui.TextAlign.right: return emptySpace; case ui.TextAlign.start: return _paragraphDirection == ui.TextDirection.rtl ? emptySpace : 0.0; case ui.TextAlign.end: return _paragraphDirection == ui.TextDirection.rtl ? 0.0 : emptySpace; default: return 0.0; } } bool get isOverflowing => width > maxWidth; bool get canHaveEllipsis { if (paragraph.paragraphStyle.ellipsis == null) { return false; } final int? maxLines = paragraph.paragraphStyle.maxLines; return (maxLines == null) || (maxLines == lineNumber + 1); } bool get _canAppendEmptyFragments { if (isHardBreak) { // Can't append more fragments to this line if it has a hard break. return false; } if (_fragmentsForNextLine?.isNotEmpty ?? false) { // If we already have fragments prepared for the next line, then we can't // append more fragments to this line. return false; } return true; } ui.TextDirection get _paragraphDirection => paragraph.paragraphStyle.effectiveTextDirection; void addFragment(LayoutFragment fragment) { _updateMetrics(fragment); if (fragment.isBreak) { _lastBreakableFragment = _fragments.length; } _fragments.add(fragment); } /// Updates the [LineBuilder]'s metrics to take into account the new [fragment]. void _updateMetrics(LayoutFragment fragment) { _spaceCount += fragment.trailingSpaces; if (fragment.isSpaceOnly) { _trailingSpaces += fragment.trailingSpaces; } else { _trailingSpaces = fragment.trailingSpaces; width = widthIncludingSpace + fragment.widthExcludingTrailingSpaces; } widthIncludingSpace += fragment.widthIncludingTrailingSpaces; if (fragment.isPlaceholder) { _adjustPlaceholderAscentDescent(fragment); } if (fragment.isBreak) { _breakCount++; } ascent = math.max(ascent, fragment.ascent); descent = math.max(descent, fragment.descent); } void _adjustPlaceholderAscentDescent(LayoutFragment fragment) { final PlaceholderSpan placeholder = fragment.span as PlaceholderSpan; final double ascent, descent; switch (placeholder.alignment) { case ui.PlaceholderAlignment.top: // The placeholder is aligned to the top of text, which means it has the // same `ascent` as the remaining text. We only need to extend the // `descent` enough to fit the placeholder. ascent = this.ascent; descent = placeholder.height - this.ascent; case ui.PlaceholderAlignment.bottom: // The opposite of `top`. The `descent` is the same, but we extend the // `ascent`. ascent = placeholder.height - this.descent; descent = this.descent; case ui.PlaceholderAlignment.middle: final double textMidPoint = height / 2; final double placeholderMidPoint = placeholder.height / 2; final double diff = placeholderMidPoint - textMidPoint; ascent = this.ascent + diff; descent = this.descent + diff; case ui.PlaceholderAlignment.aboveBaseline: ascent = placeholder.height; descent = 0.0; case ui.PlaceholderAlignment.belowBaseline: ascent = 0.0; descent = placeholder.height; case ui.PlaceholderAlignment.baseline: ascent = placeholder.baselineOffset; descent = placeholder.height - ascent; } // Update the metrics of the fragment to reflect the calculated ascent and // descent. fragment.setMetrics(spanometer, ascent: ascent, descent: descent, widthExcludingTrailingSpaces: fragment.widthExcludingTrailingSpaces, widthIncludingTrailingSpaces: fragment.widthIncludingTrailingSpaces, ); } void _recalculateMetrics() { width = 0; widthIncludingSpace = 0; ascent = 0; descent = 0; _spaceCount = 0; _trailingSpaces = 0; _breakCount = 0; _lastBreakableFragment = -1; for (int i = 0; i < _fragments.length; i++) { _updateMetrics(_fragments[i]); if (_fragments[i].isBreak) { _lastBreakableFragment = i; } } } void forceBreakLastFragment({ double? availableWidth, bool allowEmptyLine = false }) { assert(isNotEmpty); availableWidth ??= maxWidth; assert(widthIncludingSpace > availableWidth); _fragmentsForNextLine ??= <LayoutFragment>[]; // When the line has fragments other than the last one, we can always allow // the last fragment to be empty (i.e. completely removed from the line). final bool hasOtherFragments = _fragments.length > 1; final bool allowLastFragmentToBeEmpty = hasOtherFragments || allowEmptyLine; final LayoutFragment lastFragment = _fragments.last; if (lastFragment.isPlaceholder) { // Placeholder can't be force-broken. Either keep all of it in the line or // move it to the next line. if (allowLastFragmentToBeEmpty) { _fragmentsForNextLine!.insert(0, _fragments.removeLast()); _recalculateMetrics(); } return; } spanometer.currentSpan = lastFragment.span; final double lineWidthWithoutLastFragment = widthIncludingSpace - lastFragment.widthIncludingTrailingSpaces; final double availableWidthForFragment = availableWidth - lineWidthWithoutLastFragment; final int forceBreakEnd = lastFragment.end - lastFragment.trailingNewlines; final int breakingPoint = spanometer.forceBreak( lastFragment.start, forceBreakEnd, availableWidth: availableWidthForFragment, allowEmpty: allowLastFragmentToBeEmpty, ); if (breakingPoint == forceBreakEnd) { // The entire fragment remained intact. Let's keep everything as is. return; } _fragments.removeLast(); _recalculateMetrics(); final List<LayoutFragment?> split = lastFragment.split(breakingPoint); final LayoutFragment? first = split.first; if (first != null) { spanometer.measureFragment(first); addFragment(first); } final LayoutFragment? second = split.last; if (second != null) { spanometer.measureFragment(second); _fragmentsForNextLine!.insert(0, second); } } void insertEllipsis() { assert(canHaveEllipsis); assert(isOverflowing); final String ellipsisText = paragraph.paragraphStyle.ellipsis!; _fragmentsForNextLine = <LayoutFragment>[]; spanometer.currentSpan = _fragments.last.span; double ellipsisWidth = spanometer.measureText(ellipsisText); double availableWidth = math.max(0, maxWidth - ellipsisWidth); while (_widthExcludingLastFragment > availableWidth) { _fragmentsForNextLine!.insert(0, _fragments.removeLast()); _recalculateMetrics(); spanometer.currentSpan = _fragments.last.span; ellipsisWidth = spanometer.measureText(ellipsisText); availableWidth = maxWidth - ellipsisWidth; } final LayoutFragment lastFragment = _fragments.last; forceBreakLastFragment(availableWidth: availableWidth, allowEmptyLine: true); final EllipsisFragment ellipsisFragment = EllipsisFragment( endIndex, lastFragment.span, ); ellipsisFragment.setMetrics(spanometer, ascent: lastFragment.ascent, descent: lastFragment.descent, widthExcludingTrailingSpaces: ellipsisWidth, widthIncludingTrailingSpaces: ellipsisWidth, ); addFragment(ellipsisFragment); } void revertToLastBreakOpportunity() { assert(isBreakable); // The last fragment in the line may or may not be breakable. Regardless, // it needs to be removed. // // We need to find the latest breakable fragment in the line (other than the // last fragment). Such breakable fragment is guaranteed to be found because // the line `isBreakable`. // Start from the end and skip the last fragment. int i = _fragments.length - 2; while (!_fragments[i].isBreak) { i--; } _fragmentsForNextLine = _fragments.getRange(i + 1, _fragments.length).toList(); _fragments.removeRange(i + 1, _fragments.length); _recalculateMetrics(); } /// Appends as many zero-width fragments as this line allows. /// /// Returns the number of fragments that were appended. int appendZeroWidthFragments(List<LayoutFragment> fragments, {required int startFrom}) { int i = startFrom; while (_canAppendEmptyFragments && i < fragments.length && fragments[i].widthExcludingTrailingSpaces == 0) { addFragment(fragments[i]); i++; } return i - startFrom; } /// Builds the [ParagraphLine] instance that represents this line. ParagraphLine build() { if (_fragmentsForNextLine == null) { _fragmentsForNextLine = _fragments.getRange(_lastBreakableFragment + 1, _fragments.length).toList(); _fragments.removeRange(_lastBreakableFragment + 1, _fragments.length); } final int trailingNewlines = isEmpty ? 0 : _fragments.last.trailingNewlines; final ParagraphLine line = ParagraphLine( lineNumber: lineNumber, startIndex: startIndex, endIndex: endIndex, trailingNewlines: trailingNewlines, trailingSpaces: _trailingSpaces, spaceCount: _spaceCount, hardBreak: isHardBreak, width: width, widthWithTrailingSpaces: widthIncludingSpace, left: alignOffset, height: height, baseline: accumulatedHeight + ascent, ascent: ascent, descent: descent, fragments: _fragments, textDirection: _paragraphDirection, paragraph: paragraph, ); for (final LayoutFragment fragment in _fragments) { fragment.line = line; } return line; } /// Creates a new [LineBuilder] to build the next line in the paragraph. LineBuilder nextLine() { return LineBuilder._( paragraph, spanometer, maxWidth: maxWidth, lineNumber: lineNumber + 1, accumulatedHeight: accumulatedHeight + height, fragments: _fragmentsForNextLine ?? <LayoutFragment>[], ); } } /// Responsible for taking measurements within spans of a paragraph. /// /// Can't perform measurements across spans. To measure across spans, multiple /// measurements have to be taken. /// /// Before performing any measurement, the [currentSpan] has to be set. Once /// it's set, the [Spanometer] updates the underlying [context] so that /// subsequent measurements use the correct styles. class Spanometer { Spanometer(this.paragraph); final CanvasParagraph paragraph; static final RulerHost _rulerHost = RulerHost(); static final Map<TextHeightStyle, TextHeightRuler> _rulers = <TextHeightStyle, TextHeightRuler>{}; @visibleForTesting static Map<TextHeightStyle, TextHeightRuler> get rulers => _rulers; /// Clears the cache of rulers that are used for measuring text height and /// baseline metrics. static void clearRulersCache() { _rulers.forEach((TextHeightStyle style, TextHeightRuler ruler) { ruler.dispose(); }); _rulers.clear(); } double? get letterSpacing => currentSpan.style.letterSpacing; TextHeightRuler? _currentRuler; ParagraphSpan? _currentSpan; ParagraphSpan get currentSpan => _currentSpan!; set currentSpan(ParagraphSpan? span) { // Update the font string if it's different from the last applied font // string. // // Also, we need to update the font string even if the span isn't changing. // That's because `textContext` is shared across all spanometers. if (span != null) { final String newCssFontString = span.style.cssFontString; if (_lastContextFont != newCssFontString) { _lastContextFont = newCssFontString; textContext.font = newCssFontString; } } if (span == _currentSpan) { return; } _currentSpan = span; if (span == null) { _currentRuler = null; return; } // Update the height ruler. // If the ruler doesn't exist in the cache, create a new one and cache it. final TextHeightStyle heightStyle = span.style.heightStyle; TextHeightRuler? ruler = _rulers[heightStyle]; if (ruler == null) { ruler = TextHeightRuler(heightStyle, _rulerHost); _rulers[heightStyle] = ruler; } _currentRuler = ruler; } /// Whether the spanometer is ready to take measurements. bool get isReady => _currentSpan != null; /// The distance from the top of the current span to the alphabetic baseline. double get ascent => _currentRuler!.alphabeticBaseline; /// The distance from the bottom of the current span to the alphabetic baseline. double get descent => height - ascent; /// The line height of the current span. double get height => _currentRuler!.height; double measureText(String text) { return measureSubstring(textContext, text, 0, text.length); } double measureRange(int start, int end) { assert(_currentSpan != null); // Make sure the range is within the current span. assert(start >= currentSpan.start && start <= currentSpan.end); assert(end >= currentSpan.start && end <= currentSpan.end); return _measure(start, end); } void measureFragment(LayoutFragment fragment) { if (fragment.isPlaceholder) { final PlaceholderSpan placeholder = fragment.span as PlaceholderSpan; // The ascent/descent values of the placeholder fragment will be finalized // later when the line is built. fragment.setMetrics(this, ascent: placeholder.height, descent: 0, widthExcludingTrailingSpaces: placeholder.width, widthIncludingTrailingSpaces: placeholder.width, ); } else { currentSpan = fragment.span; final double widthExcludingTrailingSpaces = _measure(fragment.start, fragment.end - fragment.trailingSpaces); final double widthIncludingTrailingSpaces = _measure(fragment.start, fragment.end - fragment.trailingNewlines); fragment.setMetrics(this, ascent: ascent, descent: descent, widthExcludingTrailingSpaces: widthExcludingTrailingSpaces, widthIncludingTrailingSpaces: widthIncludingTrailingSpaces, ); } } /// In a continuous, unbreakable block of text from [start] to [end], finds /// the point where text should be broken to fit in the given [availableWidth]. /// /// The [start] and [end] indices have to be within the same text span. /// /// When [allowEmpty] is true, the result is guaranteed to be at least one /// character after [start]. But if [allowEmpty] is false and there isn't /// enough [availableWidth] to fit the first character, then [start] is /// returned. /// /// See also: /// - [LineBuilder.forceBreak]. int forceBreak( int start, int end, { required double availableWidth, required bool allowEmpty, }) { assert(_currentSpan != null); // Make sure the range is within the current span. assert(start >= currentSpan.start && start <= currentSpan.end); assert(end >= currentSpan.start && end <= currentSpan.end); if (availableWidth <= 0.0) { return allowEmpty ? start : start + 1; } int low = start; int high = end; while (high - low > 1) { final int mid = (low + high) ~/ 2; final double width = _measure(start, mid); if (width < availableWidth) { low = mid; } else if (width > availableWidth) { high = mid; } else { low = high = mid; } } if (low == start && !allowEmpty) { low++; } return low; } double _measure(int start, int end) { assert(_currentSpan != null); // Make sure the range is within the current span. assert(start >= currentSpan.start && start <= currentSpan.end); assert(end >= currentSpan.start && end <= currentSpan.end); return measureSubstring( textContext, paragraph.plainText, start, end, letterSpacing: letterSpacing, ); } }
engine/lib/web_ui/lib/src/engine/text/layout_service.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/text/layout_service.dart", "repo_id": "engine", "token_count": 12974 }
281
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:async'; import 'dart:math' as math; import 'dart:typed_data'; import 'package:meta/meta.dart'; import 'package:ui/ui.dart' as ui; import '../browser_detection.dart'; import '../dom.dart'; import '../mouse/prevent_default.dart'; import '../platform_dispatcher.dart'; import '../safe_browser_api.dart'; import '../semantics.dart'; import '../services.dart'; import '../text/paragraph.dart'; import '../util.dart'; import 'autofill_hint.dart'; import 'composition_aware_mixin.dart'; import 'input_action.dart'; import 'input_type.dart'; import 'text_capitalization.dart'; /// Make the content editable span visible to facilitate debugging. bool _debugVisibleTextEditing = false; /// Set this to `true` to print when text input commands are scheduled and run. bool _debugPrintTextInputCommands = false; /// The `keyCode` of the "Enter" key. const int _kReturnKeyCode = 13; /// Offset in pixels to place an element outside of the screen. const int offScreenOffset = -9999; /// Blink and Webkit engines, bring an overlay on top of the text field when it /// is autofilled. bool browserHasAutofillOverlay() => browserEngine == BrowserEngine.blink || browserEngine == BrowserEngine.webkit; /// `transparentTextEditing` class is configured to make the autofill overlay /// transparent. const String transparentTextEditingClass = 'transparentTextEditing'; void _emptyCallback(dynamic _) {} /// The default [HostNode] that hosts all DOM required for text editing when a11y is not enabled. @visibleForTesting // TODO(mdebbar): There could be multiple views with multiple text editing hosts. // https://github.com/flutter/flutter/issues/137344 DomElement get defaultTextEditingRoot => EnginePlatformDispatcher.instance.implicitView!.dom.textEditingHost; /// These style attributes are constant throughout the life time of an input /// element. /// /// They are assigned once during the creation of the DOM element. void _setStaticStyleAttributes(DomHTMLElement domElement) { domElement.classList.add(HybridTextEditing.textEditingClass); final DomCSSStyleDeclaration elementStyle = domElement.style; elementStyle // Prevent (forced-colors: active) from making our invisible text fields visible. // For more details, see: https://developer.mozilla.org/en-US/docs/Web/CSS/forced-color-adjust ..setProperty('forced-color-adjust', 'none') ..whiteSpace = 'pre-wrap' ..alignContent = 'center' ..position = 'absolute' ..top = '0' ..left = '0' ..padding = '0' ..opacity = '1' ..color = 'transparent' ..backgroundColor = 'transparent' ..background = 'transparent' // This property makes the input's blinking cursor transparent. ..caretColor = 'transparent' ..outline = 'none' ..border = 'none' ..resize = 'none' ..textShadow = 'none' ..overflow = 'hidden' ..transformOrigin = '0 0 0'; if (browserHasAutofillOverlay()) { domElement.classList.add(transparentTextEditingClass); } if (_debugVisibleTextEditing) { elementStyle ..color = 'purple' ..outline = '1px solid purple'; } } /// Sets attributes to hide autofill elements. /// /// These style attributes are constant throughout the life time of an input /// element. /// /// They are assigned once during the creation of the DOM element. void _styleAutofillElements( DomHTMLElement domElement, { bool isOffScreen = false, bool shouldHideElement = true, bool shouldDisablePointerEvents = false, }) { final DomCSSStyleDeclaration elementStyle = domElement.style; elementStyle ..whiteSpace = 'pre-wrap' ..alignContent = 'center' ..padding = '0' ..opacity = '1' ..color = 'transparent' ..backgroundColor = 'transparent' ..background = 'transparent' ..outline = 'none' ..border = 'none' ..resize = 'none' ..textShadow = 'transparent' ..transformOrigin = '0 0 0'; if (isOffScreen) { elementStyle ..top = '${offScreenOffset}px' ..left = '${offScreenOffset}px'; } if (shouldHideElement) { elementStyle ..width = '0' ..height = '0'; } if (shouldDisablePointerEvents) { elementStyle.pointerEvents = 'none'; } if (browserHasAutofillOverlay()) { domElement.classList.add(transparentTextEditingClass); } /// This property makes the input's blinking cursor transparent. elementStyle.setProperty('caret-color', 'transparent'); } /// Form that contains all the fields in the same AutofillGroup. /// /// An [EngineAutofillForm] will only be constructed when autofill is enabled /// (the default) on the current input field. See the [fromFrameworkMessage] /// static method. class EngineAutofillForm { EngineAutofillForm({ required this.formElement, this.elements, this.items, this.formIdentifier = '', this.insertionReferenceNode, }); final DomHTMLFormElement formElement; final Map<String, DomHTMLElement>? elements; final Map<String, AutofillInfo>? items; final DomHTMLElement? insertionReferenceNode; /// Identifier for the form. /// /// It is constructed by concatenating unique ids of input elements on the /// form. /// /// It is used for storing the form until submission. /// See [formsOnTheDom]. final String formIdentifier; /// Creates an [EngineAutofillFrom] from the JSON representation of a Flutter /// framework `TextInputConfiguration` object. /// /// The `focusedElementAutofill` argument corresponds to the "autofill" field /// in a `TextInputConfiguration`. Not having this field indicates autofill /// is explicitly disabled on the text field by the developer. /// /// The `fields` argument corresponds to the "fields" field in a /// `TextInputConfiguration`. /// /// Returns null if autofill is disabled for the input field. static EngineAutofillForm? fromFrameworkMessage( Map<String, dynamic>? focusedElementAutofill, List<dynamic>? fields, ) { // Autofill value will be null if the developer explicitly disables it on // the input field. if (focusedElementAutofill == null) { return null; } // If there is only one text field in the autofill model, `fields` will be // null. `focusedElementAutofill` contains the information about the one // text field. final Map<String, DomHTMLElement> elements = <String, DomHTMLElement>{}; final Map<String, AutofillInfo> items = <String, AutofillInfo>{}; final DomHTMLFormElement formElement = createDomHTMLFormElement(); final bool isSafariDesktopStrategy = textEditing.strategy is SafariDesktopTextEditingStrategy; DomHTMLElement? insertionReferenceNode; // Validation is in the framework side. formElement.noValidate = true; formElement.method = 'post'; formElement.action = '#'; formElement.addEventListener('submit', preventDefaultListener); // We need to explicitly disable pointer events on the form in Safari Desktop, // so that we don't have pointer event collisions if users hover over or click // into the invisible autofill elements within the form. _styleAutofillElements(formElement, shouldDisablePointerEvents: isSafariDesktopStrategy); // We keep the ids in a list then sort them later, in case the text fields' // locations are re-ordered on the framework side. final List<String> ids = List<String>.empty(growable: true); // The focused text editing element will not be created here. final AutofillInfo focusedElement = AutofillInfo.fromFrameworkMessage(focusedElementAutofill); if (fields != null) { bool fieldIsFocusedElement = false; for (final Map<String, dynamic> field in fields.cast<Map<String, dynamic>>()) { final Map<String, dynamic> autofillInfo = field.readJson('autofill'); final AutofillInfo autofill = AutofillInfo.fromFrameworkMessage( autofillInfo, textCapitalization: TextCapitalizationConfig.fromInputConfiguration( field.readString('textCapitalization'), ), ); ids.add(autofill.uniqueIdentifier); if (autofill.uniqueIdentifier != focusedElement.uniqueIdentifier) { final EngineInputType engineInputType = EngineInputType.fromName( field.readJson('inputType').readString('name'), ); final DomHTMLElement htmlElement = engineInputType.createDomElement(); autofill.editingState.applyToDomElement(htmlElement); autofill.applyToDomElement(htmlElement); // Safari Desktop does not respect elements that are invisible (or // have no size) and that leads to issues with autofill only partially // working (ref: https://github.com/flutter/flutter/issues/71275). // Thus, we have to make sure that the elements remain invisible to users, // but not to Safari for autofill to work. Since these elements are // sized and placed on the DOM, we also have to disable pointer events. _styleAutofillElements(htmlElement, shouldHideElement: !isSafariDesktopStrategy, shouldDisablePointerEvents: isSafariDesktopStrategy); items[autofill.uniqueIdentifier] = autofill; elements[autofill.uniqueIdentifier] = htmlElement; formElement.append(htmlElement); // We want to track the node in the position directly after our focused // element, so we can later insert that element in the correct position // right before this node. if(fieldIsFocusedElement){ insertionReferenceNode = htmlElement; fieldIsFocusedElement = false; } } else { // current field is the focused element that we create elsewhere fieldIsFocusedElement = true; } } } else { // There is one input element in the form. ids.add(focusedElement.uniqueIdentifier); } ids.sort(); final StringBuffer idBuffer = StringBuffer(); // Add a separator between element identifiers. for (final String id in ids) { if (idBuffer.length > 0) { idBuffer.write('*'); } idBuffer.write(id); } final String formIdentifier = idBuffer.toString(); // If a form with the same Autofill elements is already on the dom, remove // it from DOM. final DomHTMLFormElement? form = formsOnTheDom[formIdentifier]; form?.remove(); // In order to submit the form when Framework sends a `TextInput.commit` // message, we add a submit button to the form. final DomHTMLInputElement submitButton = createDomHTMLInputElement(); _styleAutofillElements(submitButton, isOffScreen: true); submitButton.className = 'submitBtn'; submitButton.type = 'submit'; formElement.append(submitButton); // If the focused node is at the end of the form, we'll default to inserting // it before the submit field. insertionReferenceNode ??= submitButton; return EngineAutofillForm( formElement: formElement, elements: elements, items: items, formIdentifier: formIdentifier, insertionReferenceNode: insertionReferenceNode ); } void placeForm(DomHTMLElement mainTextEditingElement) { // Since we're disabling pointer events on the form to fix Safari autofill, // we need to explicitly set pointer events on the active input element in // order to calculate the correct pointer event offsets. // See: https://github.com/flutter/flutter/issues/136006 if(textEditing.strategy is SafariDesktopTextEditingStrategy) { mainTextEditingElement.style.pointerEvents = 'all'; } formElement.insertBefore(mainTextEditingElement, insertionReferenceNode); defaultTextEditingRoot.append(formElement); } void storeForm() { formsOnTheDom[formIdentifier] = formElement; _styleAutofillElements(formElement, isOffScreen: true); } /// Listens to `onInput` event on the form fields. /// /// Registering to the listeners could have been done in the constructor. /// On the other hand, overall for text editing there is already a lifecycle /// for subscriptions: All the subscriptions of the DOM elements are to the /// `subscriptions` property of [DefaultTextEditingStrategy]. /// [TextEditingStrategy] manages all subscription lifecyle. All /// listeners with no exceptions are added during /// [TextEditingStrategy.addEventHandlers] method call and all /// listeners are removed during [TextEditingStrategy.disable] method call. List<DomSubscription> addInputEventListeners() { final Iterable<String> keys = elements!.keys; final List<DomSubscription> subscriptions = <DomSubscription>[]; void addSubscriptionForKey(String key) { final DomElement element = elements![key]!; subscriptions.add( DomSubscription(element, 'input', (DomEvent e) { if (items![key] == null) { throw StateError( 'AutofillInfo must have a valid uniqueIdentifier.'); } else { final AutofillInfo autofillInfo = items![key]!; handleChange(element, autofillInfo); } })); } keys.forEach(addSubscriptionForKey); return subscriptions; } void handleChange(DomElement domElement, AutofillInfo autofillInfo) { final EditingState newEditingState = EditingState.fromDomElement( domElement as DomHTMLElement); _sendAutofillEditingState(autofillInfo.uniqueIdentifier, newEditingState); } /// Sends the 'TextInputClient.updateEditingStateWithTag' message to the framework. void _sendAutofillEditingState(String? tag, EditingState editingState) { EnginePlatformDispatcher.instance.invokeOnPlatformMessage( 'flutter/textinput', const JSONMethodCodec().encodeMethodCall( MethodCall( 'TextInputClient.updateEditingStateWithTag', <dynamic>[ 0, <String?, dynamic>{tag: editingState.toFlutter()} ], ), ), _emptyCallback, ); } } /// Autofill related values. /// /// These values are to be used when a text field have autofill enabled. @visibleForTesting class AutofillInfo { AutofillInfo({ required this.editingState, required this.uniqueIdentifier, required this.autofillHint, required this.textCapitalization, this.placeholder, }); factory AutofillInfo.fromFrameworkMessage(Map<String, dynamic> autofill, {TextCapitalizationConfig textCapitalization = const TextCapitalizationConfig.defaultCapitalization()}) { final String uniqueIdentifier = autofill.readString('uniqueIdentifier'); final List<dynamic>? hintsList = autofill.tryList('hints'); final String? firstHint = (hintsList == null || hintsList.isEmpty) ? null : hintsList.first as String; final EditingState editingState = EditingState.fromFrameworkMessage(autofill.readJson('editingValue')); return AutofillInfo( uniqueIdentifier: uniqueIdentifier, autofillHint: (firstHint != null) ? BrowserAutofillHints.instance.flutterToEngine(firstHint) : null, editingState: editingState, placeholder: autofill.tryString('hintText'), textCapitalization: textCapitalization, ); } /// The current text and selection state of a text field. final EditingState editingState; /// Unique value set by the developer or generated by the framework. /// /// Used as id of the text field. /// /// An example an id generated by the framework: `EditableText-285283643`. final String uniqueIdentifier; /// Information on how should autofilled text capitalized. /// /// For example for [TextCapitalization.characters] each letter is converted /// to upper case. /// /// This value is not necessary for autofilling the focused element since /// [DefaultTextEditingStrategy.inputConfiguration] already has this /// information. /// /// On the other hand for the multi element forms, for the input elements /// other the focused field, we need to use this information. final TextCapitalizationConfig textCapitalization; /// The type of information expected in the field, specified by the developer. /// /// Used as a guidance to the browser as to the type of information expected /// in the field. /// See: https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/autocomplete final String? autofillHint; /// The optional hint text placed on the view that typically suggests what /// sort of input the field accepts, for example "enter your password here". /// /// If the developer does not specify any [autofillHints], the [placeholder] /// can be a useful indication to the platform autofill service as to what /// information is expected in this field. final String? placeholder; void applyToDomElement(DomHTMLElement domElement, {bool focusedElement = false}) { final String? autofillHint = this.autofillHint; final String? placeholder = this.placeholder; if (domInstanceOfString(domElement, 'HTMLInputElement')) { final DomHTMLInputElement element = domElement as DomHTMLInputElement; if (placeholder != null) { element.placeholder = placeholder; } if (autofillHint != null) { element.name = autofillHint; element.id = autofillHint; if (autofillHint.contains('password')) { element.type = 'password'; } else { element.type = 'text'; } } element.autocomplete = autofillHint ?? 'on'; } else if (domInstanceOfString(domElement, 'HTMLTextAreaElement')) { final DomHTMLTextAreaElement element = domElement as DomHTMLTextAreaElement; if (placeholder != null) { element.placeholder = placeholder; } if (autofillHint != null) { element.name = autofillHint; element.id = autofillHint; } element.setAttribute('autocomplete', autofillHint ?? 'on'); } } } /// Replaces a range of text in the original string with the text given in the /// replacement string. String _replace(String originalText, String replacementText, ui.TextRange replacedRange) { assert(replacedRange.isValid); assert(replacedRange.start <= originalText.length && replacedRange.end <= originalText.length); final ui.TextRange normalizedRange = ui.TextRange(start: math.min(replacedRange.start, replacedRange.end), end: math.max(replacedRange.start, replacedRange.end)); return normalizedRange.textBefore(originalText) + replacementText + normalizedRange.textAfter(originalText); } /// The change between the last editing state and the current editing state /// of a text field. /// /// This is packaged into a JSON and sent to the framework /// to be processed into a concrete [TextEditingDelta]. class TextEditingDeltaState { TextEditingDeltaState({ this.oldText = '', this.deltaText = '', this.deltaStart = -1, this.deltaEnd = -1, this.baseOffset, this.extentOffset, this.composingOffset, this.composingExtent, }); /// Infers the correct delta values based on information from the new editing state /// and the last editing state. /// /// For a deletion, the length and the direction of the deletion (backward or forward) /// are calculated by comparing the new and last editing states. /// If the deletion is backward, the length is susbtracted from the [deltaEnd] /// that we set when beforeinput was fired to determine the [deltaStart]. /// If the deletion is forward, [deltaStart] is set to the new editing state baseOffset /// and [deltaEnd] is set to [deltaStart] incremented by the length of the deletion. /// /// For a replacement at a selection we set the [deltaStart] to be the beginning of the selection /// from the last editing state. /// /// For the composing region we check if a composing range was captured by the compositionupdate event, /// we have a non empty [deltaText], and that we did not have an active selection. An active selection /// would mean we are not composing. /// /// We then verify that the delta we collected results in the text contained within the new editing state /// when applied to the last editing state. If it is not then we use our new editing state as the source of truth, /// and use regex to find the correct [deltaStart] and [deltaEnd]. static TextEditingDeltaState inferDeltaState(EditingState newEditingState, EditingState? lastEditingState, TextEditingDeltaState lastTextEditingDeltaState) { final TextEditingDeltaState newTextEditingDeltaState = lastTextEditingDeltaState.copyWith(); final bool previousSelectionWasCollapsed = lastEditingState?.baseOffset == lastEditingState?.extentOffset; final bool isTextBeingRemoved = newTextEditingDeltaState.deltaText.isEmpty && newTextEditingDeltaState.deltaEnd != -1; final bool isTextBeingChangedAtActiveSelection = newTextEditingDeltaState.deltaText.isNotEmpty && !previousSelectionWasCollapsed; if (isTextBeingRemoved) { // When text is deleted outside of the composing region or is cut using the native toolbar, // we calculate the length of the deleted text by comparing the new and old editing state lengths. // If the deletion is backward, the length is subtracted from the [deltaEnd] // that we set when beforeinput was fired to determine the [deltaStart]. // If the deletion is forward, [deltaStart] is set to the new editing state baseOffset // and [deltaEnd] is set to [deltaStart] incremented by the length of the deletion. final int deletedLength = newTextEditingDeltaState.oldText.length - newEditingState.text!.length; final bool backwardDeletion = newEditingState.baseOffset != lastEditingState?.baseOffset; if (backwardDeletion) { newTextEditingDeltaState.deltaStart = newTextEditingDeltaState.deltaEnd - deletedLength; } else { // Forward deletion newTextEditingDeltaState.deltaStart = newEditingState.baseOffset!; newTextEditingDeltaState.deltaEnd = newTextEditingDeltaState.deltaStart + deletedLength; } } else if (isTextBeingChangedAtActiveSelection) { final bool isPreviousSelectionInverted = lastEditingState!.baseOffset! > lastEditingState.extentOffset!; // When a selection of text is replaced by a copy/paste operation we set the starting range // of the delta to be the beginning of the selection of the previous editing state. newTextEditingDeltaState.deltaStart = isPreviousSelectionInverted ? lastEditingState.extentOffset! : lastEditingState.baseOffset!; } // If we are composing then set the delta range to the composing region we // captured in compositionupdate. final bool isCurrentlyComposing = newTextEditingDeltaState.composingOffset != null && newTextEditingDeltaState.composingOffset != newTextEditingDeltaState.composingExtent; if (newTextEditingDeltaState.deltaText.isNotEmpty && previousSelectionWasCollapsed && isCurrentlyComposing) { newTextEditingDeltaState.deltaStart = newTextEditingDeltaState.composingOffset!; } final bool isDeltaRangeEmpty = newTextEditingDeltaState.deltaStart == -1 && newTextEditingDeltaState.deltaStart == newTextEditingDeltaState.deltaEnd; if (!isDeltaRangeEmpty) { // To verify the range of our delta we should compare the newEditingState's // text with the delta applied to the oldText. If they differ then capture // the correct delta range from the newEditingState's text value. // // We can assume the deltaText for additions and replacements to the text value // are accurate. What may not be accurate is the range of the delta. // // We can think of the newEditingState as our source of truth. // // This verification is needed for cases such as the insertion of a period // after a double space, and the insertion of an accented character through // a native composing menu. final ui.TextRange replacementRange = ui.TextRange(start: newTextEditingDeltaState.deltaStart, end: newTextEditingDeltaState.deltaEnd); final String textAfterDelta = _replace( newTextEditingDeltaState.oldText, newTextEditingDeltaState.deltaText, replacementRange); final bool isDeltaVerified = textAfterDelta == newEditingState.text!; if (!isDeltaVerified) { // 1. Find all matches for deltaText. // 2. Apply matches/replacement to oldText until oldText matches the // new editing state's text value. final bool isPeriodInsertion = newTextEditingDeltaState.deltaText.contains('.'); final RegExp deltaTextPattern = RegExp(RegExp.escape(newTextEditingDeltaState.deltaText)); for (final Match match in deltaTextPattern.allMatches(newEditingState.text!)) { String textAfterMatch; int actualEnd; final bool isMatchWithinOldTextBounds = match.start >= 0 && match.end <= newTextEditingDeltaState.oldText.length; if (!isMatchWithinOldTextBounds) { actualEnd = match.start + newTextEditingDeltaState.deltaText.length - 1; textAfterMatch = _replace( newTextEditingDeltaState.oldText, newTextEditingDeltaState.deltaText, ui.TextRange( start: match.start, end: actualEnd, ), ); } else { actualEnd = actualEnd = isPeriodInsertion? match.end - 1 : match.end; textAfterMatch = _replace( newTextEditingDeltaState.oldText, newTextEditingDeltaState.deltaText, ui.TextRange( start: match.start, end: actualEnd, ), ); } if (textAfterMatch == newEditingState.text!) { newTextEditingDeltaState.deltaStart = match.start; newTextEditingDeltaState.deltaEnd = actualEnd; break; } } } } // Update selection of the delta using information from the new editing state. newTextEditingDeltaState.baseOffset = newEditingState.baseOffset; newTextEditingDeltaState.extentOffset = newEditingState.extentOffset; return newTextEditingDeltaState; } /// The text before the text field was updated. String oldText; /// The text that is being inserted/replaced into the text field. /// This will be an empty string for deletions and non text updates /// such as selection updates. String deltaText; /// The position in the text field where the change begins. /// /// Has a default value of -1 to signify an empty range. int deltaStart; /// The position in the text field where the change ends. /// /// Has a default value of -1 to signify an empty range. int deltaEnd; /// The updated starting position of the selection in the text field. int? baseOffset; /// The updated terminating position of the selection in the text field. int? extentOffset; /// The starting position of the composing region. int? composingOffset; /// The terminating position of the composing region. int? composingExtent; Map<String, dynamic> toFlutter() => <String, dynamic>{ 'deltas': <Map<String, dynamic>>[ <String, dynamic>{ 'oldText': oldText, 'deltaText': deltaText, 'deltaStart': deltaStart, 'deltaEnd': deltaEnd, 'selectionBase': baseOffset, 'selectionExtent': extentOffset, 'composingBase': composingOffset, 'composingExtent': composingExtent }, ], }; TextEditingDeltaState copyWith({ String? oldText, String? deltaText, int? deltaStart, int? deltaEnd, int? baseOffset, int? extentOffset, int? composingOffset, int? composingExtent, }) { return TextEditingDeltaState( oldText: oldText ?? this.oldText, deltaText: deltaText ?? this.deltaText, deltaStart: deltaStart ?? this.deltaStart, deltaEnd: deltaEnd ?? this.deltaEnd, baseOffset: baseOffset ?? this.baseOffset, extentOffset: extentOffset ?? this.extentOffset, composingOffset: composingOffset ?? this.composingOffset, composingExtent: composingExtent ?? this.composingExtent, ); } } /// The current text and selection state of a text field. class EditingState { EditingState({ this.text, int? baseOffset, int? extentOffset, this.composingBaseOffset = -1, this.composingExtentOffset = -1 }) : // Don't allow negative numbers. baseOffset = math.max(0, baseOffset ?? 0), // Don't allow negative numbers. extentOffset = math.max(0, extentOffset ?? 0); /// Creates an [EditingState] instance using values from an editing state Map /// coming from Flutter. /// /// The `editingState` Map has the following structure: /// ```json /// { /// "text": "The text here", /// "selectionBase": 0, /// "selectionExtent": 0, /// "selectionAffinity": "TextAffinity.upstream", /// "selectionIsDirectional": false, /// "composingBase": -1, /// "composingExtent": -1 /// } /// ``` /// /// Flutter Framework can send the [selectionBase] and [selectionExtent] as /// -1, if so 0 assigned to the [baseOffset] and [extentOffset]. -1 is not a /// valid selection range for input DOM elements. factory EditingState.fromFrameworkMessage( Map<String, dynamic> flutterEditingState) { final String? text = flutterEditingState.tryString('text'); final int selectionBase = flutterEditingState.readInt('selectionBase'); final int selectionExtent = flutterEditingState.readInt('selectionExtent'); final int? composingBase = flutterEditingState.tryInt('composingBase'); final int? composingExtent = flutterEditingState.tryInt('composingExtent'); return EditingState( text: text, baseOffset: selectionBase, extentOffset: selectionExtent, composingBaseOffset: composingBase ?? -1, composingExtentOffset: composingExtent ?? -1 ); } /// Creates an [EditingState] instance using values from the editing element /// in the DOM. /// /// [domElement] can be a [InputElement] or a [TextAreaElement] depending on /// the [InputType] of the text field. factory EditingState.fromDomElement(DomHTMLElement? domElement) { if (domInstanceOfString(domElement, 'HTMLInputElement')) { final DomHTMLInputElement element = domElement! as DomHTMLInputElement; if (element.selectionDirection == 'backward') { return EditingState( text: element.value, baseOffset: element.selectionEnd?.toInt(), extentOffset: element.selectionStart?.toInt()); } else { return EditingState( text: element.value, baseOffset: element.selectionStart?.toInt(), extentOffset: element.selectionEnd?.toInt()); } } else if (domInstanceOfString(domElement, 'HTMLTextAreaElement')) { final DomHTMLTextAreaElement element = domElement! as DomHTMLTextAreaElement; if (element.selectionDirection == 'backward') { return EditingState( text: element.value, baseOffset: element.selectionEnd?.toInt(), extentOffset: element.selectionStart?.toInt()); } else { return EditingState( text: element.value, baseOffset: element.selectionStart?.toInt(), extentOffset: element.selectionEnd?.toInt()); } } else { throw UnsupportedError('Initialized with unsupported input type'); } } // Pick the smallest selection index for base. int get minOffset => math.min(baseOffset ?? 0, extentOffset ?? 0); // Pick the greatest selection index for extent. int get maxOffset => math.max(baseOffset ?? 0, extentOffset ?? 0); EditingState copyWith({ String? text, int? baseOffset, int? extentOffset, int? composingBaseOffset, int? composingExtentOffset, }) { return EditingState( text: text ?? this.text, baseOffset: baseOffset ?? this.baseOffset, extentOffset: extentOffset ?? this.extentOffset, composingBaseOffset: composingBaseOffset ?? this.composingBaseOffset, composingExtentOffset: composingExtentOffset ?? this.composingExtentOffset, ); } /// The counterpart of [EditingState.fromFrameworkMessage]. It generates a Map that /// can be sent to Flutter. // TODO(mdebbar): Should we get `selectionAffinity` and other properties from flutter's editing state? Map<String, dynamic> toFlutter() => <String, dynamic>{ 'text': text, 'selectionBase': baseOffset, 'selectionExtent': extentOffset, 'composingBase': composingBaseOffset, 'composingExtent': composingExtentOffset, }; /// The current text being edited. final String? text; /// The offset at which the text selection originates. final int? baseOffset; /// The offset at which the text selection terminates. final int? extentOffset; /// The offset at which [CompositionAwareMixin.composingText] begins, if any. final int composingBaseOffset; /// The offset at which [CompositionAwareMixin.composingText] terminates, if any. final int composingExtentOffset; /// Whether the current editing state is valid or not. bool get isValid => baseOffset! >= 0 && extentOffset! >= 0; @override int get hashCode => Object.hash( text, baseOffset, extentOffset, composingBaseOffset, composingExtentOffset ); @override bool operator ==(Object other) { if (identical(this, other)) { return true; } if (runtimeType != other.runtimeType) { return false; } return other is EditingState && other.text == text && other.minOffset == minOffset && other.maxOffset == maxOffset && other.composingBaseOffset == composingBaseOffset && other.composingExtentOffset == composingExtentOffset; } @override String toString() { String result = super.toString(); assert(() { result = 'EditingState("$text", base:$baseOffset, extent:$extentOffset, composingBase:$composingBaseOffset, composingExtent:$composingExtentOffset)'; return true; }()); return result; } /// Sets the selection values of a DOM element using this [EditingState]. /// /// [domElement] can be a [InputElement] or a [TextAreaElement] depending on /// the [InputType] of the text field. /// /// This should only be used by focused elements only, because only focused /// elements can have their text selection range set. Attempting to set /// selection range on a non-focused element will cause it to request focus. /// /// See also: /// /// * [applyTextToDomElement], which is used for non-focused elements. void applyToDomElement(DomHTMLElement? domElement) { if (domInstanceOfString(domElement, 'HTMLInputElement')) { final DomHTMLInputElement element = domElement! as DomHTMLInputElement; element.value = text; element.setSelectionRange(minOffset, maxOffset); } else if (domInstanceOfString(domElement, 'HTMLTextAreaElement')) { final DomHTMLTextAreaElement element = domElement! as DomHTMLTextAreaElement; element.value = text; element.setSelectionRange(minOffset, maxOffset); } else { throw UnsupportedError('Unsupported DOM element type: <${domElement?.tagName}> (${domElement.runtimeType})'); } } /// Applies the [text] to the [domElement]. /// /// This is used by non-focused elements. /// /// See also: /// /// * [applyToDomElement], which is used for focused elements. void applyTextToDomElement(DomHTMLElement? domElement) { if (domInstanceOfString(domElement, 'HTMLInputElement')) { final DomHTMLInputElement element = domElement! as DomHTMLInputElement; element.value = text; } else if (domInstanceOfString(domElement, 'HTMLTextAreaElement')) { final DomHTMLTextAreaElement element = domElement! as DomHTMLTextAreaElement; element.value = text; } else { throw UnsupportedError('Unsupported DOM element type'); } } } /// Controls the appearance of the input control being edited. /// /// For example, [inputType] determines whether we should use `<input>` or /// `<textarea>` as a backing DOM element. /// /// This corresponds to Flutter's [TextInputConfiguration]. class InputConfiguration { InputConfiguration({ this.inputType = EngineInputType.text, this.inputAction = 'TextInputAction.done', this.obscureText = false, this.readOnly = false, this.autocorrect = true, this.textCapitalization = const TextCapitalizationConfig.defaultCapitalization(), this.autofill, this.autofillGroup, this.enableDeltaModel = false, }); InputConfiguration.fromFrameworkMessage( Map<String, dynamic> flutterInputConfiguration) : inputType = EngineInputType.fromName( flutterInputConfiguration.readJson('inputType').readString('name'), isDecimal: flutterInputConfiguration.readJson('inputType').tryBool('decimal') ?? false, isMultiline: flutterInputConfiguration.readJson('inputType').tryBool('isMultiline') ?? false, ), inputAction = flutterInputConfiguration.tryString('inputAction') ?? 'TextInputAction.done', obscureText = flutterInputConfiguration.tryBool('obscureText') ?? false, readOnly = flutterInputConfiguration.tryBool('readOnly') ?? false, autocorrect = flutterInputConfiguration.tryBool('autocorrect') ?? true, textCapitalization = TextCapitalizationConfig.fromInputConfiguration( flutterInputConfiguration.readString('textCapitalization'), ), autofill = flutterInputConfiguration.containsKey('autofill') ? AutofillInfo.fromFrameworkMessage( flutterInputConfiguration.readJson('autofill')) : null, autofillGroup = EngineAutofillForm.fromFrameworkMessage( flutterInputConfiguration.tryJson('autofill'), flutterInputConfiguration.tryList('fields'), ), enableDeltaModel = flutterInputConfiguration.tryBool('enableDeltaModel') ?? false; /// The type of information being edited in the input control. final EngineInputType inputType; /// The default action for the input field. final String inputAction; /// Whether the text field can be edited or not. /// /// Defaults to false. final bool readOnly; /// Whether to hide the text being edited. final bool obscureText; /// Whether to enable autocorrection. /// /// Definition of autocorrect can be found in: /// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/input /// /// For future manual tests, note that autocorrect is an attribute only /// supported by Safari. final bool autocorrect; final bool enableDeltaModel; final AutofillInfo? autofill; final EngineAutofillForm? autofillGroup; final TextCapitalizationConfig textCapitalization; } typedef OnChangeCallback = void Function(EditingState? editingState, TextEditingDeltaState? editingDeltaState); typedef OnActionCallback = void Function(String? inputAction); /// Provides HTML DOM functionality for editable text. /// /// A concrete implementation is picked at runtime based on the current /// operating system, web browser, and accessibility mode. abstract class TextEditingStrategy { void initializeTextEditing( InputConfiguration inputConfig, { required OnChangeCallback onChange, required OnActionCallback onAction, }); /// Sets the initial placement of the DOM element on the UI. /// /// The element must be located exactly in the same place with the editable /// widget. However, its contents and cursor will be invisible. /// /// Users can interact with the element and use the functionality of the /// right-click menu, such as copy, paste, cut, select, translate, etc. void initializeElementPlacement(); /// Register event listeners to the DOM element. /// /// These event listener will be removed in [disable]. void addEventHandlers(); /// Update the element's position. /// /// The position will be updated everytime Flutter Framework sends /// 'TextInput.setEditableSizeAndTransform' message. void updateElementPlacement(EditableTextGeometry geometry); /// Set editing state of the element. /// /// This includes text and selection relelated states. The editing state will /// be updated everytime Flutter Framework sends 'TextInput.setEditingState' /// message. void setEditingState(EditingState editingState); /// Set style to the native DOM element used for text editing. void updateElementStyle(EditableTextStyle style); /// Disables the element so it's no longer used for text editing. /// /// Calling [disable] also removes any registered event listeners. void disable(); } /// A [TextEditingStrategy] that places its [domElement] assuming no /// prior transform or sizing is applied to it. /// /// This implementation is used by text editables when semantics is not /// enabled. With semantics enabled the placement is provided by the semantics /// tree. class GloballyPositionedTextEditingStrategy extends DefaultTextEditingStrategy { GloballyPositionedTextEditingStrategy(super.owner); @override void placeElement() { geometry?.applyToDomElement(activeDomElement); if (hasAutofillGroup) { placeForm(); // Set the last editing state if it exists, this is critical for a // users ongoing work to continue uninterrupted when there is an update to // the transform. lastEditingState?.applyToDomElement(domElement); // On Chrome, when a form is focused, it opens an autofill menu // immediately. // Flutter framework sends `setEditableSizeAndTransform` for informing // the engine about the location of the text field. This call will // arrive after `show` call. // Therefore on Chrome we place the element when // `setEditableSizeAndTransform` method is called and focus on the form // only after placing it to the correct position. Hence autofill menu // does not appear on top-left of the page. // Refocus on the elements after applying the geometry. focusedFormElement!.focus(); activeDomElement.focus(); } } } /// A [TextEditingStrategy] for Safari Desktop Browser. /// /// It places its [domElement] assuming no prior transform or sizing is applied /// to it. /// /// In case of an autofill enabled form, it does not append the form element /// to the DOM, until the geometry information is updated. /// /// This implementation is used by text editables when semantics is not /// enabled. With semantics enabled the placement is provided by the semantics /// tree. class SafariDesktopTextEditingStrategy extends DefaultTextEditingStrategy { SafariDesktopTextEditingStrategy(super.owner); /// Appending an element on the DOM for Safari Desktop Browser. /// /// This method is only called when geometry information is updated by /// 'TextInput.setEditableSizeAndTransform' message. /// /// This method is similar to the [GloballyPositionedTextEditingStrategy]. /// The only part different: this method does not call `super.placeElement()`, /// which in current state calls `domElement.focus()`. /// /// Making an extra `focus` request causes flickering in Safari. @override void placeElement() { geometry?.applyToDomElement(activeDomElement); if (hasAutofillGroup) { // We listen to pointerdown events on the Flutter View element and programatically // focus our inputs. However, these inputs are focused before the pointerdown // events conclude. Thus, the browser triggers a blur event immediately after // focusing these inputs. This causes issues with Safari Desktop's autofill // dialog (ref: https://github.com/flutter/flutter/issues/127960). // In order to guarantee that we only focus after the pointerdown event concludes, // we wrap the form autofill placement and focus logic in a zero-duration Timer. // This ensures that our input doesn't have instantaneous focus/blur events // occur on it and fixes the autofill dialog bug as a result. Timer(Duration.zero, () { placeForm(); // On Safari Desktop, when a form is focused, it opens an autofill menu // immediately. // Flutter framework sends `setEditableSizeAndTransform` for informing // the engine about the location of the text field. This call may arrive // after the first `show` call, depending on the text input widget's // implementation. Therefore form is placed, when // `setEditableSizeAndTransform` method is called and focus called on the // form only after placing it to the correct position and only once after // that. Calling focus multiple times causes flickering. focusedFormElement!.focus(); // Set the last editing state if it exists, this is critical for a // users ongoing work to continue uninterrupted when there is an update to // the transform. // If domElement is not focused cursor location will not be correct. activeDomElement.focus(); lastEditingState?.applyToDomElement(activeDomElement); }); } } @override void initializeElementPlacement() { if (geometry != null) { placeElement(); } activeDomElement.focus(); } } /// Class implementing the default editing strategies for text editing. /// /// This class uses a DOM element to provide text editing capabilities. /// /// The backing DOM element could be one of: /// /// 1. `<input>`. /// 2. `<textarea>`. /// 3. `<span contenteditable="true">`. /// /// This class includes all the default behaviour for an editing element as /// well as the common properties such as [domElement]. /// /// Strategies written for different form factors and browsers should extend /// this class instead of extending the interface [TextEditingStrategy]. In /// particular, a concrete implementation is expected to override /// [placeElement] that places the DOM element accordingly. The default /// implementation of [placeElement] does not position the element. /// /// Unless a formfactor/browser requires specific implementation for a specific /// strategy the methods in this class should be used. abstract class DefaultTextEditingStrategy with CompositionAwareMixin implements TextEditingStrategy { DefaultTextEditingStrategy(this.owner); final HybridTextEditing owner; bool isEnabled = false; /// The DOM element used for editing, if any. DomHTMLElement? domElement; /// Same as [domElement] but null-checked. /// /// This must only be called in places that know for sure that a DOM element /// is currently available for editing. DomHTMLElement get activeDomElement { assert( domElement != null, 'The DOM element of this text editing strategy is not currently active.', ); return domElement!; } late InputConfiguration inputConfiguration; EditingState? lastEditingState; TextEditingDeltaState? _editingDeltaState; TextEditingDeltaState get editingDeltaState { _editingDeltaState ??= TextEditingDeltaState(oldText: lastEditingState!.text!); return _editingDeltaState!; } /// Styles associated with the editable text. EditableTextStyle? style; /// Size and transform of the editable text on the page. EditableTextGeometry? geometry; OnChangeCallback? onChange; OnActionCallback? onAction; final List<DomSubscription> subscriptions = <DomSubscription>[]; bool get hasAutofillGroup => inputConfiguration.autofillGroup != null; /// Whether the focused input element is part of a form. bool get appendedToForm => _appendedToForm; bool _appendedToForm = false; DomHTMLFormElement? get focusedFormElement => inputConfiguration.autofillGroup?.formElement; @override void initializeTextEditing( InputConfiguration inputConfig, { required OnChangeCallback onChange, required OnActionCallback onAction, }) { assert(!isEnabled); domElement = inputConfig.inputType.createDomElement(); applyConfiguration(inputConfig); _setStaticStyleAttributes(activeDomElement); style?.applyToDomElement(activeDomElement); if (!hasAutofillGroup) { // If there is an Autofill Group the `FormElement`, it will be appended to the // DOM later, when the first location information arrived. // Otherwise, on Blink based Desktop browsers, the autofill menu appears // on top left of the screen. defaultTextEditingRoot.append(activeDomElement); _appendedToForm = false; } initializeElementPlacement(); isEnabled = true; this.onChange = onChange; this.onAction = onAction; } void applyConfiguration(InputConfiguration config) { inputConfiguration = config; if (config.readOnly) { activeDomElement.setAttribute('readonly', 'readonly'); } else { activeDomElement.removeAttribute('readonly'); } if (config.obscureText) { activeDomElement.setAttribute('type', 'password'); } if (config.inputType.inputmodeAttribute == 'none') { activeDomElement.setAttribute('inputmode', 'none'); } final EngineInputAction action = EngineInputAction.fromName(config.inputAction); action.configureInputAction(activeDomElement); final AutofillInfo? autofill = config.autofill; if (autofill != null) { autofill.applyToDomElement(activeDomElement, focusedElement: true); } else { activeDomElement.setAttribute('autocomplete', 'off'); } final String autocorrectValue = config.autocorrect ? 'on' : 'off'; activeDomElement.setAttribute('autocorrect', autocorrectValue); } @override void initializeElementPlacement() { placeElement(); } @override void addEventHandlers() { if (inputConfiguration.autofillGroup != null) { subscriptions .addAll(inputConfiguration.autofillGroup!.addInputEventListeners()); } // Subscribe to text and selection changes. subscriptions.add(DomSubscription(activeDomElement, 'input', handleChange)); subscriptions.add(DomSubscription(activeDomElement, 'keydown', maybeSendAction)); subscriptions.add(DomSubscription(domDocument, 'selectionchange', handleChange)); activeDomElement.addEventListener('beforeinput', createDomEventListener(handleBeforeInput)); addCompositionEventHandlers(activeDomElement); // Refocus on the activeDomElement after blur, so that user can keep editing the // text field. subscriptions.add(DomSubscription(activeDomElement, 'blur', (_) { activeDomElement.focus(); })); preventDefaultForMouseEvents(); } @override void updateElementPlacement(EditableTextGeometry textGeometry) { geometry = textGeometry; if (isEnabled) { // On updates, we shouldn't go through the entire placeElement() flow if // we are in the middle of IME composition, otherwise we risk interrupting it. // Geometry updates occur when a multiline input expands or contracts. If // we are in the middle of composition, we should just update the geometry. // See: https://github.com/flutter/flutter/issues/98817 if (composingText != null) { geometry?.applyToDomElement(activeDomElement); } else { placeElement(); } } } @override void updateElementStyle(EditableTextStyle textStyle) { style = textStyle; if (isEnabled) { textStyle.applyToDomElement(activeDomElement); } } @override void disable() { assert(isEnabled); isEnabled = false; lastEditingState = null; _editingDeltaState = null; style = null; geometry = null; for (int i = 0; i < subscriptions.length; i++) { subscriptions[i].cancel(); } subscriptions.clear(); removeCompositionEventHandlers(activeDomElement); // If focused element is a part of a form, it needs to stay on the DOM // until the autofill context of the form is finalized. // More details on `TextInput.finishAutofillContext` call. if (_appendedToForm && inputConfiguration.autofillGroup?.formElement != null) { // Subscriptions are removed, listeners won't be triggered. activeDomElement.blur(); _styleAutofillElements(activeDomElement, isOffScreen: true); inputConfiguration.autofillGroup?.storeForm(); } else { activeDomElement.remove(); } domElement = null; } @override void setEditingState(EditingState? editingState) { lastEditingState = editingState; if (!isEnabled || !editingState!.isValid) { return; } lastEditingState!.applyToDomElement(domElement); } void placeElement() { activeDomElement.focus(); } void placeForm() { inputConfiguration.autofillGroup!.placeForm(activeDomElement); _appendedToForm = true; } void handleChange(DomEvent event) { assert(isEnabled); EditingState newEditingState = EditingState.fromDomElement(activeDomElement); newEditingState = determineCompositionState(newEditingState); TextEditingDeltaState? newTextEditingDeltaState; if (inputConfiguration.enableDeltaModel) { editingDeltaState.composingOffset = newEditingState.composingBaseOffset; editingDeltaState.composingExtent = newEditingState.composingExtentOffset; newTextEditingDeltaState = TextEditingDeltaState.inferDeltaState(newEditingState, lastEditingState, editingDeltaState); } if (newEditingState != lastEditingState) { lastEditingState = newEditingState; _editingDeltaState = newTextEditingDeltaState; onChange!(lastEditingState, _editingDeltaState); } // Flush delta state. _editingDeltaState = null; } void handleBeforeInput(DomEvent event) { // In some cases the beforeinput event is not fired such as when the selection // of a text field is updated. In this case only the oninput event is fired. // We still want a delta generated in these cases so we can properly update // the selection. We begin to set the deltaStart and deltaEnd in beforeinput // because a change in the selection will not have a delta range, it will only // have a baseOffset and extentOffset. If these are set inside of inferDeltaState // then the method will incorrectly report a deltaStart and deltaEnd for a non // text update delta. final String? eventData = getJsProperty<void>(event, 'data') as String?; final String? inputType = getJsProperty<void>(event, 'inputType') as String?; if (inputType != null) { final bool isSelectionInverted = lastEditingState!.baseOffset! > lastEditingState!.extentOffset!; final int deltaOffset = isSelectionInverted ? lastEditingState!.baseOffset! : lastEditingState!.extentOffset!; if (inputType.contains('delete')) { // The deltaStart is set in handleChange because there is where we get access // to the new selection baseOffset which is our new deltaStart. editingDeltaState.deltaText = ''; editingDeltaState.deltaEnd = deltaOffset; } else if (inputType == 'insertLineBreak'){ // event.data is null on a line break, so we manually set deltaText as a line break by setting it to '\n'. editingDeltaState.deltaText = '\n'; editingDeltaState.deltaStart = deltaOffset; editingDeltaState.deltaEnd = deltaOffset; } else if (eventData != null) { // When event.data is not null we will begin by considering this delta as an insertion // at the selection extentOffset. This may change due to logic in handleChange to handle // composition and other IME behaviors. editingDeltaState.deltaText = eventData; editingDeltaState.deltaStart = deltaOffset; editingDeltaState.deltaEnd = deltaOffset; } } } void maybeSendAction(DomEvent e) { if (domInstanceOfString(e, 'KeyboardEvent')) { final DomKeyboardEvent event = e as DomKeyboardEvent; if (event.keyCode == _kReturnKeyCode) { onAction!(inputConfiguration.inputAction); // Prevent the browser from inserting a new line when it's not a multiline input. if (inputConfiguration.inputType is! MultilineInputType) { event.preventDefault(); } } } } /// Enables the element so it can be used to edit text. /// /// Register [callback] so that it gets invoked whenever any change occurs in /// the text editing element. /// /// Changes could be: /// - Text changes, or /// - Selection changes. void enable( InputConfiguration inputConfig, { required OnChangeCallback onChange, required OnActionCallback onAction, }) { assert(!isEnabled); initializeTextEditing(inputConfig, onChange: onChange, onAction: onAction); addEventHandlers(); if (lastEditingState != null) { setEditingState(lastEditingState); } // Re-focuses after setting editing state. activeDomElement.focus(); } /// Prevent default behavior for mouse down, up and move. /// /// When normal mouse events are not prevented, mouse selection /// conflicts with selection sent from the framework, which creates /// flickering during selection by mouse. /// /// On mobile browsers, mouse events are sent after a touch event, /// see: https://bugs.chromium.org/p/chromium/issues/detail?id=119216#c11. void preventDefaultForMouseEvents() { subscriptions.add( DomSubscription(activeDomElement, 'mousedown', (DomEvent event) { event.preventDefault(); })); subscriptions.add( DomSubscription(activeDomElement, 'mouseup', (DomEvent event) { event.preventDefault(); })); subscriptions.add( DomSubscription(activeDomElement, 'mousemove', (DomEvent event) { event.preventDefault(); })); } } /// IOS/Safari behaviour for text editing. /// /// In iOS, the virtual keyboard might shifts the screen up to make input /// visible depending on the location of the focused input element. /// /// Due to this [initializeElementPlacement] and [updateElementPlacement] /// strategies are different. /// /// [disable] is also different since the [_positionInputElementTimer] /// also needs to be cleaned. /// /// inputmodeAttribute needs to be set for mobile devices. Due to this /// [initializeTextEditing] is different. class IOSTextEditingStrategy extends GloballyPositionedTextEditingStrategy { IOSTextEditingStrategy(super.owner); /// Timer that times when to set the location of the input text. /// /// This is only used for iOS. In iOS, virtual keyboard shifts the screen. /// There is no callback to know if the keyboard is up and how much the screen /// has shifted. Therefore instead of listening to the shift and passing this /// information to Flutter Framework, we are trying to stop the shift. /// /// In iOS, the virtual keyboard shifts the screen up if the focused input /// element is under the keyboard or very close to the keyboard. Before the /// focus is called we are positioning it offscreen. The location of the input /// in iOS is set to correct place, 100ms after focus. We use this timer for /// timing this delay. Timer? _positionInputElementTimer; static const Duration _delayBeforePlacement = Duration(milliseconds: 100); /// This interval between the blur subscription and callback is considered to /// be fast. /// /// This is only used for iOS. The blur callback may trigger as soon as the /// creation of the subscription. Occasionally in this case, the virtual /// keyboard will quickly show and hide again. /// /// Less than this interval allows the virtual keyboard to keep showing up /// instead of hiding rapidly. static const Duration _blurFastCallbackInterval = Duration(milliseconds: 200); /// Whether or not the input element can be positioned at this point in time. /// /// This is currently only used in iOS. It's set to false before focusing the /// input field, and set back to true after a short timer. We do this because /// if the input field is positioned before focus, it could be pushed to an /// incorrect position by the virtual keyboard. /// /// See: /// /// * [_delayBeforePlacement] which controls how long to wait before /// positioning the input field. bool _canPosition = true; @override void initializeTextEditing( InputConfiguration inputConfig, { required OnChangeCallback onChange, required OnActionCallback onAction, }) { super.initializeTextEditing(inputConfig, onChange: onChange, onAction: onAction); inputConfig.inputType.configureInputMode(activeDomElement); if (hasAutofillGroup) { placeForm(); } inputConfig.textCapitalization.setAutocapitalizeAttribute(activeDomElement); } @override void initializeElementPlacement() { /// Position the element outside of the page before focusing on it. This is /// useful for not triggering a scroll when iOS virtual keyboard is /// coming up. activeDomElement.style.transform = 'translate(${offScreenOffset}px, ${offScreenOffset}px)'; _canPosition = false; } @override void addEventHandlers() { if (inputConfiguration.autofillGroup != null) { subscriptions .addAll(inputConfiguration.autofillGroup!.addInputEventListeners()); } // Subscribe to text and selection changes. subscriptions.add(DomSubscription(activeDomElement, 'input', handleChange)); subscriptions.add(DomSubscription(activeDomElement, 'keydown', maybeSendAction)); subscriptions.add(DomSubscription(domDocument, 'selectionchange', handleChange)); activeDomElement.addEventListener('beforeinput', createDomEventListener(handleBeforeInput)); addCompositionEventHandlers(activeDomElement); // Position the DOM element after it is focused. subscriptions.add(DomSubscription(activeDomElement, 'focus', (_) { // Cancel previous timer if exists. _schedulePlacement(); })); _addTapListener(); // Record start time of blur subscription. final Stopwatch blurWatch = Stopwatch()..start(); // On iOS, blur is trigerred in the following cases: // // 1. The browser app is sent to the background (or the tab is changed). In // this case, the window loses focus (see [windowHasFocus]), // so we close the input connection with the framework. // 2. The user taps on another focusable element. In this case, we refocus // the input field and wait for the framework to manage the focus change. // 3. The virtual keyboard is closed by tapping "done". We can't detect this // programmatically, so we end up refocusing the input field. This is // okay because the virtual keyboard will hide, and as soon as the user // taps the text field again, the virtual keyboard will come up. // 4. Safari sometimes sends a blur event immediately after activating the // input field. In this case, we want to keep the focus on the input field. // In order to detect this, we measure how much time has passed since the // input field was activated. If the time is too short, we re-focus the // input element. subscriptions.add(DomSubscription(activeDomElement, 'blur', (_) { final bool isFastCallback = blurWatch.elapsed < _blurFastCallbackInterval; if (windowHasFocus && isFastCallback) { activeDomElement.focus(); } else { owner.sendTextConnectionClosedToFrameworkIfAny(); } })); } @override void updateElementPlacement(EditableTextGeometry textGeometry) { geometry = textGeometry; if (isEnabled && _canPosition) { placeElement(); } } @override void disable() { super.disable(); _positionInputElementTimer?.cancel(); _positionInputElementTimer = null; } /// On iOS long press works differently than a single tap. /// /// On a normal tap the virtual keyboard comes up and users can enter text /// using the keyboard. /// /// The long press on the other hand focuses on the element without bringing /// up the virtual keyboard. It allows the users to modify the field by using /// copy/cut/select/paste etc. /// /// After a long press [domElement] is positioned to the correct place. If the /// user later single-tap on the [domElement] the virtual keyboard will come /// and might shift the page up. /// /// In order to prevent this shift, on a `click` event the position of the /// element is again set somewhere outside of the page and /// [_positionInputElementTimer] timer is restarted. The element will be /// placed to its correct position after [_delayBeforePlacement]. void _addTapListener() { subscriptions.add(DomSubscription(activeDomElement, 'click', (_) { // Check if the element is already positioned. If not this does not fall // under `The user was using the long press, now they want to enter text // via keyboard` journey. if (_canPosition) { // Re-place the element somewhere outside of the screen. initializeElementPlacement(); // Re-configure the timer to place the element. _schedulePlacement(); } })); } void _schedulePlacement() { _positionInputElementTimer?.cancel(); _positionInputElementTimer = Timer(_delayBeforePlacement, () { _canPosition = true; placeElement(); }); } @override void placeElement() { activeDomElement.focus(); geometry?.applyToDomElement(activeDomElement); } } /// Android behaviour for text editing. /// /// inputmodeAttribute needs to be set for mobile devices. Due to this /// [initializeTextEditing] is different. /// /// Keyboard acts differently than other devices. [addEventHandlers] handles /// this case as an extra. class AndroidTextEditingStrategy extends GloballyPositionedTextEditingStrategy { AndroidTextEditingStrategy(super.owner); @override void initializeTextEditing( InputConfiguration inputConfig, { required OnChangeCallback onChange, required OnActionCallback onAction, }) { super.initializeTextEditing(inputConfig, onChange: onChange, onAction: onAction); inputConfig.inputType.configureInputMode(activeDomElement); if (hasAutofillGroup) { placeForm(); } else { defaultTextEditingRoot.append(activeDomElement); } inputConfig.textCapitalization.setAutocapitalizeAttribute( activeDomElement); } @override void addEventHandlers() { if (inputConfiguration.autofillGroup != null) { subscriptions .addAll(inputConfiguration.autofillGroup!.addInputEventListeners()); } // Subscribe to text and selection changes. subscriptions.add( DomSubscription(activeDomElement, 'input', handleChange)); subscriptions.add( DomSubscription(activeDomElement, 'keydown', maybeSendAction)); subscriptions.add( DomSubscription(domDocument, 'selectionchange', handleChange)); activeDomElement.addEventListener('beforeinput', createDomEventListener(handleBeforeInput)); addCompositionEventHandlers(activeDomElement); subscriptions.add( DomSubscription(activeDomElement, 'blur', (_) { if (windowHasFocus) { // Chrome on Android will hide the onscreen keyboard when you tap outside // the text box. Instead, we want the framework to tell us to hide the // keyboard via `TextInput.clearClient` or `TextInput.hide`. Therefore // refocus as long as [windowHasFocus] is true. activeDomElement.focus(); } else { owner.sendTextConnectionClosedToFrameworkIfAny(); } })); preventDefaultForMouseEvents(); } @override void placeElement() { activeDomElement.focus(); geometry?.applyToDomElement(activeDomElement); } } /// Firefox behaviour for text editing. /// /// Selections are different in Firefox. [addEventHandlers] strategy is /// impelemented diefferently in Firefox. class FirefoxTextEditingStrategy extends GloballyPositionedTextEditingStrategy { FirefoxTextEditingStrategy(super.owner); @override void initializeTextEditing( InputConfiguration inputConfig, { required OnChangeCallback onChange, required OnActionCallback onAction, }) { super.initializeTextEditing(inputConfig, onChange: onChange, onAction: onAction); if (hasAutofillGroup) { placeForm(); } } @override void addEventHandlers() { if (inputConfiguration.autofillGroup != null) { subscriptions .addAll(inputConfiguration.autofillGroup!.addInputEventListeners()); } // Subscribe to text and selection changes. subscriptions.add( DomSubscription(activeDomElement, 'input', handleChange)); subscriptions.add( DomSubscription( activeDomElement, 'keydown', maybeSendAction)); activeDomElement.addEventListener('beforeinput', createDomEventListener(handleBeforeInput)); addCompositionEventHandlers(activeDomElement); // Detects changes in text selection. // // In Firefox, when cursor moves, neither selectionChange nor onInput // events are triggered. We are listening to keyup event. Selection start, // end values are used to decide if the text cursor moved. // // Specific keycodes are not checked since users/applications can bind // their own keys to move the text cursor. // Decides if the selection has changed (cursor moved) compared to the // previous values. // // After each keyup, the start/end values of the selection is compared to // the previously saved editing state. subscriptions.add( DomSubscription( activeDomElement, 'keyup', (DomEvent event) { handleChange(event); })); // In Firefox the context menu item "Select All" does not work without // listening to onSelect. On the other browsers onSelectionChange is // enough for covering "Select All" functionality. subscriptions.add( DomSubscription( activeDomElement, 'select', handleChange)); // Refocus on the activeDomElement after blur, so that user can keep editing the // text field. subscriptions.add( DomSubscription( activeDomElement, 'blur', (_) { _postponeFocus(); })); preventDefaultForMouseEvents(); } void _postponeFocus() { // Firefox does not focus on the editing element if we call the focus // inside the blur event, therefore we postpone the focus. // Calling focus inside a Timer for `0` milliseconds guarantee that it is // called after blur event propagation is completed. Timer(Duration.zero, () { activeDomElement.focus(); }); } @override void placeElement() { activeDomElement.focus(); geometry?.applyToDomElement(activeDomElement); // Set the last editing state if it exists, this is critical for a // users ongoing work to continue uninterrupted when there is an update to // the transform. lastEditingState?.applyToDomElement(activeDomElement); } } /// Base class for all `TextInput` commands sent through the `flutter/textinput` /// channel. @immutable abstract class TextInputCommand { const TextInputCommand(); /// Executes the logic for this command. void run(HybridTextEditing textEditing); } /// Responds to the 'TextInput.setClient' message. class TextInputSetClient extends TextInputCommand { const TextInputSetClient({ required this.clientId, required this.configuration, }); final int clientId; final InputConfiguration configuration; @override void run(HybridTextEditing textEditing) { final bool clientIdChanged = textEditing._clientId != null && textEditing._clientId != clientId; if (clientIdChanged && textEditing.isEditing) { // We're connecting a new client. Any pending command for the previous client // are irrelevant at this point. textEditing.stopEditing(); } textEditing._clientId = clientId; textEditing.configuration = configuration; } } /// Creates the text editing strategy used in non-a11y mode. DefaultTextEditingStrategy createDefaultTextEditingStrategy(HybridTextEditing textEditing) { DefaultTextEditingStrategy strategy; if(operatingSystem == OperatingSystem.iOs) { strategy = IOSTextEditingStrategy(textEditing); } else if(operatingSystem == OperatingSystem.android) { strategy = AndroidTextEditingStrategy(textEditing); } else if(browserEngine == BrowserEngine.webkit) { strategy = SafariDesktopTextEditingStrategy(textEditing); } else if(browserEngine == BrowserEngine.firefox) { strategy = FirefoxTextEditingStrategy(textEditing); } else { strategy = GloballyPositionedTextEditingStrategy(textEditing); } return strategy; } /// Responds to the 'TextInput.updateConfig' message. class TextInputUpdateConfig extends TextInputCommand { const TextInputUpdateConfig(); @override void run(HybridTextEditing textEditing) { textEditing.strategy.applyConfiguration(textEditing.configuration!); } } /// Responds to the 'TextInput.setEditingState' message. class TextInputSetEditingState extends TextInputCommand { const TextInputSetEditingState({ required this.state, }); final EditingState state; @override void run(HybridTextEditing textEditing) { textEditing.strategy.setEditingState(state); } } /// Responds to the 'TextInput.show' message. class TextInputShow extends TextInputCommand { const TextInputShow(); @override void run(HybridTextEditing textEditing) { if (!textEditing.isEditing) { textEditing._startEditing(); } } } /// Responds to the 'TextInput.setEditableSizeAndTransform' message. class TextInputSetEditableSizeAndTransform extends TextInputCommand { const TextInputSetEditableSizeAndTransform({ required this.geometry, }); final EditableTextGeometry geometry; @override void run(HybridTextEditing textEditing) { textEditing.strategy.updateElementPlacement(geometry); } } /// Responds to the 'TextInput.setStyle' message. class TextInputSetStyle extends TextInputCommand { const TextInputSetStyle({ required this.style, }); final EditableTextStyle style; @override void run(HybridTextEditing textEditing) { textEditing.strategy.updateElementStyle(style); } } /// Responds to the 'TextInput.clearClient' message. class TextInputClearClient extends TextInputCommand { const TextInputClearClient(); @override void run(HybridTextEditing textEditing) { if (textEditing.isEditing) { textEditing.stopEditing(); } } } /// Responds to the 'TextInput.hide' message. class TextInputHide extends TextInputCommand { const TextInputHide(); @override void run(HybridTextEditing textEditing) { if (textEditing.isEditing) { textEditing.stopEditing(); } } } class TextInputSetMarkedTextRect extends TextInputCommand { const TextInputSetMarkedTextRect(); @override void run(HybridTextEditing textEditing) { // No-op: this message is currently only used on iOS to implement // UITextInput.firstRecForRange. } } class TextInputSetCaretRect extends TextInputCommand { const TextInputSetCaretRect(); @override void run(HybridTextEditing textEditing) { // No-op: not supported on this platform. } } class TextInputRequestAutofill extends TextInputCommand { const TextInputRequestAutofill(); @override void run(HybridTextEditing textEditing) { // No-op: not supported on this platform. } } class TextInputFinishAutofillContext extends TextInputCommand { const TextInputFinishAutofillContext({ required this.saveForm, }); final bool saveForm; @override void run(HybridTextEditing textEditing) { // Close the text editing connection. Form is finalizing. textEditing.sendTextConnectionClosedToFrameworkIfAny(); if (saveForm) { saveForms(); } // Clean the forms from DOM after submitting them. cleanForms(); } } /// Submits the forms currently attached to the DOM. /// /// Browser will save the information entered to the form. /// /// Called when the form is finalized with save option `true`. /// See: https://github.com/flutter/flutter/blob/bf9f3a3dcfea3022f9cf2dfc3ab10b120b48b19d/packages/flutter/lib/src/services/text_input.dart#L1277 void saveForms() { formsOnTheDom.forEach((String identifier, DomHTMLFormElement form) { final DomHTMLInputElement submitBtn = form.getElementsByClassName('submitBtn').first as DomHTMLInputElement; submitBtn.click(); }); } /// Removes the forms from the DOM. /// /// Called when the form is finalized. void cleanForms() { for (final DomHTMLFormElement form in formsOnTheDom.values) { form.remove(); } formsOnTheDom.clear(); } /// Translates the message-based communication between the framework and the /// engine [implementation]. /// /// This class is meant to be used as a singleton. class TextEditingChannel { TextEditingChannel(this.implementation); /// Supplies the implementation that responds to the channel messages. final HybridTextEditing implementation; /// Handles "flutter/textinput" platform messages received from the framework. void handleTextInput( ByteData? data, ui.PlatformMessageResponseCallback? callback) { const JSONMethodCodec codec = JSONMethodCodec(); final MethodCall call = codec.decodeMethodCall(data); final TextInputCommand command; switch (call.method) { case 'TextInput.setClient': command = TextInputSetClient( clientId: call.arguments[0] as int, configuration: InputConfiguration.fromFrameworkMessage(call.arguments[1] as Map<String, dynamic>), ); case 'TextInput.updateConfig': // Set configuration eagerly because it contains data about the text // field used to flush the command queue. However, delaye applying the // configuration because the strategy may not be available yet. implementation.configuration = InputConfiguration.fromFrameworkMessage( call.arguments as Map<String, dynamic> ); command = const TextInputUpdateConfig(); case 'TextInput.setEditingState': command = TextInputSetEditingState( state: EditingState.fromFrameworkMessage( call.arguments as Map<String, dynamic> ), ); case 'TextInput.show': command = const TextInputShow(); case 'TextInput.setEditableSizeAndTransform': command = TextInputSetEditableSizeAndTransform( geometry: EditableTextGeometry.fromFrameworkMessage( call.arguments as Map<String, dynamic> ), ); case 'TextInput.setStyle': command = TextInputSetStyle( style: EditableTextStyle.fromFrameworkMessage( call.arguments as Map<String, dynamic>, ), ); case 'TextInput.clearClient': command = const TextInputClearClient(); case 'TextInput.hide': command = const TextInputHide(); case 'TextInput.requestAutofill': // There's no API to request autofill on the web. Instead we let the // browser show autofill options automatically, if available. We // therefore simply ignore this message. command = const TextInputRequestAutofill(); case 'TextInput.finishAutofillContext': command = TextInputFinishAutofillContext( saveForm: call.arguments as bool, ); case 'TextInput.setMarkedTextRect': command = const TextInputSetMarkedTextRect(); case 'TextInput.setCaretRect': command = const TextInputSetCaretRect(); default: EnginePlatformDispatcher.instance.replyToPlatformMessage(callback, null); return; } implementation.acceptCommand(command, () { EnginePlatformDispatcher.instance .replyToPlatformMessage(callback, codec.encodeSuccessEnvelope(true)); }); } /// Sends the 'TextInputClient.updateEditingState' message to the framework. void updateEditingState(int? clientId, EditingState? editingState) { EnginePlatformDispatcher.instance.invokeOnPlatformMessage( 'flutter/textinput', const JSONMethodCodec().encodeMethodCall( MethodCall('TextInputClient.updateEditingState', <dynamic>[ clientId, editingState!.toFlutter(), ]), ), _emptyCallback, ); } /// Sends the 'TextInputClient.updateEditingStateWithDeltas' message to the framework. void updateEditingStateWithDelta(int? clientId, TextEditingDeltaState? editingDeltaState) { EnginePlatformDispatcher.instance.invokeOnPlatformMessage( 'flutter/textinput', const JSONMethodCodec().encodeMethodCall( MethodCall('TextInputClient.updateEditingStateWithDeltas', <dynamic>[ clientId, editingDeltaState!.toFlutter(), ]), ), _emptyCallback, ); } /// Sends the 'TextInputClient.performAction' message to the framework. void performAction(int? clientId, String? inputAction) { EnginePlatformDispatcher.instance.invokeOnPlatformMessage( 'flutter/textinput', const JSONMethodCodec().encodeMethodCall( MethodCall( 'TextInputClient.performAction', <dynamic>[clientId, inputAction], ), ), _emptyCallback, ); } /// Sends the 'TextInputClient.onConnectionClosed' message to the framework. void onConnectionClosed(int? clientId) { EnginePlatformDispatcher.instance.invokeOnPlatformMessage( 'flutter/textinput', const JSONMethodCodec().encodeMethodCall( MethodCall( 'TextInputClient.onConnectionClosed', <dynamic>[clientId], ), ), _emptyCallback, ); } } /// Text editing singleton. final HybridTextEditing textEditing = HybridTextEditing(); /// Map for storing forms left attached on the DOM. /// /// Used for keeping the form elements on the DOM until user confirms to /// save or cancel them. /// /// See: https://github.com/flutter/flutter/blob/bf9f3a3dcfea3022f9cf2dfc3ab10b120b48b19d/packages/flutter/lib/src/services/text_input.dart#L1277 final Map<String, DomHTMLFormElement> formsOnTheDom = <String, DomHTMLFormElement>{}; /// Should be used as a singleton to provide support for text editing in /// Flutter Web. /// /// The approach is "hybrid" because it relies on Flutter for /// displaying, and HTML for user interactions: /// /// - HTML's contentEditable feature handles typing and text changes. /// - HTML's selection API handles selection changes and cursor movements. class HybridTextEditing { /// Private constructor so this class can be a singleton. /// /// The constructor also decides which text editing strategy to use depending /// on the operating system and browser engine. HybridTextEditing(); late final TextEditingChannel channel = TextEditingChannel(this); /// A CSS class name used to identify all elements used for text editing. @visibleForTesting static const String textEditingClass = 'flt-text-editing'; int? _clientId; /// Flag which shows if there is an ongoing editing. /// /// Also used to define if a keyboard is needed. bool isEditing = false; InputConfiguration? configuration; DefaultTextEditingStrategy? debugTextEditingStrategyOverride; /// Supplies the DOM element used for editing. late final DefaultTextEditingStrategy strategy = debugTextEditingStrategyOverride ?? (EngineSemantics.instance.semanticsEnabled ? SemanticsTextEditingStrategy.ensureInitialized(this) : createDefaultTextEditingStrategy(this)); void acceptCommand(TextInputCommand command, ui.VoidCallback callback) { if (_debugPrintTextInputCommands) { print('flutter/textinput channel command: ${command.runtimeType}'); } command.run(this); callback(); } void _startEditing() { assert(!isEditing); isEditing = true; strategy.enable( configuration!, onChange: (EditingState? editingState, TextEditingDeltaState? editingDeltaState) { if (configuration!.enableDeltaModel) { channel.updateEditingStateWithDelta(_clientId, editingDeltaState); } else { channel.updateEditingState(_clientId, editingState); } }, onAction: (String? inputAction) { channel.performAction(_clientId, inputAction); }, ); } void stopEditing() { assert(isEditing); isEditing = false; strategy.disable(); } void sendTextConnectionClosedToFrameworkIfAny() { if (isEditing) { stopEditing(); channel.onConnectionClosed(_clientId); } } } /// Information on the font and alignment of a text editing element. /// /// This information is received via TextInput.setStyle message. class EditableTextStyle { EditableTextStyle({ required this.textDirection, required this.fontSize, required this.textAlign, required this.fontFamily, required this.fontWeight, }); factory EditableTextStyle.fromFrameworkMessage( Map<String, dynamic> flutterStyle) { assert(flutterStyle.containsKey('fontSize')); assert(flutterStyle.containsKey('fontFamily')); assert(flutterStyle.containsKey('textAlignIndex')); assert(flutterStyle.containsKey('textDirectionIndex')); final int textAlignIndex = flutterStyle['textAlignIndex'] as int; final int textDirectionIndex = flutterStyle['textDirectionIndex'] as int; final int? fontWeightIndex = flutterStyle['fontWeightIndex'] as int?; // Convert [fontWeightIndex] to its CSS equivalent value. final String fontWeight = fontWeightIndex != null ? fontWeightIndexToCss(fontWeightIndex: fontWeightIndex) : 'normal'; // Also convert [textAlignIndex] and [textDirectionIndex] to their // corresponding enum values in [ui.TextAlign] and [ui.TextDirection] // respectively. return EditableTextStyle( fontSize: flutterStyle.tryDouble('fontSize'), fontFamily: flutterStyle.tryString('fontFamily'), textAlign: ui.TextAlign.values[textAlignIndex], textDirection: ui.TextDirection.values[textDirectionIndex], fontWeight: fontWeight, ); } /// This information will be used for changing the style of the hidden input /// element, which will match it's size to the size of the editable widget. final double? fontSize; final String fontWeight; final String? fontFamily; final ui.TextAlign textAlign; final ui.TextDirection textDirection; String? get align => textAlignToCssValue(textAlign, textDirection); String get cssFont => '$fontWeight ${fontSize}px ${canonicalizeFontFamily(fontFamily)}'; void applyToDomElement(DomHTMLElement domElement) { domElement.style ..textAlign = align! ..font = cssFont; } } /// Describes the location and size of the editing element on the screen. /// /// This information is received via "TextInput.setEditableSizeAndTransform" /// message from the framework. @immutable class EditableTextGeometry { const EditableTextGeometry({ required this.width, required this.height, required this.globalTransform, }); /// Parses the geometry from a message sent by the framework. factory EditableTextGeometry.fromFrameworkMessage( Map<String, dynamic> encodedGeometry, ) { assert(encodedGeometry.containsKey('width')); assert(encodedGeometry.containsKey('height')); assert(encodedGeometry.containsKey('transform')); final List<double> transformList = List<double>.from(encodedGeometry.readList('transform').map( (final dynamic e) => (e as num).toDouble() )); return EditableTextGeometry( width: encodedGeometry.readDouble('width'), height: encodedGeometry.readDouble('height'), globalTransform: Float32List.fromList(transformList), ); } /// The width of the editable in local coordinates, i.e. before applying [globalTransform]. final double width; /// The height of the editable in local coordinates, i.e. before applying [globalTransform]. final double height; /// The aggregate transform rooted at the global (screen) coordinate system /// that places and sizes the editable. /// /// For correct sizing this transform must be applied to the [width] and /// [height] fields. final Float32List globalTransform; /// Applies this geometry to the DOM element. /// /// This assumes that the parent of the [domElement] has identity transform /// applied to it (i.e. the default). If the parent has a non-identity /// transform applied, this method will misplace the [domElement]. For /// example, if the editable DOM element is nested inside the semantics /// tree the semantics tree provides the placement parameters, in which /// case this method should not be used. void applyToDomElement(DomHTMLElement domElement) { final String cssTransform = float64ListToCssTransform(globalTransform); domElement.style ..width = '${width}px' ..height = '${height}px' ..transform = cssTransform; } }
engine/lib/web_ui/lib/src/engine/text_editing/text_editing.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/text_editing/text_editing.dart", "repo_id": "engine", "token_count": 29399 }
282
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:async'; import 'dart:typed_data'; import 'package:meta/meta.dart'; import 'package:ui/ui.dart' as ui; import 'package:ui/ui_web/src/ui_web.dart' as ui_web; import '../engine.dart' show DimensionsProvider, registerHotRestartListener, renderer; import 'browser_detection.dart'; import 'configuration.dart'; import 'display.dart'; import 'dom.dart'; import 'initialization.dart'; import 'js_interop/js_app.dart'; import 'mouse/context_menu.dart'; import 'mouse/cursor.dart'; import 'navigation/history.dart'; import 'platform_dispatcher.dart'; import 'pointer_binding.dart'; import 'semantics.dart'; import 'services.dart'; import 'text_editing/text_editing.dart'; import 'util.dart'; import 'view_embedder/dom_manager.dart'; import 'view_embedder/embedding_strategy/embedding_strategy.dart'; import 'view_embedder/global_html_attributes.dart'; import 'view_embedder/style_manager.dart'; typedef _HandleMessageCallBack = Future<bool> Function(); /// When set to true, all platform messages will be printed to the console. const bool debugPrintPlatformMessages = false; /// The view ID for the implicit flutter view provided by the platform. const int kImplicitViewId = 0; int _nextViewId = kImplicitViewId + 1; /// Represents all views in the Flutter Web Engine. /// /// In addition to everything defined in [ui.FlutterView], this class adds /// a few web-specific properties. base class EngineFlutterView implements ui.FlutterView { /// Creates a [ui.FlutterView] that can be used in multi-view mode. /// /// The [hostElement] parameter specifies the container in the DOM into which /// the Flutter view will be rendered. factory EngineFlutterView( EnginePlatformDispatcher platformDispatcher, DomElement hostElement, { JsViewConstraints? viewConstraints, } ) = _EngineFlutterViewImpl; EngineFlutterView._( this.viewId, this.platformDispatcher, // This is nullable to accommodate the legacy `EngineFlutterWindow`. In // multi-view mode, the host element is required for each view (as reflected // by the public `EngineFlutterView` constructor). DomElement? hostElement, { JsViewConstraints? viewConstraints, } ) : _jsViewConstraints = viewConstraints, embeddingStrategy = EmbeddingStrategy.create(hostElement: hostElement), dimensionsProvider = DimensionsProvider.create(hostElement: hostElement) { // The embeddingStrategy will take care of cleaning up the rootElement on // hot restart. embeddingStrategy.attachViewRoot(dom.rootElement); pointerBinding = PointerBinding(this); _resizeSubscription = onResize.listen(_didResize); _globalHtmlAttributes.applyAttributes( viewId: viewId, autoDetectRenderer: FlutterConfiguration.flutterWebAutoDetect, rendererTag: renderer.rendererTag, buildMode: buildMode, ); registerHotRestartListener(dispose); } static EngineFlutterWindow implicit( EnginePlatformDispatcher platformDispatcher, DomElement? hostElement, ) => EngineFlutterWindow._(platformDispatcher, hostElement); @override final int viewId; @override final EnginePlatformDispatcher platformDispatcher; /// Abstracts all the DOM manipulations required to embed a Flutter view in a user-supplied `hostElement`. final EmbeddingStrategy embeddingStrategy; late final StreamSubscription<ui.Size?> _resizeSubscription; final ViewConfiguration _viewConfiguration = const ViewConfiguration(); /// Whether this [EngineFlutterView] has been disposed or not. bool isDisposed = false; /// Disposes of the [EngineFlutterView] instance and undoes all of its DOM /// tree and any event listeners. @mustCallSuper void dispose() { if (isDisposed) { return; } isDisposed = true; _resizeSubscription.cancel(); dimensionsProvider.close(); pointerBinding.dispose(); dom.rootElement.remove(); // TODO(harryterkelsen): What should we do about this in multi-view? renderer.clearFragmentProgramCache(); semantics.reset(); } @override void render(ui.Scene scene, {ui.Size? size}) { assert(!isDisposed, 'Trying to render a disposed EngineFlutterView.'); if (size != null) { resize(size); } platformDispatcher.render(scene, this); } @override void updateSemantics(ui.SemanticsUpdate update) { assert(!isDisposed, 'Trying to update semantics on a disposed EngineFlutterView.'); semantics.updateSemantics(update); } // TODO(yjbanov): How should this look like for multi-view? // https://github.com/flutter/flutter/issues/137445 late final AccessibilityAnnouncements accessibilityAnnouncements = AccessibilityAnnouncements(hostElement: dom.announcementsHost); late final GlobalHtmlAttributes _globalHtmlAttributes = GlobalHtmlAttributes( rootElement: dom.rootElement, hostElement: embeddingStrategy.hostElement, ); late final MouseCursor mouseCursor = MouseCursor(dom.rootElement); late final ContextMenu contextMenu = ContextMenu(dom.rootElement); late final DomManager dom = DomManager(devicePixelRatio: devicePixelRatio); late final PointerBinding pointerBinding; @override ViewConstraints get physicalConstraints { final double dpr = devicePixelRatio; final ui.Size currentLogicalSize = physicalSize / dpr; return ViewConstraints.fromJs(_jsViewConstraints, currentLogicalSize) * dpr; } final JsViewConstraints? _jsViewConstraints; late final EngineSemanticsOwner semantics = EngineSemanticsOwner(dom.semanticsHost); @override ui.Size get physicalSize { return _physicalSize ??= _computePhysicalSize(); } /// Resizes the `rootElement` to `newPhysicalSize` by changing its CSS style. /// /// This is used by the [render] method, when the framework sends new dimensions /// for the current Flutter View. /// /// Dimensions from the framework are constrained by the [physicalConstraints] /// that can be configured by the user when adding a view to the app. /// /// In practice, this method changes the size of the `rootElement` of the app /// so it can push/shrink inside its `hostElement`. That way, a Flutter app /// can change the layout of the container page. /// /// ``` /// <p>Some HTML content...</p> /// +--- (div) hostElement ------------------------------------+ /// | +--- rootElement ---------------------+ | /// | | | | /// | | | container | /// | | size applied to *this* | must be able | /// | | | to reflow | /// | | | | /// | +-------------------------------------+ | /// +----------------------------------------------------------+ /// <p>More HTML content...</p> /// ``` /// /// The `hostElement` needs to be styled in a way that allows its size to flow /// with its contents. Things like `max-height: 100px; overflow: hidden` will /// work as expected (by hiding the overflowing part of the flutter app), but /// if in that case flutter is not made aware of that max-height with /// `physicalConstraints`, it will end up rendering more pixels that are visible /// on the screen, with a possible hit to performance. /// /// TL;DR: The `viewConstraints` of a Flutter view, must take into consideration /// the CSS box-model restrictions imposed on its `hostElement` (especially when /// hiding `overflow`). Flutter does not attempt to interpret the styles of /// `hostElement` to compute its `physicalConstraints`, only its current size. void resize(ui.Size newPhysicalSize) { // The browser uses CSS, and CSS operates in logical sizes. final ui.Size logicalSize = newPhysicalSize / devicePixelRatio; dom.rootElement.style ..width = '${logicalSize.width}px' ..height = '${logicalSize.height}px'; // Force an update of the physicalSize so it's ready for the renderer. _computePhysicalSize(); } /// Lazily populated and cleared at the end of the frame. ui.Size? _physicalSize; ui.Size? debugPhysicalSizeOverride; /// Computes the physical size of the view. /// /// This function is expensive. It triggers browser layout if there are /// pending DOM writes. ui.Size _computePhysicalSize() { ui.Size? physicalSizeOverride; assert(() { physicalSizeOverride = debugPhysicalSizeOverride; return true; }()); return physicalSizeOverride ?? dimensionsProvider.computePhysicalSize(); } /// Forces the view to recompute its physical size. Useful for tests. void debugForceResize() { _physicalSize = _computePhysicalSize(); } @override ViewPadding get viewInsets => _viewInsets; ViewPadding _viewInsets = ui.ViewPadding.zero as ViewPadding; @override ViewPadding get viewPadding => _viewConfiguration.viewPadding; @override ViewPadding get systemGestureInsets => _viewConfiguration.systemGestureInsets; @override ViewPadding get padding => _viewConfiguration.padding; @override ui.GestureSettings get gestureSettings => _viewConfiguration.gestureSettings; @override List<ui.DisplayFeature> get displayFeatures => _viewConfiguration.displayFeatures; @override EngineFlutterDisplay get display => EngineFlutterDisplay.instance; @override double get devicePixelRatio => display.devicePixelRatio; @visibleForTesting final DimensionsProvider dimensionsProvider; Stream<ui.Size?> get onResize => dimensionsProvider.onResize; /// Called immediately after the view has been resized. /// /// When there is a text editing going on in mobile devices, do not change /// the physicalSize, change the [window.viewInsets]. See: /// https://api.flutter.dev/flutter/dart-ui/FlutterView/viewInsets.html /// https://api.flutter.dev/flutter/dart-ui/FlutterView/physicalSize.html /// /// Note: always check for rotations for a mobile device. Update the physical /// size if the change is caused by a rotation. void _didResize(ui.Size? newSize) { StyleManager.scaleSemanticsHost(dom.semanticsHost, devicePixelRatio); final ui.Size newPhysicalSize = _computePhysicalSize(); final bool isEditingOnMobile = isMobile && !_isRotation(newPhysicalSize) && textEditing.isEditing; if (isEditingOnMobile) { _computeOnScreenKeyboardInsets(true); } else { _physicalSize = newPhysicalSize; // When physical size changes this value has to be recalculated. _computeOnScreenKeyboardInsets(false); } platformDispatcher.invokeOnMetricsChanged(); } /// Uses the previous physical size and current innerHeight/innerWidth /// values to decide if a device is rotating. /// /// During a rotation the height and width values will (almost) swap place. /// Values can slightly differ due to space occupied by the browser header. /// For example the following values are collected for Pixel 3 rotation: /// /// height: 658 width: 393 /// new height: 313 new width: 738 /// /// The following values are from a changed caused by virtual keyboard. /// /// height: 658 width: 393 /// height: 368 width: 393 bool _isRotation(ui.Size newPhysicalSize) { // This method compares the new dimensions with the previous ones. // Return false if the previous dimensions are not set. if (_physicalSize != null) { // First confirm both height and width are effected. if (_physicalSize!.height != newPhysicalSize.height && _physicalSize!.width != newPhysicalSize.width) { // If prior to rotation height is bigger than width it should be the // opposite after the rotation and vice versa. if ((_physicalSize!.height > _physicalSize!.width && newPhysicalSize.height < newPhysicalSize.width) || (_physicalSize!.width > _physicalSize!.height && newPhysicalSize.width < newPhysicalSize.height)) { // Rotation detected return true; } } } return false; } void _computeOnScreenKeyboardInsets(bool isEditingOnMobile) { _viewInsets = dimensionsProvider.computeKeyboardInsets( _physicalSize!.height, isEditingOnMobile, ); } } final class _EngineFlutterViewImpl extends EngineFlutterView { _EngineFlutterViewImpl( EnginePlatformDispatcher platformDispatcher, DomElement hostElement, { JsViewConstraints? viewConstraints, } ) : super._(_nextViewId++, platformDispatcher, hostElement, viewConstraints: viewConstraints); } /// The Web implementation of [ui.SingletonFlutterWindow]. final class EngineFlutterWindow extends EngineFlutterView implements ui.SingletonFlutterWindow { EngineFlutterWindow._( EnginePlatformDispatcher platformDispatcher, DomElement? hostElement, ) : super._(kImplicitViewId, platformDispatcher, hostElement) { if (ui_web.isCustomUrlStrategySet) { _browserHistory = createHistoryForExistingState(ui_web.urlStrategy); } } @override void dispose() { super.dispose(); _browserHistory?.dispose(); } @override ui.VoidCallback? get onMetricsChanged => platformDispatcher.onMetricsChanged; @override set onMetricsChanged(ui.VoidCallback? callback) { platformDispatcher.onMetricsChanged = callback; } @override ui.Locale get locale => platformDispatcher.locale; @override List<ui.Locale> get locales => platformDispatcher.locales; @override ui.Locale? computePlatformResolvedLocale(List<ui.Locale> supportedLocales) { return platformDispatcher.computePlatformResolvedLocale(supportedLocales); } @override ui.VoidCallback? get onLocaleChanged => platformDispatcher.onLocaleChanged; @override set onLocaleChanged(ui.VoidCallback? callback) { platformDispatcher.onLocaleChanged = callback; } @override String get initialLifecycleState => platformDispatcher.initialLifecycleState; @override double get textScaleFactor => platformDispatcher.textScaleFactor; @override bool get nativeSpellCheckServiceDefined => platformDispatcher.nativeSpellCheckServiceDefined; @override bool get brieflyShowPassword => platformDispatcher.brieflyShowPassword; @override bool get alwaysUse24HourFormat => platformDispatcher.alwaysUse24HourFormat; @override ui.VoidCallback? get onTextScaleFactorChanged => platformDispatcher.onTextScaleFactorChanged; @override set onTextScaleFactorChanged(ui.VoidCallback? callback) { platformDispatcher.onTextScaleFactorChanged = callback; } @override ui.Brightness get platformBrightness => platformDispatcher.platformBrightness; @override ui.VoidCallback? get onPlatformBrightnessChanged => platformDispatcher.onPlatformBrightnessChanged; @override set onPlatformBrightnessChanged(ui.VoidCallback? callback) { platformDispatcher.onPlatformBrightnessChanged = callback; } @override String? get systemFontFamily => platformDispatcher.systemFontFamily; @override ui.VoidCallback? get onSystemFontFamilyChanged => platformDispatcher.onSystemFontFamilyChanged; @override set onSystemFontFamilyChanged(ui.VoidCallback? callback) { platformDispatcher.onSystemFontFamilyChanged = callback; } @override ui.FrameCallback? get onBeginFrame => platformDispatcher.onBeginFrame; @override set onBeginFrame(ui.FrameCallback? callback) { platformDispatcher.onBeginFrame = callback; } @override ui.VoidCallback? get onDrawFrame => platformDispatcher.onDrawFrame; @override set onDrawFrame(ui.VoidCallback? callback) { platformDispatcher.onDrawFrame = callback; } @override ui.TimingsCallback? get onReportTimings => platformDispatcher.onReportTimings; @override set onReportTimings(ui.TimingsCallback? callback) { platformDispatcher.onReportTimings = callback; } @override ui.PointerDataPacketCallback? get onPointerDataPacket => platformDispatcher.onPointerDataPacket; @override set onPointerDataPacket(ui.PointerDataPacketCallback? callback) { platformDispatcher.onPointerDataPacket = callback; } @override ui.KeyDataCallback? get onKeyData => platformDispatcher.onKeyData; @override set onKeyData(ui.KeyDataCallback? callback) { platformDispatcher.onKeyData = callback; } @override String get defaultRouteName => platformDispatcher.defaultRouteName; @override void scheduleFrame() => platformDispatcher.scheduleFrame(); @override bool get semanticsEnabled => platformDispatcher.semanticsEnabled; @override ui.VoidCallback? get onSemanticsEnabledChanged => platformDispatcher.onSemanticsEnabledChanged; @override set onSemanticsEnabledChanged(ui.VoidCallback? callback) { platformDispatcher.onSemanticsEnabledChanged = callback; } @override ui.FrameData get frameData => const ui.FrameData.webOnly(); @override ui.VoidCallback? get onFrameDataChanged => null; @override set onFrameDataChanged(ui.VoidCallback? callback) {} @override ui.AccessibilityFeatures get accessibilityFeatures => platformDispatcher.accessibilityFeatures; @override ui.VoidCallback? get onAccessibilityFeaturesChanged => platformDispatcher.onAccessibilityFeaturesChanged; @override set onAccessibilityFeaturesChanged(ui.VoidCallback? callback) { platformDispatcher.onAccessibilityFeaturesChanged = callback; } @override void sendPlatformMessage( String name, ByteData? data, ui.PlatformMessageResponseCallback? callback, ) { platformDispatcher.sendPlatformMessage(name, data, callback); } @override ui.PlatformMessageCallback? get onPlatformMessage => platformDispatcher.onPlatformMessage; @override set onPlatformMessage(ui.PlatformMessageCallback? callback) { platformDispatcher.onPlatformMessage = callback; } @override void setIsolateDebugName(String name) => ui.PlatformDispatcher.instance.setIsolateDebugName(name); /// Handles the browser history integration to allow users to use the back /// button, etc. BrowserHistory get browserHistory { return _browserHistory ??= createHistoryForExistingState(_urlStrategyForInitialization); } ui_web.UrlStrategy? get _urlStrategyForInitialization { // Prevent any further customization of URL strategy. ui_web.preventCustomUrlStrategy(); return ui_web.urlStrategy; } BrowserHistory? _browserHistory; // Must be either SingleEntryBrowserHistory or MultiEntriesBrowserHistory. Future<void> _useSingleEntryBrowserHistory() async { // Recreate the browser history mode that's appropriate for the existing // history state. // // If it happens to be a single-entry one, then there's nothing further to do. // // But if it's a multi-entry one, it will be torn down below and replaced // with a single-entry history. // // See: https://github.com/flutter/flutter/issues/79241 _browserHistory ??= createHistoryForExistingState(_urlStrategyForInitialization); if (_browserHistory is SingleEntryBrowserHistory) { return; } // At this point, we know that `_browserHistory` is a non-null // `MultiEntriesBrowserHistory` instance. final ui_web.UrlStrategy? strategy = _browserHistory?.urlStrategy; await _browserHistory?.tearDown(); _browserHistory = SingleEntryBrowserHistory(urlStrategy: strategy); } Future<void> _useMultiEntryBrowserHistory() async { // Recreate the browser history mode that's appropriate for the existing // history state. // // If it happens to be a multi-entry one, then there's nothing further to do. // // But if it's a single-entry one, it will be torn down below and replaced // with a multi-entry history. // // See: https://github.com/flutter/flutter/issues/79241 _browserHistory ??= createHistoryForExistingState(_urlStrategyForInitialization); if (_browserHistory is MultiEntriesBrowserHistory) { return; } // At this point, we know that `_browserHistory` is a non-null // `SingleEntryBrowserHistory` instance. final ui_web.UrlStrategy? strategy = _browserHistory?.urlStrategy; await _browserHistory?.tearDown(); _browserHistory = MultiEntriesBrowserHistory(urlStrategy: strategy); } @visibleForTesting Future<void> debugInitializeHistory( ui_web.UrlStrategy? strategy, { required bool useSingle, }) async { await _browserHistory?.tearDown(); ui_web.urlStrategy = strategy; if (useSingle) { _browserHistory = SingleEntryBrowserHistory(urlStrategy: strategy); } else { _browserHistory = MultiEntriesBrowserHistory(urlStrategy: strategy); } } Future<void> resetHistory() async { await _browserHistory?.tearDown(); _browserHistory = null; ui_web.debugResetCustomUrlStrategy(); } Future<void> _endOfTheLine = Future<void>.value(); Future<bool> _waitInTheLine(_HandleMessageCallBack callback) async { final Future<void> currentPosition = _endOfTheLine; final Completer<void> completer = Completer<void>(); _endOfTheLine = completer.future; await currentPosition; bool result = false; try { result = await callback(); } finally { completer.complete(); } return result; } Future<bool> handleNavigationMessage(ByteData? data) async { return _waitInTheLine(() async { final MethodCall decoded = const JSONMethodCodec().decodeMethodCall(data); final Map<String, dynamic>? arguments = decoded.arguments as Map<String, dynamic>?; switch (decoded.method) { case 'selectMultiEntryHistory': await _useMultiEntryBrowserHistory(); return true; case 'selectSingleEntryHistory': await _useSingleEntryBrowserHistory(); return true; // the following cases assert that arguments are not null case 'routeUpdated': // deprecated assert(arguments != null); await _useSingleEntryBrowserHistory(); browserHistory.setRouteName(arguments!.tryString('routeName')); return true; case 'routeInformationUpdated': assert(arguments != null); final String? uriString = arguments!.tryString('uri'); final String path; if (uriString != null) { final Uri uri = Uri.parse(uriString); // Need to remove scheme and authority. path = Uri.decodeComponent( Uri( path: uri.path.isEmpty ? '/' : uri.path, queryParameters: uri.queryParametersAll.isEmpty ? null : uri.queryParametersAll, fragment: uri.fragment.isEmpty ? null : uri.fragment, ).toString(), ); } else { path = arguments.tryString('location')!; } browserHistory.setRouteName( path, state: arguments['state'], replace: arguments.tryBool('replace') ?? false, ); return true; } return false; }); } // TODO(mdebbar): Deprecate this and remove it. // https://github.com/flutter/flutter/issues/127395 void debugOverrideDevicePixelRatio(double? value) { assert(() { printWarning( 'The window.debugOverrideDevicePixelRatio API is deprecated and will ' 'be removed in a future release. Please use ' '`debugOverrideDevicePixelRatio` from `dart:ui_web` instead.', ); return true; }()); display.debugOverrideDevicePixelRatio(value); } // TODO(mdebbar): Deprecate this and remove it. // https://github.com/flutter/flutter/issues/127395 ui.Size? get webOnlyDebugPhysicalSizeOverride { assert(() { printWarning( 'The webOnlyDebugPhysicalSizeOverride API is deprecated and will be ' 'removed in a future release. Please use ' '`SingletonFlutterWindow.debugPhysicalSizeOverride` from `dart:ui_web` ' 'instead.', ); return true; }()); return debugPhysicalSizeOverride; } // TODO(mdebbar): Deprecate this and remove it. // https://github.com/flutter/flutter/issues/127395 set webOnlyDebugPhysicalSizeOverride(ui.Size? value) { assert(() { printWarning( 'The webOnlyDebugPhysicalSizeOverride API is deprecated and will be ' 'removed in a future release. Please use ' '`SingletonFlutterWindow.debugPhysicalSizeOverride` from `dart:ui_web` ' 'instead.', ); return true; }()); debugPhysicalSizeOverride = value; } } /// The window singleton. /// /// `dart:ui` window delegates to this value. However, this value has a wider /// API surface, providing Web-specific functionality that the standard /// `dart:ui` version does not. EngineFlutterWindow get window { assert( _window != null, 'Trying to access the implicit FlutterView, but it is not available.\n' 'Note: the implicit FlutterView is not available in multi-view mode.', ); return _window!; } EngineFlutterWindow? _window; /// Initializes the [window] (aka the implicit view), if it's not already /// initialized. EngineFlutterWindow ensureImplicitViewInitialized({ DomElement? hostElement, }) { if (_window == null) { _window = EngineFlutterView.implicit( EnginePlatformDispatcher.instance, hostElement, ); EnginePlatformDispatcher.instance.viewManager.registerView(_window!); } return _window!; } /// The Web implementation of [ui.ViewPadding]. class ViewPadding implements ui.ViewPadding { const ViewPadding({ required this.left, required this.top, required this.right, required this.bottom, }); @override final double left; @override final double top; @override final double right; @override final double bottom; } class ViewConstraints implements ui.ViewConstraints { const ViewConstraints({ this.minWidth = 0.0, this.maxWidth = double.infinity, this.minHeight = 0.0, this.maxHeight = double.infinity, }); ViewConstraints.tight(ui.Size size) : minWidth = size.width, maxWidth = size.width, minHeight = size.height, maxHeight = size.height; /// Converts JsViewConstraints into ViewConstraints. /// /// Since JsViewConstraints are expressed by the user, in logical pixels, this /// conversion uses logical pixels for the current size as well. /// /// The resulting ViewConstraints object will be multiplied by devicePixelRatio /// later to compute the physicalViewConstraints, which is what the framework /// uses. factory ViewConstraints.fromJs( JsViewConstraints? constraints, ui.Size currentLogicalSize) { if (constraints == null) { return ViewConstraints.tight(currentLogicalSize); } return ViewConstraints( minWidth: _computeMinConstraintValue(constraints.minWidth, currentLogicalSize.width), minHeight: _computeMinConstraintValue(constraints.minHeight, currentLogicalSize.height), maxWidth: _computeMaxConstraintValue(constraints.maxWidth, currentLogicalSize.width), maxHeight: _computeMaxConstraintValue(constraints.maxHeight, currentLogicalSize.height), ); } @override final double minWidth; @override final double maxWidth; @override final double minHeight; @override final double maxHeight; @override bool isSatisfiedBy(ui.Size size) { return (minWidth <= size.width) && (size.width <= maxWidth) && (minHeight <= size.height) && (size.height <= maxHeight); } @override bool get isTight => minWidth >= maxWidth && minHeight >= maxHeight; ViewConstraints operator*(double factor) { return ViewConstraints( minWidth: minWidth * factor, maxWidth: maxWidth * factor, minHeight: minHeight * factor, maxHeight: maxHeight * factor, ); } @override ViewConstraints operator/(double factor) { return ViewConstraints( minWidth: minWidth / factor, maxWidth: maxWidth / factor, minHeight: minHeight / factor, maxHeight: maxHeight / factor, ); } @override bool operator ==(Object other) { if (identical(this, other)) { return true; } if (other.runtimeType != runtimeType) { return false; } return other is ViewConstraints && other.minWidth == minWidth && other.maxWidth == maxWidth && other.minHeight == minHeight && other.maxHeight == maxHeight; } @override int get hashCode => Object.hash(minWidth, maxWidth, minHeight, maxHeight); @override String toString() { if (minWidth == double.infinity && minHeight == double.infinity) { return 'ViewConstraints(biggest)'; } if (minWidth == 0 && maxWidth == double.infinity && minHeight == 0 && maxHeight == double.infinity) { return 'ViewConstraints(unconstrained)'; } String describe(double min, double max, String dim) { if (min == max) { return '$dim=${min.toStringAsFixed(1)}'; } return '${min.toStringAsFixed(1)}<=$dim<=${max.toStringAsFixed(1)}'; } final String width = describe(minWidth, maxWidth, 'w'); final String height = describe(minHeight, maxHeight, 'h'); return 'ViewConstraints($width, $height)'; } } // Computes the "min" value for a constraint that takes into account user `desired` // configuration and the actual available value. // // Returns the `desired` value unless it is `null`, in which case it returns the // `available` value. double _computeMinConstraintValue(double? desired, double available) { assert(desired == null || desired >= 0, 'Minimum constraint must be >= 0 if set.'); assert(desired == null || desired.isFinite, 'Minimum constraint must be finite.'); return desired ?? available; } // Computes the "max" value for a constraint that takes into account user `desired` // configuration and the `available` size. // // Returns the `desired` value unless it is `null`, in which case it returns the // `available` value. // // A `desired` value of `Infinity` or `Number.POSITIVE_INFINITY` (from JS) means // "unconstrained". // // This method allows returning values larger than `available`, so the Flutter // app is able to stretch its container up to a certain value, without being // fully unconstrained. double _computeMaxConstraintValue(double? desired, double available) { assert(desired == null || desired >= 0, 'Maximum constraint must be >= 0 if set.'); return desired ?? available; }
engine/lib/web_ui/lib/src/engine/window.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/window.dart", "repo_id": "engine", "token_count": 10211 }
283
name: ui publish_to: none # Keep the SDK version range in sync with pubspecs under web_sdk environment: sdk: '>=3.2.0-0 <4.0.0' dependencies: js: 0.6.4 meta: ^1.7.0 web_locale_keymap: path: ../../third_party/web_locale_keymap web_unicode: path: ../../third_party/web_unicode web_test_fonts: path: ../../third_party/web_test_fonts dev_dependencies: archive: 3.4.2 args: any async: any convert: any crypto: any html: 0.15.4 http: 1.1.0 http_multi_server: any image: 3.0.1 matcher: 0.12.16 package_config: any path: 1.8.0 pool: any quiver: 3.2.1 shelf: any shelf_packages_handler: any shelf_static: any shelf_web_socket: any stack_trace: any stream_channel: 2.1.1 test: 1.24.8 test_api: any test_core: any typed_data: any uuid: 4.1.0 watcher: 1.1.0 web_socket_channel: any webdriver: 3.0.3 webkit_inspection_protocol: any yaml: 3.0.0 web_test_utils: path: ../../web_sdk/web_test_utils web_engine_tester: path: ../../web_sdk/web_engine_tester skia_gold_client: path: ../../testing/skia_gold_client dependency_overrides: engine_repo_tools: path: ../../tools/pkg/engine_repo_tools
engine/lib/web_ui/pubspec.yaml/0
{ "file_path": "engine/lib/web_ui/pubspec.yaml", "repo_id": "engine", "token_count": 533 }
284
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "export.h" #include "third_party/skia/include/core/SkString.h" SKWASM_EXPORT SkString* skString_allocate(size_t length) { return new SkString(length); } SKWASM_EXPORT char* skString_getData(SkString* string) { return string->data(); } SKWASM_EXPORT void skString_free(SkString* string) { return delete string; } SKWASM_EXPORT std::u16string* skString16_allocate(size_t length) { std::u16string* string = new std::u16string(); string->resize(length); return string; } SKWASM_EXPORT char16_t* skString16_getData(std::u16string* string) { return string->data(); } SKWASM_EXPORT void skString16_free(std::u16string* string) { delete string; }
engine/lib/web_ui/skwasm/string.cpp/0
{ "file_path": "engine/lib/web_ui/skwasm/string.cpp", "repo_id": "engine", "token_count": 299 }
285
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart' as ui; import 'common.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } const ui.Rect region = ui.Rect.fromLTRB(0, 0, 500, 250); void testMain() { group('ColorFilter', () { setUpCanvasKitTest(withImplicitView: true); test('ColorFilter.matrix applies a color filter', () async { final LayerSceneBuilder builder = LayerSceneBuilder(); builder.pushOffset(0, 0); // Draw a red circle and apply it to the scene. final CkPictureRecorder recorder = CkPictureRecorder(); final CkCanvas canvas = recorder.beginRecording(region); canvas.drawCircle( const ui.Offset(75, 125), 50, CkPaint()..color = const ui.Color.fromARGB(255, 255, 0, 0), ); final CkPicture redCircle = recorder.endRecording(); builder.addPicture(ui.Offset.zero, redCircle); // Apply a "greyscale" color filter. builder.pushColorFilter(const ui.ColorFilter.matrix(<double>[ 0.2126, 0.7152, 0.0722, 0, 0, // 0.2126, 0.7152, 0.0722, 0, 0, // 0.2126, 0.7152, 0.0722, 0, 0, // 0, 0, 0, 1, 0, // ])); // Draw another red circle and apply it to the scene. // This one should be grey since we have the color filter. final CkPictureRecorder recorder2 = CkPictureRecorder(); final CkCanvas canvas2 = recorder2.beginRecording(region); canvas2.drawCircle( const ui.Offset(425, 125), 50, CkPaint()..color = const ui.Color.fromARGB(255, 255, 0, 0), ); final CkPicture greyCircle = recorder2.endRecording(); builder.addPicture(ui.Offset.zero, greyCircle); await matchSceneGolden('canvaskit_colorfilter.png', builder.build(), region: region); }); test('invertColors inverts the colors', () async { final LayerSceneBuilder builder = LayerSceneBuilder(); builder.pushOffset(0, 0); // Draw a red circle and apply it to the scene. final CkPictureRecorder recorder = CkPictureRecorder(); final CkCanvas canvas = recorder.beginRecording(region); canvas.drawCircle( const ui.Offset(75, 125), 50, CkPaint()..color = const ui.Color.fromARGB(255, 255, 0, 0), ); final CkPicture redCircle = recorder.endRecording(); builder.addPicture(ui.Offset.zero, redCircle); // Draw another red circle with invertColors. final CkPictureRecorder recorder2 = CkPictureRecorder(); final CkCanvas canvas2 = recorder2.beginRecording(region); canvas2.drawCircle( const ui.Offset(425, 125), 50, CkPaint() ..color = const ui.Color.fromARGB(255, 255, 0, 0) ..invertColors = true, ); final CkPicture invertedCircle = recorder2.endRecording(); builder.addPicture(ui.Offset.zero, invertedCircle); await matchSceneGolden('canvaskit_invertcolors.png', builder.build(), region: region); }); test('ColorFilter.matrix works for inverse matrix', () async { final LayerSceneBuilder builder = LayerSceneBuilder(); builder.pushOffset(0, 0); // Draw a red, green, and blue square with the inverted color matrix. builder.pushColorFilter(const ui.ColorFilter.matrix(<double>[ -1, 0, 0, 0, 255, // 0, -1, 0, 0, 255, // 0, 0, -1, 0, 255, // 0, 0, 0, 1, 0, // ])); final CkPictureRecorder recorder = CkPictureRecorder(); final CkCanvas canvas = recorder.beginRecording(region); canvas.drawRect(const ui.Rect.fromLTWH(50, 50, 100, 100), CkPaint()..color = const ui.Color.fromARGB(255, 255, 0, 0)); canvas.drawRect(const ui.Rect.fromLTWH(200, 50, 100, 100), CkPaint()..color = const ui.Color.fromARGB(255, 0, 255, 0)); canvas.drawRect(const ui.Rect.fromLTWH(350, 50, 100, 100), CkPaint()..color = const ui.Color.fromARGB(255, 0, 0, 255)); final CkPicture invertedSquares = recorder.endRecording(); builder.addPicture(ui.Offset.zero, invertedSquares); await matchSceneGolden('canvaskit_inverse_colormatrix.png', builder.build(), region: region); }); test('ColorFilter color with 0 opacity', () async { final LayerSceneBuilder builder = LayerSceneBuilder(); builder.pushOffset(0,0); final CkPictureRecorder recorder = CkPictureRecorder(); final CkCanvas canvas = recorder.beginRecording(region); canvas.drawCircle( const ui.Offset(75, 125), 50, CkPaint()..color = const ui.Color.fromARGB(255, 255, 0, 0), ); final CkPicture redCircle1 = recorder.endRecording(); builder.addPicture(ui.Offset.zero, redCircle1); builder.pushColorFilter(ui.ColorFilter.mode(const ui.Color(0x00000000).withOpacity(0), ui.BlendMode.srcOver)); // Draw another red circle and apply it to the scene. // This one should also be red with the color filter doing nothing final CkPictureRecorder recorder2 = CkPictureRecorder(); final CkCanvas canvas2 = recorder2.beginRecording(region); canvas2.drawCircle( const ui.Offset(425, 125), 50, CkPaint()..color = const ui.Color.fromARGB(255, 255, 0, 0), ); final CkPicture redCircle2 = recorder2.endRecording(); builder.addPicture(ui.Offset.zero, redCircle2); await matchSceneGolden('canvaskit_transparent_colorfilter.png', builder.build(), region: region); }); // TODO(hterkelsen): https://github.com/flutter/flutter/issues/71520 }, skip: isSafari || isFirefox); }
engine/lib/web_ui/test/canvaskit/color_filter_golden_test.dart/0
{ "file_path": "engine/lib/web_ui/test/canvaskit/color_filter_golden_test.dart", "repo_id": "engine", "token_count": 2339 }
286
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart' as ui; import 'package:ui/ui_web/src/ui_web.dart' as ui_web; import '../common/matchers.dart'; import 'common.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } void testMain() { group('CanvasKit', () { setUpCanvasKitTest(withImplicitView: true); late LayerScene scene; setUp(() { // Create a scene to use in tests. final CkPicture picture = paintPicture(const ui.Rect.fromLTRB(0, 0, 60, 60), (CkCanvas canvas) { canvas.drawRect(const ui.Rect.fromLTRB(0, 0, 60, 60), CkPaint()..style = ui.PaintingStyle.fill); }); final LayerSceneBuilder sb = LayerSceneBuilder(); sb.addPicture(ui.Offset.zero, picture); scene = sb.build(); }); test('can render into arbitrary views', () async { await CanvasKitRenderer.instance.renderScene(scene, implicitView); final EngineFlutterView anotherView = EngineFlutterView( EnginePlatformDispatcher.instance, createDomElement('another-view')); EnginePlatformDispatcher.instance.viewManager.registerView(anotherView); await CanvasKitRenderer.instance.renderScene(scene, anotherView); }); test('will error if trying to render into an unregistered view', () async { final EngineFlutterView unregisteredView = EngineFlutterView( EnginePlatformDispatcher.instance, createDomElement('unregistered-view')); expect( () => CanvasKitRenderer.instance.renderScene(scene, unregisteredView), throwsAssertionError, ); }); test('will dispose the Rasterizer for a disposed view', () async { final EngineFlutterView view = EngineFlutterView( EnginePlatformDispatcher.instance, createDomElement('multi-view')); EnginePlatformDispatcher.instance.viewManager.registerView(view); expect( CanvasKitRenderer.instance.debugGetRasterizerForView(view), isNotNull, ); EnginePlatformDispatcher.instance.viewManager .disposeAndUnregisterView(view.viewId); expect( CanvasKitRenderer.instance.debugGetRasterizerForView(view), isNull, ); }); // Issue https://github.com/flutter/flutter/issues/142094 test('does not reset platform view factories when disposing a view', () async { expect(PlatformViewManager.instance.knowsViewType('self-test'), isFalse); final EngineFlutterView view = EngineFlutterView( EnginePlatformDispatcher.instance, createDomElement('multi-view')); EnginePlatformDispatcher.instance.viewManager.registerView(view); expect( CanvasKitRenderer.instance.debugGetRasterizerForView(view), isNotNull, ); EnginePlatformDispatcher.instance.viewManager .disposeAndUnregisterView(view.viewId); expect( CanvasKitRenderer.instance.debugGetRasterizerForView(view), isNull, ); expect( PlatformViewManager.instance.knowsViewType( ui_web.PlatformViewRegistry.defaultVisibleViewType), isTrue); expect( PlatformViewManager.instance.knowsViewType( ui_web.PlatformViewRegistry.defaultInvisibleViewType), isTrue); }); }); }
engine/lib/web_ui/test/canvaskit/multi_view_test.dart/0
{ "file_path": "engine/lib/web_ui/test/canvaskit/multi_view_test.dart", "repo_id": "engine", "token_count": 1371 }
287
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:typed_data'; /// A 1x1 fully transparent PNG image. final Uint8List kTransparentImage = Uint8List.fromList(<int>[ 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00, 0x00, 0x0d, 0x49, 0x48, 0x44, 0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x04, 0x00, 0x00, 0x00, 0xb5, 0x1c, 0x0c, 0x02, 0x00, 0x00, 0x00, 0x0b, 0x49, 0x44, 0x41, 0x54, 0x78, 0xda, 0x63, 0x64, 0x60, 0x00, 0x00, 0x00, 0x06, 0x00, 0x02, 0x30, 0x81, 0xd0, 0x2f, 0x00, 0x00, 0x00, 0x00, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82, ]); /// A 4x4 PNG image sample. final Uint8List k4x4PngImage = Uint8List.fromList(<int>[ 0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a, 0x00, 0x00, 0x00, 0x0d, 0x49, 0x48, 0x44, 0x52, 0x00, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x04, 0x08, 0x06, 0x00, 0x00, 0x00, 0xa9, 0xf1, 0x9e, 0x7e, 0x00, 0x00, 0x00, 0x13, 0x49, 0x44, 0x41, 0x54, 0x78, 0xda, 0x63, 0xfc, 0xcf, 0xc0, 0x50, 0xcf, 0x80, 0x04, 0x18, 0x49, 0x17, 0x00, 0x00, 0xf2, 0xae, 0x05, 0xfd, 0x52, 0x01, 0xc2, 0xde, 0x00, 0x00, 0x00, 0x00, 0x49, 0x45, 0x4e, 0x44, 0xae, 0x42, 0x60, 0x82, ]); /// An animated GIF image with 3 1x1 pixel frames (a red, green, and blue /// frames). The GIF animates forever, and each frame has a 100ms delay. final Uint8List kAnimatedGif = Uint8List.fromList(<int> [ 0x47, 0x49, 0x46, 0x38, 0x39, 0x61, 0x01, 0x00, 0x01, 0x00, 0xa1, 0x03, 0x00, 0x00, 0x00, 0xff, 0xff, 0x00, 0x00, 0x00, 0xff, 0x00, 0xff, 0xff, 0xff, 0x21, 0xff, 0x0b, 0x4e, 0x45, 0x54, 0x53, 0x43, 0x41, 0x50, 0x45, 0x32, 0x2e, 0x30, 0x03, 0x01, 0x00, 0x00, 0x00, 0x21, 0xf9, 0x04, 0x00, 0x0a, 0x00, 0xff, 0x00, 0x2c, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, 0x02, 0x02, 0x4c, 0x01, 0x00, 0x21, 0xf9, 0x04, 0x00, 0x0a, 0x00, 0xff, 0x00, 0x2c, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, 0x02, 0x02, 0x54, 0x01, 0x00, 0x21, 0xf9, 0x04, 0x00, 0x0a, 0x00, 0xff, 0x00, 0x2c, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x01, 0x00, 0x00, 0x02, 0x02, 0x44, 0x01, 0x00, 0x3b, ]);
engine/lib/web_ui/test/canvaskit/test_data.dart/0
{ "file_path": "engine/lib/web_ui/test/canvaskit/test_data.dart", "repo_id": "engine", "token_count": 1285 }
288
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:async'; import 'dart:typed_data'; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import '../common/test_initialization.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } Future<void> testMain() async { setUpUnitTests(); group('message handler', () { const String testText = 'test text'; late ClipboardMessageHandler clipboardMessageHandler; MockClipboardAPICopyStrategy clipboardAPICopyStrategy = MockClipboardAPICopyStrategy(); MockClipboardAPIPasteStrategy clipboardAPIPasteStrategy = MockClipboardAPIPasteStrategy(); setUp(() { clipboardMessageHandler = ClipboardMessageHandler(); clipboardAPICopyStrategy = MockClipboardAPICopyStrategy(); clipboardAPIPasteStrategy = MockClipboardAPIPasteStrategy(); clipboardMessageHandler.copyToClipboardStrategy = clipboardAPICopyStrategy; clipboardMessageHandler.pasteFromClipboardStrategy = clipboardAPIPasteStrategy; }); test('set data successful', () async { clipboardAPICopyStrategy.testResult = true; const MethodCodec codec = JSONMethodCodec(); final Completer<bool> completer = Completer<bool>(); void callback(ByteData? data) { completer.complete(codec.decodeEnvelope(data!) as bool); } clipboardMessageHandler.setDataMethodCall( const MethodCall('Clipboard.setData', <String, dynamic>{ 'text': testText, }), callback); expect(await completer.future, isTrue); }); test('set data error', () async { clipboardAPICopyStrategy.testResult = false; const MethodCodec codec = JSONMethodCodec(); final Completer<ByteData> completer = Completer<ByteData>(); void callback(ByteData? data) { completer.complete(data!); } clipboardMessageHandler.setDataMethodCall( const MethodCall('Clipboard.setData', <String, dynamic>{ 'text': testText, }), callback); final ByteData result = await completer.future; expect( () =>codec.decodeEnvelope(result), throwsA(const TypeMatcher<PlatformException>() .having((PlatformException e) => e.code, 'code', equals('copy_fail')))); }); test('get data successful', () async { clipboardAPIPasteStrategy.testResult = testText; const MethodCodec codec = JSONMethodCodec(); final Completer<Map<String, dynamic>> completer = Completer<Map<String, dynamic>>(); void callback(ByteData? data) { completer.complete(codec.decodeEnvelope(data!) as Map<String, dynamic>); } clipboardMessageHandler.getDataMethodCall(callback); final Map<String, dynamic> result = await completer.future; expect(result['text'], testText); }); test('has strings true', () async { clipboardAPIPasteStrategy.testResult = testText; const MethodCodec codec = JSONMethodCodec(); final Completer<Map<String, dynamic>> completer = Completer<Map<String, dynamic>>(); void callback(ByteData? data) { completer.complete(codec.decodeEnvelope(data!) as Map<String, dynamic>); } clipboardMessageHandler.hasStringsMethodCall(callback); final Map<String, dynamic> result = await completer.future; expect(result['value'], isTrue); }); test('has strings false', () async { clipboardAPIPasteStrategy.testResult = ''; const MethodCodec codec = JSONMethodCodec(); final Completer<Map<String, dynamic>> completer = Completer<Map<String, dynamic>>(); void callback(ByteData? data) { completer.complete(codec.decodeEnvelope(data!) as Map<String, dynamic>); } clipboardMessageHandler.hasStringsMethodCall(callback); final Map<String, dynamic> result = await completer.future; expect(result['value'], isFalse); }); test('has strings error', () async { clipboardAPIPasteStrategy.errors = true; const MethodCodec codec = JSONMethodCodec(); final Completer<Map<String, dynamic>> completer = Completer<Map<String, dynamic>>(); void callback(ByteData? data) { completer.complete(codec.decodeEnvelope(data!) as Map<String, dynamic>); } clipboardMessageHandler.hasStringsMethodCall(callback); final Map<String, dynamic> result = await completer.future; expect(result['value'], isFalse); }); }); } class MockClipboardAPICopyStrategy implements ClipboardAPICopyStrategy { bool testResult = true; @override Future<bool> setData(String? text) { return Future<bool>.value(testResult); } } class MockClipboardAPIPasteStrategy implements ClipboardAPIPasteStrategy { String testResult = ''; // Whether getData's Future will resolve with an error. bool errors = false; @override Future<String> getData() { if (errors) { return Future<String>.error(Error()); } return Future<String>.value(testResult); } }
engine/lib/web_ui/test/engine/clipboard_test.dart/0
{ "file_path": "engine/lib/web_ui/test/engine/clipboard_test.dart", "repo_id": "engine", "token_count": 1898 }
289
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:meta/meta.dart' show isTest; import 'package:quiver/testing/async.dart'; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart' as ui; import '../common/keyboard_test_common.dart'; const int kLocationLeft = 1; const int kLocationRight = 2; const int kLocationNumpad = 3; final int kPhysicalKeyA = kWebToPhysicalKey['KeyA']!; final int kPhysicalKeyE = kWebToPhysicalKey['KeyE']!; final int kPhysicalKeyL = kWebToPhysicalKey['KeyL']!; final int kPhysicalKeyU = kWebToPhysicalKey['KeyU']!; final int kPhysicalDigit1 = kWebToPhysicalKey['Digit1']!; final int kPhysicalNumpad1 = kWebToPhysicalKey['Numpad1']!; final int kPhysicalShiftLeft = kWebToPhysicalKey['ShiftLeft']!; final int kPhysicalShiftRight = kWebToPhysicalKey['ShiftRight']!; final int kPhysicalMetaLeft = kWebToPhysicalKey['MetaLeft']!; final int kPhysicalCapsLock = kWebToPhysicalKey['CapsLock']!; final int kPhysicalScrollLock = kWebToPhysicalKey['ScrollLock']!; final int kPhysicalBracketLeft = kWebToPhysicalKey['BracketLeft']!; // A web-specific physical key when code is empty. const int kPhysicalEmptyCode = 0x1700000000; const int kLogicalKeyA = 0x00000000061; const int kLogicalKeyL = 0x0000000006C; const int kLogicalKeyU = 0x00000000075; const int kLogicalDigit1 = 0x00000000031; final int kLogicalNumpad1 = kWebLogicalLocationMap['1']![kLocationNumpad]!; final int kLogicalShiftLeft = kWebLogicalLocationMap['Shift']![kLocationLeft]!; final int kLogicalShiftRight = kWebLogicalLocationMap['Shift']![kLocationRight]!; final int kLogicalCtrlLeft = kWebLogicalLocationMap['Control']![kLocationLeft]!; final int kLogicalAltLeft = kWebLogicalLocationMap['Alt']![kLocationLeft]!; final int kLogicalMetaLeft = kWebLogicalLocationMap['Meta']![kLocationLeft]!; final int kLogicalCapsLock = kWebToLogicalKey['CapsLock']!; final int kLogicalScrollLock = kWebToLogicalKey['ScrollLock']!; final int kLogicalProcess = kWebToLogicalKey['Process']!; const int kWebKeyIdPlane = 0x1700000000; final int kLogicalBracketLeft = kPhysicalBracketLeft + kWebKeyIdPlane; // Dead key algorithm. void main() { internalBootstrapBrowserTest(() => testMain); } void testMain() { test('KeyData.toString', () { expect(const ui.KeyData( type: ui.KeyEventType.down, physical: 0x700e5, logical: 0x61, character: 'A', timeStamp: Duration.zero, synthesized: false, ).toString(), 'KeyData(Key Down, physical: 0x700e5, logical: 0x61 (Unicode), character: "A" (0x41))'); expect(const ui.KeyData( type: ui.KeyEventType.up, physical: 0x700e6, logical: 0x100000061, character: '\n', timeStamp: Duration.zero, synthesized: true, ).toString(), r'KeyData(Key Up, physical: 0x700e6, logical: 0x100000061 (Unprintable), character: "\n" (0x0a), synthesized)'); expect(const ui.KeyData( type: ui.KeyEventType.repeat, physical: 0x700e7, logical: 0x9900000071, character: null, timeStamp: Duration.zero, synthesized: false, ).toString(), 'KeyData(Key Repeat, physical: 0x700e7, logical: 0x9900000071, character: <none>)'); }); test('Single key press, repeat, and release', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); // Only handle down events return key.type == ui.KeyEventType.down; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent('KeyA', 'a')..timeStamp = 1); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 1), type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); converter.handleEvent(keyRepeatedDownEvent('KeyA', 'a')..timeStamp = 1.5); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 1, microseconds: 500), type: ui.KeyEventType.repeat, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); expect(MockKeyboardEvent.lastDefaultPrevented, isFalse); converter.handleEvent(keyRepeatedDownEvent('KeyA', 'a')..timeStamp = 1500); expectKeyData(keyDataList.last, timeStamp: const Duration(seconds: 1, milliseconds: 500), type: ui.KeyEventType.repeat, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); expect(MockKeyboardEvent.lastDefaultPrevented, isFalse); converter.handleEvent(keyUpEvent('KeyA', 'a')..timeStamp = 2000.5); expectKeyData(keyDataList.last, timeStamp: const Duration(seconds: 2, microseconds: 500), type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: null, ); expect(MockKeyboardEvent.lastDefaultPrevented, isFalse); }); test('Special cases', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); // Only handle down events return key.type == ui.KeyEventType.down; }, OperatingSystem.windows); // en-in.win, with AltGr converter.handleEvent(keyDownEvent('KeyL', 'l̥', kCtrl | kAlt)..timeStamp = 1); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 1), type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyL, logical: kLogicalKeyL, character: 'l̥', ); }); test('Release modifier during a repeated sequence', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); // Only handle down events return key.type == ui.KeyEventType.down; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent('ShiftLeft', 'Shift', kShift, kLocationLeft)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftLeft, logical: kLogicalShiftLeft, character: null, ); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); converter.handleEvent(keyDownEvent('KeyA', 'A', kShift)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'A', ); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); converter.handleEvent(keyRepeatedDownEvent('KeyA', 'A', kShift)); expectKeyData(keyDataList.last, type: ui.KeyEventType.repeat, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'A', ); expect(MockKeyboardEvent.lastDefaultPrevented, isFalse); converter.handleEvent(keyUpEvent('ShiftLeft', 'Shift', 0, kLocationLeft)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftLeft, logical: kLogicalShiftLeft, character: null, ); expect(MockKeyboardEvent.lastDefaultPrevented, isFalse); converter.handleEvent(keyRepeatedDownEvent('KeyA', 'a')); expectKeyData(keyDataList.last, type: ui.KeyEventType.repeat, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); expect(MockKeyboardEvent.lastDefaultPrevented, isFalse); converter.handleEvent(keyRepeatedDownEvent('KeyA', 'a')); expectKeyData(keyDataList.last, type: ui.KeyEventType.repeat, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); expect(MockKeyboardEvent.lastDefaultPrevented, isFalse); converter.handleEvent(keyUpEvent('KeyA', 'a')); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: null, ); expect(MockKeyboardEvent.lastDefaultPrevented, isFalse); }); test('Distinguish between left and right modifiers', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent('ShiftLeft', 'Shift', kShift, kLocationLeft)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftLeft, logical: kLogicalShiftLeft, character: null, ); converter.handleEvent(keyDownEvent('ShiftRight', 'Shift', kShift, kLocationRight)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftRight, logical: kLogicalShiftRight, character: null, ); converter.handleEvent(keyUpEvent('ShiftLeft', 'Shift', kShift, kLocationLeft)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftLeft, logical: kLogicalShiftLeft, character: null, ); converter.handleEvent(keyUpEvent('ShiftRight', 'Shift', 0, kLocationRight)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftRight, logical: kLogicalShiftRight, character: null, ); }); test('Treat modifiers at standard locations as if at left', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent('', 'Shift', kShift)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalEmptyCode, logical: kLogicalShiftLeft, character: null, ); converter.handleEvent(keyUpEvent('', 'Shift', kShift)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalEmptyCode, logical: kLogicalShiftLeft, character: null, ); converter.handleEvent(keyDownEvent('', 'Control', kCtrl)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalEmptyCode, logical: kLogicalCtrlLeft, character: null, ); converter.handleEvent(keyUpEvent('', 'Control', kCtrl)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalEmptyCode, logical: kLogicalCtrlLeft, character: null, ); converter.handleEvent(keyDownEvent('', 'Alt', kAlt)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalEmptyCode, logical: kLogicalAltLeft, character: null, ); converter.handleEvent(keyUpEvent('', 'Alt', kAlt)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalEmptyCode, logical: kLogicalAltLeft, character: null, ); converter.handleEvent(keyDownEvent('', 'Meta', kMeta)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalEmptyCode, logical: kLogicalMetaLeft, character: null, ); converter.handleEvent(keyUpEvent('', 'Meta', kMeta)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalEmptyCode, logical: kLogicalMetaLeft, character: null, ); }); test('Distinguish between normal and numpad digits', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent('Digit1', '1')); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalDigit1, logical: kLogicalDigit1, character: '1', ); converter.handleEvent(keyDownEvent('Numpad1', '1', 0, kLocationNumpad)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalNumpad1, logical: kLogicalNumpad1, character: '1', ); converter.handleEvent(keyUpEvent('Digit1', '1')); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalDigit1, logical: kLogicalDigit1, character: null, ); converter.handleEvent(keyUpEvent('Numpad1', '1', 0, kLocationNumpad)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalNumpad1, logical: kLogicalNumpad1, character: null, ); }); test('Dead keys are distinguishable', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); // The absolute values of the following logical keys are not guaranteed. const int kLogicalAltE = 0x1740070008; const int kLogicalAltU = 0x1740070018; const int kLogicalAltShiftE = 0x1760070008; // The values must be distinguishable. expect(kLogicalAltE, isNot(equals(kLogicalAltU))); expect(kLogicalAltE, isNot(equals(kLogicalAltShiftE))); converter.handleEvent(keyDownEvent('AltLeft', 'Alt', kAlt, kLocationLeft)); converter.handleEvent(keyDownEvent('KeyE', 'Dead', kAlt)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyE, logical: kLogicalAltE, character: null, ); converter.handleEvent(keyUpEvent('KeyE', 'Dead', kAlt)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyE, logical: kLogicalAltE, character: null, ); converter.handleEvent(keyDownEvent('KeyU', 'Dead', kAlt)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyU, logical: kLogicalAltU, character: null, ); converter.handleEvent(keyUpEvent('KeyU', 'Dead', kAlt)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyU, logical: kLogicalAltU, character: null, ); converter.handleEvent(keyDownEvent('ShiftLeft', 'Shift', kAlt | kShift, kLocationLeft)); // This does not actually produce a Dead key on macOS (US layout); just for // testing. converter.handleEvent(keyDownEvent('KeyE', 'Dead', kAlt | kShift)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyE, logical: kLogicalAltShiftE, character: null, ); converter.handleEvent(keyUpEvent('AltLeft', 'Alt', kShift, kLocationLeft)); converter.handleEvent(keyUpEvent('KeyE', 'e', kShift)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyE, logical: kLogicalAltShiftE, character: null, ); converter.handleEvent(keyUpEvent('ShiftLeft', 'Shift', 0, kLocationLeft)); }); test('Duplicate down is preceded with synthesized up', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent('ShiftLeft', 'Shift', kShift, kLocationLeft)); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); // A KeyUp of ShiftLeft is missed. keyDataList.clear(); converter.handleEvent(keyDownEvent('ShiftLeft', 'Shift', kShift, kLocationLeft)); expect(keyDataList, hasLength(2)); expectKeyData(keyDataList.first, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftLeft, logical: kLogicalShiftLeft, character: null, synthesized: true, ); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftLeft, logical: kLogicalShiftLeft, character: null, ); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); keyDataList.clear(); converter.handleEvent(keyUpEvent('ShiftLeft', 'Shift', 0, kLocationLeft)); expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftLeft, logical: kLogicalShiftLeft, character: null, ); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); }); test('Duplicate down is preceded with synthesized up using registered logical key', () { // Regression test for https://github.com/flutter/flutter/issues/126247. final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); // This test simulates the use of 'BracketLeft' on a french keyboard, see: // https://github.com/flutter/flutter/issues/126247#issuecomment-1856112566. converter.handleEvent(keyDownEvent('BracketLeft', 'Dead')); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); expectKeyData(keyDataList.first, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalBracketLeft, logical: kLogicalBracketLeft, character: null, ); // A KeyUp of BracketLeft is missed. keyDataList.clear(); converter.handleEvent(keyDownEvent('BracketLeft', 'Process')); expect(keyDataList, hasLength(2)); expectKeyData(keyDataList.first, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalBracketLeft, logical: kLogicalBracketLeft, character: null, synthesized: true, ); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalBracketLeft, logical: kLogicalProcess, character: null, ); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); }); test('Duplicate ups are skipped', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); // A KeyDown of ShiftRight is missed due to loss of focus. converter.handleEvent(keyUpEvent('ShiftRight', 'Shift', 0, kLocationRight)); expect(keyDataList, hasLength(1)); expect(keyDataList[0].physical, 0); expect(keyDataList[0].logical, 0); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); }); test('Conflict from multiple keyboards do not crash', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); // Same layout converter.handleEvent(keyDownEvent('KeyA', 'a')); converter.handleEvent(keyDownEvent('KeyA', 'a')); converter.handleEvent(keyUpEvent('KeyA', 'a')); converter.handleEvent(keyUpEvent('KeyA', 'a')); // Different layout converter.handleEvent(keyDownEvent('KeyA', 'a')); converter.handleEvent(keyDownEvent('KeyA', 'u')); converter.handleEvent(keyUpEvent('KeyA', 'u')); converter.handleEvent(keyUpEvent('KeyA', 'a')); // Passes if there's no crash, and states are reset after everything is released. keyDataList.clear(); converter.handleEvent(keyDownEvent('KeyA', 'a')); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); converter.handleEvent(keyDownEvent('KeyU', 'u')); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyU, logical: kLogicalKeyU, character: 'u', ); }); for (final OperatingSystem system in <OperatingSystem>[OperatingSystem.macOs, OperatingSystem.iOs]) { testFakeAsync('CapsLock down synthesizes an immediate cancel on $system', (FakeAsync async) { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, system); // A KeyDown of ShiftRight is missed due to loss of focus. converter.handleEvent(keyDownEvent('CapsLock', 'CapsLock')); expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalCapsLock, logical: kLogicalCapsLock, character: null, ); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); keyDataList.clear(); async.elapse(const Duration(microseconds: 1)); expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalCapsLock, logical: kLogicalCapsLock, character: null, synthesized: true, ); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); keyDataList.clear(); converter.handleEvent(keyUpEvent('CapsLock', 'CapsLock')); expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalCapsLock, logical: kLogicalCapsLock, character: null, ); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); keyDataList.clear(); async.elapse(const Duration(microseconds: 1)); expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalCapsLock, logical: kLogicalCapsLock, character: null, synthesized: true, ); expect(MockKeyboardEvent.lastDefaultPrevented, isTrue); keyDataList.clear(); // Another key down works converter.handleEvent(keyDownEvent('CapsLock', 'CapsLock')); expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalCapsLock, logical: kLogicalCapsLock, character: null, ); keyDataList.clear(); // Schedules are canceled after disposal converter.dispose(); async.elapse(const Duration(seconds: 10)); expect(keyDataList, isEmpty); }); } testFakeAsync('CapsLock behaves normally on non-macOS', (FakeAsync async) { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent('CapsLock', 'CapsLock')); expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalCapsLock, logical: kLogicalCapsLock, character: null, ); keyDataList.clear(); async.elapse(const Duration(seconds: 10)); expect(keyDataList, isEmpty); converter.handleEvent(keyUpEvent('CapsLock', 'CapsLock')); expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalCapsLock, logical: kLogicalCapsLock, character: null, ); keyDataList.clear(); async.elapse(const Duration(seconds: 10)); expect(keyDataList, isEmpty); converter.handleEvent(keyDownEvent('CapsLock', 'CapsLock')); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalCapsLock, logical: kLogicalCapsLock, character: null, ); converter.handleEvent(keyUpEvent('CapsLock', 'CapsLock')); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalCapsLock, logical: kLogicalCapsLock, character: null, ); }); for (final OperatingSystem system in <OperatingSystem>[OperatingSystem.macOs, OperatingSystem.iOs]) { testFakeAsync('Key guards: key down events are guarded on $system', (FakeAsync async) { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, system); converter.handleEvent(keyDownEvent('MetaLeft', 'Meta', kMeta, kLocationLeft)..timeStamp = 100); async.elapse(const Duration(milliseconds: 100)); converter.handleEvent(keyDownEvent('KeyA', 'a', kMeta)..timeStamp = 200); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 200), type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); keyDataList.clear(); // Key Up of KeyA is omitted due to being a shortcut. async.elapse(const Duration(milliseconds: 2500)); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 2200), type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: null, synthesized: true, ); keyDataList.clear(); converter.handleEvent(keyUpEvent('MetaLeft', 'Meta', 0, kLocationLeft)..timeStamp = 2700); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 2700), type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalMetaLeft, logical: kLogicalMetaLeft, character: null, ); async.elapse(const Duration(milliseconds: 100)); // Key A states are cleared converter.handleEvent(keyDownEvent('KeyA', 'a')..timeStamp = 2800); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 2800), type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); async.elapse(const Duration(milliseconds: 100)); converter.handleEvent(keyUpEvent('KeyA', 'a')..timeStamp = 2900); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 2900), type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: null, ); }); } testFakeAsync('Key guards: key repeated down events refreshes guards', (FakeAsync async) { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.macOs); converter.handleEvent(keyDownEvent('MetaLeft', 'Meta', kMeta, kLocationLeft)..timeStamp = 100); async.elapse(const Duration(milliseconds: 100)); converter.handleEvent(keyDownEvent('KeyA', 'a', kMeta)..timeStamp = 200); async.elapse(const Duration(milliseconds: 400)); converter.handleEvent(keyRepeatedDownEvent('KeyA', 'a', kMeta)..timeStamp = 600); async.elapse(const Duration(milliseconds: 50)); converter.handleEvent(keyRepeatedDownEvent('KeyA', 'a', kMeta)..timeStamp = 650); async.elapse(const Duration(milliseconds: 50)); converter.handleEvent(keyRepeatedDownEvent('KeyA', 'a', kMeta)..timeStamp = 700); // Key Up of KeyA is omitted due to being a shortcut. async.elapse(const Duration(milliseconds: 2000)); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 2700), type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: null, synthesized: true, ); keyDataList.clear(); converter.handleEvent(keyUpEvent('MetaLeft', 'Meta', 0, kLocationLeft)..timeStamp = 3200); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 3200), type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalMetaLeft, logical: kLogicalMetaLeft, character: null, ); async.elapse(const Duration(milliseconds: 100)); // Key A states are cleared converter.handleEvent(keyDownEvent('KeyA', 'a')..timeStamp = 3300); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 3300), type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); async.elapse(const Duration(milliseconds: 100)); converter.handleEvent(keyUpEvent('KeyA', 'a')..timeStamp = 3400); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 3400), type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: null, ); }); testFakeAsync('Key guards: cleared by keyups', (FakeAsync async) { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.macOs); converter.handleEvent(keyDownEvent('MetaLeft', 'Meta', kMeta, kLocationLeft)..timeStamp = 100); async.elapse(const Duration(milliseconds: 100)); converter.handleEvent(keyDownEvent('KeyA', 'a', kCtrl)..timeStamp = 200); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 200), type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); keyDataList.clear(); async.elapse(const Duration(milliseconds: 500)); converter.handleEvent(keyUpEvent('MetaLeft', 'Meta', 0, kLocationLeft)..timeStamp = 700); async.elapse(const Duration(milliseconds: 100)); converter.handleEvent(keyUpEvent('KeyA', 'a')..timeStamp = 800); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 800), type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: null, ); keyDataList.clear(); async.elapse(const Duration(milliseconds: 2000)); expect(keyDataList, isEmpty); // Key A states are cleared converter.handleEvent(keyDownEvent('KeyA', 'a')..timeStamp = 2800); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 2800), type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); async.elapse(const Duration(milliseconds: 100)); converter.handleEvent(keyUpEvent('KeyA', 'a')..timeStamp = 2900); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 2900), type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: null, ); }); testFakeAsync('Key guards: key down events are not guarded on non-macOS', (FakeAsync async) { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent('MetaLeft', 'Meta', kMeta, kLocationLeft)..timeStamp = 100); async.elapse(const Duration(milliseconds: 100)); converter.handleEvent(keyDownEvent('KeyA', 'a', kMeta)..timeStamp = 200); expectKeyData(keyDataList.last, timeStamp: const Duration(milliseconds: 200), type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); keyDataList.clear(); async.elapse(const Duration(milliseconds: 2500)); expect(keyDataList, isEmpty); }); testFakeAsync('Lock flags of other keys', (FakeAsync async) { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent('ScrollLock', 'ScrollLock')); expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalScrollLock, logical: kLogicalScrollLock, character: null, ); keyDataList.clear(); async.elapse(const Duration(seconds: 10)); expect(keyDataList, isEmpty); converter.handleEvent(keyUpEvent('ScrollLock', 'ScrollLock')); expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalScrollLock, logical: kLogicalScrollLock, character: null, ); keyDataList.clear(); converter.handleEvent(keyDownEvent('ScrollLock', 'ScrollLock')); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalScrollLock, logical: kLogicalScrollLock, character: null, ); converter.handleEvent(keyUpEvent('ScrollLock', 'ScrollLock')); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalScrollLock, logical: kLogicalScrollLock, character: null, ); }); test('Deduce modifier key up from modifier field', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent('ShiftRight', 'Shift', kShift, kLocationRight)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftRight, logical: kLogicalShiftRight, character: null, ); converter.handleEvent(keyDownEvent('ShiftLeft', 'Shift', kShift, kLocationLeft)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftLeft, logical: kLogicalShiftLeft, character: null, ); keyDataList.clear(); // The release of the shift keys are omitted converter.handleEvent(keyDownEvent('KeyA', 'a')); expect(keyDataList, hasLength(3)); expectKeyData(keyDataList[0], type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftLeft, logical: kLogicalShiftLeft, character: null, synthesized: true, ); expectKeyData(keyDataList[1], type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftRight, logical: kLogicalShiftRight, character: null, synthesized: true, ); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalKeyA, logical: kLogicalKeyA, character: 'a', ); }); // Regression test for https://github.com/flutter/flutter/issues/99297. // // On Linux Chrome, when holding ShiftLeft and pressing MetaLeft (Win key), // the MetaLeft down event has metaKey true, while the Meta up event has // metaKey false. This violates the definition of metaKey, and does not happen // in nearly any other cases for any other keys. test('Ignore inconsistent modifier flag of the current modifier', () { final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent('ShiftLeft', 'Shift', kShift, kLocationLeft)); expectKeyData(keyDataList.last, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftLeft, logical: kLogicalShiftLeft, character: null, ); keyDataList.clear(); converter.handleEvent(keyDownEvent('MetaLeft', 'Meta', kShift /* No kMeta here! */, kLocationLeft)); // Only a MetaLeft down event, no synthesized MetaLeft up events. expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.first, type: ui.KeyEventType.down, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalMetaLeft, logical: kLogicalMetaLeft, character: null, ); keyDataList.clear(); converter.handleEvent(keyUpEvent('MetaLeft', 'Meta', kShift | kMeta /* Yes, kMeta here! */, kLocationLeft)); // Only a MetaLeft down event, no synthesized MetaLeft up events. expect(keyDataList, hasLength(1)); expectKeyData(keyDataList.first, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalMetaLeft, logical: kLogicalMetaLeft, character: null, ); keyDataList.clear(); converter.handleEvent(keyUpEvent('ShiftLeft', 'Shift', 0, kLocationLeft)); expectKeyData(keyDataList.last, type: ui.KeyEventType.up, deviceType: ui.KeyEventDeviceType.keyboard, physical: kPhysicalShiftLeft, logical: kLogicalShiftLeft, character: null, ); keyDataList.clear(); }); test('Ignore DOM event when event.key is null', () { // Regression test for https://github.com/flutter/flutter/issues/114620. final List<ui.KeyData> keyDataList = <ui.KeyData>[]; final KeyboardConverter converter = KeyboardConverter((ui.KeyData key) { keyDataList.add(key); return true; }, OperatingSystem.linux); converter.handleEvent(keyDownEvent(null, null)); converter.handleEvent(keyUpEvent(null, null)); // Invalid key events are ignored. expect(keyDataList, isEmpty); }); } // Flags used for the `modifiers` argument of `key***Event` functions. const int kAlt = 0x1; const int kCtrl = 0x2; const int kShift = 0x4; const int kMeta = 0x8; // Utility functions to make code more concise. // // To add timeStamp , use syntax `..timeStamp = `. MockKeyboardEvent keyDownEvent(String? code, String? key, [int modifiers = 0, int location = 0]) { return MockKeyboardEvent( type: 'keydown', code: code, key: key, altKey: modifiers & kAlt != 0, ctrlKey: modifiers & kCtrl != 0, shiftKey: modifiers & kShift != 0, metaKey: modifiers & kMeta != 0, location: location, ); } MockKeyboardEvent keyUpEvent(String? code, String? key, [int modifiers = 0, int location = 0]) { return MockKeyboardEvent( type: 'keyup', code: code, key: key, altKey: modifiers & kAlt != 0, ctrlKey: modifiers & kCtrl != 0, shiftKey: modifiers & kShift != 0, metaKey: modifiers & kMeta != 0, location: location, ); } MockKeyboardEvent keyRepeatedDownEvent(String code, String key, [int modifiers = 0, int location = 0]) { return MockKeyboardEvent( type: 'keydown', code: code, key: key, altKey: modifiers & kAlt != 0, ctrlKey: modifiers & kCtrl != 0, shiftKey: modifiers & kShift != 0, metaKey: modifiers & kMeta != 0, repeat: true, location: location, ); } void expectKeyData( ui.KeyData target, { required ui.KeyEventType type, required ui.KeyEventDeviceType deviceType, required int physical, required int logical, required String? character, Duration? timeStamp, bool synthesized = false, }) { expect(target.type, type); expect(target.physical, physical); expect(target.logical, logical); expect(target.character, character); expect(target.synthesized, synthesized); if (timeStamp != null) { expect(target.timeStamp, equals(timeStamp)); } } typedef FakeAsyncTest = void Function(FakeAsync); @isTest void testFakeAsync(String description, FakeAsyncTest fn) { test(description, () { FakeAsync().run(fn); }); }
engine/lib/web_ui/test/engine/keyboard_converter_test.dart/0
{ "file_path": "engine/lib/web_ui/test/engine/keyboard_converter_test.dart", "repo_id": "engine", "token_count": 16688 }
290
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:js_interop'; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui_web/src/ui_web.dart' as ui_web; import '../common/spy.dart'; @JS('window._flutter_internal_on_benchmark') external set jsBenchmarkValueCallback(JSAny? object); void main() { internalBootstrapBrowserTest(() => testMain); } void testMain() { group('$Profiler', () { _profilerTests(); }); group('$Instrumentation', () { _instrumentationTests(); }); } void _profilerTests() { final List<String> warnings = <String>[]; late void Function(String) oldPrintWarning; setUpAll(() { oldPrintWarning = printWarning; printWarning = (String warning) { warnings.add(warning); }; }); setUp(() { warnings.clear(); Profiler.isBenchmarkMode = true; Profiler.ensureInitialized(); }); tearDownAll(() { printWarning = oldPrintWarning; }); tearDown(() { jsBenchmarkValueCallback = null; ui_web.benchmarkValueCallback = null; Profiler.isBenchmarkMode = false; }); test('works when there is no listener', () { expect(() => Profiler.instance.benchmark('foo', 123), returnsNormally); }); test('can listen to benchmarks', () { final List<BenchmarkDatapoint> data = <BenchmarkDatapoint>[]; ui_web.benchmarkValueCallback = (String name, double value) { data.add((name, value)); }; Profiler.instance.benchmark('foo', 123); expect(data, <BenchmarkDatapoint>[('foo', 123)]); data.clear(); Profiler.instance.benchmark('bar', 0.0125); expect(data, <BenchmarkDatapoint>[('bar', 0.0125)]); data.clear(); // Remove listener and make sure nothing breaks and the data isn't being // sent to the old callback anymore. ui_web.benchmarkValueCallback = null; expect(() => Profiler.instance.benchmark('baz', 99.999), returnsNormally); expect(data, isEmpty); }); // TODO(mdebbar): Remove this group once the JS API is removed. // https://github.com/flutter/flutter/issues/127395 group('[JS API]', () { test('can listen to benchmarks', () { final List<BenchmarkDatapoint> data = <BenchmarkDatapoint>[]; jsBenchmarkValueCallback = (String name, double value) { data.add((name, value)); }.toJS; Profiler.instance.benchmark('foo', 123); expect(warnings, hasLength(1)); expect(warnings.single, contains('deprecated')); expect(warnings.single, contains('benchmarkValueCallback')); expect(warnings.single, contains('dart:ui_web')); warnings.clear(); expect(data, <BenchmarkDatapoint>[('foo', 123)]); data.clear(); Profiler.instance.benchmark('bar', 0.0125); expect(data, <BenchmarkDatapoint>[('bar', 0.0125)]); data.clear(); // Remove listener and make sure nothing breaks and the data isn't being // sent to the old callback anymore. jsBenchmarkValueCallback = null; expect(() => Profiler.instance.benchmark('baz', 99.999), returnsNormally); expect(data, isEmpty); }); test('throws on wrong listener type', () { final List<BenchmarkDatapoint> data = <BenchmarkDatapoint>[]; // Wrong callback signature. jsBenchmarkValueCallback = (double value) { data.add(('bad', value)); }.toJS; expect( () => Profiler.instance.benchmark('foo', 123), // dart2js throws a NoSuchMethodError, dart2wasm throws a TypeError here. // Just make sure it throws an error in this case. throwsA(isA<Error>()), ); expect(data, isEmpty); // Not even a callback. jsBenchmarkValueCallback = 'string'.toJS; expect( () => Profiler.instance.benchmark('foo', 123), // dart2js throws a TypeError, while dart2wasm throws an explicit // exception. throwsA(anything), ); }); test('can be combined with ui_web API', () { final List<BenchmarkDatapoint> uiWebData = <BenchmarkDatapoint>[]; final List<BenchmarkDatapoint> jsData = <BenchmarkDatapoint>[]; ui_web.benchmarkValueCallback = (String name, double value) { uiWebData.add((name, value)); }; jsBenchmarkValueCallback = (String name, double value) { jsData.add((name, value)); }.toJS; Profiler.instance.benchmark('foo', 123); expect(warnings, hasLength(1)); expect(warnings.single, contains('deprecated')); expect(warnings.single, contains('benchmarkValueCallback')); expect(warnings.single, contains('dart:ui_web')); warnings.clear(); expect(uiWebData, <BenchmarkDatapoint>[('foo', 123)]); expect(jsData, <BenchmarkDatapoint>[('foo', 123)]); uiWebData.clear(); jsData.clear(); Profiler.instance.benchmark('bar', 0.0125); expect(uiWebData, <BenchmarkDatapoint>[('bar', 0.0125)]); expect(jsData, <BenchmarkDatapoint>[('bar', 0.0125)]); uiWebData.clear(); jsData.clear(); ui_web.benchmarkValueCallback = null; jsBenchmarkValueCallback = null; expect(() => Profiler.instance.benchmark('baz', 99.999), returnsNormally); expect(uiWebData, isEmpty); expect(jsData, isEmpty); }); }); } void _instrumentationTests() { setUp(() { Instrumentation.enabled = false; }); tearDown(() { Instrumentation.enabled = false; }); test('when disabled throws instead of initializing', () { expect(() => Instrumentation.instance, throwsStateError); }); test('when disabled throws instead of incrementing counter', () { Instrumentation.enabled = true; final Instrumentation instrumentation = Instrumentation.instance; Instrumentation.enabled = false; expect(() => instrumentation.incrementCounter('test'), throwsStateError); }); test('when enabled increments counter', () { final ZoneSpy spy = ZoneSpy(); spy.run(() { Instrumentation.enabled = true; final Instrumentation instrumentation = Instrumentation.instance; expect(instrumentation.debugPrintTimer, isNull); instrumentation.incrementCounter('foo'); expect(instrumentation.debugPrintTimer, isNotNull); instrumentation.incrementCounter('foo'); instrumentation.incrementCounter('bar'); expect(spy.printLog, isEmpty); expect(instrumentation.debugPrintTimer, isNotNull); spy.fakeAsync.elapse(const Duration(seconds: 2)); expect(instrumentation.debugPrintTimer, isNull); expect(spy.printLog, hasLength(1)); expect( spy.printLog.single, 'Engine counters:\n' ' bar: 1\n' ' foo: 2\n', ); }); }); } typedef BenchmarkDatapoint = (String, double);
engine/lib/web_ui/test/engine/profiler_test.dart/0
{ "file_path": "engine/lib/web_ui/test/engine/profiler_test.dart", "repo_id": "engine", "token_count": 2561 }
291
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @TestOn('chrome || safari || firefox') library; import 'dart:typed_data'; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart' hide window; import 'package:ui/ui.dart' as ui; import '../../common/test_initialization.dart'; import 'semantics_tester.dart'; final InputConfiguration singlelineConfig = InputConfiguration(); final InputConfiguration multilineConfig = InputConfiguration( inputType: EngineInputType.multiline, inputAction: 'TextInputAction.newline', ); EngineSemantics semantics() => EngineSemantics.instance; EngineSemanticsOwner owner() => EnginePlatformDispatcher.instance.implicitView!.semantics; const MethodCodec codec = JSONMethodCodec(); DateTime _testTime = DateTime(2021, 4, 16); void main() { internalBootstrapBrowserTest(() => testMain); } void testMain() { setUpAll(() async { await bootstrapAndRunApp(withImplicitView: true); }); setUp(() { EngineSemantics.debugResetSemantics(); }); group('$SemanticsTextEditingStrategy pre-initialization tests', () { setUp(() { semantics() ..debugOverrideTimestampFunction(() => _testTime) ..semanticsEnabled = true; }); tearDown(() { semantics().semanticsEnabled = false; }); test('Calling dispose() pre-initialization will not throw an error', () { final SemanticsObject textFieldSemantics = createTextFieldSemantics( value: 'hi', isFocused: true, ); final TextField textField = textFieldSemantics.primaryRole! as TextField; // ensureInitialized() isn't called prior to calling dispose() here. // Since we are conditionally calling dispose() on our // SemanticsTextEditingStrategy._instance, we shouldn't expect an error. // ref: https://github.com/flutter/engine/pull/40146 expect(() => textField.dispose(), returnsNormally); }); }); group('$SemanticsTextEditingStrategy', () { late HybridTextEditing testTextEditing; late SemanticsTextEditingStrategy strategy; setUp(() { testTextEditing = HybridTextEditing(); SemanticsTextEditingStrategy.ensureInitialized(testTextEditing); strategy = SemanticsTextEditingStrategy.instance; testTextEditing.debugTextEditingStrategyOverride = strategy; testTextEditing.configuration = singlelineConfig; semantics() ..debugOverrideTimestampFunction(() => _testTime) ..semanticsEnabled = true; }); tearDown(() { semantics().semanticsEnabled = false; }); test('renders a text field', () { createTextFieldSemantics(value: 'hello'); expectSemanticsTree(owner(), ''' <sem style="$rootSemanticStyle"> <input value="hello" /> </sem>'''); }); // TODO(yjbanov): this test will need to be adjusted for Safari when we add // Safari testing. test('sends a didGainAccessibilityFocus/didLoseAccessibilityFocus action when browser requests focus/blur', () async { final SemanticsActionLogger logger = SemanticsActionLogger(); createTextFieldSemantics(value: 'hello'); final DomElement textField = owner().semanticsHost .querySelector('input[data-semantics-role="text-field"]')!; expect(owner().semanticsHost.ownerDocument?.activeElement, isNot(textField)); textField.focus(); expect(owner().semanticsHost.ownerDocument?.activeElement, textField); expect(await logger.idLog.first, 0); expect(await logger.actionLog.first, ui.SemanticsAction.didGainAccessibilityFocus); textField.blur(); expect(owner().semanticsHost.ownerDocument?.activeElement, isNot(textField)); expect(await logger.idLog.first, 0); expect(await logger.actionLog.first, ui.SemanticsAction.didLoseAccessibilityFocus); }, // TODO(yjbanov): https://github.com/flutter/flutter/issues/46638 // TODO(yjbanov): https://github.com/flutter/flutter/issues/50590 skip: browserEngine != BrowserEngine.blink); test('Syncs semantic state from framework', () { expect(owner().semanticsHost.ownerDocument?.activeElement, domDocument.body); int changeCount = 0; int actionCount = 0; strategy.enable( singlelineConfig, onChange: (_, __) { changeCount++; }, onAction: (_) { actionCount++; }, ); // Create final SemanticsObject textFieldSemantics = createTextFieldSemantics( value: 'hello', label: 'greeting', isFocused: true, rect: const ui.Rect.fromLTWH(0, 0, 10, 15), ); final TextField textField = textFieldSemantics.primaryRole! as TextField; expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); expect(textField.editableElement, strategy.domElement); expect(textField.activeEditableElement.getAttribute('aria-label'), 'greeting'); expect(textField.activeEditableElement.style.width, '10px'); expect(textField.activeEditableElement.style.height, '15px'); // Update createTextFieldSemantics( value: 'bye', label: 'farewell', rect: const ui.Rect.fromLTWH(0, 0, 12, 17), ); expect(owner().semanticsHost.ownerDocument?.activeElement, domDocument.body); expect(strategy.domElement, null); expect(textField.activeEditableElement.getAttribute('aria-label'), 'farewell'); expect(textField.activeEditableElement.style.width, '12px'); expect(textField.activeEditableElement.style.height, '17px'); strategy.disable(); // There was no user interaction with the <input> element, // so we should expect no engine-to-framework feedback. expect(changeCount, 0); expect(actionCount, 0); }); test( 'Does not overwrite text value and selection editing state on semantic updates', () { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); final SemanticsObject textFieldSemantics = createTextFieldSemantics( value: 'hello', textSelectionBase: 1, textSelectionExtent: 3, isFocused: true, rect: const ui.Rect.fromLTWH(0, 0, 10, 15)); final TextField textField = textFieldSemantics.primaryRole! as TextField; final DomHTMLInputElement editableElement = textField.activeEditableElement as DomHTMLInputElement; expect(editableElement, strategy.domElement); expect(editableElement.value, ''); expect(editableElement.selectionStart, 0); expect(editableElement.selectionEnd, 0); strategy.disable(); }); test( 'Updates editing state when receiving framework messages from the text input channel', () { expect(owner().semanticsHost.ownerDocument?.activeElement, domDocument.body); strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); final SemanticsObject textFieldSemantics = createTextFieldSemantics( value: 'hello', textSelectionBase: 1, textSelectionExtent: 3, isFocused: true, rect: const ui.Rect.fromLTWH(0, 0, 10, 15)); final TextField textField = textFieldSemantics.primaryRole! as TextField; final DomHTMLInputElement editableElement = textField.activeEditableElement as DomHTMLInputElement; // No updates expected on semantic updates expect(editableElement, strategy.domElement); expect(editableElement.value, ''); expect(editableElement.selectionStart, 0); expect(editableElement.selectionEnd, 0); // Update from framework const MethodCall setEditingState = MethodCall('TextInput.setEditingState', <String, dynamic>{ 'text': 'updated', 'selectionBase': 2, 'selectionExtent': 3, }); sendFrameworkMessage(codec.encodeMethodCall(setEditingState), testTextEditing); // Editing state should now be updated expect(editableElement.value, 'updated'); expect(editableElement.selectionStart, 2); expect(editableElement.selectionEnd, 3); strategy.disable(); }); test('Gives up focus after DOM blur', () { expect(owner().semanticsHost.ownerDocument?.activeElement, domDocument.body); strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); final SemanticsObject textFieldSemantics = createTextFieldSemantics( value: 'hello', isFocused: true, ); final TextField textField = textFieldSemantics.primaryRole! as TextField; expect(textField.editableElement, strategy.domElement); expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); // The input should not refocus after blur. textField.activeEditableElement.blur(); expect(owner().semanticsHost.ownerDocument?.activeElement, domDocument.body); strategy.disable(); }); test('Does not dispose and recreate dom elements in persistent mode', () { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); // It doesn't create a new DOM element. expect(strategy.domElement, isNull); // During the semantics update the DOM element is created and is focused on. final SemanticsObject textFieldSemantics = createTextFieldSemantics( value: 'hello', isFocused: true, ); expect(strategy.domElement, isNotNull); expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); strategy.disable(); expect(strategy.domElement, isNull); // It doesn't remove the DOM element. final TextField textField = textFieldSemantics.primaryRole! as TextField; expect(owner().semanticsHost.contains(textField.editableElement), isTrue); // Editing element is not enabled. expect(strategy.isEnabled, isFalse); expect(owner().semanticsHost.ownerDocument?.activeElement, domDocument.body); }); test('Refocuses when setting editing state', () { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); createTextFieldSemantics( value: 'hello', isFocused: true, ); expect(strategy.domElement, isNotNull); expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); // Blur the element without telling the framework. strategy.activeDomElement.blur(); expect(owner().semanticsHost.ownerDocument?.activeElement, domDocument.body); // The input will have focus after editing state is set and semantics updated. strategy.setEditingState(EditingState(text: 'foo')); // NOTE: at this point some browsers, e.g. some versions of Safari will // have set the focus on the editing element as a result of setting // the test selection range. Other browsers require an explicit call // to `element.focus()` for the element to acquire focus. So far, // this discrepancy hasn't caused issues, so we're not checking for // any particular focus state between setEditingState and // createTextFieldSemantics. However, this is something for us to // keep in mind in case this causes issues in the future. createTextFieldSemantics( value: 'hello', isFocused: true, ); expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); strategy.disable(); }); test('Works in multi-line mode', () { strategy.enable( multilineConfig, onChange: (_, __) {}, onAction: (_) {}, ); createTextFieldSemantics( value: 'hello', isFocused: true, isMultiline: true, ); final DomHTMLTextAreaElement textArea = strategy.domElement! as DomHTMLTextAreaElement; expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); textArea.blur(); expect(owner().semanticsHost.ownerDocument?.activeElement, domDocument.body); strategy.disable(); // It doesn't remove the textarea from the DOM. expect(owner().semanticsHost.contains(textArea), isTrue); // Editing element is not enabled. expect(strategy.isEnabled, isFalse); }); test('Does not position or size its DOM element', () { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); // Send width and height that are different from semantics values on // purpose. final EditableTextGeometry geometry = EditableTextGeometry( height: 12, width: 13, globalTransform: Matrix4.translationValues(14, 15, 0).storage, ); testTextEditing.acceptCommand( TextInputSetEditableSizeAndTransform(geometry: geometry), () {}, ); createTextFieldSemantics( value: 'hello', isFocused: true, ); // Checks that the placement attributes come from semantics and not from // EditableTextGeometry. void checkPlacementIsSetBySemantics() { expect(strategy.activeDomElement.style.transform, ''); expect(strategy.activeDomElement.style.width, '100px'); expect(strategy.activeDomElement.style.height, '50px'); } checkPlacementIsSetBySemantics(); strategy.placeElement(); checkPlacementIsSetBySemantics(); }); Map<int, SemanticsObject> createTwoFieldSemantics(SemanticsTester builder, {int? focusFieldId}) { builder.updateNode( id: 0, children: <SemanticsNodeUpdate>[ builder.updateNode( id: 1, isTextField: true, value: 'Hello', isFocused: focusFieldId == 1, rect: const ui.Rect.fromLTRB(0, 0, 50, 10), ), builder.updateNode( id: 2, isTextField: true, value: 'World', isFocused: focusFieldId == 2, rect: const ui.Rect.fromLTRB(0, 20, 50, 10), ), ], ); return builder.apply(); } test('Changes focus from one text field to another through a semantics update', () { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); // Switch between the two fields a few times. for (int i = 0; i < 5; i++) { final SemanticsTester tester = SemanticsTester(owner()); createTwoFieldSemantics(tester, focusFieldId: 1); expect(tester.apply().length, 3); expect(owner().semanticsHost.ownerDocument?.activeElement, tester.getTextField(1).editableElement); expect(strategy.domElement, tester.getTextField(1).editableElement); createTwoFieldSemantics(tester, focusFieldId: 2); expect(tester.apply().length, 3); expect(owner().semanticsHost.ownerDocument?.activeElement, tester.getTextField(2).editableElement); expect(strategy.domElement, tester.getTextField(2).editableElement); } }); }, skip: isIosSafari); group('$SemanticsTextEditingStrategy in iOS', () { late HybridTextEditing testTextEditing; late SemanticsTextEditingStrategy strategy; setUp(() { testTextEditing = HybridTextEditing(); SemanticsTextEditingStrategy.ensureInitialized(testTextEditing); strategy = SemanticsTextEditingStrategy.instance; testTextEditing.debugTextEditingStrategyOverride = strategy; testTextEditing.configuration = singlelineConfig; debugBrowserEngineOverride = BrowserEngine.webkit; debugOperatingSystemOverride = OperatingSystem.iOs; semantics() ..debugOverrideTimestampFunction(() => _testTime) ..semanticsEnabled = true; }); tearDown(() { debugBrowserEngineOverride = null; debugOperatingSystemOverride = null; semantics().semanticsEnabled = false; }); test('does not render a text field', () { expect(owner().semanticsHost.querySelector('flt-semantics[role="textbox"]'), isNull); createTextFieldSemanticsForIos(value: 'hello'); expect(owner().semanticsHost.querySelector('flt-semantics[role="textbox"]'), isNotNull); }); test('tap detection works', () async { final SemanticsActionLogger logger = SemanticsActionLogger(); createTextFieldSemanticsForIos(value: 'hello'); final DomElement textField = owner().semanticsHost .querySelector('flt-semantics[role="textbox"]')!; simulateTap(textField); expect(await logger.idLog.first, 0); expect(await logger.actionLog.first, ui.SemanticsAction.tap); }); test('Syncs semantic state from framework', () { expect(owner().semanticsHost.ownerDocument?.activeElement, domDocument.body); int changeCount = 0; int actionCount = 0; strategy.enable( singlelineConfig, onChange: (_, __) { changeCount++; }, onAction: (_) { actionCount++; }, ); // Create final SemanticsObject textFieldSemantics = createTextFieldSemanticsForIos( value: 'hello', label: 'greeting', isFocused: true, rect: const ui.Rect.fromLTWH(0, 0, 10, 15), ); final TextField textField = textFieldSemantics.primaryRole! as TextField; expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); expect(textField.editableElement, strategy.domElement); expect(textField.activeEditableElement.getAttribute('aria-label'), 'greeting'); expect(textField.activeEditableElement.style.width, '10px'); expect(textField.activeEditableElement.style.height, '15px'); // Update createTextFieldSemanticsForIos( value: 'bye', label: 'farewell', rect: const ui.Rect.fromLTWH(0, 0, 12, 17), ); final DomElement textBox = owner().semanticsHost.querySelector('flt-semantics[role="textbox"]')!; expect(strategy.domElement, null); expect(owner().semanticsHost.ownerDocument?.activeElement, textBox); expect(textBox.getAttribute('aria-label'), 'farewell'); strategy.disable(); // There was no user interaction with the <input> element, // so we should expect no engine-to-framework feedback. expect(changeCount, 0); expect(actionCount, 0); }); test( 'Does not overwrite text value and selection editing state on semantic updates', () { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); final SemanticsObject textFieldSemantics = createTextFieldSemanticsForIos( value: 'hello', textSelectionBase: 1, textSelectionExtent: 3, isFocused: true, rect: const ui.Rect.fromLTWH(0, 0, 10, 15)); final TextField textField = textFieldSemantics.primaryRole! as TextField; final DomHTMLInputElement editableElement = textField.activeEditableElement as DomHTMLInputElement; expect(editableElement, strategy.domElement); expect(editableElement.value, ''); expect(editableElement.selectionStart, 0); expect(editableElement.selectionEnd, 0); strategy.disable(); }); test( 'Updates editing state when receiving framework messages from the text input channel', () { expect(owner().semanticsHost.ownerDocument?.activeElement, domDocument.body); strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); final SemanticsObject textFieldSemantics = createTextFieldSemanticsForIos( value: 'hello', textSelectionBase: 1, textSelectionExtent: 3, isFocused: true, rect: const ui.Rect.fromLTWH(0, 0, 10, 15)); final TextField textField = textFieldSemantics.primaryRole! as TextField; final DomHTMLInputElement editableElement = textField.activeEditableElement as DomHTMLInputElement; // No updates expected on semantic updates expect(editableElement, strategy.domElement); expect(editableElement.value, ''); expect(editableElement.selectionStart, 0); expect(editableElement.selectionEnd, 0); // Update from framework const MethodCall setEditingState = MethodCall('TextInput.setEditingState', <String, dynamic>{ 'text': 'updated', 'selectionBase': 2, 'selectionExtent': 3, }); sendFrameworkMessage(codec.encodeMethodCall(setEditingState), testTextEditing); // Editing state should now be updated // expect(editableElement.value, 'updated'); expect(editableElement.selectionStart, 2); expect(editableElement.selectionEnd, 3); strategy.disable(); }); test('Gives up focus after DOM blur', () { expect(owner().semanticsHost.ownerDocument?.activeElement, domDocument.body); strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); final SemanticsObject textFieldSemantics = createTextFieldSemanticsForIos( value: 'hello', isFocused: true, ); final TextField textField = textFieldSemantics.primaryRole! as TextField; expect(textField.editableElement, strategy.domElement); expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); // The input should not refocus after blur. textField.activeEditableElement.blur(); final DomElement textBox = owner().semanticsHost.querySelector('flt-semantics[role="textbox"]')!; expect(owner().semanticsHost.ownerDocument?.activeElement, textBox); strategy.disable(); }); test('Disposes and recreates dom elements in persistent mode', () { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); // It doesn't create a new DOM element. expect(strategy.domElement, isNull); // During the semantics update the DOM element is created and is focused on. final SemanticsObject textFieldSemantics = createTextFieldSemanticsForIos( value: 'hello', isFocused: true, ); expect(strategy.domElement, isNotNull); expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); strategy.disable(); expect(strategy.domElement, isNull); // It removes the DOM element. final TextField textField = textFieldSemantics.primaryRole! as TextField; expect(owner().semanticsHost.contains(textField.editableElement), isFalse); // Editing element is not enabled. expect(strategy.isEnabled, isFalse); // Focus is on the semantic object final DomElement textBox = owner().semanticsHost.querySelector('flt-semantics[role="textbox"]')!; expect(owner().semanticsHost.ownerDocument?.activeElement, textBox); }); test('Refocuses when setting editing state', () { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); createTextFieldSemanticsForIos( value: 'hello', isFocused: true, ); expect(strategy.domElement, isNotNull); expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); // Blur the element without telling the framework. strategy.activeDomElement.blur(); final DomElement textBox = owner().semanticsHost.querySelector('flt-semantics[role="textbox"]')!; expect(owner().semanticsHost.ownerDocument?.activeElement, textBox); // The input will have focus after editing state is set and semantics updated. strategy.setEditingState(EditingState(text: 'foo')); // NOTE: at this point some browsers, e.g. some versions of Safari will // have set the focus on the editing element as a result of setting // the test selection range. Other browsers require an explicit call // to `element.focus()` for the element to acquire focus. So far, // this discrepancy hasn't caused issues, so we're not checking for // any particular focus state between setEditingState and // createTextFieldSemantics. However, this is something for us to // keep in mind in case this causes issues in the future. createTextFieldSemanticsForIos( value: 'hello', isFocused: true, ); expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); strategy.disable(); }); test('Works in multi-line mode', () { strategy.enable( multilineConfig, onChange: (_, __) {}, onAction: (_) {}, ); createTextFieldSemanticsForIos( value: 'hello', isFocused: true, isMultiline: true, ); final DomHTMLTextAreaElement textArea = strategy.domElement! as DomHTMLTextAreaElement; expect(owner().semanticsHost.ownerDocument?.activeElement, strategy.domElement); strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); expect(owner().semanticsHost.contains(textArea), isTrue); textArea.blur(); final DomElement textBox = owner().semanticsHost.querySelector('flt-semantics[role="textbox"]')!; expect(owner().semanticsHost.ownerDocument?.activeElement, textBox); strategy.disable(); // It removes the textarea from the DOM. expect(owner().semanticsHost.contains(textArea), isFalse); // Editing element is not enabled. expect(strategy.isEnabled, isFalse); }); test('Does not position or size its DOM element', () { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); // Send width and height that are different from semantics values on // purpose. final Matrix4 transform = Matrix4.translationValues(14, 15, 0); final EditableTextGeometry geometry = EditableTextGeometry( height: 12, width: 13, globalTransform: transform.storage, ); const ui.Rect semanticsRect = ui.Rect.fromLTRB(0, 0, 100, 50); testTextEditing.acceptCommand( TextInputSetEditableSizeAndTransform(geometry: geometry), () {}, ); createTextFieldSemanticsForIos( value: 'hello', isFocused: true, ); // Checks that the placement attributes come from semantics and not from // EditableTextGeometry. void checkPlacementIsSetBySemantics() { expect(strategy.activeDomElement.style.transform, isNot(equals(transform.toString()))); expect(strategy.activeDomElement.style.width, '${semanticsRect.width}px'); expect(strategy.activeDomElement.style.height, '${semanticsRect.height}px'); } checkPlacementIsSetBySemantics(); strategy.placeElement(); checkPlacementIsSetBySemantics(); }); test('Changes focus from one text field to another through a semantics update', () { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); // Switch between the two fields a few times. for (int i = 0; i < 1; i++) { final SemanticsTester tester = SemanticsTester(owner()); createTwoFieldSemanticsForIos(tester, focusFieldId: 1); expect(tester.apply().length, 3); expect(owner().semanticsHost.ownerDocument?.activeElement, tester.getTextField(1).editableElement); expect(strategy.domElement, tester.getTextField(1).editableElement); createTwoFieldSemanticsForIos(tester, focusFieldId: 2); expect(tester.apply().length, 3); expect(owner().semanticsHost.ownerDocument?.activeElement, tester.getTextField(2).editableElement); expect(strategy.domElement, tester.getTextField(2).editableElement); } }); test('input transform is correct', () async { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); createTextFieldSemanticsForIos( value: 'hello', isFocused: true, ); expect(strategy.activeDomElement.style.transform, 'translate(${offScreenOffset}px, ${offScreenOffset}px)'); // See [_delayBeforePlacement]. await Future<void>.delayed(const Duration(milliseconds: 120) , (){}); expect(strategy.activeDomElement.style.transform, ''); }); test('disposes the editable element, if there is one', () { strategy.enable( singlelineConfig, onChange: (_, __) {}, onAction: (_) {}, ); SemanticsObject textFieldSemantics = createTextFieldSemanticsForIos( value: 'hello', ); TextField textField = textFieldSemantics.primaryRole! as TextField; expect(textField.editableElement, isNull); textField.dispose(); expect(textField.editableElement, isNull); textFieldSemantics = createTextFieldSemanticsForIos( value: 'hi', isFocused: true, ); textField = textFieldSemantics.primaryRole! as TextField; expect(textField.editableElement, isNotNull); textField.dispose(); expect(textField.editableElement, isNull); }); }, skip: !isSafari); } SemanticsObject createTextFieldSemantics({ required String value, String label = '', bool isFocused = false, bool isMultiline = false, ui.Rect rect = const ui.Rect.fromLTRB(0, 0, 100, 50), int textSelectionBase = 0, int textSelectionExtent = 0, }) { final SemanticsTester tester = SemanticsTester(owner()); tester.updateNode( id: 0, label: label, value: value, isTextField: true, isFocused: isFocused, isMultiline: isMultiline, hasTap: true, rect: rect, textDirection: ui.TextDirection.ltr, textSelectionBase: textSelectionBase, textSelectionExtent: textSelectionExtent ); tester.apply(); return tester.getSemanticsObject(0); } void simulateTap(DomElement element) { element.dispatchEvent(createDomPointerEvent( 'pointerdown', <Object?, Object?>{ 'clientX': 125, 'clientY': 248, }, )); element.dispatchEvent(createDomPointerEvent( 'pointerup', <Object?, Object?>{ 'clientX': 126, 'clientY': 248, }, )); } /// An editable DOM element won't be created on iOS unless a tap is detected. /// This function mimics the workflow by simulating a tap and sending a second /// semantic update. SemanticsObject createTextFieldSemanticsForIos({ required String value, String label = '', bool isFocused = false, bool isMultiline = false, ui.Rect rect = const ui.Rect.fromLTRB(0, 0, 100, 50), int textSelectionBase = 0, int textSelectionExtent = 0, }) { final SemanticsObject textFieldSemantics = createTextFieldSemantics( value: value, isFocused: isFocused, label: label, isMultiline: isMultiline, rect: rect, textSelectionBase: textSelectionBase, textSelectionExtent: textSelectionExtent, ); if (isFocused) { final TextField textField = textFieldSemantics.primaryRole! as TextField; simulateTap(textField.semanticsObject.element); return createTextFieldSemantics( value: value, isFocused: isFocused, label: label, isMultiline: isMultiline, rect: rect, textSelectionBase: textSelectionBase, textSelectionExtent: textSelectionExtent, ); } return textFieldSemantics; } /// See [createTextFieldSemanticsForIos]. Map<int, SemanticsObject> createTwoFieldSemanticsForIos(SemanticsTester builder, {int? focusFieldId}) { builder.updateNode( id: 0, children: <SemanticsNodeUpdate>[ builder.updateNode( id: 1, isTextField: true, value: 'Hello', label: 'Hello', isFocused: false, rect: const ui.Rect.fromLTWH(0, 0, 10, 10), ), builder.updateNode( id: 2, isTextField: true, value: 'World', label: 'World', isFocused: false, rect: const ui.Rect.fromLTWH(20, 20, 10, 10), ), ], ); builder.apply(); final String label = focusFieldId == 1 ? 'Hello' : 'World'; final DomElement textBox = owner().semanticsHost.querySelector('flt-semantics[aria-label="$label"]')!; simulateTap(textBox); builder.updateNode( id: 0, children: <SemanticsNodeUpdate>[ builder.updateNode( id: 1, isTextField: true, value: 'Hello', label: 'Hello', isFocused: focusFieldId == 1, rect: const ui.Rect.fromLTWH(0, 0, 10, 10), ), builder.updateNode( id: 2, isTextField: true, value: 'World', label: 'World', isFocused: focusFieldId == 2, rect: const ui.Rect.fromLTWH(20, 20, 10, 10), ), ], ); return builder.apply(); } /// Emulates sending of a message by the framework to the engine. void sendFrameworkMessage(ByteData? message, HybridTextEditing testTextEditing) { testTextEditing.channel.handleTextInput(message, (ByteData? data) {}); }
engine/lib/web_ui/test/engine/semantics/text_field_test.dart/0
{ "file_path": "engine/lib/web_ui/test/engine/semantics/text_field_test.dart", "repo_id": "engine", "token_count": 13054 }
292
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:async'; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart' as ui show Size; void main() { internalBootstrapBrowserTest(() => doTests); } void doTests() { group('computePhysicalSize', () { late FullPageDimensionsProvider provider; setUp(() { provider = FullPageDimensionsProvider(); }); test('returns visualViewport physical size (width * dpr)', () { const double dpr = 2.5; EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(dpr); final ui.Size expected = ui.Size(domWindow.visualViewport!.width! * dpr, domWindow.visualViewport!.height! * dpr); final ui.Size computed = provider.computePhysicalSize(); expect(computed, expected); }); }); group('computeKeyboardInsets', () { late FullPageDimensionsProvider provider; setUp(() { provider = FullPageDimensionsProvider(); }); test('from viewport physical size (simulated keyboard)', () { // Simulate a 100px tall keyboard showing... const double dpr = 2.5; EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(dpr); const double keyboardGap = 100; final double physicalHeight = (domWindow.visualViewport!.height! + keyboardGap) * dpr; const double expectedBottom = keyboardGap * dpr; final ViewPadding computed = provider.computeKeyboardInsets(physicalHeight, false); expect(computed.top, 0); expect(computed.right, 0); expect(computed.bottom, expectedBottom); expect(computed.left, 0); }); }); group('onResize Stream', () { // Needed to synthesize "resize" events final DomEventTarget resizeEventTarget = domWindow.visualViewport ?? domWindow; late FullPageDimensionsProvider provider; setUp(() { provider = FullPageDimensionsProvider(); }); test('funnels resize events on resizeEventTarget', () { final Future<Object?> event = provider.onResize.first; final Future<List<Object?>> events = provider.onResize.take(3).toList(); resizeEventTarget.dispatchEvent(createDomEvent('Event', 'resize')); resizeEventTarget.dispatchEvent(createDomEvent('Event', 'resize')); resizeEventTarget.dispatchEvent(createDomEvent('Event', 'resize')); expect(event, completes); expect(events, completes); expect(events, completion(hasLength(3))); }); test('closed by onHotRestart', () { // Register an onDone listener for the stream final Completer<bool> completer = Completer<bool>(); provider.onResize.listen(null, onDone: () { completer.complete(true); }); // Should close the stream provider.close(); resizeEventTarget.dispatchEvent(createDomEvent('Event', 'resize')); expect(provider.onResize.isEmpty, completion(isTrue)); expect(completer.future, completion(isTrue)); }); }); }
engine/lib/web_ui/test/engine/view_embedder/dimensions_provider/full_page_dimensions_provider_test.dart/0
{ "file_path": "engine/lib/web_ui/test/engine/view_embedder/dimensions_provider/full_page_dimensions_provider_test.dart", "repo_id": "engine", "token_count": 1131 }
293
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart' as engine; import 'package:ui/ui.dart' hide TextStyle; import '../common/test_initialization.dart'; import 'screenshot.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } /// Tests context save/restore. Future<void> testMain() async { const double screenWidth = 600.0; const double screenHeight = 800.0; const Rect screenRect = Rect.fromLTWH(0, 0, screenWidth, screenHeight); setUpUnitTests( setUpTestViewDimensions: false, ); // Regression test for https://github.com/flutter/flutter/issues/49429 // Should clip with correct transform. test('Clips image with oval clip path', () async { final engine.RecordingCanvas rc = engine.RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300)); final engine.SurfacePaint paint = Paint() as engine.SurfacePaint ..color = const Color(0xFF00FF00) ..style = PaintingStyle.fill; rc.save(); final Path ovalPath = Path(); ovalPath.addOval(const Rect.fromLTWH(100, 30, 200, 100)); rc.clipPath(ovalPath); rc.translate(-500, -500); rc.save(); rc.translate(500, 500); rc.drawPath(ovalPath, paint); // The line below was causing SaveClipStack to incorrectly set // transform before path painting. rc.translate(-1000, -1000); rc.save(); rc.restore(); rc.restore(); rc.restore(); // The rectangle should paint without clipping since we restored // context. rc.drawRect(const Rect.fromLTWH(0, 0, 4, 200), paint); await canvasScreenshot(rc, 'context_save_restore_transform', canvasRect: screenRect); }); test('Should restore clip path', () async { final engine.RecordingCanvas rc = engine.RecordingCanvas(const Rect.fromLTRB(0, 0, 400, 300)); final Paint goodPaint = Paint() ..color = const Color(0x8000FF00) ..style = PaintingStyle.fill; final Paint badPaint = Paint() ..color = const Color(0xFFFF0000) ..style = PaintingStyle.fill; rc.save(); final Path ovalPath = Path(); ovalPath.addOval(const Rect.fromLTWH(100, 30, 200, 100)); rc.clipPath(ovalPath); rc.translate(-500, -500); rc.save(); rc.restore(); // The rectangle should be clipped against oval. rc.drawRect(const Rect.fromLTWH(0, 0, 300, 300), badPaint as engine.SurfacePaint); rc.restore(); // The rectangle should paint without clipping since we restored // context. rc.drawRect(const Rect.fromLTWH(0, 0, 200, 200), goodPaint as engine.SurfacePaint); await canvasScreenshot(rc, 'context_save_restore_clip', canvasRect: screenRect); }); }
engine/lib/web_ui/test/html/canvas_context_golden_test.dart/0
{ "file_path": "engine/lib/web_ui/test/html/canvas_context_golden_test.dart", "repo_id": "engine", "token_count": 1015 }
294
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart'; import '../../common/test_initialization.dart'; import '../screenshot.dart'; const Rect region = Rect.fromLTWH(0, 0, 500, 100); void main() { internalBootstrapBrowserTest(() => testMain); } SurfacePaint makePaint() => Paint() as SurfacePaint; Future<void> testMain() async { setUpUnitTests( emulateTesterEnvironment: false, setUpTestViewDimensions: false, ); setUpAll(() async { debugShowClipLayers = true; }); setUp(() async { SurfaceSceneBuilder.debugForgetFrameScene(); }); group('Add picture to scene', () { test('draw growing picture across frames', () async { final SurfaceSceneBuilder builder = SurfaceSceneBuilder(); builder.pushClipRect( const Rect.fromLTRB(0, 0, 100, 100), ); _drawTestPicture(builder, 100, false); builder.pop(); final DomElement elm1 = builder .build() .webOnlyRootElement!; domDocument.body!.append(elm1); // Now draw picture again but at larger size. final SurfaceSceneBuilder builder2 = SurfaceSceneBuilder(); builder2.pushClipRect( const Rect.fromLTRB(0, 0, 100, 100), ); // Now draw the picture at original target size, which will use a // different code path that should normally not have width/height set // on image element. _drawTestPicture(builder2, 20, false); builder2.pop(); elm1.remove(); await sceneScreenshot(builder2, 'canvas_draw_picture_acrossframes', region: region); }); test('draw growing picture across frames clipped', () async { final SurfaceSceneBuilder builder = SurfaceSceneBuilder(); builder.pushClipRect( const Rect.fromLTRB(0, 0, 100, 100), ); _drawTestPicture(builder, 100, true); builder.pop(); final DomElement elm1 = builder .build() .webOnlyRootElement!; domDocument.body!.append(elm1); // Now draw picture again but at larger size. final SurfaceSceneBuilder builder2 = SurfaceSceneBuilder(); builder2.pushClipRect( const Rect.fromLTRB(0, 0, 100, 100), ); _drawTestPicture(builder2, 20, true); builder2.pop(); elm1.remove(); await sceneScreenshot(builder2, 'canvas_draw_picture_acrossframes_clipped', region: region); }); test('PictureInPicture', () async { final SurfaceSceneBuilder builder = SurfaceSceneBuilder(); final Picture greenRectPicture = _drawGreenRectIntoPicture(); final EnginePictureRecorder recorder = PictureRecorder() as EnginePictureRecorder; final RecordingCanvas canvas = recorder.beginRecording(const Rect.fromLTRB(0, 0, 100, 100)); canvas.drawPicture(greenRectPicture); builder.addPicture(const Offset(10, 10), recorder.endRecording()); await sceneScreenshot(builder, 'canvas_draw_picture_in_picture_rect', region: region); }); }); } HtmlImage? sharedImage; void _drawTestPicture(SceneBuilder builder, double targetSize, bool clipped) { sharedImage ??= _createRealTestImage(); final EnginePictureRecorder recorder = PictureRecorder() as EnginePictureRecorder; final RecordingCanvas canvas = recorder.beginRecording(const Rect.fromLTRB(0, 0, 100, 100)); canvas.debugEnforceArbitraryPaint(); if (clipped) { canvas.clipRRect( RRect.fromLTRBR(0, 0, targetSize, targetSize, const Radius.circular(4))); } canvas.drawImageRect(sharedImage!, const Rect.fromLTWH(0, 0, 20, 20), Rect.fromLTWH(0, 0, targetSize, targetSize), makePaint()); final Picture picture = recorder.endRecording(); builder.addPicture( Offset.zero, picture, ); } Picture _drawGreenRectIntoPicture() { final EnginePictureRecorder recorder = PictureRecorder() as EnginePictureRecorder; final RecordingCanvas canvas = recorder.beginRecording(const Rect.fromLTRB(0, 0, 100, 100)); canvas.drawRect(const Rect.fromLTWH(20, 20, 50, 50), makePaint()..color = const Color(0xFF00FF00)); return recorder.endRecording(); } const String _base64Encoded20x20TestImage = 'iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAIAAAAC64paAAAACXBIWXMAAC4jAAAuIwF4pT92AAAA' 'B3RJTUUH5AMFFBksg4i3gQAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdGggR0lNUFeBDhcAAAAj' 'SURBVDjLY2TAC/7jlWVioACMah4ZmhnxpyHG0QAb1UyZZgBjWAIm/clP0AAAAABJRU5ErkJggg=='; HtmlImage _createRealTestImage() { return HtmlImage( createDomHTMLImageElement() ..src = 'data:text/plain;base64,$_base64Encoded20x20TestImage', 20, 20, ); }
engine/lib/web_ui/test/html/drawing/canvas_draw_picture_golden_test.dart/0
{ "file_path": "engine/lib/web_ui/test/html/drawing/canvas_draw_picture_golden_test.dart", "repo_id": "engine", "token_count": 1831 }
295
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart' hide window; import '../../common/test_initialization.dart'; import 'helper.dart'; const Rect bounds = Rect.fromLTWH(0, 0, 800, 600); void main() { internalBootstrapBrowserTest(() => testMain); } Future<void> testMain() async { setUpUnitTests( withImplicitView: true, emulateTesterEnvironment: false, setUpTestViewDimensions: false, ); test('paints multiple shadows', () { final BitmapCanvas canvas = BitmapCanvas(bounds, RenderStrategy()); final CanvasParagraph paragraph = rich( EngineParagraphStyle(fontFamily: 'Roboto'), (CanvasParagraphBuilder builder) { builder.pushStyle(EngineTextStyle.only( fontSize: 32.0, color: blue, shadows: <Shadow>[ const Shadow(color: red, blurRadius:2.0, offset: Offset(4.0, 2.0)), const Shadow(color: green, blurRadius: 3.0), ], )); builder.addText('Lorem '); builder.pushStyle(EngineTextStyle.only( color: green, background: Paint()..color = yellow, shadows: <Shadow>[ const Shadow(blurRadius: 10.0), ], )); builder.addText('ipsum'); builder.pop(); builder.addText('dolor.'); }, )..layout(constrain(double.infinity)); canvas.drawParagraph(paragraph, Offset.zero); return takeScreenshot(canvas, bounds, 'canvas_paragraph_shadows'); }); }
engine/lib/web_ui/test/html/paragraph/shadows_golden_test.dart/0
{ "file_path": "engine/lib/web_ui/test/html/paragraph/shadows_golden_test.dart", "repo_id": "engine", "token_count": 694 }
296
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:math' as math; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart' hide TextStyle; import '../../common/test_initialization.dart'; import '../screenshot.dart'; // TODO(yjbanov): unskip Firefox tests when Firefox implements WebGL in headless mode. // https://github.com/flutter/flutter/issues/86623 void main() { internalBootstrapBrowserTest(() => testMain); } Future<void> testMain() async { setUpUnitTests( setUpTestViewDimensions: false, ); test('Should draw linear gradient using rectangle.', () async { final RecordingCanvas rc = RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500)); const Rect shaderRect = Rect.fromLTRB(50, 50, 300, 300); final SurfacePaint paint = SurfacePaint()..shader = Gradient.linear( Offset(shaderRect.left, shaderRect.top), Offset(shaderRect.right, shaderRect.bottom), const <Color>[Color(0xFFcfdfd2), Color(0xFF042a85)]); rc.drawRect(shaderRect, paint); expect(rc.renderStrategy.hasArbitraryPaint, isTrue); await canvasScreenshot(rc, 'linear_gradient_rect'); }); test('Should blend linear gradient with alpha channel correctly.', () async { const Rect canvasRect = Rect.fromLTRB(0, 0, 500, 500); final RecordingCanvas rc = RecordingCanvas(canvasRect); final SurfacePaint backgroundPaint = SurfacePaint() ..style = PaintingStyle.fill ..color = const Color(0xFFFF0000); rc.drawRect(canvasRect, backgroundPaint); const Rect shaderRect = Rect.fromLTRB(50, 50, 300, 300); final SurfacePaint paint = SurfacePaint()..shader = Gradient.linear( Offset(shaderRect.left, shaderRect.top), Offset(shaderRect.right, shaderRect.bottom), const <Color>[Color(0x00000000), Color(0xFF0000FF)]); rc.drawRect(shaderRect, paint); expect(rc.renderStrategy.hasArbitraryPaint, isTrue); await canvasScreenshot(rc, 'linear_gradient_rect_alpha'); }); test('Should draw linear gradient with transform.', () async { final RecordingCanvas rc = RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500)); final List<double> angles = <double>[0.0, 90.0, 180.0]; double yOffset = 0; for (final double angle in angles) { final Rect shaderRect = Rect.fromLTWH(50, 50 + yOffset, 100, 100); final Matrix4 matrix = Matrix4.identity(); matrix.translate(shaderRect.left, shaderRect.top); matrix.multiply(Matrix4 .rotationZ((angle / 180) * math.pi)); final Matrix4 post = Matrix4.identity(); post.translate(-shaderRect.left, -shaderRect.top); matrix.multiply(post); final SurfacePaint paint = SurfacePaint() ..shader = Gradient.linear( Offset(shaderRect.left, shaderRect.top), Offset(shaderRect.right, shaderRect.bottom), const <Color>[Color(0xFFFF0000), Color(0xFF042a85)], null, TileMode.clamp, matrix.toFloat64()); rc.drawRect(shaderRect, SurfacePaint() ..color = const Color(0xFF000000)); rc.drawOval(shaderRect, paint); yOffset += 120; } expect(rc.renderStrategy.hasArbitraryPaint, isTrue); await canvasScreenshot(rc, 'linear_gradient_oval_matrix'); }, skip: isFirefox); // Regression test for https://github.com/flutter/flutter/issues/50010 test('Should draw linear gradient using rounded rect.', () async { final RecordingCanvas rc = RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500)); const Rect shaderRect = Rect.fromLTRB(50, 50, 300, 300); final SurfacePaint paint = SurfacePaint()..shader = Gradient.linear( Offset(shaderRect.left, shaderRect.top), Offset(shaderRect.right, shaderRect.bottom), const <Color>[Color(0xFFcfdfd2), Color(0xFF042a85)]); rc.drawRRect(RRect.fromRectAndRadius(shaderRect, const Radius.circular(16)), paint); expect(rc.renderStrategy.hasArbitraryPaint, isTrue); await canvasScreenshot(rc, 'linear_gradient_rounded_rect'); }); test('Should draw tiled repeated linear gradient with transform.', () async { final RecordingCanvas rc = RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500)); final List<double> angles = <double>[0.0, 30.0, 210.0]; double yOffset = 0; for (final double angle in angles) { final Rect shaderRect = Rect.fromLTWH(50, 50 + yOffset, 100, 100); final SurfacePaint paint = SurfacePaint() ..shader = Gradient.linear( Offset(shaderRect.left, shaderRect.top), Offset(shaderRect.left + shaderRect.width / 2, shaderRect.top), const <Color>[Color(0xFFFF0000), Color(0xFF042a85)], null, TileMode.repeated, Matrix4 .rotationZ((angle / 180) * math.pi) .toFloat64()); rc.drawRect(shaderRect, SurfacePaint() ..color = const Color(0xFF000000)); rc.drawOval(shaderRect, paint); yOffset += 120; } expect(rc.renderStrategy.hasArbitraryPaint, isTrue); await canvasScreenshot(rc, 'linear_gradient_tiled_repeated_rect'); }, skip: isFirefox); test('Should draw tiled mirrored linear gradient with transform.', () async { final RecordingCanvas rc = RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500)); final List<double> angles = <double>[0.0, 30.0, 210.0]; double yOffset = 0; for (final double angle in angles) { final Rect shaderRect = Rect.fromLTWH(50, 50 + yOffset, 100, 100); final SurfacePaint paint = SurfacePaint() ..shader = Gradient.linear( Offset(shaderRect.left, shaderRect.top), Offset(shaderRect.left + shaderRect.width / 2, shaderRect.top), const <Color>[Color(0xFFFF0000), Color(0xFF042a85)], null, TileMode.mirror, Matrix4 .rotationZ((angle / 180) * math.pi) .toFloat64()); rc.drawRect(shaderRect, SurfacePaint() ..color = const Color(0xFF000000)); rc.drawOval(shaderRect, paint); yOffset += 120; } expect(rc.renderStrategy.hasArbitraryPaint, isTrue); await canvasScreenshot(rc, 'linear_gradient_tiled_mirrored_rect'); }, skip: isFirefox); }
engine/lib/web_ui/test/html/shaders/linear_gradient_golden_test.dart/0
{ "file_path": "engine/lib/web_ui/test/html/shaders/linear_gradient_golden_test.dart", "repo_id": "engine", "token_count": 2567 }
297
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart'; import '../paragraph/helper.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } Future<void> testMain() async { group('$BidiFragmenter', () { test('empty string', () { expect(split(''), <_Bidi>[ _Bidi('', null, ffPrevious), ]); }); test('basic cases', () { expect(split('Lorem 11 $rtlWord1 22 ipsum'), <_Bidi>[ _Bidi('Lorem', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi('11', ltr, ffPrevious), _Bidi(' ', null, ffSandwich), _Bidi(rtlWord1, rtl, ffRtl), _Bidi(' ', null, ffSandwich), _Bidi('22', ltr, ffPrevious), _Bidi(' ', null, ffSandwich), _Bidi('ipsum', ltr, ffLtr), ]); }); test('text and digits', () { expect(split('Lorem11 ${rtlWord1}22 33ipsum44dolor ${rtlWord2}55$rtlWord1'), <_Bidi>[ _Bidi('Lorem11', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi(rtlWord1, rtl, ffRtl), _Bidi('22', ltr, ffPrevious), _Bidi(' ', null, ffSandwich), _Bidi('33ipsum44dolor', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi(rtlWord2, rtl, ffRtl), _Bidi('55', ltr, ffPrevious), _Bidi(rtlWord1, rtl, ffRtl), ]); }); test('Mashriqi digits', () { expect(split('foo ١١ ٢٢ bar'), <_Bidi>[ _Bidi('foo', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi('١١', ltr, ffRtl), _Bidi(' ', null, ffSandwich), _Bidi('٢٢', ltr, ffRtl), _Bidi(' ', null, ffSandwich), _Bidi('bar', ltr, ffLtr), ]); expect(split('$rtlWord1 ١١ ٢٢ $rtlWord2'), <_Bidi>[ _Bidi(rtlWord1, rtl, ffRtl), _Bidi(' ', null, ffSandwich), _Bidi('١١', ltr, ffRtl), _Bidi(' ', null, ffSandwich), _Bidi('٢٢', ltr, ffRtl), _Bidi(' ', null, ffSandwich), _Bidi(rtlWord2, rtl, ffRtl), ]); }); test('spaces', () { expect(split(' '), <_Bidi>[ _Bidi(' ', null, ffSandwich), ]); }); test('symbols', () { expect(split('Calculate 2.2 + 4.5 and write the result'), <_Bidi>[ _Bidi('Calculate', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi('2', ltr, ffPrevious), _Bidi('.', null, ffSandwich), _Bidi('2', ltr, ffPrevious), _Bidi(' + ', null, ffSandwich), _Bidi('4', ltr, ffPrevious), _Bidi('.', null, ffSandwich), _Bidi('5', ltr, ffPrevious), _Bidi(' ', null, ffSandwich), _Bidi('and', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi('write', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi('the', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi('result', ltr, ffLtr), ]); expect(split('Calculate $rtlWord1 2.2 + 4.5 and write the result'), <_Bidi>[ _Bidi('Calculate', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi(rtlWord1, rtl, ffRtl), _Bidi(' ', null, ffSandwich), _Bidi('2', ltr, ffPrevious), _Bidi('.', null, ffSandwich), _Bidi('2', ltr, ffPrevious), _Bidi(' + ', null, ffSandwich), _Bidi('4', ltr, ffPrevious), _Bidi('.', null, ffSandwich), _Bidi('5', ltr, ffPrevious), _Bidi(' ', null, ffSandwich), _Bidi('and', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi('write', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi('the', ltr, ffLtr), _Bidi(' ', null, ffSandwich), _Bidi('result', ltr, ffLtr), ]); expect(split('12 + 24 = 36'), <_Bidi>[ _Bidi('12', ltr, ffPrevious), _Bidi(' + ', null, ffSandwich), _Bidi('24', ltr, ffPrevious), _Bidi(' = ', null, ffSandwich), _Bidi('36', ltr, ffPrevious), ]); }); test('handles new lines', () { expect(split('Lorem\n12\nipsum \n'), <_Bidi>[ _Bidi('Lorem', ltr, ffLtr), _Bidi('\n', null, ffSandwich), _Bidi('12', ltr, ffPrevious), _Bidi('\n', null, ffSandwich), _Bidi('ipsum', ltr, ffLtr), _Bidi(' \n', null, ffSandwich), ]); expect(split('$rtlWord1\n $rtlWord2 \n'), <_Bidi>[ _Bidi(rtlWord1, rtl, ffRtl), _Bidi('\n ', null, ffSandwich), _Bidi(rtlWord2, rtl, ffRtl), _Bidi(' \n', null, ffSandwich), ]); }); test('surrogates', () { expect(split('A\u{1F600}'), <_Bidi>[ _Bidi('A', ltr, ffLtr), _Bidi('\u{1F600}', null, ffSandwich), ]); }); }); } /// Holds information about how a bidi region was split from a string. class _Bidi { _Bidi(this.text, this.textDirection, this.fragmentFlow); factory _Bidi.fromBidiFragment(String text, BidiFragment bidiFragment) { return _Bidi( text.substring(bidiFragment.start, bidiFragment.end), bidiFragment.textDirection, bidiFragment.fragmentFlow, ); } final String text; final TextDirection? textDirection; final FragmentFlow fragmentFlow; @override int get hashCode => Object.hash(text, textDirection); @override bool operator ==(Object other) { return other is _Bidi && other.text == text && other.textDirection == textDirection && other.fragmentFlow == fragmentFlow; } @override String toString() { return '"$text" ($textDirection | $fragmentFlow)'; } } List<_Bidi> split(String text) { return <_Bidi>[ for (final BidiFragment bidiFragment in BidiFragmenter(text).fragment()) _Bidi.fromBidiFragment(text, bidiFragment) ]; }
engine/lib/web_ui/test/html/text/text_direction_test.dart/0
{ "file_path": "engine/lib/web_ui/test/html/text/text_direction_test.dart", "repo_id": "engine", "token_count": 2955 }
298
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/ui.dart'; import '../common/test_initialization.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } Future<void> testMain() async { setUpUnitTests(); test('Gradient.radial with no focal point', () { expect( Gradient.radial( Offset.zero, 5.0, <Color>[const Color(0xFFFFFFFF), const Color(0xFFFFFFFF)], <double>[0.0, 1.0], TileMode.mirror), isNotNull, ); }); // this is just a radial gradient, focal point is discarded. test('radial center and focal == Offset.zero and focalRadius == 0.0 is ok', () { expect( () => Gradient.radial( Offset.zero, 0.0, <Color>[const Color(0xFFFFFFFF), const Color(0xFFFFFFFF)], <double>[0.0, 1.0], TileMode.mirror, null, Offset.zero, ), isNotNull); }); test('radial center != focal and focalRadius == 0.0 is ok', () { expect( () => Gradient.radial( Offset.zero, 0.0, <Color>[const Color(0xFFFFFFFF), const Color(0xFFFFFFFF)], <double>[0.0, 1.0], TileMode.mirror, null, const Offset(2.0, 2.0), ), isNotNull); }); // this would result in div/0 on skia side. test('radial center and focal == Offset.zero and focalRadius != 0.0 assert', () { expect( () => Gradient.radial( Offset.zero, 0.0, <Color>[const Color(0xFFFFFFFF), const Color(0xFFFFFFFF)], <double>[0.0, 1.0], TileMode.mirror, null, Offset.zero, 1.0, ), throwsA(const TypeMatcher<AssertionError>()), ); }); }
engine/lib/web_ui/test/ui/gradient_test.dart/0
{ "file_path": "engine/lib/web_ui/test/ui/gradient_test.dart", "repo_id": "engine", "token_count": 1026 }
299
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/ui.dart' as ui; import '../common/test_initialization.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } Future<void> testMain() async { setUpUnitTests( emulateTesterEnvironment: false, setUpTestViewDimensions: false, ); test('blanks are equal to each other', () { final ui.TextStyle a = ui.TextStyle(); final ui.TextStyle b = ui.TextStyle(); expect(a, b); expect(a.hashCode, b.hashCode); }); test('each property individually equal', () { for (final String property in _populatorsA.keys) { final _TextStylePropertyPopulator populator = _populatorsA[property]!; final _TestTextStyleBuilder aBuilder = _TestTextStyleBuilder(); populator(aBuilder); final ui.TextStyle a = aBuilder.build(); final _TestTextStyleBuilder bBuilder = _TestTextStyleBuilder(); populator(bBuilder); final ui.TextStyle b = bBuilder.build(); expect(reason: '$property property is equal', a, b); expect(reason: '$property hashCode is equal', a.hashCode, b.hashCode); } }); test('each property individually not equal', () { for (final String property in _populatorsA.keys) { final _TextStylePropertyPopulator populatorA = _populatorsA[property]!; final _TestTextStyleBuilder aBuilder = _TestTextStyleBuilder(); populatorA(aBuilder); final ui.TextStyle a = aBuilder.build(); final _TextStylePropertyPopulator populatorB = _populatorsB[property]!; final _TestTextStyleBuilder bBuilder = _TestTextStyleBuilder(); populatorB(bBuilder); final ui.TextStyle b = bBuilder.build(); expect(reason: '$property property is not equal', a, isNot(b)); expect(reason: '$property hashCode is not equal', a.hashCode, isNot(b.hashCode)); } }); // `color` and `foreground` cannot be used at the same time, so each test skips // one or the other to be able to test all variations. for (final String skipProperty in const <String>['color', 'foreground']) { test('all properties (except $skipProperty) altogether equal', () { final _TestTextStyleBuilder aBuilder = _TestTextStyleBuilder(); final _TestTextStyleBuilder bBuilder = _TestTextStyleBuilder(); for (final String property in _populatorsA.keys) { if (property == skipProperty) { continue; } final _TextStylePropertyPopulator populator = _populatorsA[property]!; populator(aBuilder); populator(bBuilder); } final ui.TextStyle a = aBuilder.build(); final ui.TextStyle b = bBuilder.build(); expect(a, b); expect(a.hashCode, b.hashCode); }); test('all properties (except $skipProperty) altogether not equal', () { final _TestTextStyleBuilder aBuilder = _TestTextStyleBuilder(); final _TestTextStyleBuilder bBuilder = _TestTextStyleBuilder(); for (final String property in _populatorsA.keys) { if (property == skipProperty) { continue; } final _TextStylePropertyPopulator populatorA = _populatorsA[property]!; populatorA(aBuilder); final _TextStylePropertyPopulator populatorB = _populatorsB[property]!; populatorB(bBuilder); } final ui.TextStyle a = aBuilder.build(); final ui.TextStyle b = bBuilder.build(); expect(a, isNot(b)); expect(a.hashCode, isNot(b.hashCode)); }); } test('toString() with color', () { final _TestTextStyleBuilder builder = _TestTextStyleBuilder(); for (final String property in _populatorsA.keys) { if (property == 'foreground') { continue; } final _TextStylePropertyPopulator populator = _populatorsA[property]!; populator(builder); } final ui.TextStyle style = builder.build(); expect( style.toString(), 'TextStyle(' 'color: Color(0xff000000), ' 'decoration: TextDecoration.none, ' 'decorationColor: Color(0xffaa0000), ' 'decorationStyle: TextDecorationStyle.solid, ' 'decorationThickness: ${1.0}, ' 'fontWeight: FontWeight.w400, ' 'fontStyle: FontStyle.normal, ' 'textBaseline: TextBaseline.alphabetic, ' 'fontFamily: Arial, ' 'fontFamilyFallback: [Roboto], ' 'fontSize: 12.0, ' 'letterSpacing: 1.2x, ' 'wordSpacing: 2.3x, ' 'height: 13.0x, ' 'leadingDistribution: TextLeadingDistribution.proportional, ' 'locale: en_US, ' 'background: Paint(), ' 'foreground: unspecified, ' 'shadows: [TextShadow(Color(0xff000000), Offset(0.0, 0.0), ${0.0})], ' "fontFeatures: [FontFeature('case', 1)], " "fontVariations: [FontVariation('ital', 0.1)]" ')', ); }); test('toString() with foreground', () { final _TestTextStyleBuilder builder = _TestTextStyleBuilder(); for (final String property in _populatorsA.keys) { if (property == 'color') { continue; } final _TextStylePropertyPopulator populator = _populatorsA[property]!; populator(builder); } final ui.TextStyle style = builder.build(); expect( style.toString(), 'TextStyle(' 'color: unspecified, ' 'decoration: TextDecoration.none, ' 'decorationColor: Color(0xffaa0000), ' 'decorationStyle: TextDecorationStyle.solid, ' 'decorationThickness: ${1.0}, ' 'fontWeight: FontWeight.w400, ' 'fontStyle: FontStyle.normal, ' 'textBaseline: TextBaseline.alphabetic, ' 'fontFamily: Arial, ' 'fontFamilyFallback: [Roboto], ' 'fontSize: 12.0, ' 'letterSpacing: 1.2x, ' 'wordSpacing: 2.3x, ' 'height: 13.0x, ' 'leadingDistribution: TextLeadingDistribution.proportional, ' 'locale: en_US, ' 'background: Paint(), ' 'foreground: Paint(), ' 'shadows: [TextShadow(Color(0xff000000), Offset(0.0, 0.0), ${0.0})], ' "fontFeatures: [FontFeature('case', 1)], " "fontVariations: [FontVariation('ital', 0.1)]" ')', ); }); } typedef _TextStylePropertyPopulator = void Function(_TestTextStyleBuilder builder); // Paint equality is based on identity, so all the paints below are different, // even though they express the same paint. final ui.Paint _backgroundA = ui.Paint(); final ui.Paint _foregroundA = ui.Paint(); final ui.Paint _backgroundB = ui.Paint(); final ui.Paint _foregroundB = ui.Paint(); // Intentionally do not use const List expressions to make sure Object.hashAll is used to compute hashCode final Map<String, _TextStylePropertyPopulator> _populatorsA = <String, _TextStylePropertyPopulator>{ 'color': (_TestTextStyleBuilder builder) { builder.color = const ui.Color(0xff000000); }, 'decoration': (_TestTextStyleBuilder builder) { builder.decoration = ui.TextDecoration.none; }, 'decorationColor': (_TestTextStyleBuilder builder) { builder.decorationColor = const ui.Color(0xffaa0000); }, 'decorationStyle': (_TestTextStyleBuilder builder) { builder.decorationStyle = ui.TextDecorationStyle.solid; }, 'decorationThickness': (_TestTextStyleBuilder builder) { builder.decorationThickness = 1.0; }, 'fontWeight': (_TestTextStyleBuilder builder) { builder.fontWeight = ui.FontWeight.w400; }, 'fontStyle': (_TestTextStyleBuilder builder) { builder.fontStyle = ui.FontStyle.normal; }, 'textBaseline': (_TestTextStyleBuilder builder) { builder.textBaseline = ui.TextBaseline.alphabetic; }, 'fontFamily': (_TestTextStyleBuilder builder) { builder.fontFamily = 'Arial'; }, 'fontFamilyFallback': (_TestTextStyleBuilder builder) { builder.fontFamilyFallback = <String>['Roboto']; }, 'fontSize': (_TestTextStyleBuilder builder) { builder.fontSize = 12; }, 'letterSpacing': (_TestTextStyleBuilder builder) { builder.letterSpacing = 1.2; }, 'wordSpacing': (_TestTextStyleBuilder builder) { builder.wordSpacing = 2.3; }, 'height': (_TestTextStyleBuilder builder) { builder.height = 13; }, 'leadingDistribution': (_TestTextStyleBuilder builder) { builder.leadingDistribution = ui.TextLeadingDistribution.proportional; }, 'locale': (_TestTextStyleBuilder builder) { builder.locale = const ui.Locale('en', 'US'); }, 'background': (_TestTextStyleBuilder builder) { builder.background = _backgroundA; }, 'foreground': (_TestTextStyleBuilder builder) { builder.foreground = _foregroundA; }, 'shadows': (_TestTextStyleBuilder builder) { builder.shadows = <ui.Shadow>[const ui.Shadow()]; }, 'fontFeatures': (_TestTextStyleBuilder builder) { builder.fontFeatures = <ui.FontFeature>[const ui.FontFeature.caseSensitiveForms()]; }, 'fontVariations': (_TestTextStyleBuilder builder) { builder.fontVariations = <ui.FontVariation>[ const ui.FontVariation.italic(0.1)]; }, }; // Intentionally do not use const List expressions to make sure Object.hashAll is used to compute hashCode final Map<String, _TextStylePropertyPopulator> _populatorsB = <String, _TextStylePropertyPopulator>{ 'color': (_TestTextStyleBuilder builder) { builder.color = const ui.Color(0xffbb0000); }, 'decoration': (_TestTextStyleBuilder builder) { builder.decoration = ui.TextDecoration.lineThrough; }, 'decorationColor': (_TestTextStyleBuilder builder) { builder.decorationColor = const ui.Color(0xffcc0000); }, 'decorationStyle': (_TestTextStyleBuilder builder) { builder.decorationStyle = ui.TextDecorationStyle.dotted; }, 'decorationThickness': (_TestTextStyleBuilder builder) { builder.decorationThickness = 1.4; }, 'fontWeight': (_TestTextStyleBuilder builder) { builder.fontWeight = ui.FontWeight.w600; }, 'fontStyle': (_TestTextStyleBuilder builder) { builder.fontStyle = ui.FontStyle.italic; }, 'textBaseline': (_TestTextStyleBuilder builder) { builder.textBaseline = ui.TextBaseline.ideographic; }, 'fontFamily': (_TestTextStyleBuilder builder) { builder.fontFamily = 'Noto'; }, 'fontFamilyFallback': (_TestTextStyleBuilder builder) { builder.fontFamilyFallback = <String>['Verdana']; }, 'fontSize': (_TestTextStyleBuilder builder) { builder.fontSize = 12.1; }, 'letterSpacing': (_TestTextStyleBuilder builder) { builder.letterSpacing = 1.25; }, 'wordSpacing': (_TestTextStyleBuilder builder) { builder.wordSpacing = 2.35; }, 'height': (_TestTextStyleBuilder builder) { builder.height = 13.1; }, 'leadingDistribution': (_TestTextStyleBuilder builder) { builder.leadingDistribution = ui.TextLeadingDistribution.even; }, 'locale': (_TestTextStyleBuilder builder) { builder.locale = const ui.Locale('fr', 'CA'); }, 'background': (_TestTextStyleBuilder builder) { builder.background = _backgroundB; }, 'foreground': (_TestTextStyleBuilder builder) { builder.foreground = _foregroundB; }, 'shadows': (_TestTextStyleBuilder builder) { builder.shadows = <ui.Shadow>[const ui.Shadow(blurRadius: 5)]; }, 'fontFeatures': (_TestTextStyleBuilder builder) { builder.fontFeatures = <ui.FontFeature>[const ui.FontFeature.alternative(2)]; }, 'fontVariations': (_TestTextStyleBuilder builder) { builder.fontVariations = <ui.FontVariation>[ const ui.FontVariation.italic(0.4)]; }, }; class _TestTextStyleBuilder { ui.Color? color; ui.TextDecoration? decoration; ui.Color? decorationColor; ui.TextDecorationStyle? decorationStyle; double? decorationThickness; ui.FontWeight? fontWeight; ui.FontStyle? fontStyle; ui.TextBaseline? textBaseline; String? fontFamily; List<String>? fontFamilyFallback; double? fontSize; double? letterSpacing; double? wordSpacing; double? height; ui.TextLeadingDistribution? leadingDistribution; ui.Locale? locale; ui.Paint? background; ui.Paint? foreground; List<ui.Shadow>? shadows; List<ui.FontFeature>? fontFeatures; List<ui.FontVariation>? fontVariations; ui.TextStyle build() { return ui.TextStyle( color: color, decoration: decoration, decorationColor: decorationColor, decorationStyle: decorationStyle, decorationThickness: decorationThickness, fontWeight: fontWeight, fontStyle: fontStyle, textBaseline: textBaseline, fontFamily: fontFamily, fontFamilyFallback: fontFamilyFallback, fontSize: fontSize, letterSpacing: letterSpacing, wordSpacing: wordSpacing, height: height, leadingDistribution: leadingDistribution, locale: locale, background: background, foreground: foreground, shadows: shadows, fontFeatures: fontFeatures, fontVariations: fontVariations, ); } }
engine/lib/web_ui/test/ui/text_style_test.dart/0
{ "file_path": "engine/lib/web_ui/test/ui/text_style_test.dart", "repo_id": "engine", "token_count": 4444 }
300
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_RUNTIME_DART_SERVICE_ISOLATE_H_ #define FLUTTER_RUNTIME_DART_SERVICE_ISOLATE_H_ #include <functional> #include <mutex> #include <set> #include <string> #include "flutter/fml/compiler_specific.h" #include "third_party/dart/runtime/include/dart_api.h" namespace flutter { //------------------------------------------------------------------------------ /// @brief Utility methods for interacting with the DartVM managed service /// isolate present in debug and profile runtime modes. /// class DartServiceIsolate { public: //---------------------------------------------------------------------------- /// The handle used to refer to callbacks registered with the service isolate. /// using CallbackHandle = ptrdiff_t; //---------------------------------------------------------------------------- /// A callback made by the Dart VM when the VM Service is ready. The argument /// indicates the VM Service URI. /// using DartVMServiceServerStateCallback = std::function<void(const std::string& vm_service_uri)>; //---------------------------------------------------------------------------- /// @brief Start the service isolate. This call may only be made in the /// Dart VM initiated isolate creation callback. It is only valid /// to make this call when the VM explicitly requests the creation /// of the service isolate. The VM does this by specifying the /// script URI to be `DART_VM_SERVICE_ISOLATE_NAME`. The isolate /// to be designated as the service isolate must already be /// created (but not running) when this call is made. /// /// @param[in] server_ip The service protocol IP address. /// @param[in] server_port The service protocol port. /// @param[in] embedder_tag_handler The library tag handler. /// @param[in] disable_origin_check If websocket origin checks must /// be enabled. /// @param[in] disable_service_auth_codes If service auth codes must be /// enabled. /// @param[in] enable_service_port_fallback If fallback to port 0 must be /// enabled when the bind fails. /// @param error The error when this method /// returns false. This string must /// be freed by the caller using /// `free`. /// /// @return If the startup was successful. Refer to the `error` for /// details on failure. /// static bool Startup(const std::string& server_ip, intptr_t server_port, Dart_LibraryTagHandler embedder_tag_handler, bool disable_origin_check, bool disable_service_auth_codes, bool enable_service_port_fallback, char** error); //---------------------------------------------------------------------------- /// @brief Add a callback that will get invoked when the VM Service /// starts up. If the VM Service has already started before this /// call is made, the callback is invoked immediately. /// /// This method is thread safe. /// /// @param[in] callback The callback with information about the VM Service. /// /// @return A handle for the callback that can be used later in /// `RemoveServerStatusCallback`. /// [[nodiscard]] static CallbackHandle AddServerStatusCallback( const DartVMServiceServerStateCallback& callback); //---------------------------------------------------------------------------- /// @brief Removed a callback previously registered via /// `AddServiceStatusCallback`. /// /// This method is thread safe. /// /// @param[in] handle The handle /// /// @return If the callback was unregistered. This may fail if there was /// no such callback with that handle. /// static bool RemoveServerStatusCallback(CallbackHandle handle); private: // Native entries. static void NotifyServerState(Dart_NativeArguments args); static void Shutdown(Dart_NativeArguments args); static std::mutex callbacks_mutex_; static std::set<std::unique_ptr<DartVMServiceServerStateCallback>> callbacks_; }; } // namespace flutter #endif // FLUTTER_RUNTIME_DART_SERVICE_ISOLATE_H_
engine/runtime/dart_service_isolate.h/0
{ "file_path": "engine/runtime/dart_service_isolate.h", "repo_id": "engine", "token_count": 1764 }
301
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_RUNTIME_EMBEDDER_RESOURCES_H_ #define FLUTTER_RUNTIME_EMBEDDER_RESOURCES_H_ namespace flutter { namespace runtime { struct ResourcesEntry { const char* path_; const char* resource_; int length_; }; } // namespace runtime } // namespace flutter namespace flutter { class EmbedderResources { public: explicit EmbedderResources(runtime::ResourcesEntry* resources_table); static const int kNoSuchInstance; int ResourceLookup(const char* path, const char** resource); const char* Path(int idx); private: runtime::ResourcesEntry* At(int idx); runtime::ResourcesEntry* resources_table_; }; } // namespace flutter #endif // FLUTTER_RUNTIME_EMBEDDER_RESOURCES_H_
engine/runtime/embedder_resources.h/0
{ "file_path": "engine/runtime/embedder_resources.h", "repo_id": "engine", "token_count": 283 }
302
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_RUNTIME_RUNTIME_CONTROLLER_H_ #define FLUTTER_RUNTIME_RUNTIME_CONTROLLER_H_ #include <memory> #include <vector> #include "flutter/assets/asset_manager.h" #include "flutter/common/task_runners.h" #include "flutter/flow/layers/layer_tree.h" #include "flutter/fml/macros.h" #include "flutter/fml/mapping.h" #include "flutter/lib/ui/io_manager.h" #include "flutter/lib/ui/painting/image_generator_registry.h" #include "flutter/lib/ui/text/font_collection.h" #include "flutter/lib/ui/ui_dart_state.h" #include "flutter/lib/ui/volatile_path_tracker.h" #include "flutter/lib/ui/window/platform_configuration.h" #include "flutter/lib/ui/window/pointer_data_packet.h" #include "flutter/runtime/dart_vm.h" #include "flutter/runtime/platform_data.h" #include "flutter/runtime/platform_isolate_manager.h" #include "rapidjson/document.h" #include "rapidjson/stringbuffer.h" namespace flutter { class Scene; class RuntimeDelegate; class View; class Window; //------------------------------------------------------------------------------ /// Represents an instance of a running root isolate with window bindings. In /// normal operation, a single instance of this object is owned by the engine /// per shell. This object may only be created, used, and collected on the UI /// task runner. Window state queried by the root isolate is stored by this /// object. In cold-restart scenarios, the engine may collect this before /// installing a new runtime controller in its place. The Clone method may be /// used by the engine to copy the currently accumulated window state so it can /// be referenced by the new runtime controller. /// /// When `RuntimeController` is created, it takes some time before the root /// isolate becomes ready. Operation during this gap is stored by /// `RuntimeController` and flushed to the Dart VM when the isolate becomes /// ready before the entrypoint function. See `PlatformData`. /// class RuntimeController : public PlatformConfigurationClient { public: //---------------------------------------------------------------------------- /// @brief Creates a new instance of a runtime controller. This is /// usually only done by the engine instance associated with the /// shell. /// /// @param client The runtime delegate. This is /// usually the `Engine` instance. /// @param vm A reference to a running Dart VM. /// The runtime controller must be /// collected before the VM is /// destroyed (this order is /// guaranteed by the shell). /// @param[in] idle_notification_callback The idle notification callback. /// This allows callers to run native /// code in isolate scope when the VM /// is about to be notified that the /// engine is going to be idle. /// @param[in] platform_data The window data (if exists). /// @param[in] isolate_create_callback The isolate create callback. This /// allows callers to run native code /// in isolate scope on the UI task /// runner as soon as the root isolate /// has been created. /// @param[in] isolate_shutdown_callback The isolate shutdown callback. /// This allows callers to run native /// code in isolate scoped on the UI /// task runner just as the root /// isolate is about to be torn down. /// @param[in] persistent_isolate_data Unstructured persistent read-only /// data that the root isolate can /// access in a synchronous manner. /// @param[in] context Engine-owned state which is /// accessed by the root dart isolate. /// RuntimeController( RuntimeDelegate& p_client, DartVM* vm, fml::RefPtr<const DartSnapshot> p_isolate_snapshot, const std::function<void(int64_t)>& idle_notification_callback, const PlatformData& platform_data, const fml::closure& isolate_create_callback, const fml::closure& isolate_shutdown_callback, std::shared_ptr<const fml::Mapping> p_persistent_isolate_data, const UIDartState::Context& context); //---------------------------------------------------------------------------- /// @brief Create a RuntimeController that shares as many resources as /// possible with the calling RuntimeController such that together /// they occupy less memory. /// @return A RuntimeController with a running isolate. /// @see RuntimeController::RuntimeController /// std::unique_ptr<RuntimeController> Spawn( RuntimeDelegate& p_client, const std::string& advisory_script_uri, const std::string& advisory_script_entrypoint, const std::function<void(int64_t)>& idle_notification_callback, const fml::closure& isolate_create_callback, const fml::closure& isolate_shutdown_callback, const std::shared_ptr<const fml::Mapping>& persistent_isolate_data, fml::WeakPtr<IOManager> io_manager, fml::WeakPtr<ImageDecoder> image_decoder, fml::WeakPtr<ImageGeneratorRegistry> image_generator_registry, fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate) const; // |PlatformConfigurationClient| ~RuntimeController() override; //---------------------------------------------------------------------------- /// @brief Launches the isolate using the window data associated with /// this runtime controller. Before this call, the Dart isolate /// has not been initialized. On successful return, the caller can /// assume that the isolate is in the /// `DartIsolate::Phase::Running` phase. /// /// This call will fail if a root isolate is already running. To /// re-create an isolate with the window data associated with this /// runtime controller, `Clone` this runtime controller and /// Launch an isolate in that runtime controller instead. /// /// @param[in] settings The per engine instance settings. /// @param[in] root_isolate_create_callback A callback invoked before the /// root isolate has launched the Dart /// program, but after it has been /// created. This is called without /// isolate scope, and after any root /// isolate callback in the settings. /// @param[in] dart_entrypoint The dart entrypoint. If /// `std::nullopt` or empty, `main` will /// be attempted. /// @param[in] dart_entrypoint_library The dart entrypoint library. If /// `std::nullopt` or empty, the core /// library will be attempted. /// @param[in] dart_entrypoint_args Arguments passed as a List<String> /// to Dart's entrypoint function. /// @param[in] isolate_configuration The isolate configuration /// /// @return If the isolate could be launched and guided to the /// `DartIsolate::Phase::Running` phase. /// [[nodiscard]] bool LaunchRootIsolate( const Settings& settings, const fml::closure& root_isolate_create_callback, std::optional<std::string> dart_entrypoint, std::optional<std::string> dart_entrypoint_library, const std::vector<std::string>& dart_entrypoint_args, std::unique_ptr<IsolateConfiguration> isolate_configuration); //---------------------------------------------------------------------------- /// @brief Clone the runtime controller. Launching an isolate with a /// cloned runtime controller will use the same snapshots and /// copies all window data to the new instance. This is usually /// only used in the debug runtime mode to support the /// cold-restart scenario. /// /// @return A clone of the existing runtime controller. /// std::unique_ptr<RuntimeController> Clone() const; //---------------------------------------------------------------------------- /// @brief Notify the isolate that a new view is available. /// /// A view must be added before other methods can refer to it, /// including the implicit view. Adding a view that already exists /// triggers an assertion. /// /// @param[in] view_id The ID of the new view. /// @param[in] viewport_metrics The initial viewport metrics for the view. /// bool AddView(int64_t view_id, const ViewportMetrics& view_metrics); //---------------------------------------------------------------------------- /// @brief Notify the isolate that a view is no longer available. /// /// Removing a view that does not exist triggers an assertion. /// /// The implicit view (kFlutterImplicitViewId) should never be /// removed. Doing so triggers an assertion. /// /// @param[in] view_id The ID of the view. /// bool RemoveView(int64_t view_id); //---------------------------------------------------------------------------- /// @brief Forward the specified viewport metrics to the running isolate. /// If the isolate is not running, these metrics will be saved and /// flushed to the isolate when it starts. /// /// @param[in] view_id The ID for the view that `metrics` describes. /// @param[in] metrics The window's viewport metrics. /// /// @return If the window metrics were forwarded to the running isolate. /// bool SetViewportMetrics(int64_t view_id, const ViewportMetrics& metrics); //---------------------------------------------------------------------------- /// @brief Forward the specified display metrics to the running isolate. /// If the isolate is not running, these metrics will be saved and /// flushed to the isolate when it starts. /// /// @param[in] displays The available displays. bool SetDisplays(const std::vector<DisplayData>& displays); //---------------------------------------------------------------------------- /// @brief Forward the specified locale data to the running isolate. If /// the isolate is not running, this data will be saved and /// flushed to the isolate when it starts running. /// /// @deprecated The persistent isolate data must be used for this purpose /// instead. /// /// @param[in] locale_data The locale data. This should consist of groups of /// 4 strings, each group representing a single locale. /// /// @return If the locale data was forwarded to the running isolate. /// bool SetLocales(const std::vector<std::string>& locale_data); //---------------------------------------------------------------------------- /// @brief Forward the user settings data to the running isolate. If the /// isolate is not running, this data will be saved and flushed to /// the isolate when it starts running. /// /// @deprecated The persistent isolate data must be used for this purpose /// instead. /// /// @param[in] data The user settings data. /// /// @return If the user settings data was forwarded to the running /// isolate. /// bool SetUserSettingsData(const std::string& data); //---------------------------------------------------------------------------- /// @brief Forward the initial lifecycle state data to the running /// isolate. If the isolate is not running, this data will be /// saved and flushed to the isolate when it starts running. /// After the isolate starts running, the current lifecycle /// state is pushed to it via the "flutter/lifecycle" channel. /// /// @deprecated The persistent isolate data must be used for this purpose /// instead. /// /// @param[in] data The lifecycle state data. /// /// @return If the lifecycle state data was forwarded to the running /// isolate. /// bool SetInitialLifecycleState(const std::string& data); //---------------------------------------------------------------------------- /// @brief Notifies the running isolate about whether the semantics tree /// should be generated or not. If the isolate is not running, /// this preference will be saved and flushed to the isolate when /// it starts running. /// /// @param[in] enabled Indicates whether to generate the semantics tree. /// /// @return If the semantics tree generation preference was forwarded to /// the running isolate. /// bool SetSemanticsEnabled(bool enabled); //---------------------------------------------------------------------------- /// @brief Forward the preference of accessibility features that must be /// enabled in the semantics tree to the running isolate. If the /// isolate is not running, this data will be saved and flushed to /// the isolate when it starts running. /// /// @param[in] flags The accessibility features that must be generated in /// the semantics tree. /// /// @return If the preference of accessibility features was forwarded to /// the running isolate. /// bool SetAccessibilityFeatures(int32_t flags); //---------------------------------------------------------------------------- /// @brief Notifies the running isolate that it should start generating a /// new frame. /// /// @see `Engine::BeginFrame` for more context. /// /// @param[in] frame_time The point at which the current frame interval /// began. May be used by animation interpolators, /// physics simulations, etc. /// /// @return If notification to begin frame rendering was delivered to the /// running isolate. /// bool BeginFrame(fml::TimePoint frame_time, uint64_t frame_number); //---------------------------------------------------------------------------- /// @brief Dart code cannot fully measure the time it takes for a /// specific frame to be rendered. This is because Dart code only /// runs on the UI task runner. That is only a small part of the /// overall frame workload. The raster task runner frame workload /// is executed on a thread where Dart code cannot run (and hence /// instrument). Besides, due to the pipelined nature of rendering /// in Flutter, there may be multiple frame workloads being /// processed at any given time. However, for non-Timeline based /// profiling, it is useful for trace collection and processing to /// happen in Dart. To do this, the raster task runner frame /// workloads need to be instrumented separately. After a set /// number of these profiles have been gathered, they need to be /// reported back to Dart code. The engine reports this extra /// instrumentation information back to Dart code running on the /// engine by invoking this method at predefined intervals. /// /// @see `Engine::ReportTimings`, `FrameTiming` /// /// @param[in] timings Collection of `FrameTiming::kCount` * `n` timestamps /// for `n` frames whose timings have not been reported /// yet. A collection of integers is reported here for /// easier conversions to Dart objects. The timestamps /// are measured against the system monotonic clock /// measured in microseconds. /// bool ReportTimings(std::vector<int64_t> timings); //---------------------------------------------------------------------------- /// @brief Notify the Dart VM that no frame workloads are expected on the /// UI task runner till the specified deadline. The VM uses this /// opportunity to perform garbage collection operations is a /// manner that interferes as little as possible with frame /// rendering. /// /// NotifyIdle is advisory. The VM may or may not run a garbage collection /// when this is called, and will eventually perform garbage collections even /// if it is not called or it is called with insufficient deadlines. /// /// The garbage collection mechanism and its thresholds are internal /// implementation details and absolutely no guarantees are made about the /// threshold discussed below. This discussion is also an oversimplification /// but hopefully serves to calibrate expectations about GC behavior: /// * When the Dart VM and its root isolate are initialized, the memory /// consumed upto that point are treated as a baseline. /// * A fixed percentage of the memory consumed (~20%) over the baseline is /// treated as the hard threshold. /// * The memory in play is divided into old space and new space. The new /// space is typically very small and fills up rapidly. /// * The baseline plus the threshold is considered the old space while the /// small new space is a separate region (typically a few pages). /// * The total old space size minus the max new space size is treated as the /// soft threshold. /// * In a world where there is no call to NotifyIdle, when the total /// allocation exceeds the soft threshold, a concurrent mark is initiated in /// the VM. There is a “small” pause that occurs when the concurrent mark is /// initiated and another pause when the mark concludes and a sweep is /// initiated. /// * If the total allocations exceeds the hard threshold, a “big” /// stop-the-world pause is initiated. /// * If after either the sweep after the concurrent mark, or, the /// stop-the-world pause, the consumption returns to be below the soft /// threshold, the dance begins anew. /// * If after both the “small” and “big” pauses, memory usage is still over /// the hard threshold, i.e, the objects are still reachable, that amount of /// memory is treated as the new baseline and a fixed percentage of the new /// baseline over the new baseline is now the new hard threshold. /// * Updating the baseline will continue till memory for the updated old /// space can be allocated from the operating system. These allocations will /// typically fail due to address space exhaustion on 32-bit systems and /// page table exhaustion on 64-bit systems. /// * NotifyIdle initiates the concurrent mark preemptively. The deadline is /// used by the VM to determine if the corresponding sweep can be performed /// within the deadline. This way, jank due to “small” pauses can be /// ameliorated. /// * There is no ability to stop a “big” pause on reaching the hard threshold /// in the old space. The best you can do is release (by making them /// unreachable) objects eagerly so that the are marked as unreachable in /// the concurrent mark initiated by either reaching the soft threshold or /// an explicit NotifyIdle. /// * If you are running out of memory, its because too many large objects /// were allocation and remained reachable such that the old space kept /// growing till it could grow no more. /// * At the edges of allocation thresholds, failures can occur gracefully if /// the instigating allocation was made in the Dart VM or rather gracelessly /// if the allocation is made by some native component. /// /// @see `Dart_TimelineGetMicros` /// /// @bug The `deadline` argument must be converted to `std::chrono` /// instead of a raw integer. /// /// @param[in] deadline The deadline is used by the VM to determine if the /// corresponding sweep can be performed within the deadline. /// /// @return If the idle notification was forwarded to the running isolate. /// virtual bool NotifyIdle(fml::TimeDelta deadline); //---------------------------------------------------------------------------- /// @brief Notify the Dart VM that the attached flutter view has been /// destroyed. This gives the Dart VM to perform some cleanup /// activities e.g: perform garbage collection to free up any /// unused memory. /// /// NotifyDestroyed is advisory. The VM may or may not perform any clean up /// activities. /// virtual bool NotifyDestroyed(); //---------------------------------------------------------------------------- /// @brief Returns if the root isolate is running. The isolate must be /// transitioned to the running phase manually. The isolate can /// stop running if it terminates execution on its own. /// /// @return True if root isolate running, False otherwise. /// virtual bool IsRootIsolateRunning(); //---------------------------------------------------------------------------- /// @brief Dispatch the specified platform message to running root /// isolate. /// /// @param[in] message The message to dispatch to the isolate. /// /// @return If the message was dispatched to the running root isolate. /// This may fail is an isolate is not running. /// virtual bool DispatchPlatformMessage( std::unique_ptr<PlatformMessage> message); //---------------------------------------------------------------------------- /// @brief Dispatch the specified pointer data message to the running /// root isolate. /// /// @param[in] packet The pointer data message to dispatch to the isolate. /// /// @return If the pointer data message was dispatched. This may fail is /// an isolate is not running. /// bool DispatchPointerDataPacket(const PointerDataPacket& packet); //---------------------------------------------------------------------------- /// @brief Dispatch the semantics action to the specified accessibility /// node. /// /// @param[in] node_id The identified of the accessibility node. /// @param[in] action The semantics action to perform on the specified /// accessibility node. /// @param[in] args Optional data that applies to the specified action. /// /// @return If the semantics action was dispatched. This may fail if an /// isolate is not running. /// bool DispatchSemanticsAction(int32_t node_id, SemanticsAction action, fml::MallocMapping args); //---------------------------------------------------------------------------- /// @brief Gets the main port identifier of the root isolate. /// /// @return The main port identifier. If no root isolate is running, /// returns `ILLEGAL_PORT`. /// Dart_Port GetMainPort(); //---------------------------------------------------------------------------- /// @brief Gets the debug name of the root isolate. But default, the /// debug name of the isolate is derived from its advisory script /// URI, advisory main entrypoint and its main port name. For /// example, "main.dart$main-1234" where the script URI is /// "main.dart", the entrypoint is "main" and the port name /// "1234". Once launched, the isolate may re-christen itself /// using a name it selects via `setIsolateDebugName` in /// `window.dart`. This name is purely advisory and only used by /// instrumentation and reporting purposes. /// /// @return The debug name of the root isolate. /// std::string GetIsolateName(); //---------------------------------------------------------------------------- /// @brief Returns if the root isolate has any live receive ports. /// /// @return True if there are live receive ports, False otherwise. Return /// False if the root isolate is not running as well. /// bool HasLivePorts(); //---------------------------------------------------------------------------- /// @brief Get the last error encountered by the microtask queue. /// /// @return The last error encountered by the microtask queue. /// tonic::DartErrorHandleType GetLastError(); //---------------------------------------------------------------------------- /// @brief Get the service ID of the root isolate if the root isolate is /// running. /// /// @return The root isolate service id. /// std::optional<std::string> GetRootIsolateServiceID() const; //---------------------------------------------------------------------------- /// @brief Get the return code specified by the root isolate (if one is /// present). /// /// @return The root isolate return code if the isolate has specified one. /// std::optional<uint32_t> GetRootIsolateReturnCode(); //---------------------------------------------------------------------------- /// @brief Get an identifier that represents the Dart isolate group the /// root isolate is in. /// /// @return The root isolate group identifier, zero if one can't /// be established. uint64_t GetRootIsolateGroup() const; //-------------------------------------------------------------------------- /// @brief Loads the Dart shared library into the Dart VM. When the /// Dart library is loaded successfully, the Dart future /// returned by the originating loadLibrary() call completes. /// /// The Dart compiler may generate separate shared libraries /// files called 'loading units' when libraries are imported /// as deferred. Each of these shared libraries are identified /// by a unique loading unit id. Callers should open and resolve /// a SymbolMapping from the shared library. The Mappings should /// be moved into this method, as ownership will be assumed by the /// dart root isolate after successful loading and released after /// shutdown of the root isolate. The loading unit may not be /// used after isolate shutdown. If loading fails, the mappings /// will be released. /// /// This method is paired with a RequestDartDeferredLibrary /// invocation that provides the embedder with the loading unit id /// of the deferred library to load. /// /// /// @param[in] loading_unit_id The unique id of the deferred library's /// loading unit, as passed in by /// RequestDartDeferredLibrary. /// /// @param[in] snapshot_data Dart snapshot data of the loading unit's /// shared library. /// /// @param[in] snapshot_data Dart snapshot instructions of the loading /// unit's shared library. /// void LoadDartDeferredLibrary( intptr_t loading_unit_id, std::unique_ptr<const fml::Mapping> snapshot_data, std::unique_ptr<const fml::Mapping> snapshot_instructions); //-------------------------------------------------------------------------- /// @brief Indicates to the dart VM that the request to load a deferred /// library with the specified loading unit id has failed. /// /// The dart future returned by the initiating loadLibrary() call /// will complete with an error. /// /// @param[in] loading_unit_id The unique id of the deferred library's /// loading unit, as passed in by /// RequestDartDeferredLibrary. /// /// @param[in] error_message The error message that will appear in the /// dart Future. /// /// @param[in] transient A transient error is a failure due to /// temporary conditions such as no network. /// Transient errors allow the dart VM to /// re-request the same deferred library and /// loading_unit_id again. Non-transient /// errors are permanent and attempts to /// re-request the library will instantly /// complete with an error. virtual void LoadDartDeferredLibraryError(intptr_t loading_unit_id, const std::string error_message, bool transient); // |PlatformConfigurationClient| void RequestDartDeferredLibrary(intptr_t loading_unit_id) override; // |PlatformConfigurationClient| std::shared_ptr<const fml::Mapping> GetPersistentIsolateData() override; const fml::WeakPtr<IOManager>& GetIOManager() const { return context_.io_manager; } virtual DartVM* GetDartVM() const { return vm_; } const fml::RefPtr<const DartSnapshot>& GetIsolateSnapshot() const { return isolate_snapshot_; } const PlatformData& GetPlatformData() const { return platform_data_; } const fml::RefPtr<SkiaUnrefQueue>& GetSkiaUnrefQueue() const { return context_.unref_queue; } const fml::TaskRunnerAffineWeakPtr<SnapshotDelegate>& GetSnapshotDelegate() const { return context_.snapshot_delegate; } std::weak_ptr<const DartIsolate> GetRootIsolate() const { return root_isolate_; } std::shared_ptr<PlatformIsolateManager> GetPlatformIsolateManager() override { return platform_isolate_manager_; } //-------------------------------------------------------------------------- /// @brief Shuts down all registered platform isolates. Must be called /// from the platform thread. /// void ShutdownPlatformIsolates(); protected: /// Constructor for Mocks. RuntimeController(RuntimeDelegate& p_client, const TaskRunners& task_runners); private: struct Locale { Locale(std::string language_code_, std::string country_code_, std::string script_code_, std::string variant_code_); ~Locale(); std::string language_code; std::string country_code; std::string script_code; std::string variant_code; }; RuntimeDelegate& client_; DartVM* const vm_; fml::RefPtr<const DartSnapshot> isolate_snapshot_; std::function<void(int64_t)> idle_notification_callback_; PlatformData platform_data_; std::weak_ptr<DartIsolate> root_isolate_; std::weak_ptr<DartIsolate> spawning_isolate_; std::optional<uint32_t> root_isolate_return_code_; const fml::closure isolate_create_callback_; const fml::closure isolate_shutdown_callback_; std::shared_ptr<const fml::Mapping> persistent_isolate_data_; UIDartState::Context context_; std::shared_ptr<PlatformIsolateManager> platform_isolate_manager_ = std::shared_ptr<PlatformIsolateManager>(new PlatformIsolateManager()); bool has_flushed_runtime_state_ = false; // Tracks the views that have been called `Render` during a frame. // // If all views that have been registered by `AddView` have been called // `Render`, then the runtime controller notifies the client of the end of // frame immediately, allowing the client to submit the views to the pipeline // a bit earlier than having to wait for the end of `BeginFrame`. See also // `Animator::OnAllViewsRendered`. // // This mechanism fixes https://github.com/flutter/flutter/issues/144584 with // option 2 and // https://github.com/flutter/engine/pull/51186#issuecomment-1977820525 with // option a in most cases, except if there are multiple views and only part of // them are rendered. // TODO(dkwingsmt): Fix these problems for all cases. std::unordered_set<uint64_t> rendered_views_during_frame_; void MarkAsFrameBorder(); void CheckIfAllViewsRendered(); PlatformConfiguration* GetPlatformConfigurationIfAvailable(); bool FlushRuntimeStateToIsolate(); // |PlatformConfigurationClient| std::string DefaultRouteName() override; // |PlatformConfigurationClient| void ScheduleFrame() override; // |PlatformConfigurationClient| void EndWarmUpFrame() override; // |PlatformConfigurationClient| void Render(int64_t view_id, Scene* scene, double width, double height) override; // |PlatformConfigurationClient| void UpdateSemantics(SemanticsUpdate* update) override; // |PlatformConfigurationClient| void HandlePlatformMessage(std::unique_ptr<PlatformMessage> message) override; // |PlatformConfigurationClient| FontCollection& GetFontCollection() override; // |PlatformConfigurationClient| std::shared_ptr<AssetManager> GetAssetManager() override; // |PlatformConfigurationClient| void UpdateIsolateDescription(const std::string isolate_name, int64_t isolate_port) override; // |PlatformConfigurationClient| void SetNeedsReportTimings(bool value) override; // |PlatformConfigurationClient| std::unique_ptr<std::vector<std::string>> ComputePlatformResolvedLocale( const std::vector<std::string>& supported_locale_data) override; // |PlatformConfigurationClient| void SendChannelUpdate(std::string name, bool listening) override; // |PlatformConfigurationClient| double GetScaledFontSize(double unscaled_font_size, int configuration_id) const override; FML_DISALLOW_COPY_AND_ASSIGN(RuntimeController); }; } // namespace flutter #endif // FLUTTER_RUNTIME_RUNTIME_CONTROLLER_H_
engine/runtime/runtime_controller.h/0
{ "file_path": "engine/runtime/runtime_controller.h", "repo_id": "engine", "token_count": 11982 }
303
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_COMMON_BASE64_H_ #define FLUTTER_SHELL_COMMON_BASE64_H_ #include <cstddef> namespace flutter { struct Base64 { public: enum class Error { kNone, kBadPadding, kBadChar, }; /** Base64 encodes src into dst. @param dst a pointer to a buffer large enough to receive the result. @return the required length of dst for encoding. */ static size_t Encode(const void* src, size_t length, void* dst); /** Returns the length of the buffer that needs to be allocated to encode srcDataLength bytes. */ static size_t EncodedSize(size_t srcDataLength) { // Take the floor of division by 3 to find the number of groups that need to // be encoded. Each group takes 4 bytes to be represented in base64. return ((srcDataLength + 2) / 3) * 4; } /** Base64 decodes src into dst. This can be called once with 'dst' nullptr to get the required size, then again with an allocated 'dst' pointer to do the actual decoding. @param dst nullptr or a pointer to a buffer large enough to receive the result @param dstLength assigned the length dst is required to be. Must not be nullptr. */ [[nodiscard]] static Error Decode(const void* src, size_t srcLength, void* dst, size_t* dstLength); }; } // namespace flutter #endif // FLUTTER_SHELL_COMMON_BASE64_H_
engine/shell/common/base64.h/0
{ "file_path": "engine/shell/common/base64.h", "repo_id": "engine", "token_count": 626 }
304
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/shell/common/engine.h" #include <cstring> #include "flutter/runtime/dart_vm_lifecycle.h" #include "flutter/shell/common/thread_host.h" #include "flutter/testing/fixture_test.h" #include "flutter/testing/testing.h" #include "fml/mapping.h" #include "gmock/gmock.h" #include "lib/ui/text/font_collection.h" #include "rapidjson/document.h" #include "rapidjson/stringbuffer.h" #include "rapidjson/writer.h" #include "runtime/isolate_configuration.h" #include "shell/common/run_configuration.h" namespace flutter { namespace { class FontManifestAssetResolver : public AssetResolver { public: FontManifestAssetResolver() {} bool IsValid() const override { return true; } bool IsValidAfterAssetManagerChange() const override { return true; } AssetResolver::AssetResolverType GetType() const override { return AssetResolver::AssetResolverType::kApkAssetProvider; } mutable size_t mapping_call_count = 0u; std::unique_ptr<fml::Mapping> GetAsMapping( const std::string& asset_name) const override { mapping_call_count++; if (asset_name == "FontManifest.json") { return std::make_unique<fml::DataMapping>("[{},{},{}]"); } return nullptr; } std::vector<std::unique_ptr<fml::Mapping>> GetAsMappings( const std::string& asset_pattern, const std::optional<std::string>& subdir) const override { return {}; }; bool operator==(const AssetResolver& other) const override { auto mapping = GetAsMapping("FontManifest.json"); return memcmp(other.GetAsMapping("FontManifest.json")->GetMapping(), mapping->GetMapping(), mapping->GetSize()) == 0; } }; class MockDelegate : public Engine::Delegate { public: MOCK_METHOD(void, OnEngineUpdateSemantics, (SemanticsNodeUpdates, CustomAccessibilityActionUpdates), (override)); MOCK_METHOD(void, OnEngineHandlePlatformMessage, (std::unique_ptr<PlatformMessage>), (override)); MOCK_METHOD(void, OnPreEngineRestart, (), (override)); MOCK_METHOD(void, OnRootIsolateCreated, (), (override)); MOCK_METHOD(void, UpdateIsolateDescription, (const std::string, int64_t), (override)); MOCK_METHOD(void, SetNeedsReportTimings, (bool), (override)); MOCK_METHOD(std::unique_ptr<std::vector<std::string>>, ComputePlatformResolvedLocale, (const std::vector<std::string>&), (override)); MOCK_METHOD(void, RequestDartDeferredLibrary, (intptr_t), (override)); MOCK_METHOD(fml::TimePoint, GetCurrentTimePoint, (), (override)); MOCK_METHOD(const std::shared_ptr<PlatformMessageHandler>&, GetPlatformMessageHandler, (), (const, override)); MOCK_METHOD(void, OnEngineChannelUpdate, (std::string, bool), (override)); MOCK_METHOD(double, GetScaledFontSize, (double font_size, int configuration_id), (const, override)); }; class MockResponse : public PlatformMessageResponse { public: MOCK_METHOD(void, Complete, (std::unique_ptr<fml::Mapping> data), (override)); MOCK_METHOD(void, CompleteEmpty, (), (override)); }; class MockRuntimeDelegate : public RuntimeDelegate { public: MOCK_METHOD(std::string, DefaultRouteName, (), (override)); MOCK_METHOD(void, ScheduleFrame, (bool), (override)); MOCK_METHOD(void, OnAllViewsRendered, (), (override)); MOCK_METHOD(void, Render, (int64_t, std::unique_ptr<flutter::LayerTree>, float), (override)); MOCK_METHOD(void, UpdateSemantics, (SemanticsNodeUpdates, CustomAccessibilityActionUpdates), (override)); MOCK_METHOD(void, HandlePlatformMessage, (std::unique_ptr<PlatformMessage>), (override)); MOCK_METHOD(FontCollection&, GetFontCollection, (), (override)); MOCK_METHOD(std::shared_ptr<AssetManager>, GetAssetManager, (), (override)); MOCK_METHOD(void, OnRootIsolateCreated, (), (override)); MOCK_METHOD(void, UpdateIsolateDescription, (const std::string, int64_t), (override)); MOCK_METHOD(void, SetNeedsReportTimings, (bool), (override)); MOCK_METHOD(std::unique_ptr<std::vector<std::string>>, ComputePlatformResolvedLocale, (const std::vector<std::string>&), (override)); MOCK_METHOD(void, RequestDartDeferredLibrary, (intptr_t), (override)); MOCK_METHOD(std::weak_ptr<PlatformMessageHandler>, GetPlatformMessageHandler, (), (const, override)); MOCK_METHOD(void, SendChannelUpdate, (std::string, bool), (override)); MOCK_METHOD(double, GetScaledFontSize, (double font_size, int configuration_id), (const, override)); }; class MockRuntimeController : public RuntimeController { public: MockRuntimeController(RuntimeDelegate& client, const TaskRunners& p_task_runners) : RuntimeController(client, p_task_runners) {} MOCK_METHOD(bool, IsRootIsolateRunning, (), (override)); MOCK_METHOD(bool, DispatchPlatformMessage, (std::unique_ptr<PlatformMessage>), (override)); MOCK_METHOD(void, LoadDartDeferredLibraryError, (intptr_t, const std::string, bool), (override)); MOCK_METHOD(DartVM*, GetDartVM, (), (const, override)); MOCK_METHOD(bool, NotifyIdle, (fml::TimeDelta), (override)); }; class MockFontCollection : public FontCollection { public: MOCK_METHOD(void, RegisterFonts, (const std::shared_ptr<AssetManager>& asset_manager), (override)); }; std::unique_ptr<PlatformMessage> MakePlatformMessage( const std::string& channel, const std::map<std::string, std::string>& values, const fml::RefPtr<PlatformMessageResponse>& response) { rapidjson::Document document; auto& allocator = document.GetAllocator(); document.SetObject(); for (const auto& pair : values) { rapidjson::Value key(pair.first.c_str(), strlen(pair.first.c_str()), allocator); rapidjson::Value value(pair.second.c_str(), strlen(pair.second.c_str()), allocator); document.AddMember(key, value, allocator); } rapidjson::StringBuffer buffer; rapidjson::Writer<rapidjson::StringBuffer> writer(buffer); document.Accept(writer); const uint8_t* data = reinterpret_cast<const uint8_t*>(buffer.GetString()); std::unique_ptr<PlatformMessage> message = std::make_unique<PlatformMessage>( channel, fml::MallocMapping::Copy(data, buffer.GetSize()), response); return message; } class EngineTest : public testing::FixtureTest { public: EngineTest() : thread_host_("EngineTest", ThreadHost::Type::kPlatform | ThreadHost::Type::kIo | ThreadHost::Type::kUi | ThreadHost::Type::kRaster), task_runners_({ "EngineTest", thread_host_.platform_thread->GetTaskRunner(), // platform thread_host_.raster_thread->GetTaskRunner(), // raster thread_host_.ui_thread->GetTaskRunner(), // ui thread_host_.io_thread->GetTaskRunner() // io }) {} void PostUITaskSync(const std::function<void()>& function) { fml::AutoResetWaitableEvent latch; task_runners_.GetUITaskRunner()->PostTask([&] { function(); latch.Signal(); }); latch.Wait(); } protected: void SetUp() override { settings_ = CreateSettingsForFixture(); dispatcher_maker_ = [](PointerDataDispatcher::Delegate&) { return nullptr; }; } MockDelegate delegate_; PointerDataDispatcherMaker dispatcher_maker_; ThreadHost thread_host_; TaskRunners task_runners_; Settings settings_; std::unique_ptr<Animator> animator_; fml::WeakPtr<IOManager> io_manager_; std::unique_ptr<RuntimeController> runtime_controller_; std::shared_ptr<fml::ConcurrentTaskRunner> image_decoder_task_runner_; fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate_; }; } // namespace TEST_F(EngineTest, Create) { PostUITaskSync([this] { auto engine = std::make_unique<Engine>( /*delegate=*/delegate_, /*dispatcher_maker=*/dispatcher_maker_, /*image_decoder_task_runner=*/image_decoder_task_runner_, /*task_runners=*/task_runners_, /*settings=*/settings_, /*animator=*/std::move(animator_), /*io_manager=*/io_manager_, /*font_collection=*/std::make_shared<FontCollection>(), /*runtime_controller=*/std::move(runtime_controller_), /*gpu_disabled_switch=*/std::make_shared<fml::SyncSwitch>()); EXPECT_TRUE(engine); }); } TEST_F(EngineTest, DispatchPlatformMessageUnknown) { PostUITaskSync([this] { MockRuntimeDelegate client; auto mock_runtime_controller = std::make_unique<MockRuntimeController>(client, task_runners_); EXPECT_CALL(*mock_runtime_controller, IsRootIsolateRunning()) .WillRepeatedly(::testing::Return(false)); auto engine = std::make_unique<Engine>( /*delegate=*/delegate_, /*dispatcher_maker=*/dispatcher_maker_, /*image_decoder_task_runner=*/image_decoder_task_runner_, /*task_runners=*/task_runners_, /*settings=*/settings_, /*animator=*/std::move(animator_), /*io_manager=*/io_manager_, /*font_collection=*/std::make_shared<FontCollection>(), /*runtime_controller=*/std::move(mock_runtime_controller), /*gpu_disabled_switch=*/std::make_shared<fml::SyncSwitch>()); fml::RefPtr<PlatformMessageResponse> response = fml::MakeRefCounted<MockResponse>(); std::unique_ptr<PlatformMessage> message = std::make_unique<PlatformMessage>("foo", response); engine->DispatchPlatformMessage(std::move(message)); }); } TEST_F(EngineTest, DispatchPlatformMessageInitialRoute) { PostUITaskSync([this] { MockRuntimeDelegate client; auto mock_runtime_controller = std::make_unique<MockRuntimeController>(client, task_runners_); EXPECT_CALL(*mock_runtime_controller, IsRootIsolateRunning()) .WillRepeatedly(::testing::Return(false)); auto engine = std::make_unique<Engine>( /*delegate=*/delegate_, /*dispatcher_maker=*/dispatcher_maker_, /*image_decoder_task_runner=*/image_decoder_task_runner_, /*task_runners=*/task_runners_, /*settings=*/settings_, /*animator=*/std::move(animator_), /*io_manager=*/io_manager_, /*font_collection=*/std::make_shared<FontCollection>(), /*runtime_controller=*/std::move(mock_runtime_controller), /*gpu_disabled_switch=*/std::make_shared<fml::SyncSwitch>()); fml::RefPtr<PlatformMessageResponse> response = fml::MakeRefCounted<MockResponse>(); std::map<std::string, std::string> values{ {"method", "setInitialRoute"}, {"args", "test_initial_route"}, }; std::unique_ptr<PlatformMessage> message = MakePlatformMessage("flutter/navigation", values, response); engine->DispatchPlatformMessage(std::move(message)); EXPECT_EQ(engine->InitialRoute(), "test_initial_route"); }); } TEST_F(EngineTest, DispatchPlatformMessageInitialRouteIgnored) { PostUITaskSync([this] { MockRuntimeDelegate client; auto mock_runtime_controller = std::make_unique<MockRuntimeController>(client, task_runners_); EXPECT_CALL(*mock_runtime_controller, IsRootIsolateRunning()) .WillRepeatedly(::testing::Return(true)); EXPECT_CALL(*mock_runtime_controller, DispatchPlatformMessage(::testing::_)) .WillRepeatedly(::testing::Return(true)); auto engine = std::make_unique<Engine>( /*delegate=*/delegate_, /*dispatcher_maker=*/dispatcher_maker_, /*image_decoder_task_runner=*/image_decoder_task_runner_, /*task_runners=*/task_runners_, /*settings=*/settings_, /*animator=*/std::move(animator_), /*io_manager=*/io_manager_, /*font_collection=*/std::make_shared<FontCollection>(), /*runtime_controller=*/std::move(mock_runtime_controller), /*gpu_disabled_switch=*/std::make_shared<fml::SyncSwitch>()); fml::RefPtr<PlatformMessageResponse> response = fml::MakeRefCounted<MockResponse>(); std::map<std::string, std::string> values{ {"method", "setInitialRoute"}, {"args", "test_initial_route"}, }; std::unique_ptr<PlatformMessage> message = MakePlatformMessage("flutter/navigation", values, response); engine->DispatchPlatformMessage(std::move(message)); EXPECT_EQ(engine->InitialRoute(), ""); }); } TEST_F(EngineTest, SpawnSharesFontLibrary) { PostUITaskSync([this] { MockRuntimeDelegate client; auto mock_runtime_controller = std::make_unique<MockRuntimeController>(client, task_runners_); auto vm_ref = DartVMRef::Create(settings_); EXPECT_CALL(*mock_runtime_controller, GetDartVM()) .WillRepeatedly(::testing::Return(vm_ref.get())); auto engine = std::make_unique<Engine>( /*delegate=*/delegate_, /*dispatcher_maker=*/dispatcher_maker_, /*image_decoder_task_runner=*/image_decoder_task_runner_, /*task_runners=*/task_runners_, /*settings=*/settings_, /*animator=*/std::move(animator_), /*io_manager=*/io_manager_, /*font_collection=*/std::make_shared<FontCollection>(), /*runtime_controller=*/std::move(mock_runtime_controller), /*gpu_disabled_switch=*/std::make_shared<fml::SyncSwitch>()); auto spawn = engine->Spawn(delegate_, dispatcher_maker_, settings_, nullptr, std::string(), io_manager_, snapshot_delegate_, nullptr); EXPECT_TRUE(spawn != nullptr); EXPECT_EQ(&engine->GetFontCollection(), &spawn->GetFontCollection()); }); } TEST_F(EngineTest, SpawnWithCustomInitialRoute) { PostUITaskSync([this] { MockRuntimeDelegate client; auto mock_runtime_controller = std::make_unique<MockRuntimeController>(client, task_runners_); auto vm_ref = DartVMRef::Create(settings_); EXPECT_CALL(*mock_runtime_controller, GetDartVM()) .WillRepeatedly(::testing::Return(vm_ref.get())); auto engine = std::make_unique<Engine>( /*delegate=*/delegate_, /*dispatcher_maker=*/dispatcher_maker_, /*image_decoder_task_runner=*/image_decoder_task_runner_, /*task_runners=*/task_runners_, /*settings=*/settings_, /*animator=*/std::move(animator_), /*io_manager=*/io_manager_, /*font_collection=*/std::make_shared<FontCollection>(), /*runtime_controller=*/std::move(mock_runtime_controller), /*gpu_disabled_switch=*/std::make_shared<fml::SyncSwitch>()); auto spawn = engine->Spawn(delegate_, dispatcher_maker_, settings_, nullptr, "/foo", io_manager_, snapshot_delegate_, nullptr); EXPECT_TRUE(spawn != nullptr); ASSERT_EQ("/foo", spawn->InitialRoute()); }); } TEST_F(EngineTest, SpawnWithCustomSettings) { PostUITaskSync([this] { MockRuntimeDelegate client; auto mock_runtime_controller = std::make_unique<MockRuntimeController>(client, task_runners_); auto vm_ref = DartVMRef::Create(settings_); EXPECT_CALL(*mock_runtime_controller, GetDartVM()) .WillRepeatedly(::testing::Return(vm_ref.get())); auto engine = std::make_unique<Engine>( /*delegate=*/delegate_, /*dispatcher_maker=*/dispatcher_maker_, /*image_decoder_task_runner=*/image_decoder_task_runner_, /*task_runners=*/task_runners_, /*settings=*/settings_, /*animator=*/std::move(animator_), /*io_manager=*/io_manager_, /*font_collection=*/std::make_shared<FontCollection>(), /*runtime_controller=*/std::move(mock_runtime_controller), /*gpu_disabled_switch=*/std::make_shared<fml::SyncSwitch>()); Settings custom_settings = settings_; custom_settings.persistent_isolate_data = std::make_shared<fml::DataMapping>("foo"); auto spawn = engine->Spawn(delegate_, dispatcher_maker_, custom_settings, nullptr, std::string(), io_manager_, snapshot_delegate_, nullptr); EXPECT_TRUE(spawn != nullptr); auto new_persistent_isolate_data = const_cast<RuntimeController*>(spawn->GetRuntimeController()) ->GetPersistentIsolateData(); EXPECT_EQ(custom_settings.persistent_isolate_data->GetMapping(), new_persistent_isolate_data->GetMapping()); EXPECT_EQ(custom_settings.persistent_isolate_data->GetSize(), new_persistent_isolate_data->GetSize()); }); } TEST_F(EngineTest, PassesLoadDartDeferredLibraryErrorToRuntime) { PostUITaskSync([this] { intptr_t error_id = 123; const std::string error_message = "error message"; MockRuntimeDelegate client; auto mock_runtime_controller = std::make_unique<MockRuntimeController>(client, task_runners_); EXPECT_CALL(*mock_runtime_controller, IsRootIsolateRunning()) .WillRepeatedly(::testing::Return(true)); EXPECT_CALL(*mock_runtime_controller, LoadDartDeferredLibraryError(error_id, error_message, true)) .Times(1); auto engine = std::make_unique<Engine>( /*delegate=*/delegate_, /*dispatcher_maker=*/dispatcher_maker_, /*image_decoder_task_runner=*/image_decoder_task_runner_, /*task_runners=*/task_runners_, /*settings=*/settings_, /*animator=*/std::move(animator_), /*io_manager=*/io_manager_, /*font_collection=*/std::make_shared<FontCollection>(), /*runtime_controller=*/std::move(mock_runtime_controller), /*gpu_disabled_switch=*/std::make_shared<fml::SyncSwitch>()); engine->LoadDartDeferredLibraryError(error_id, error_message, true); }); } TEST_F(EngineTest, SpawnedEngineInheritsAssetManager) { PostUITaskSync([this] { MockRuntimeDelegate client; auto mock_runtime_controller = std::make_unique<MockRuntimeController>(client, task_runners_); auto vm_ref = DartVMRef::Create(settings_); EXPECT_CALL(*mock_runtime_controller, GetDartVM()) .WillRepeatedly(::testing::Return(vm_ref.get())); // auto mock_font_collection = std::make_shared<MockFontCollection>(); // EXPECT_CALL(*mock_font_collection, RegisterFonts(::testing::_)) // .WillOnce(::testing::Return()); auto engine = std::make_unique<Engine>( /*delegate=*/delegate_, /*dispatcher_maker=*/dispatcher_maker_, /*image_decoder_task_runner=*/image_decoder_task_runner_, /*task_runners=*/task_runners_, /*settings=*/settings_, /*animator=*/std::move(animator_), /*io_manager=*/io_manager_, /*font_collection=*/std::make_shared<FontCollection>(), /*runtime_controller=*/std::move(mock_runtime_controller), /*gpu_disabled_switch=*/std::make_shared<fml::SyncSwitch>()); EXPECT_EQ(engine->GetAssetManager(), nullptr); auto asset_manager = std::make_shared<AssetManager>(); asset_manager->PushBack(std::make_unique<FontManifestAssetResolver>()); engine->UpdateAssetManager(asset_manager); EXPECT_EQ(engine->GetAssetManager(), asset_manager); auto spawn = engine->Spawn(delegate_, dispatcher_maker_, settings_, nullptr, std::string(), io_manager_, snapshot_delegate_, nullptr); EXPECT_TRUE(spawn != nullptr); EXPECT_EQ(engine->GetAssetManager(), spawn->GetAssetManager()); }); } TEST_F(EngineTest, UpdateAssetManagerWithEqualManagers) { PostUITaskSync([this] { MockRuntimeDelegate client; auto mock_runtime_controller = std::make_unique<MockRuntimeController>(client, task_runners_); auto vm_ref = DartVMRef::Create(settings_); EXPECT_CALL(*mock_runtime_controller, GetDartVM()) .WillRepeatedly(::testing::Return(vm_ref.get())); auto mock_font_collection = std::make_shared<MockFontCollection>(); EXPECT_CALL(*mock_font_collection, RegisterFonts(::testing::_)) .WillOnce(::testing::Return()); auto engine = std::make_unique<Engine>( /*delegate=*/delegate_, /*dispatcher_maker=*/dispatcher_maker_, /*image_decoder_task_runner=*/image_decoder_task_runner_, /*task_runners=*/task_runners_, /*settings=*/settings_, /*animator=*/std::move(animator_), /*io_manager=*/io_manager_, /*font_collection=*/mock_font_collection, /*runtime_controller=*/std::move(mock_runtime_controller), /*gpu_disabled_switch=*/std::make_shared<fml::SyncSwitch>()); EXPECT_EQ(engine->GetAssetManager(), nullptr); auto asset_manager = std::make_shared<AssetManager>(); asset_manager->PushBack(std::make_unique<FontManifestAssetResolver>()); auto asset_manager_2 = std::make_shared<AssetManager>(); asset_manager_2->PushBack(std::make_unique<FontManifestAssetResolver>()); EXPECT_NE(asset_manager, asset_manager_2); EXPECT_TRUE(*asset_manager == *asset_manager_2); engine->UpdateAssetManager(asset_manager); EXPECT_EQ(engine->GetAssetManager(), asset_manager); engine->UpdateAssetManager(asset_manager_2); // Didn't change because they're equivalent. EXPECT_EQ(engine->GetAssetManager(), asset_manager); }); } } // namespace flutter
engine/shell/common/engine_unittests.cc/0
{ "file_path": "engine/shell/common/engine_unittests.cc", "repo_id": "engine", "token_count": 8702 }
305
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #define FML_USED_ON_EMBEDDER #include "flutter/shell/common/rasterizer.h" #include <memory> #include <optional> #include "flutter/flow/frame_timings.h" #include "flutter/fml/synchronization/count_down_latch.h" #include "flutter/fml/time/time_point.h" #include "flutter/shell/common/thread_host.h" #include "flutter/testing/testing.h" #include "third_party/skia/include/core/SkColorSpace.h" #include "third_party/skia/include/core/SkSurface.h" #include "third_party/skia/include/gpu/GrTypes.h" #include "third_party/skia/include/gpu/ganesh/SkSurfaceGanesh.h" #include "gmock/gmock.h" using testing::_; using testing::ByMove; using testing::NiceMock; using testing::Return; using testing::ReturnRef; namespace flutter { namespace { constexpr float kDevicePixelRatio = 2.0f; constexpr int64_t kImplicitViewId = 0; std::vector<std::unique_ptr<LayerTreeTask>> SingleLayerTreeList( int64_t view_id, std::unique_ptr<LayerTree> layer_tree, float pixel_ratio) { std::vector<std::unique_ptr<LayerTreeTask>> tasks; tasks.push_back(std::make_unique<LayerTreeTask>( view_id, std::move(layer_tree), pixel_ratio)); return tasks; } class MockDelegate : public Rasterizer::Delegate { public: MOCK_METHOD(void, OnFrameRasterized, (const FrameTiming& frame_timing), (override)); MOCK_METHOD(fml::Milliseconds, GetFrameBudget, (), (override)); MOCK_METHOD(fml::TimePoint, GetLatestFrameTargetTime, (), (const, override)); MOCK_METHOD(const TaskRunners&, GetTaskRunners, (), (const, override)); MOCK_METHOD(const fml::RefPtr<fml::RasterThreadMerger>, GetParentRasterThreadMerger, (), (const, override)); MOCK_METHOD(std::shared_ptr<const fml::SyncSwitch>, GetIsGpuDisabledSyncSwitch, (), (const, override)); MOCK_METHOD(const Settings&, GetSettings, (), (const, override)); MOCK_METHOD(bool, ShouldDiscardLayerTree, (int64_t, const flutter::LayerTree&), (override)); }; class MockSurface : public Surface { public: MOCK_METHOD(bool, IsValid, (), (override)); MOCK_METHOD(std::unique_ptr<SurfaceFrame>, AcquireFrame, (const SkISize& size), (override)); MOCK_METHOD(SkMatrix, GetRootTransformation, (), (const, override)); MOCK_METHOD(GrDirectContext*, GetContext, (), (override)); MOCK_METHOD(std::unique_ptr<GLContextResult>, MakeRenderContextCurrent, (), (override)); MOCK_METHOD(bool, ClearRenderContext, (), (override)); MOCK_METHOD(bool, AllowsDrawingWhenGpuDisabled, (), (const, override)); }; class MockExternalViewEmbedder : public ExternalViewEmbedder { public: MOCK_METHOD(DlCanvas*, GetRootCanvas, (), (override)); MOCK_METHOD(void, CancelFrame, (), (override)); MOCK_METHOD( void, BeginFrame, (GrDirectContext * context, const fml::RefPtr<fml::RasterThreadMerger>& raster_thread_merger), (override)); MOCK_METHOD(void, PrepareFlutterView, (int64_t flutter_view_id, SkISize frame_size, double device_pixel_ratio), (override)); MOCK_METHOD(void, PrerollCompositeEmbeddedView, (int64_t view_id, std::unique_ptr<EmbeddedViewParams> params), (override)); MOCK_METHOD( PostPrerollResult, PostPrerollAction, (const fml::RefPtr<fml::RasterThreadMerger>& raster_thread_merger), (override)); MOCK_METHOD(DlCanvas*, CompositeEmbeddedView, (int64_t view_id), (override)); MOCK_METHOD(void, SubmitFlutterView, (GrDirectContext * context, const std::shared_ptr<impeller::AiksContext>& aiks_context, std::unique_ptr<SurfaceFrame> frame), (override)); MOCK_METHOD( void, EndFrame, (bool should_resubmit_frame, const fml::RefPtr<fml::RasterThreadMerger>& raster_thread_merger), (override)); MOCK_METHOD(bool, SupportsDynamicThreadMerging, (), (override)); }; } // namespace TEST(RasterizerTest, create) { NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); auto rasterizer = std::make_unique<Rasterizer>(delegate); EXPECT_TRUE(rasterizer != nullptr); } static std::unique_ptr<FrameTimingsRecorder> CreateFinishedBuildRecorder( fml::TimePoint timestamp) { std::unique_ptr<FrameTimingsRecorder> recorder = std::make_unique<FrameTimingsRecorder>(); recorder->RecordVsync(timestamp, timestamp); recorder->RecordBuildStart(timestamp); recorder->RecordBuildEnd(timestamp); return recorder; } static std::unique_ptr<FrameTimingsRecorder> CreateFinishedBuildRecorder() { return CreateFinishedBuildRecorder(fml::TimePoint::Now()); } TEST(RasterizerTest, drawEmptyPipeline) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); ON_CALL(delegate, GetTaskRunners()).WillByDefault(ReturnRef(task_runners)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); rasterizer->Setup(std::move(surface)); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); rasterizer->Draw(pipeline); latch.Signal(); }); latch.Wait(); } TEST(RasterizerTest, drawWithExternalViewEmbedderExternalViewEmbedderSubmitFrameCalled) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); EXPECT_CALL(delegate, OnFrameRasterized(_)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); std::shared_ptr<NiceMock<MockExternalViewEmbedder>> external_view_embedder = std::make_shared<NiceMock<MockExternalViewEmbedder>>(); rasterizer->SetExternalViewEmbedder(external_view_embedder); SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; auto surface_frame = std::make_unique<SurfaceFrame>( /*surface=*/ nullptr, framebuffer_info, /*submit_callback=*/[](const SurfaceFrame&, DlCanvas*) { return true; }, /*frame_size=*/SkISize::Make(800, 600)); EXPECT_CALL(*surface, AllowsDrawingWhenGpuDisabled()).WillOnce(Return(true)); EXPECT_CALL(*surface, AcquireFrame(SkISize())) .WillOnce(Return(ByMove(std::move(surface_frame)))); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); EXPECT_CALL(*external_view_embedder, BeginFrame(/*context=*/nullptr, /*raster_thread_merger=*/ fml::RefPtr<fml::RasterThreadMerger>(nullptr))) .Times(1); EXPECT_CALL(*external_view_embedder, PrepareFlutterView(/*flutter_view_id=*/kImplicitViewId, /*frame_size=*/SkISize(), /*device_pixel_ratio=*/2.0)) .Times(1); EXPECT_CALL(*external_view_embedder, SubmitFlutterView).Times(1); EXPECT_CALL( *external_view_embedder, EndFrame(/*should_resubmit_frame=*/false, /*raster_thread_merger=*/fml::RefPtr<fml::RasterThreadMerger>( nullptr))) .Times(1); rasterizer->Setup(std::move(surface)); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>(/*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); rasterizer->Draw(pipeline); latch.Signal(); }); latch.Wait(); } TEST( RasterizerTest, drawWithExternalViewEmbedderAndThreadMergerNotMergedExternalViewEmbedderSubmitFrameNotCalled) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); EXPECT_CALL(delegate, OnFrameRasterized(_)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); std::shared_ptr<NiceMock<MockExternalViewEmbedder>> external_view_embedder = std::make_shared<NiceMock<MockExternalViewEmbedder>>(); rasterizer->SetExternalViewEmbedder(external_view_embedder); EXPECT_CALL(*external_view_embedder, SupportsDynamicThreadMerging) .WillRepeatedly(Return(true)); SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; auto surface_frame = std::make_unique<SurfaceFrame>( /*surface=*/ nullptr, framebuffer_info, /*submit_callback=*/[](const SurfaceFrame&, DlCanvas*) { return true; }, /*frame_size=*/SkISize::Make(800, 600)); EXPECT_CALL(*surface, AllowsDrawingWhenGpuDisabled()).WillOnce(Return(true)); EXPECT_CALL(*surface, AcquireFrame(SkISize())) .WillOnce(Return(ByMove(std::move(surface_frame)))); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); EXPECT_CALL(*external_view_embedder, BeginFrame(/*context=*/nullptr, /*raster_thread_merger=*/_)) .Times(1); EXPECT_CALL(*external_view_embedder, PrepareFlutterView(/*flutter_view_id=*/kImplicitViewId, /*frame_size=*/SkISize(), /*device_pixel_ratio=*/2.0)) .Times(1); EXPECT_CALL(*external_view_embedder, SubmitFlutterView).Times(0); EXPECT_CALL(*external_view_embedder, EndFrame(/*should_resubmit_frame=*/false, /*raster_thread_merger=*/_)) .Times(1); rasterizer->Setup(std::move(surface)); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>( /*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); rasterizer->Draw(pipeline); latch.Signal(); }); latch.Wait(); } TEST( RasterizerTest, drawWithExternalViewEmbedderAndThreadsMergedExternalViewEmbedderSubmitFrameCalled) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); fml::MessageLoop::EnsureInitializedForCurrentThread(); TaskRunners task_runners("test", fml::MessageLoop::GetCurrent().GetTaskRunner(), fml::MessageLoop::GetCurrent().GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); EXPECT_CALL(delegate, OnFrameRasterized(_)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); std::shared_ptr<NiceMock<MockExternalViewEmbedder>> external_view_embedder = std::make_shared<NiceMock<MockExternalViewEmbedder>>(); rasterizer->SetExternalViewEmbedder(external_view_embedder); SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; auto surface_frame = std::make_unique<SurfaceFrame>( /*surface=*/ nullptr, framebuffer_info, /*submit_callback=*/[](const SurfaceFrame&, DlCanvas*) { return true; }, /*frame_size=*/SkISize::Make(800, 600)); EXPECT_CALL(*surface, AllowsDrawingWhenGpuDisabled()).WillOnce(Return(true)); EXPECT_CALL(*surface, AcquireFrame(SkISize())) .WillOnce(Return(ByMove(std::move(surface_frame)))); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); EXPECT_CALL(*external_view_embedder, SupportsDynamicThreadMerging) .WillRepeatedly(Return(true)); EXPECT_CALL(*external_view_embedder, BeginFrame(/*context=*/nullptr, /*raster_thread_merger=*/_)) .Times(1); EXPECT_CALL(*external_view_embedder, PrepareFlutterView(/*flutter_view_id=*/kImplicitViewId, /*frame_size=*/SkISize(), /*device_pixel_ratio=*/2.0)) .Times(1); EXPECT_CALL(*external_view_embedder, SubmitFlutterView).Times(1); EXPECT_CALL(*external_view_embedder, EndFrame(/*should_resubmit_frame=*/false, /*raster_thread_merger=*/_)) .Times(1); rasterizer->Setup(std::move(surface)); auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>(/*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); rasterizer->Draw(pipeline); } TEST(RasterizerTest, drawLastLayerTreeWithThreadsMergedExternalViewEmbedderAndEndFrameCalled) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); fml::MessageLoop::EnsureInitializedForCurrentThread(); TaskRunners task_runners("test", fml::MessageLoop::GetCurrent().GetTaskRunner(), fml::MessageLoop::GetCurrent().GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); EXPECT_CALL(delegate, OnFrameRasterized(_)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); std::shared_ptr<NiceMock<MockExternalViewEmbedder>> external_view_embedder = std::make_shared<NiceMock<MockExternalViewEmbedder>>(); rasterizer->SetExternalViewEmbedder(external_view_embedder); SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; auto surface_frame1 = std::make_unique<SurfaceFrame>( /*surface=*/ nullptr, framebuffer_info, /*submit_callback=*/[](const SurfaceFrame&, DlCanvas*) { return true; }, /*frame_size=*/SkISize::Make(800, 600)); auto surface_frame2 = std::make_unique<SurfaceFrame>( /*surface=*/ nullptr, framebuffer_info, /*submit_callback=*/[](const SurfaceFrame&, DlCanvas*) { return true; }, /*frame_size=*/SkISize::Make(800, 600)); EXPECT_CALL(*surface, AllowsDrawingWhenGpuDisabled()) .WillRepeatedly(Return(true)); // Prepare two frames for Draw() and DrawLastLayerTrees(). EXPECT_CALL(*surface, AcquireFrame(SkISize())) .WillOnce(Return(ByMove(std::move(surface_frame1)))) .WillOnce(Return(ByMove(std::move(surface_frame2)))); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); EXPECT_CALL(*external_view_embedder, SupportsDynamicThreadMerging) .WillRepeatedly(Return(true)); EXPECT_CALL(*external_view_embedder, BeginFrame(/*context=*/nullptr, /*raster_thread_merger=*/_)) .Times(2); EXPECT_CALL(*external_view_embedder, PrepareFlutterView(/*flutter_view_id=*/kImplicitViewId, /*frame_size=*/SkISize(), /*device_pixel_ratio=*/2.0)) .Times(2); EXPECT_CALL(*external_view_embedder, SubmitFlutterView).Times(2); EXPECT_CALL(*external_view_embedder, EndFrame(/*should_resubmit_frame=*/false, /*raster_thread_merger=*/_)) .Times(2); rasterizer->Setup(std::move(surface)); auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>(/*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); // The Draw() will respectively call BeginFrame(), SubmitFlutterView() and // EndFrame() one time. ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); rasterizer->Draw(pipeline); // The DrawLastLayerTrees() will respectively call BeginFrame(), // SubmitFlutterView() and EndFrame() one more time, totally 2 times. rasterizer->DrawLastLayerTrees(CreateFinishedBuildRecorder()); } TEST(RasterizerTest, externalViewEmbedderDoesntEndFrameWhenNoSurfaceIsSet) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); auto rasterizer = std::make_unique<Rasterizer>(delegate); std::shared_ptr<NiceMock<MockExternalViewEmbedder>> external_view_embedder = std::make_shared<NiceMock<MockExternalViewEmbedder>>(); rasterizer->SetExternalViewEmbedder(external_view_embedder); EXPECT_CALL( *external_view_embedder, EndFrame(/*should_resubmit_frame=*/false, /*raster_thread_merger=*/fml::RefPtr<fml::RasterThreadMerger>( nullptr))) .Times(0); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>( /*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); rasterizer->Draw(pipeline); latch.Signal(); }); latch.Wait(); } TEST(RasterizerTest, externalViewEmbedderDoesntEndFrameWhenNotUsedThisFrame) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); auto is_gpu_disabled_sync_switch = std::make_shared<const fml::SyncSwitch>(false); ON_CALL(delegate, GetIsGpuDisabledSyncSwitch()) .WillByDefault(Return(is_gpu_disabled_sync_switch)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); std::shared_ptr<NiceMock<MockExternalViewEmbedder>> external_view_embedder = std::make_shared<NiceMock<MockExternalViewEmbedder>>(); rasterizer->SetExternalViewEmbedder(external_view_embedder); rasterizer->Setup(std::move(surface)); EXPECT_CALL(*external_view_embedder, BeginFrame(/*context=*/nullptr, /*raster_thread_merger=*/_)) .Times(0); EXPECT_CALL(*external_view_embedder, PrepareFlutterView(/*flutter_view_id=*/kImplicitViewId, /*frame_size=*/SkISize(), /*device_pixel_ratio=*/2.0)) .Times(0); EXPECT_CALL( *external_view_embedder, EndFrame(/*should_resubmit_frame=*/false, /*raster_thread_merger=*/fml::RefPtr<fml::RasterThreadMerger>( nullptr))) .Times(0); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>( /*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); // Always discard the layer tree. ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(true)); DrawStatus status = rasterizer->Draw(pipeline); EXPECT_EQ(status, DrawStatus::kDone); EXPECT_EQ(rasterizer->GetLastDrawStatus(kImplicitViewId), DrawSurfaceStatus::kDiscarded); latch.Signal(); }); latch.Wait(); } TEST(RasterizerTest, externalViewEmbedderDoesntEndFrameWhenPipelineIsEmpty) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); std::shared_ptr<NiceMock<MockExternalViewEmbedder>> external_view_embedder = std::make_shared<NiceMock<MockExternalViewEmbedder>>(); rasterizer->SetExternalViewEmbedder(external_view_embedder); rasterizer->Setup(std::move(surface)); EXPECT_CALL( *external_view_embedder, EndFrame(/*should_resubmit_frame=*/false, /*raster_thread_merger=*/fml::RefPtr<fml::RasterThreadMerger>( nullptr))) .Times(0); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); DrawStatus status = rasterizer->Draw(pipeline); EXPECT_EQ(status, DrawStatus::kPipelineEmpty); latch.Signal(); }); latch.Wait(); } TEST(RasterizerTest, drawMultipleViewsWithExternalViewEmbedder) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); EXPECT_CALL(delegate, OnFrameRasterized(_)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); std::shared_ptr<NiceMock<MockExternalViewEmbedder>> external_view_embedder = std::make_shared<NiceMock<MockExternalViewEmbedder>>(); rasterizer->SetExternalViewEmbedder(external_view_embedder); EXPECT_CALL(*external_view_embedder, SupportsDynamicThreadMerging) .WillRepeatedly(Return(false)); EXPECT_CALL(*surface, AllowsDrawingWhenGpuDisabled()).WillOnce(Return(true)); EXPECT_CALL(*surface, AcquireFrame(SkISize())).Times(2); ON_CALL(*surface, AcquireFrame).WillByDefault([](const SkISize& size) { SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; return std::make_unique<SurfaceFrame>( /*surface=*/ nullptr, framebuffer_info, /*submit_callback=*/[](const SurfaceFrame&, DlCanvas*) { return true; }, /*frame_size=*/SkISize::Make(800, 600)); }); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); EXPECT_CALL(*external_view_embedder, BeginFrame(/*context=*/nullptr, /*raster_thread_merger=*/_)) .Times(1); EXPECT_CALL( *external_view_embedder, PrepareFlutterView(/*flutter_view_id=*/0, /*frame_size=*/SkISize(), /*device_pixel_ratio=*/1.5)) .Times(1); EXPECT_CALL( *external_view_embedder, PrepareFlutterView(/*flutter_view_id=*/1, /*frame_size=*/SkISize(), /*device_pixel_ratio=*/2.0)) .Times(1); EXPECT_CALL(*external_view_embedder, SubmitFlutterView).Times(2); EXPECT_CALL(*external_view_embedder, EndFrame(/*should_resubmit_frame=*/false, /*raster_thread_merger=*/_)) .Times(1); rasterizer->Setup(std::move(surface)); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); std::vector<std::unique_ptr<LayerTreeTask>> tasks; tasks.push_back(std::make_unique<LayerTreeTask>( 0, std::make_unique<LayerTree>(LayerTree::Config(), SkISize()), 1.5)); tasks.push_back(std::make_unique<LayerTreeTask>( 1, std::make_unique<LayerTree>(LayerTree::Config(), SkISize()), 2.0)); auto layer_tree_item = std::make_unique<FrameItem>( std::move(tasks), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); rasterizer->Draw(pipeline); latch.Signal(); }); latch.Wait(); } TEST(RasterizerTest, drawWithGpuEnabledAndSurfaceAllowsDrawingWhenGpuDisabledDoesAcquireFrame) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); EXPECT_CALL(delegate, OnFrameRasterized(_)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); auto is_gpu_disabled_sync_switch = std::make_shared<const fml::SyncSwitch>(false); SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; auto surface_frame = std::make_unique<SurfaceFrame>( /*surface=*/ nullptr, /*framebuffer_info=*/framebuffer_info, /*submit_callback=*/[](const SurfaceFrame&, DlCanvas*) { return true; }, /*frame_size=*/SkISize::Make(800, 600)); EXPECT_CALL(*surface, AllowsDrawingWhenGpuDisabled()).WillOnce(Return(true)); ON_CALL(delegate, GetIsGpuDisabledSyncSwitch()) .WillByDefault(Return(is_gpu_disabled_sync_switch)); EXPECT_CALL(delegate, GetIsGpuDisabledSyncSwitch()).Times(0); EXPECT_CALL(*surface, AcquireFrame(SkISize())) .WillOnce(Return(ByMove(std::move(surface_frame)))); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); rasterizer->Setup(std::move(surface)); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>( /*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); rasterizer->Draw(pipeline); latch.Signal(); }); latch.Wait(); } TEST( RasterizerTest, drawWithGpuDisabledAndSurfaceAllowsDrawingWhenGpuDisabledDoesAcquireFrame) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); EXPECT_CALL(delegate, OnFrameRasterized(_)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); auto is_gpu_disabled_sync_switch = std::make_shared<const fml::SyncSwitch>(true); SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; auto surface_frame = std::make_unique<SurfaceFrame>( /*surface=*/ nullptr, /*framebuffer_info=*/framebuffer_info, /*submit_callback=*/[](const SurfaceFrame&, DlCanvas*) { return true; }, /*frame_size=*/SkISize::Make(800, 600)); EXPECT_CALL(*surface, AllowsDrawingWhenGpuDisabled()).WillOnce(Return(true)); ON_CALL(delegate, GetIsGpuDisabledSyncSwitch()) .WillByDefault(Return(is_gpu_disabled_sync_switch)); EXPECT_CALL(delegate, GetIsGpuDisabledSyncSwitch()).Times(0); EXPECT_CALL(*surface, AcquireFrame(SkISize())) .WillOnce(Return(ByMove(std::move(surface_frame)))); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); rasterizer->Setup(std::move(surface)); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>( /*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); DrawStatus status = rasterizer->Draw(pipeline); EXPECT_EQ(status, DrawStatus::kDone); latch.Signal(); }); latch.Wait(); } TEST( RasterizerTest, drawWithGpuEnabledAndSurfaceDisallowsDrawingWhenGpuDisabledDoesAcquireFrame) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); EXPECT_CALL(delegate, OnFrameRasterized(_)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); auto is_gpu_disabled_sync_switch = std::make_shared<const fml::SyncSwitch>(false); SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; auto surface_frame = std::make_unique<SurfaceFrame>( /*surface=*/ nullptr, /*framebuffer_info=*/framebuffer_info, /*submit_callback=*/[](const SurfaceFrame&, DlCanvas*) { return true; }, /*frame_size=*/SkISize::Make(800, 600)); EXPECT_CALL(*surface, AllowsDrawingWhenGpuDisabled()).WillOnce(Return(false)); EXPECT_CALL(delegate, GetIsGpuDisabledSyncSwitch()) .WillOnce(Return(is_gpu_disabled_sync_switch)); EXPECT_CALL(*surface, AcquireFrame(SkISize())) .WillOnce(Return(ByMove(std::move(surface_frame)))); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); rasterizer->Setup(std::move(surface)); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>( /*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); DrawStatus status = rasterizer->Draw(pipeline); EXPECT_EQ(status, DrawStatus::kDone); latch.Signal(); }); latch.Wait(); } TEST( RasterizerTest, drawWithGpuDisabledAndSurfaceDisallowsDrawingWhenGpuDisabledDoesntAcquireFrame) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); EXPECT_CALL(delegate, OnFrameRasterized(_)).Times(0); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); auto is_gpu_disabled_sync_switch = std::make_shared<const fml::SyncSwitch>(true); SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; auto surface_frame = std::make_unique<SurfaceFrame>( /*surface=*/ nullptr, /*framebuffer_info=*/framebuffer_info, /*submit_callback=*/[](const SurfaceFrame&, DlCanvas*) { return true; }, /*frame_size=*/SkISize::Make(800, 600)); EXPECT_CALL(*surface, AllowsDrawingWhenGpuDisabled()).WillOnce(Return(false)); EXPECT_CALL(delegate, GetIsGpuDisabledSyncSwitch()) .WillOnce(Return(is_gpu_disabled_sync_switch)); EXPECT_CALL(*surface, AcquireFrame(SkISize())).Times(0); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); rasterizer->Setup(std::move(surface)); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>( /*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); DrawStatus status = rasterizer->Draw(pipeline); EXPECT_EQ(status, DrawStatus::kGpuUnavailable); latch.Signal(); }); latch.Wait(); } TEST( RasterizerTest, FrameTimingRecorderShouldStartRecordingRasterTimeBeforeSurfaceAcquireFrame) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); EXPECT_CALL(delegate, OnFrameRasterized(_)) .WillOnce([&](const FrameTiming& frame_timing) { fml::TimePoint now = fml::TimePoint::Now(); fml::TimePoint raster_start = frame_timing.Get(FrameTiming::kRasterStart); EXPECT_TRUE(now - raster_start < fml::TimeDelta::FromSecondsF(1)); }); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); auto is_gpu_disabled_sync_switch = std::make_shared<const fml::SyncSwitch>(false); ON_CALL(delegate, GetIsGpuDisabledSyncSwitch()) .WillByDefault(Return(is_gpu_disabled_sync_switch)); ON_CALL(*surface, AcquireFrame(SkISize())) .WillByDefault(::testing::Invoke([] { return nullptr; })); EXPECT_CALL(*surface, AcquireFrame(SkISize())); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillOnce(Return(ByMove(std::make_unique<GLContextDefaultResult>(true)))); rasterizer->Setup(std::move(surface)); fml::AutoResetWaitableEvent latch; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>( /*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder()); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); DrawStatus status = rasterizer->Draw(pipeline); EXPECT_EQ(status, DrawStatus::kDone); EXPECT_EQ(rasterizer->GetLastDrawStatus(kImplicitViewId), DrawSurfaceStatus::kFailed); latch.Signal(); }); latch.Wait(); } TEST(RasterizerTest, drawLayerTreeWithCorrectFrameTimingWhenPipelineIsMoreAvailable) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); ON_CALL(delegate, GetTaskRunners()).WillByDefault(ReturnRef(task_runners)); fml::AutoResetWaitableEvent latch; std::unique_ptr<Rasterizer> rasterizer; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { rasterizer = std::make_unique<Rasterizer>(delegate); latch.Signal(); }); latch.Wait(); auto surface = std::make_unique<NiceMock<MockSurface>>(); EXPECT_CALL(*surface, AllowsDrawingWhenGpuDisabled()) .WillRepeatedly(Return(true)); ON_CALL(*surface, AcquireFrame(SkISize())) .WillByDefault(::testing::Invoke([] { SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; return std::make_unique<SurfaceFrame>( /*surface=*/ nullptr, framebuffer_info, /*submit_callback=*/ [](const SurfaceFrame& frame, DlCanvas*) { return true; }, /*frame_size=*/SkISize::Make(800, 600)); })); ON_CALL(*surface, MakeRenderContextCurrent()) .WillByDefault(::testing::Invoke( [] { return std::make_unique<GLContextDefaultResult>(true); })); fml::CountDownLatch count_down_latch(2); auto first_timestamp = fml::TimePoint::Now(); auto second_timestamp = first_timestamp + fml::TimeDelta::FromMilliseconds(8); std::vector<fml::TimePoint> timestamps = {first_timestamp, second_timestamp}; int frame_rasterized_count = 0; EXPECT_CALL(delegate, OnFrameRasterized(_)) .Times(2) .WillRepeatedly([&](const FrameTiming& frame_timing) { EXPECT_EQ(timestamps[frame_rasterized_count], frame_timing.Get(FrameTiming::kVsyncStart)); EXPECT_EQ(timestamps[frame_rasterized_count], frame_timing.Get(FrameTiming::kBuildStart)); EXPECT_EQ(timestamps[frame_rasterized_count], frame_timing.Get(FrameTiming::kBuildFinish)); frame_rasterized_count++; count_down_latch.CountDown(); }); thread_host.raster_thread->GetTaskRunner()->PostTask([&] { rasterizer->Setup(std::move(surface)); auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); for (int i = 0; i < 2; i++) { auto layer_tree = std::make_unique<LayerTree>( /*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder(timestamps[i])); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); EXPECT_EQ(result.is_first_item, i == 0); } // Although we only call 'Rasterizer::Draw' once, it will be called twice // finally because there are two items in the pipeline. ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); rasterizer->Draw(pipeline); }); count_down_latch.Wait(); thread_host.raster_thread->GetTaskRunner()->PostTask([&] { rasterizer.reset(); latch.Signal(); }); latch.Wait(); } TEST(RasterizerTest, TeardownFreesResourceCache) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); auto rasterizer = std::make_unique<Rasterizer>(delegate); auto surface = std::make_unique<NiceMock<MockSurface>>(); auto context = GrDirectContext::MakeMock(nullptr); context->setResourceCacheLimit(0); EXPECT_CALL(*surface, MakeRenderContextCurrent()) .WillRepeatedly([]() -> std::unique_ptr<GLContextResult> { return std::make_unique<GLContextDefaultResult>(true); }); EXPECT_CALL(*surface, GetContext()).WillRepeatedly(Return(context.get())); rasterizer->Setup(std::move(surface)); EXPECT_EQ(context->getResourceCacheLimit(), 0ul); rasterizer->SetResourceCacheMaxBytes(10000000, false); EXPECT_EQ(context->getResourceCacheLimit(), 10000000ul); EXPECT_EQ(context->getResourceCachePurgeableBytes(), 0ul); int count = 0; size_t bytes = 0; context->getResourceCacheUsage(&count, &bytes); EXPECT_EQ(bytes, 0ul); auto image_info = SkImageInfo::MakeN32Premul(500, 500, SkColorSpace::MakeSRGB()); auto sk_surface = SkSurfaces::RenderTarget(context.get(), skgpu::Budgeted::kYes, image_info); EXPECT_TRUE(sk_surface); SkPaint paint; sk_surface->getCanvas()->drawPaint(paint); context->flushAndSubmit(GrSyncCpu::kYes); EXPECT_EQ(context->getResourceCachePurgeableBytes(), 0ul); sk_surface.reset(); context->getResourceCacheUsage(&count, &bytes); EXPECT_GT(bytes, 0ul); EXPECT_GT(context->getResourceCachePurgeableBytes(), 0ul); rasterizer->Teardown(); EXPECT_EQ(context->getResourceCachePurgeableBytes(), 0ul); } TEST(RasterizerTest, TeardownNoSurface) { std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); EXPECT_CALL(delegate, GetTaskRunners()) .WillRepeatedly(ReturnRef(task_runners)); auto rasterizer = std::make_unique<Rasterizer>(delegate); EXPECT_TRUE(rasterizer); rasterizer->Teardown(); } TEST(RasterizerTest, presentationTimeSetWhenVsyncTargetInFuture) { GTEST_SKIP() << "eglPresentationTime is disabled due to " "https://github.com/flutter/flutter/issues/112503"; #if false std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); ON_CALL(delegate, GetTaskRunners()).WillByDefault(ReturnRef(task_runners)); fml::AutoResetWaitableEvent latch; std::unique_ptr<Rasterizer> rasterizer; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { rasterizer = std::make_unique<Rasterizer>(delegate); latch.Signal(); }); latch.Wait(); const auto millis_16 = fml::TimeDelta::FromMilliseconds(16); const auto first_timestamp = fml::TimePoint::Now() + millis_16; auto second_timestamp = first_timestamp + millis_16; std::vector<fml::TimePoint> timestamps = {first_timestamp, second_timestamp}; int frames_submitted = 0; fml::CountDownLatch submit_latch(2); auto surface = std::make_unique<MockSurface>(); ON_CALL(*surface, AllowsDrawingWhenGpuDisabled()).WillByDefault(Return(true)); ON_CALL(*surface, AcquireFrame(SkISize())) .WillByDefault(::testing::Invoke([&] { SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; return std::make_unique<SurfaceFrame>( /*surface=*/nullptr, framebuffer_info, /*submit_callback=*/ [&](const SurfaceFrame& frame, DlCanvas*) { const auto pres_time = *frame.submit_info().presentation_time; const auto diff = pres_time - first_timestamp; int num_frames_submitted = frames_submitted++; EXPECT_EQ(diff.ToMilliseconds(), num_frames_submitted * millis_16.ToMilliseconds()); submit_latch.CountDown(); return true; }, /*frame_size=*/SkISize::Make(800, 600)); })); ON_CALL(*surface, MakeRenderContextCurrent()) .WillByDefault(::testing::Invoke( [] { return std::make_unique<GLContextDefaultResult>(true); })); thread_host.raster_thread->GetTaskRunner()->PostTask([&] { rasterizer->Setup(std::move(surface)); auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); for (int i = 0; i < 2; i++) { auto layer_tree = std::make_unique<LayerTree>( /*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder(timestamps[i])); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); EXPECT_EQ(result.is_first_item, i == 0); } // Although we only call 'Rasterizer::Draw' once, it will be called twice // finally because there are two items in the pipeline. ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); rasterizer->Draw(pipeline); }); submit_latch.Wait(); thread_host.raster_thread->GetTaskRunner()->PostTask([&] { rasterizer.reset(); latch.Signal(); }); latch.Wait(); #endif // false } TEST(RasterizerTest, presentationTimeNotSetWhenVsyncTargetInPast) { GTEST_SKIP() << "eglPresentationTime is disabled due to " "https://github.com/flutter/flutter/issues/112503"; #if false std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); ThreadHost thread_host("io.flutter.test." + test_name + ".", ThreadHost::Type::kPlatform | ThreadHost::Type::kRaster | ThreadHost::Type::kIo | ThreadHost::Type::kUi); TaskRunners task_runners("test", thread_host.platform_thread->GetTaskRunner(), thread_host.raster_thread->GetTaskRunner(), thread_host.ui_thread->GetTaskRunner(), thread_host.io_thread->GetTaskRunner()); NiceMock<MockDelegate> delegate; Settings settings; ON_CALL(delegate, GetSettings()).WillByDefault(ReturnRef(settings)); ON_CALL(delegate, GetTaskRunners()).WillByDefault(ReturnRef(task_runners)); fml::AutoResetWaitableEvent latch; std::unique_ptr<Rasterizer> rasterizer; thread_host.raster_thread->GetTaskRunner()->PostTask([&] { rasterizer = std::make_unique<Rasterizer>(delegate); latch.Signal(); }); latch.Wait(); const auto millis_16 = fml::TimeDelta::FromMilliseconds(16); const auto first_timestamp = fml::TimePoint::Now() - millis_16; fml::CountDownLatch submit_latch(1); auto surface = std::make_unique<MockSurface>(); ON_CALL(*surface, AllowsDrawingWhenGpuDisabled()).WillByDefault(Return(true)); ON_CALL(*surface, AcquireFrame(SkISize())) .WillByDefault(::testing::Invoke([&] { SurfaceFrame::FramebufferInfo framebuffer_info; framebuffer_info.supports_readback = true; return std::make_unique<SurfaceFrame>( /*surface=*/nullptr, framebuffer_info, /*submit_callback=*/ [&](const SurfaceFrame& frame, DlCanvas*) { const std::optional<fml::TimePoint> pres_time = frame.submit_info().presentation_time; EXPECT_EQ(pres_time, std::nullopt); submit_latch.CountDown(); return true; }, /*frame_size=*/SkISize::Make(800, 600)); })); ON_CALL(*surface, MakeRenderContextCurrent()) .WillByDefault(::testing::Invoke( [] { return std::make_unique<GLContextDefaultResult>(true); })); thread_host.raster_thread->GetTaskRunner()->PostTask([&] { rasterizer->Setup(std::move(surface)); auto pipeline = std::make_shared<FramePipeline>(/*depth=*/10); auto layer_tree = std::make_unique<LayerTree>( /*config=*/LayerTree::Config(), /*frame_size=*/SkISize()); auto layer_tree_item = std::make_unique<FrameItem>( SingleLayerTreeList(kImplicitViewId, std::move(layer_tree), kDevicePixelRatio), CreateFinishedBuildRecorder(first_timestamp)); PipelineProduceResult result = pipeline->Produce().Complete(std::move(layer_tree_item)); EXPECT_TRUE(result.success); EXPECT_EQ(result.is_first_item, true); ON_CALL(delegate, ShouldDiscardLayerTree).WillByDefault(Return(false)); rasterizer->Draw(pipeline); }); submit_latch.Wait(); thread_host.raster_thread->GetTaskRunner()->PostTask([&] { rasterizer.reset(); latch.Signal(); }); latch.Wait(); #endif // false } } // namespace flutter
engine/shell/common/rasterizer_unittests.cc/0
{ "file_path": "engine/shell/common/rasterizer_unittests.cc", "repo_id": "engine", "token_count": 25863 }
306
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_COMMON_SHELL_TEST_H_ #define FLUTTER_SHELL_COMMON_SHELL_TEST_H_ #include "flutter/shell/common/shell.h" #include <memory> #include "flutter/common/graphics/persistent_cache.h" #include "flutter/common/settings.h" #include "flutter/flow/layers/container_layer.h" #include "flutter/fml/build_config.h" #include "flutter/fml/macros.h" #include "flutter/fml/time/time_point.h" #include "flutter/lib/ui/volatile_path_tracker.h" #include "flutter/lib/ui/window/platform_message.h" #include "flutter/shell/common/run_configuration.h" #include "flutter/shell/common/shell_test_external_view_embedder.h" #include "flutter/shell/common/shell_test_platform_view.h" #include "flutter/shell/common/thread_host.h" #include "flutter/shell/common/vsync_waiters_test.h" #include "flutter/testing/elf_loader.h" #include "flutter/testing/fixture_test.h" #include "flutter/testing/test_dart_native_resolver.h" namespace flutter { namespace testing { // The signature of ViewContent::builder. using LayerTreeBuilder = std::function<void(std::shared_ptr<ContainerLayer> root)>; struct ViewContent; // Defines the content to be rendered to all views of a frame in PumpOneFrame. using FrameContent = std::map<int64_t, ViewContent>; // Defines the content to be rendered to a view in PumpOneFrame. struct ViewContent { flutter::ViewportMetrics viewport_metrics; // Given the root layer, this callback builds the layer tree to be rasterized // in PumpOneFrame. LayerTreeBuilder builder; // Build a frame with no views. This is useful when PumpOneFrame is used just // to schedule the frame while the frame content is defined by other means. static FrameContent NoViews(); // Build a frame with a single implicit view with the specific size and no // content. static FrameContent DummyView(double width = 1, double height = 1); // Build a frame with a single implicit view with the specific viewport // metrics and no content. static FrameContent DummyView(flutter::ViewportMetrics viewport_metrics); // Build a frame with a single implicit view with the specific size and // content. static FrameContent ImplicitView(double width, double height, LayerTreeBuilder builder); }; class ShellTest : public FixtureTest { public: struct Config { // Required. const Settings& settings; // Defaults to GetTaskRunnersForFixture(). std::optional<TaskRunners> task_runners = {}; bool is_gpu_disabled = false; // Defaults to calling ShellTestPlatformView::Create with the provided // arguments. Shell::CreateCallback<PlatformView> platform_view_create_callback; }; ShellTest(); Settings CreateSettingsForFixture() override; std::unique_ptr<Shell> CreateShell( const Settings& settings, std::optional<TaskRunners> task_runners = {}); std::unique_ptr<Shell> CreateShell(const Config& config); void DestroyShell(std::unique_ptr<Shell> shell); void DestroyShell(std::unique_ptr<Shell> shell, const TaskRunners& task_runners); TaskRunners GetTaskRunnersForFixture(); fml::TimePoint GetLatestFrameTargetTime(Shell* shell) const; void SendPlatformMessage(Shell* shell, std::unique_ptr<PlatformMessage> message); void SendEnginePlatformMessage(Shell* shell, std::unique_ptr<PlatformMessage> message); static void PlatformViewNotifyCreated( Shell* shell); // This creates the surface static void PlatformViewNotifyDestroyed( Shell* shell); // This destroys the surface static void RunEngine(Shell* shell, RunConfiguration configuration); static void RestartEngine(Shell* shell, RunConfiguration configuration); /// Issue as many VSYNC as needed to flush the UI tasks so far, and reset /// the content of `will_draw_new_frame` to true if it's not nullptr. static void VSyncFlush(Shell* shell, bool* will_draw_new_frame = nullptr); static void SetViewportMetrics(Shell* shell, double width, double height); static void NotifyIdle(Shell* shell, fml::TimeDelta deadline); static void PumpOneFrame(Shell* shell); static void PumpOneFrame(Shell* shell, FrameContent frame_content); static void DispatchFakePointerData(Shell* shell); static void DispatchPointerData(Shell* shell, std::unique_ptr<PointerDataPacket> packet); // Declare |UnreportedTimingsCount|, |GetNeedsReportTimings| and // |SetNeedsReportTimings| inside |ShellTest| mainly for easier friend class // declarations as shell unit tests and Shell are in different name spaces. static bool GetNeedsReportTimings(Shell* shell); static void SetNeedsReportTimings(Shell* shell, bool value); // Declare |StorePersistentCache| inside |ShellTest| so |PersistentCache| can // friend |ShellTest| and allow us to call private |PersistentCache::store| in // unit tests. static void StorePersistentCache(PersistentCache* cache, const SkData& key, const SkData& value); static bool IsAnimatorRunning(Shell* shell); enum ServiceProtocolEnum { kGetSkSLs, kEstimateRasterCacheMemory, kSetAssetBundlePath, kRunInView, kRenderFrameWithRasterStats, }; // Helper method to test private method Shell::OnServiceProtocolGetSkSLs. // (ShellTest is a friend class of Shell.) We'll also make sure that it is // running on the correct task_runner. static void OnServiceProtocol( Shell* shell, ServiceProtocolEnum some_protocol, const fml::RefPtr<fml::TaskRunner>& task_runner, const ServiceProtocol::Handler::ServiceProtocolMap& params, rapidjson::Document* response); std::shared_ptr<txt::FontCollection> GetFontCollection(Shell* shell); // Do not assert |UnreportedTimingsCount| to be positive in any tests. // Otherwise those tests will be flaky as the clearing of unreported timings // is unpredictive. static int UnreportedTimingsCount(Shell* shell); static size_t GetLiveTrackedPathCount( const std::shared_ptr<VolatilePathTracker>& tracker); static void TurnOffGPU(Shell* shell, bool value); private: ThreadHost thread_host_; FML_DISALLOW_COPY_AND_ASSIGN(ShellTest); }; } // namespace testing } // namespace flutter #endif // FLUTTER_SHELL_COMMON_SHELL_TEST_H_
engine/shell/common/shell_test.h/0
{ "file_path": "engine/shell/common/shell_test.h", "repo_id": "engine", "token_count": 2181 }
307
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/shell/common/snapshot_controller_impeller.h" #include <algorithm> #include "flutter/flow/surface.h" #include "flutter/fml/trace_event.h" #include "flutter/impeller/display_list/dl_dispatcher.h" #include "flutter/impeller/display_list/dl_image_impeller.h" #include "flutter/impeller/geometry/size.h" #include "flutter/shell/common/snapshot_controller.h" namespace flutter { sk_sp<DlImage> SnapshotControllerImpeller::MakeRasterSnapshot( sk_sp<DisplayList> display_list, SkISize size) { sk_sp<DlImage> result; GetDelegate().GetIsGpuDisabledSyncSwitch()->Execute( fml::SyncSwitch::Handlers() .SetIfTrue([&] { // Do nothing. }) .SetIfFalse( [&] { result = DoMakeRasterSnapshot(display_list, size); })); return result; } sk_sp<DlImage> SnapshotControllerImpeller::DoMakeRasterSnapshot( const sk_sp<DisplayList>& display_list, SkISize size) { TRACE_EVENT0("flutter", __FUNCTION__); impeller::DlDispatcher dispatcher; display_list->Dispatch(dispatcher); impeller::Picture picture = dispatcher.EndRecordingAsPicture(); auto context = GetDelegate().GetAiksContext(); if (context) { auto max_size = context->GetContext() ->GetResourceAllocator() ->GetMaxTextureSizeSupported(); double scale_factor_x = static_cast<double>(max_size.width) / static_cast<double>(size.width()); double scale_factor_y = static_cast<double>(max_size.height) / static_cast<double>(size.height()); double scale_factor = std::min(1.0, std::min(scale_factor_x, scale_factor_y)); auto render_target_size = impeller::ISize(size.width(), size.height()); // Scale down the render target size to the max supported by the // GPU if necessary. Exceeding the max would otherwise cause a // null result. if (scale_factor < 1.0) { render_target_size.width *= scale_factor; render_target_size.height *= scale_factor; } std::shared_ptr<impeller::Image> image = picture.ToImage(*context, render_target_size); if (image) { return impeller::DlImageImpeller::Make(image->GetTexture(), DlImage::OwningContext::kRaster); } } return nullptr; } sk_sp<SkImage> SnapshotControllerImpeller::ConvertToRasterImage( sk_sp<SkImage> image) { FML_UNREACHABLE(); } } // namespace flutter
engine/shell/common/snapshot_controller_impeller.cc/0
{ "file_path": "engine/shell/common/snapshot_controller_impeller.cc", "repo_id": "engine", "token_count": 1066 }
308
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/shell/common/vsync_waiter_fallback.h" #include <memory> #include "flutter/fml/logging.h" #include "flutter/fml/message_loop.h" #include "flutter/fml/trace_event.h" namespace flutter { namespace { static fml::TimePoint SnapToNextTick(fml::TimePoint value, fml::TimePoint tick_phase, fml::TimeDelta tick_interval) { fml::TimeDelta offset = (tick_phase - value) % tick_interval; if (offset != fml::TimeDelta::Zero()) { offset = offset + tick_interval; } return value + offset; } } // namespace VsyncWaiterFallback::VsyncWaiterFallback(const TaskRunners& task_runners, bool for_testing) : VsyncWaiter(task_runners), phase_(fml::TimePoint::Now()), for_testing_(for_testing) {} VsyncWaiterFallback::~VsyncWaiterFallback() = default; // |VsyncWaiter| void VsyncWaiterFallback::AwaitVSync() { constexpr fml::TimeDelta kSingleFrameInterval = fml::TimeDelta::FromSecondsF(1.0 / 60.0); auto frame_start_time = SnapToNextTick(fml::TimePoint::Now(), phase_, kSingleFrameInterval); auto frame_target_time = frame_start_time + kSingleFrameInterval; TRACE_EVENT2_INT("flutter", "PlatformVsync", "frame_start_time", frame_start_time.ToEpochDelta().ToMicroseconds(), "frame_target_time", frame_target_time.ToEpochDelta().ToMicroseconds()); std::weak_ptr<VsyncWaiterFallback> weak_this = std::static_pointer_cast<VsyncWaiterFallback>(shared_from_this()); task_runners_.GetUITaskRunner()->PostTaskForTime( [frame_start_time, frame_target_time, weak_this]() { if (auto vsync_waiter = weak_this.lock()) { vsync_waiter->FireCallback(frame_start_time, frame_target_time, !vsync_waiter->for_testing_); } }, frame_start_time); } } // namespace flutter
engine/shell/common/vsync_waiter_fallback.cc/0
{ "file_path": "engine/shell/common/vsync_waiter_fallback.cc", "repo_id": "engine", "token_count": 918 }
309
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_GPU_GPU_SURFACE_METAL_IMPELLER_H_ #define FLUTTER_SHELL_GPU_GPU_SURFACE_METAL_IMPELLER_H_ #include <Metal/Metal.h> #include "flutter/flow/surface.h" #include "flutter/fml/macros.h" #include "flutter/fml/platform/darwin/scoped_nsobject.h" #include "flutter/impeller/aiks/aiks_context.h" #include "flutter/impeller/renderer/backend/metal/context_mtl.h" #include "flutter/impeller/renderer/renderer.h" #include "flutter/shell/gpu/gpu_surface_metal_delegate.h" #include "third_party/skia/include/gpu/ganesh/mtl/GrMtlTypes.h" namespace flutter { class IMPELLER_CA_METAL_LAYER_AVAILABLE GPUSurfaceMetalImpeller : public Surface { public: GPUSurfaceMetalImpeller(GPUSurfaceMetalDelegate* delegate, const std::shared_ptr<impeller::Context>& context, bool render_to_surface = true); // |Surface| ~GPUSurfaceMetalImpeller(); // |Surface| bool IsValid() override; virtual Surface::SurfaceData GetSurfaceData() const override; private: const GPUSurfaceMetalDelegate* delegate_; const MTLRenderTargetType render_target_type_; std::shared_ptr<impeller::Renderer> impeller_renderer_; std::shared_ptr<impeller::AiksContext> aiks_context_; fml::scoped_nsprotocol<id<MTLTexture>> last_texture_; // TODO(38466): Refactor GPU surface APIs take into account the fact that an // external view embedder may want to render to the root surface. This is a // hack to make avoid allocating resources for the root surface when an // external view embedder is present. bool render_to_surface_ = true; bool disable_partial_repaint_ = false; // Accumulated damage for each framebuffer; Key is address of underlying // MTLTexture for each drawable std::shared_ptr<std::map<uintptr_t, SkIRect>> damage_ = std::make_shared<std::map<uintptr_t, SkIRect>>(); // |Surface| std::unique_ptr<SurfaceFrame> AcquireFrame( const SkISize& frame_size) override; std::unique_ptr<SurfaceFrame> AcquireFrameFromCAMetalLayer( const SkISize& frame_size); std::unique_ptr<SurfaceFrame> AcquireFrameFromMTLTexture( const SkISize& frame_size); // |Surface| SkMatrix GetRootTransformation() const override; // |Surface| GrDirectContext* GetContext() override; // |Surface| std::unique_ptr<GLContextResult> MakeRenderContextCurrent() override; // |Surface| bool AllowsDrawingWhenGpuDisabled() const override; // |Surface| bool EnableRasterCache() const override; // |Surface| std::shared_ptr<impeller::AiksContext> GetAiksContext() const override; FML_DISALLOW_COPY_AND_ASSIGN(GPUSurfaceMetalImpeller); }; } // namespace flutter #endif // FLUTTER_SHELL_GPU_GPU_SURFACE_METAL_IMPELLER_H_
engine/shell/gpu/gpu_surface_metal_impeller.h/0
{ "file_path": "engine/shell/gpu/gpu_surface_metal_impeller.h", "repo_id": "engine", "token_count": 1058 }
310
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/shell/platform/android/android_environment_gl.h" namespace flutter { AndroidEnvironmentGL::AndroidEnvironmentGL() : display_(EGL_NO_DISPLAY) { // Get the display. display_ = eglGetDisplay(EGL_DEFAULT_DISPLAY); if (display_ == EGL_NO_DISPLAY) { return; } // Initialize the display connection. if (eglInitialize(display_, nullptr, nullptr) != EGL_TRUE) { return; } valid_ = true; } AndroidEnvironmentGL::~AndroidEnvironmentGL() { // Disconnect the display if valid. if (display_ != EGL_NO_DISPLAY) { eglTerminate(display_); } } bool AndroidEnvironmentGL::IsValid() const { return valid_; } EGLDisplay AndroidEnvironmentGL::Display() const { return display_; } } // namespace flutter
engine/shell/platform/android/android_environment_gl.cc/0
{ "file_path": "engine/shell/platform/android/android_environment_gl.cc", "repo_id": "engine", "token_count": 300 }
311
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/shell/platform/android/apk_asset_provider.h" #include <unistd.h> #include <algorithm> #include <sstream> #include <utility> #include "flutter/assets/asset_resolver.h" #include "flutter/fml/logging.h" namespace flutter { class APKAssetMapping : public fml::Mapping { public: explicit APKAssetMapping(AAsset* asset) : asset_(asset) {} ~APKAssetMapping() override { AAsset_close(asset_); } size_t GetSize() const override { return AAsset_getLength(asset_); } const uint8_t* GetMapping() const override { return reinterpret_cast<const uint8_t*>(AAsset_getBuffer(asset_)); } bool IsDontNeedSafe() const override { return !AAsset_isAllocated(asset_); } private: AAsset* const asset_; FML_DISALLOW_COPY_AND_ASSIGN(APKAssetMapping); }; class APKAssetProviderImpl : public APKAssetProviderInternal { public: explicit APKAssetProviderImpl(JNIEnv* env, jobject jassetManager, std::string directory) : java_asset_manager_(env, jassetManager), directory_(std::move(directory)) { asset_manager_ = AAssetManager_fromJava(env, jassetManager); } ~APKAssetProviderImpl() = default; std::unique_ptr<fml::Mapping> GetAsMapping( const std::string& asset_name) const override { std::stringstream ss; ss << directory_.c_str() << "/" << asset_name; AAsset* asset = AAssetManager_open(asset_manager_, ss.str().c_str(), AASSET_MODE_BUFFER); if (!asset) { return nullptr; } return std::make_unique<APKAssetMapping>(asset); }; private: fml::jni::ScopedJavaGlobalRef<jobject> java_asset_manager_; AAssetManager* asset_manager_; const std::string directory_; FML_DISALLOW_COPY_AND_ASSIGN(APKAssetProviderImpl); }; APKAssetProvider::APKAssetProvider(JNIEnv* env, jobject assetManager, std::string directory) : impl_(std::make_shared<APKAssetProviderImpl>(env, assetManager, std::move(directory))) {} APKAssetProvider::APKAssetProvider( std::shared_ptr<APKAssetProviderInternal> impl) : impl_(std::move(impl)) {} // |AssetResolver| bool APKAssetProvider::IsValid() const { return true; } // |AssetResolver| bool APKAssetProvider::IsValidAfterAssetManagerChange() const { return true; } // |AssetResolver| AssetResolver::AssetResolverType APKAssetProvider::GetType() const { return AssetResolver::AssetResolverType::kApkAssetProvider; } // |AssetResolver| std::unique_ptr<fml::Mapping> APKAssetProvider::GetAsMapping( const std::string& asset_name) const { return impl_->GetAsMapping(asset_name); } std::unique_ptr<APKAssetProvider> APKAssetProvider::Clone() const { return std::make_unique<APKAssetProvider>(impl_); } bool APKAssetProvider::operator==(const AssetResolver& other) const { auto other_provider = other.as_apk_asset_provider(); if (!other_provider) { return false; } return impl_ == other_provider->impl_; } } // namespace flutter
engine/shell/platform/android/apk_asset_provider.cc/0
{ "file_path": "engine/shell/platform/android/apk_asset_provider.cc", "repo_id": "engine", "token_count": 1355 }
312
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.app; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.app.Activity; import android.app.Application; import android.content.Context; import android.content.Intent; import android.content.pm.ActivityInfo; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import android.content.pm.PackageManager.NameNotFoundException; import android.content.res.Configuration; import android.content.res.Resources.NotFoundException; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.util.TypedValue; import android.view.View; import android.view.ViewGroup; import android.view.Window; import android.view.WindowManager.LayoutParams; import io.flutter.Log; import io.flutter.plugin.common.PluginRegistry; import io.flutter.plugin.platform.PlatformPlugin; import io.flutter.util.Preconditions; import io.flutter.view.FlutterMain; import io.flutter.view.FlutterNativeView; import io.flutter.view.FlutterRunArguments; import io.flutter.view.FlutterView; import java.util.ArrayList; /** * Deprecated class that performs the actual work of tying Android {@link android.app.Activity} * instances to Flutter. * * <p>This exists as a dedicated class (as opposed to being integrated directly into {@link * FlutterActivity}) to facilitate applications that don't wish to subclass {@code FlutterActivity}. * The most obvious example of when this may come in handy is if an application wishes to subclass * the Android v4 support library's {@code FragmentActivity}. * * <p><b>Usage:</b> * * <p>To wire this class up to your activity, simply forward the events defined in {@link * FlutterActivityEvents} from your activity to an instance of this class. Optionally, you can make * your activity implement {@link PluginRegistry} and/or {@link * io.flutter.view.FlutterView.Provider} and forward those methods to this class as well. * * @deprecated {@link io.flutter.embedding.android.FlutterActivity} is the new API that now replaces * this class and {@link io.flutter.app.FlutterActivity}. See * https://flutter.dev/go/android-project-migration for more migration details. */ @Deprecated public final class FlutterActivityDelegate implements FlutterActivityEvents, FlutterView.Provider, PluginRegistry { private static final String SPLASH_SCREEN_META_DATA_KEY = "io.flutter.app.android.SplashScreenUntilFirstFrame"; private static final String TAG = "FlutterActivityDelegate"; private static final LayoutParams matchParent = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); /** * Specifies the mechanism by which Flutter views are created during the operation of a {@code * FlutterActivityDelegate}. * * <p>A delegate's view factory will be consulted during {@link #onCreate(Bundle)}. If it returns * {@code null}, then the delegate will fall back to instantiating a new full-screen {@code * FlutterView}. * * <p>A delegate's native view factory will be consulted during {@link #onCreate(Bundle)}. If it * returns {@code null}, then the delegate will fall back to instantiating a new {@code * FlutterNativeView}. This is useful for applications to override to reuse the FlutterNativeView * held e.g. by a pre-existing background service. */ public interface ViewFactory { FlutterView createFlutterView(Context context); FlutterNativeView createFlutterNativeView(); /** * Hook for subclasses to indicate that the {@code FlutterNativeView} returned by {@link * #createFlutterNativeView()} should not be destroyed when this activity is destroyed. * * @return Whether the FlutterNativeView is retained. */ boolean retainFlutterNativeView(); } private final Activity activity; private final ViewFactory viewFactory; private FlutterView flutterView; private View launchView; public FlutterActivityDelegate(Activity activity, ViewFactory viewFactory) { this.activity = Preconditions.checkNotNull(activity); this.viewFactory = Preconditions.checkNotNull(viewFactory); } @Override public FlutterView getFlutterView() { return flutterView; } // The implementation of PluginRegistry forwards to flutterView. @Override public boolean hasPlugin(String key) { return flutterView.getPluginRegistry().hasPlugin(key); } @Override @SuppressWarnings("unchecked") public <T> T valuePublishedByPlugin(String pluginKey) { return (T) flutterView.getPluginRegistry().valuePublishedByPlugin(pluginKey); } @Override public Registrar registrarFor(String pluginKey) { return flutterView.getPluginRegistry().registrarFor(pluginKey); } @Override public boolean onRequestPermissionsResult( int requestCode, String[] permissions, int[] grantResults) { return flutterView .getPluginRegistry() .onRequestPermissionsResult(requestCode, permissions, grantResults); } @Override public boolean onActivityResult(int requestCode, int resultCode, Intent data) { return flutterView.getPluginRegistry().onActivityResult(requestCode, resultCode, data); } @Override public void onCreate(Bundle savedInstanceState) { Window window = activity.getWindow(); window.addFlags(LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); window.setStatusBarColor(0x40000000); window.getDecorView().setSystemUiVisibility(PlatformPlugin.DEFAULT_SYSTEM_UI); String[] args = getArgsFromIntent(activity.getIntent()); FlutterMain.ensureInitializationComplete(activity.getApplicationContext(), args); flutterView = viewFactory.createFlutterView(activity); if (flutterView == null) { FlutterNativeView nativeView = viewFactory.createFlutterNativeView(); flutterView = new FlutterView(activity, null, nativeView); flutterView.setLayoutParams(matchParent); activity.setContentView(flutterView); launchView = createLaunchView(); if (launchView != null) { addLaunchView(); } } if (loadIntent(activity.getIntent())) { return; } String appBundlePath = FlutterMain.findAppBundlePath(); if (appBundlePath != null) { runBundle(appBundlePath); } } @Override public void onNewIntent(Intent intent) { // Only attempt to reload the Flutter Dart code during development. Use // the debuggable flag as an indicator that we are in development mode. if (!isDebuggable() || !loadIntent(intent)) { flutterView.getPluginRegistry().onNewIntent(intent); } } private boolean isDebuggable() { return (activity.getApplicationInfo().flags & ApplicationInfo.FLAG_DEBUGGABLE) != 0; } @Override public void onPause() { Application app = (Application) activity.getApplicationContext(); if (app instanceof FlutterApplication) { FlutterApplication flutterApp = (FlutterApplication) app; if (activity.equals(flutterApp.getCurrentActivity())) { flutterApp.setCurrentActivity(null); } } if (flutterView != null) { flutterView.onPause(); } } @Override public void onStart() { if (flutterView != null) { flutterView.onStart(); } } @Override public void onResume() { Application app = (Application) activity.getApplicationContext(); if (app instanceof FlutterApplication) { FlutterApplication flutterApp = (FlutterApplication) app; flutterApp.setCurrentActivity(activity); } } @Override public void onStop() { flutterView.onStop(); } @Override public void onPostResume() { if (flutterView != null) { flutterView.onPostResume(); } } @Override public void onDestroy() { Application app = (Application) activity.getApplicationContext(); if (app instanceof FlutterApplication) { FlutterApplication flutterApp = (FlutterApplication) app; if (activity.equals(flutterApp.getCurrentActivity())) { flutterApp.setCurrentActivity(null); } } if (flutterView != null) { final boolean detach = flutterView.getPluginRegistry().onViewDestroy(flutterView.getFlutterNativeView()); if (detach || viewFactory.retainFlutterNativeView()) { // Detach, but do not destroy the FlutterView if a plugin // expressed interest in its FlutterNativeView. flutterView.detach(); } else { flutterView.destroy(); } } } @Override public boolean onBackPressed() { if (flutterView != null) { flutterView.popRoute(); return true; } return false; } @Override public void onUserLeaveHint() { flutterView.getPluginRegistry().onUserLeaveHint(); } @Override public void onWindowFocusChanged(boolean hasFocus) { flutterView.getPluginRegistry().onWindowFocusChanged(hasFocus); } @Override public void onTrimMemory(int level) { // Use a trim level delivered while the application is running so the // framework has a chance to react to the notification. if (level == TRIM_MEMORY_RUNNING_LOW) { flutterView.onMemoryPressure(); } } @Override public void onLowMemory() { flutterView.onMemoryPressure(); } @Override public void onConfigurationChanged(Configuration newConfig) {} private static String[] getArgsFromIntent(Intent intent) { // Before adding more entries to this list, consider that arbitrary // Android applications can generate intents with extra data and that // there are many security-sensitive args in the binary. ArrayList<String> args = new ArrayList<>(); if (intent.getBooleanExtra("trace-startup", false)) { args.add("--trace-startup"); } if (intent.getBooleanExtra("start-paused", false)) { args.add("--start-paused"); } if (intent.getBooleanExtra("disable-service-auth-codes", false)) { args.add("--disable-service-auth-codes"); } if (intent.getBooleanExtra("use-test-fonts", false)) { args.add("--use-test-fonts"); } if (intent.getBooleanExtra("enable-dart-profiling", false)) { args.add("--enable-dart-profiling"); } if (intent.getBooleanExtra("enable-software-rendering", false)) { args.add("--enable-software-rendering"); } if (intent.getBooleanExtra("skia-deterministic-rendering", false)) { args.add("--skia-deterministic-rendering"); } if (intent.getBooleanExtra("trace-skia", false)) { args.add("--trace-skia"); } if (intent.getBooleanExtra("trace-systrace", false)) { args.add("--trace-systrace"); } if (intent.hasExtra("trace-to-file")) { args.add("--trace-to-file=" + intent.getStringExtra("trace-to-file")); } if (intent.getBooleanExtra("dump-skp-on-shader-compilation", false)) { args.add("--dump-skp-on-shader-compilation"); } if (intent.getBooleanExtra("cache-sksl", false)) { args.add("--cache-sksl"); } if (intent.getBooleanExtra("purge-persistent-cache", false)) { args.add("--purge-persistent-cache"); } if (intent.getBooleanExtra("verbose-logging", false)) { args.add("--verbose-logging"); } int vmServicePort = intent.getIntExtra("vm-service-port", 0); if (vmServicePort > 0) { args.add("--vm-service-port=" + Integer.toString(vmServicePort)); } else { // TODO(bkonyi): remove once flutter_tools no longer uses this option. // See https://github.com/dart-lang/sdk/issues/50233 vmServicePort = intent.getIntExtra("observatory-port", 0); if (vmServicePort > 0) { args.add("--vm-service-port=" + Integer.toString(vmServicePort)); } } if (intent.getBooleanExtra("endless-trace-buffer", false)) { args.add("--endless-trace-buffer"); } // NOTE: all flags provided with this argument are subject to filtering // based on a list of allowed flags in shell/common/switches.cc. If any // flag provided is not allowed, the process will immediately terminate. if (intent.hasExtra("dart-flags")) { args.add("--dart-flags=" + intent.getStringExtra("dart-flags")); } if (!args.isEmpty()) { String[] argsArray = new String[args.size()]; return args.toArray(argsArray); } return null; } private boolean loadIntent(Intent intent) { String action = intent.getAction(); if (Intent.ACTION_RUN.equals(action)) { String route = intent.getStringExtra("route"); String appBundlePath = intent.getDataString(); if (appBundlePath == null) { // Fall back to the installation path if no bundle path was specified. appBundlePath = FlutterMain.findAppBundlePath(); } if (route != null) { flutterView.setInitialRoute(route); } runBundle(appBundlePath); return true; } return false; } private void runBundle(String appBundlePath) { if (!flutterView.getFlutterNativeView().isApplicationRunning()) { FlutterRunArguments args = new FlutterRunArguments(); args.bundlePath = appBundlePath; args.entrypoint = "main"; flutterView.runFromBundle(args); } } /** * Creates a {@link View} containing the same {@link Drawable} as the one set as the {@code * windowBackground} of the parent activity for use as a launch splash view. * * <p>Returns null if no {@code windowBackground} is set for the activity. */ private View createLaunchView() { if (!showSplashScreenUntilFirstFrame()) { return null; } final Drawable launchScreenDrawable = getLaunchScreenDrawableFromActivityTheme(); if (launchScreenDrawable == null) { return null; } final View view = new View(activity); view.setLayoutParams(matchParent); view.setBackground(launchScreenDrawable); return view; } /** * Extracts a {@link Drawable} from the parent activity's {@code windowBackground}. * * <p>{@code android:windowBackground} is specifically reused instead of a other attributes * because the Android framework can display it fast enough when launching the app as opposed to * anything defined in the Activity subclass. * * <p>Returns null if no {@code windowBackground} is set for the activity. */ @SuppressWarnings("deprecation") private Drawable getLaunchScreenDrawableFromActivityTheme() { TypedValue typedValue = new TypedValue(); if (!activity.getTheme().resolveAttribute(android.R.attr.windowBackground, typedValue, true)) { return null; } if (typedValue.resourceId == 0) { return null; } try { return activity.getResources().getDrawable(typedValue.resourceId); } catch (NotFoundException e) { Log.e(TAG, "Referenced launch screen windowBackground resource does not exist"); return null; } } /** * Let the user specify whether the activity's {@code windowBackground} is a launch screen and * should be shown until the first frame via a <meta-data> tag in the activity. */ private Boolean showSplashScreenUntilFirstFrame() { try { ActivityInfo activityInfo = activity .getPackageManager() .getActivityInfo(activity.getComponentName(), PackageManager.GET_META_DATA); Bundle metadata = activityInfo.metaData; return metadata != null && metadata.getBoolean(SPLASH_SCREEN_META_DATA_KEY); } catch (NameNotFoundException e) { return false; } } /** * Show and then automatically animate out the launch view. * * <p>If a launch screen is defined in the user application's AndroidManifest.xml as the * activity's {@code windowBackground}, display it on top of the {@link FlutterView} and remove * the activity's {@code windowBackground}. * * <p>Fade it out and remove it when the {@link FlutterView} renders its first frame. */ private void addLaunchView() { if (launchView == null) { return; } activity.addContentView(launchView, matchParent); flutterView.addFirstFrameListener( new FlutterView.FirstFrameListener() { @Override public void onFirstFrame() { FlutterActivityDelegate.this .launchView .animate() .alpha(0f) // Use Android's default animation duration. .setListener( new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { // Views added to an Activity's addContentView is always added to its // root FrameLayout. ((ViewGroup) FlutterActivityDelegate.this.launchView.getParent()) .removeView(FlutterActivityDelegate.this.launchView); FlutterActivityDelegate.this.launchView = null; } }); FlutterActivityDelegate.this.flutterView.removeFirstFrameListener(this); } }); // Resets the activity theme from the one containing the launch screen in the window // background to a blank one since the launch screen is now in a view in front of the // FlutterView. // // We can make this configurable if users want it. activity.setTheme(android.R.style.Theme_Black_NoTitleBar); } }
engine/shell/platform/android/io/flutter/app/FlutterActivityDelegate.java/0
{ "file_path": "engine/shell/platform/android/io/flutter/app/FlutterActivityDelegate.java", "repo_id": "engine", "token_count": 6106 }
313
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.embedding.android; import androidx.annotation.CallSuper; import com.google.android.play.core.splitcompat.SplitCompatApplication; import io.flutter.FlutterInjector; import io.flutter.embedding.engine.deferredcomponents.PlayStoreDeferredComponentManager; // TODO(garyq): Add a note about deferred components automatically adding this to manifest via // manifest variable injection once it is implemented. /** * Flutter's extension of {@link SplitCompatApplication} that injects a {@link * PlayStoreDeferredComponentManager} with {@link FlutterInjector} to enable Split AOT Flutter apps. * * <p>To use this class, either have your custom application class extend * FlutterPlayStoreSplitApplication or use it directly in the app's AndroidManifest.xml by adding * the following line: * * <pre>{@code * <manifest * ... * <application * android:name="io.flutter.embedding.android.FlutterPlayStoreSplitApplication" * ...> * </application> * </manifest> * }</pre> * * This class is meant to be used with the Google Play store. Custom non-play store applications do * not need to extend {@link com.google.android.play.core.splitcompat.SplitCompatApplication} and * should inject a custom {@link * io.flutter.embedding.engine.deferredcomponents.DeferredComponentManager} implementation like so: * * <pre>{@code * FlutterInjector.setInstance( * new FlutterInjector.Builder().setDeferredComponentManager(yourCustomManager).build()); * }</pre> */ public class FlutterPlayStoreSplitApplication extends SplitCompatApplication { @Override @CallSuper public void onCreate() { super.onCreate(); // Create and inject a PlayStoreDeferredComponentManager, which is the default manager for // interacting with the Google Play Store. PlayStoreDeferredComponentManager deferredComponentManager = new PlayStoreDeferredComponentManager(this, null); FlutterInjector.setInstance( new FlutterInjector.Builder() .setDeferredComponentManager(deferredComponentManager) .build()); } }
engine/shell/platform/android/io/flutter/embedding/android/FlutterPlayStoreSplitApplication.java/0
{ "file_path": "engine/shell/platform/android/io/flutter/embedding/android/FlutterPlayStoreSplitApplication.java", "repo_id": "engine", "token_count": 667 }
314
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.embedding.engine; import android.content.Context; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import io.flutter.FlutterInjector; import io.flutter.embedding.engine.dart.DartExecutor.DartEntrypoint; import io.flutter.embedding.engine.loader.FlutterLoader; import io.flutter.plugin.platform.PlatformViewsController; import java.util.ArrayList; import java.util.List; /** * Represents a collection of {@link io.flutter.embedding.engine.FlutterEngine}s who share resources * to allow them to be created faster and with less memory than calling the {@link * io.flutter.embedding.engine.FlutterEngine}'s constructor multiple times. * * <p>When creating or recreating the first {@link io.flutter.embedding.engine.FlutterEngine} in the * FlutterEngineGroup, the behavior is the same as creating a {@link * io.flutter.embedding.engine.FlutterEngine} via its constructor. When subsequent {@link * io.flutter.embedding.engine.FlutterEngine}s are created, resources from an existing living {@link * io.flutter.embedding.engine.FlutterEngine} is re-used. * * <p>The shared resources are kept until the last surviving {@link * io.flutter.embedding.engine.FlutterEngine} is destroyed. * * <p>Deleting a FlutterEngineGroup doesn't invalidate its existing {@link * io.flutter.embedding.engine.FlutterEngine}s, but it eliminates the possibility to create more * {@link io.flutter.embedding.engine.FlutterEngine}s in that group. */ public class FlutterEngineGroup { /* package */ @VisibleForTesting final List<FlutterEngine> activeEngines = new ArrayList<>(); /** * Create a FlutterEngineGroup whose child engines will share resources. * * <p>Since the FlutterEngineGroup is likely to have a longer lifecycle than any individual * Android component, it's more semantically correct to pass in an application context rather than * the individual Android component's context to minimize the chances of leaks. */ public FlutterEngineGroup(@NonNull Context context) { this(context, null); } /** * Create a FlutterEngineGroup whose child engines will share resources. Use {@code dartVmArgs} to * pass flags to the Dart VM during initialization. * * <p>Since the FlutterEngineGroup is likely to have a longer lifecycle than any individual * Android component, it's more semantically correct to pass in an application context rather than * the individual Android component's context to minimize the chances of leaks. */ public FlutterEngineGroup(@NonNull Context context, @Nullable String[] dartVmArgs) { FlutterLoader loader = FlutterInjector.instance().flutterLoader(); if (!loader.initialized()) { loader.startInitialization(context.getApplicationContext()); loader.ensureInitializationComplete(context.getApplicationContext(), dartVmArgs); } } /** * Creates a {@link io.flutter.embedding.engine.FlutterEngine} in this group and run its {@link * io.flutter.embedding.engine.dart.DartExecutor} with a default entrypoint of the "main" function * in the "lib/main.dart" file. * * <p>If no prior {@link io.flutter.embedding.engine.FlutterEngine} were created in this group, * the initialization cost will be slightly higher than subsequent engines. The very first {@link * io.flutter.embedding.engine.FlutterEngine} created per program, regardless of * FlutterEngineGroup, also incurs the Dart VM creation time. * * <p>Subsequent engine creations will share resources with existing engines. However, if all * existing engines were {@link io.flutter.embedding.engine.FlutterEngine#destroy()}ed, the next * engine created will recreate its dependencies. */ public FlutterEngine createAndRunDefaultEngine(@NonNull Context context) { return createAndRunEngine(context, null); } /** * Creates a {@link io.flutter.embedding.engine.FlutterEngine} in this group and run its {@link * io.flutter.embedding.engine.dart.DartExecutor} with the specified {@link DartEntrypoint}. * * <p>If no prior {@link io.flutter.embedding.engine.FlutterEngine} were created in this group, * the initialization cost will be slightly higher than subsequent engines. The very first {@link * io.flutter.embedding.engine.FlutterEngine} created per program, regardless of * FlutterEngineGroup, also incurs the Dart VM creation time. * * <p>Subsequent engine creations will share resources with existing engines. However, if all * existing engines were {@link io.flutter.embedding.engine.FlutterEngine#destroy()}ed, the next * engine created will recreate its dependencies. */ public FlutterEngine createAndRunEngine( @NonNull Context context, @Nullable DartEntrypoint dartEntrypoint) { return createAndRunEngine(context, dartEntrypoint, null); } /** * Creates a {@link io.flutter.embedding.engine.FlutterEngine} in this group and run its {@link * io.flutter.embedding.engine.dart.DartExecutor} with the specified {@link DartEntrypoint} and * the specified {@code initialRoute}. * * <p>If no prior {@link io.flutter.embedding.engine.FlutterEngine} were created in this group, * the initialization cost will be slightly higher than subsequent engines. The very first {@link * io.flutter.embedding.engine.FlutterEngine} created per program, regardless of * FlutterEngineGroup, also incurs the Dart VM creation time. * * <p>Subsequent engine creations will share resources with existing engines. However, if all * existing engines were {@link io.flutter.embedding.engine.FlutterEngine#destroy()}ed, the next * engine created will recreate its dependencies. */ public FlutterEngine createAndRunEngine( @NonNull Context context, @Nullable DartEntrypoint dartEntrypoint, @Nullable String initialRoute) { return createAndRunEngine( new Options(context).setDartEntrypoint(dartEntrypoint).setInitialRoute(initialRoute)); } /** * Creates a {@link io.flutter.embedding.engine.FlutterEngine} in this group and run its {@link * io.flutter.embedding.engine.dart.DartExecutor} with the specified {@link DartEntrypoint}, the * specified {@code initialRoute} and the {@code dartEntrypointArgs}. * * <p>If no prior {@link io.flutter.embedding.engine.FlutterEngine} were created in this group, * the initialization cost will be slightly higher than subsequent engines. The very first {@link * io.flutter.embedding.engine.FlutterEngine} created per program, regardless of * FlutterEngineGroup, also incurs the Dart VM creation time. * * <p>Subsequent engine creations will share resources with existing engines. However, if all * existing engines were {@link io.flutter.embedding.engine.FlutterEngine#destroy()}ed, the next * engine created will recreate its dependencies. */ public FlutterEngine createAndRunEngine(@NonNull Options options) { FlutterEngine engine = null; Context context = options.getContext(); DartEntrypoint dartEntrypoint = options.getDartEntrypoint(); String initialRoute = options.getInitialRoute(); List<String> dartEntrypointArgs = options.getDartEntrypointArgs(); PlatformViewsController platformViewsController = options.getPlatformViewsController(); platformViewsController = platformViewsController != null ? platformViewsController : new PlatformViewsController(); boolean automaticallyRegisterPlugins = options.getAutomaticallyRegisterPlugins(); boolean waitForRestorationData = options.getWaitForRestorationData(); if (dartEntrypoint == null) { dartEntrypoint = DartEntrypoint.createDefault(); } if (activeEngines.size() == 0) { engine = createEngine( context, platformViewsController, automaticallyRegisterPlugins, waitForRestorationData); if (initialRoute != null) { engine.getNavigationChannel().setInitialRoute(initialRoute); } engine.getDartExecutor().executeDartEntrypoint(dartEntrypoint, dartEntrypointArgs); } else { engine = activeEngines .get(0) .spawn( context, dartEntrypoint, initialRoute, dartEntrypointArgs, platformViewsController, automaticallyRegisterPlugins, waitForRestorationData); } activeEngines.add(engine); final FlutterEngine engineToCleanUpOnDestroy = engine; engine.addEngineLifecycleListener( new FlutterEngine.EngineLifecycleListener() { @Override public void onPreEngineRestart() { // No-op. Not interested. } @Override public void onEngineWillDestroy() { activeEngines.remove(engineToCleanUpOnDestroy); } }); return engine; } @VisibleForTesting /* package */ FlutterEngine createEngine( Context context, @NonNull PlatformViewsController platformViewsController, boolean automaticallyRegisterPlugins, boolean waitForRestorationData) { return new FlutterEngine( context, // Context. null, // FlutterLoader. null, // FlutterJNI. platformViewsController, // PlatformViewsController. null, // String[]. The Dart VM has already started, this arguments will have no effect. automaticallyRegisterPlugins, // boolean. waitForRestorationData, // boolean. this); } /** Options that control how a FlutterEngine should be created. */ public static class Options { @NonNull private Context context; @Nullable private DartEntrypoint dartEntrypoint; @Nullable private String initialRoute; @Nullable private List<String> dartEntrypointArgs; @NonNull private PlatformViewsController platformViewsController; private boolean automaticallyRegisterPlugins = true; private boolean waitForRestorationData = false; public Options(@NonNull Context context) { this.context = context; } public Context getContext() { return context; } /** * dartEntrypoint specifies the {@link DartEntrypoint} the new engine should run. It doesn't * need to be the same entrypoint as the current engine but must be built in the same AOT or * snapshot. */ public DartEntrypoint getDartEntrypoint() { return dartEntrypoint; } /** * The name of the initial Flutter `Navigator` `Route` to load. If this is null, it will default * to the "/" route. */ public String getInitialRoute() { return initialRoute; } /** Arguments passed as a list of string to Dart's entrypoint function. */ public List<String> getDartEntrypointArgs() { return dartEntrypointArgs; } /** Manages platform views. */ public PlatformViewsController getPlatformViewsController() { return platformViewsController; } /** * If plugins are automatically registered, then they are registered during the {@link * io.flutter.embedding.engine.FlutterEngine}'s constructor. */ public boolean getAutomaticallyRegisterPlugins() { return automaticallyRegisterPlugins; } /** * The waitForRestorationData flag controls whether the engine delays responding to requests * from the framework for restoration data until that data has been provided to the engine via * {@code RestorationChannel.setRestorationData(byte[] data)}. */ public boolean getWaitForRestorationData() { return waitForRestorationData; } /** * Setter for `dartEntrypoint` property. * * @param dartEntrypoint specifies the {@link DartEntrypoint} the new engine should run. It * doesn't need to be the same entrypoint as the current engine but must be built in the * same AOT or snapshot. */ public Options setDartEntrypoint(DartEntrypoint dartEntrypoint) { this.dartEntrypoint = dartEntrypoint; return this; } /** * Setter for `initialRoute` property. * * @param initialRoute The name of the initial Flutter `Navigator` `Route` to load. If this is * null, it will default to the "/" route. */ public Options setInitialRoute(String initialRoute) { this.initialRoute = initialRoute; return this; } /** * Setter for `dartEntrypointArgs` property. * * @param dartEntrypointArgs Arguments passed as a list of string to Dart's entrypoint function. */ public Options setDartEntrypointArgs(List<String> dartEntrypointArgs) { this.dartEntrypointArgs = dartEntrypointArgs; return this; } /** * Setter for `platformViewsController` property. * * @param platformViewsController Manages platform views. */ public Options setPlatformViewsController( @NonNull PlatformViewsController platformViewsController) { this.platformViewsController = platformViewsController; return this; } /** * Setter for `automaticallyRegisterPlugins` property. * * @param automaticallyRegisterPlugins If plugins are automatically registered, then they are * registered during the execution of {@link io.flutter.embedding.engine.FlutterEngine}'s * constructor. */ public Options setAutomaticallyRegisterPlugins(boolean automaticallyRegisterPlugins) { this.automaticallyRegisterPlugins = automaticallyRegisterPlugins; return this; } /** * Setter for `waitForRestorationData` property. * * @param waitForRestorationData The waitForRestorationData flag controls whether the engine * delays responding to requests from the framework for restoration data until that data has * been provided to the engine via {@code RestorationChannel.setRestorationData(byte[] * data)}. */ public Options setWaitForRestorationData(boolean waitForRestorationData) { this.waitForRestorationData = waitForRestorationData; return this; } } }
engine/shell/platform/android/io/flutter/embedding/engine/FlutterEngineGroup.java/0
{ "file_path": "engine/shell/platform/android/io/flutter/embedding/engine/FlutterEngineGroup.java", "repo_id": "engine", "token_count": 4652 }
315
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.embedding.engine.mutatorsstack; import android.graphics.Matrix; import android.graphics.Path; import android.graphics.Rect; import android.graphics.RectF; import androidx.annotation.Keep; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import java.util.ArrayList; import java.util.List; /** * The mutator stack containing a list of mutators * * <p>The mutators can be applied to a {@link io.flutter.plugin.platform.PlatformView} to perform a * series mutations. See {@link FlutterMutatorsStack.FlutterMutator} for informations on Mutators. */ @Keep public class FlutterMutatorsStack { /** * The type of a Mutator See {@link FlutterMutatorsStack.FlutterMutator} for informations on * Mutators. */ public enum FlutterMutatorType { CLIP_RECT, CLIP_RRECT, CLIP_PATH, TRANSFORM, OPACITY } /** * A class represents a mutator * * <p>A mutator contains information of a single mutation operation that can be applied to a * {@link io.flutter.plugin.platform.PlatformView}. See {@link * FlutterMutatorsStack.FlutterMutator} for informations on Mutators. */ public class FlutterMutator { @Nullable private Matrix matrix; @Nullable private Rect rect; @Nullable private Path path; @Nullable private float[] radiis; private FlutterMutatorType type; /** * Initialize a clip rect mutator. * * @param rect the rect to be clipped. */ public FlutterMutator(Rect rect) { this.type = FlutterMutatorType.CLIP_RECT; this.rect = rect; } /** * Initialize a clip rrect mutator. * * @param rect the rect of the rrect * @param radiis the radiis of the rrect. Array of 8 values, 4 pairs of [X,Y]. This value cannot * be null. */ public FlutterMutator(Rect rect, float[] radiis) { this.type = FlutterMutatorType.CLIP_RRECT; this.rect = rect; this.radiis = radiis; } /** * Initialize a clip path mutator. * * @param path the path to be clipped. */ public FlutterMutator(Path path) { this.type = FlutterMutatorType.CLIP_PATH; this.path = path; } /** * Initialize a transform mutator. * * @param matrix the transform matrix to apply. */ public FlutterMutator(Matrix matrix) { this.type = FlutterMutatorType.TRANSFORM; this.matrix = matrix; } /** * Get the mutator type. * * @return The type of the mutator. */ public FlutterMutatorType getType() { return type; } /** * Get the rect of the mutator if the {@link #getType()} returns FlutterMutatorType.CLIP_RECT. * * @return the clipping rect if the type is FlutterMutatorType.CLIP_RECT; otherwise null. */ public Rect getRect() { return rect; } /** * Get the path of the mutator if the {@link #getType()} returns FlutterMutatorType.CLIP_PATH. * * @return the clipping path if the type is FlutterMutatorType.CLIP_PATH; otherwise null. */ public Path getPath() { return path; } /** * Get the matrix of the mutator if the {@link #getType()} returns FlutterMutatorType.TRANSFORM. * * @return the matrix if the type is FlutterMutatorType.TRANSFORM; otherwise null. */ public Matrix getMatrix() { return matrix; } } private @NonNull List<FlutterMutator> mutators; private List<Path> finalClippingPaths; private Matrix finalMatrix; /** Initialize the mutator stack. */ public FlutterMutatorsStack() { this.mutators = new ArrayList<FlutterMutator>(); finalMatrix = new Matrix(); finalClippingPaths = new ArrayList<Path>(); } /** * Push a transform {@link FlutterMutatorsStack.FlutterMutator} to the stack. * * @param values the transform matrix to be pushed to the stack. The array matches how a {@link * android.graphics.Matrix} is constructed. */ public void pushTransform(float[] values) { Matrix matrix = new Matrix(); matrix.setValues(values); FlutterMutator mutator = new FlutterMutator(matrix); mutators.add(mutator); finalMatrix.preConcat(mutator.getMatrix()); } /** Push a clipRect {@link FlutterMutatorsStack.FlutterMutator} to the stack. */ public void pushClipRect(int left, int top, int right, int bottom) { Rect rect = new Rect(left, top, right, bottom); FlutterMutator mutator = new FlutterMutator(rect); mutators.add(mutator); Path path = new Path(); path.addRect(new RectF(rect), Path.Direction.CCW); path.transform(finalMatrix); finalClippingPaths.add(path); } /** * Push a clipRRect {@link FlutterMutatorsStack.FlutterMutator} to the stack. * * @param left left offset of the rrect. * @param top top offset of the rrect. * @param right right position of the rrect. * @param bottom bottom position of the rrect. * @param radiis the radiis of the rrect. It must be size of 8, including an x and y for each * corner. */ public void pushClipRRect(int left, int top, int right, int bottom, float[] radiis) { Rect rect = new Rect(left, top, right, bottom); FlutterMutator mutator = new FlutterMutator(rect, radiis); mutators.add(mutator); Path path = new Path(); path.addRoundRect(new RectF(rect), radiis, Path.Direction.CCW); path.transform(finalMatrix); finalClippingPaths.add(path); } /** * Get a list of all the raw mutators. The 0 index of the returned list is the top of the stack. */ public List<FlutterMutator> getMutators() { return mutators; } /** * Get a list of all the clipping operations. All the clipping operations -- whether it is clip * rect, clip rrect, or clip path -- are converted into Paths. The paths are also transformed with * the matrix that up to their stack positions. For example: If the stack looks like (from top to * bottom): TransA -&gt; ClipA -&gt; TransB -&gt; ClipB, the final paths will look like * [TransA*ClipA, TransA*TransB*ClipB]. * * <p>Clipping this list to the parent canvas of a view results the final clipping path. */ public List<Path> getFinalClippingPaths() { return finalClippingPaths; } /** * Returns the final matrix. Apply this matrix to the canvas of a view results the final * transformation of the view. */ public Matrix getFinalMatrix() { return finalMatrix; } }
engine/shell/platform/android/io/flutter/embedding/engine/mutatorsstack/FlutterMutatorsStack.java/0
{ "file_path": "engine/shell/platform/android/io/flutter/embedding/engine/mutatorsstack/FlutterMutatorsStack.java", "repo_id": "engine", "token_count": 2354 }
316
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.embedding.engine.plugins.shim; import androidx.annotation.NonNull; import io.flutter.Log; import io.flutter.embedding.engine.FlutterEngine; import io.flutter.embedding.engine.plugins.FlutterPlugin; import io.flutter.embedding.engine.plugins.activity.ActivityAware; import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding; import io.flutter.plugin.common.PluginRegistry; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; /** * A {@link PluginRegistry} that is shimmed to let old plugins use the new Android embedding and * plugin API behind the scenes. * * <p>The following is an example usage of {@code ShimPluginRegistry} within a {@code * FlutterActivity}: * * <pre> * // Create the FlutterEngine that will back the Flutter UI. * FlutterEngineGroup group = new FlutterEngineGroup(context); * FlutterEngine flutterEngine = group.createAndRunDefaultEngine(context); * * // Create a ShimPluginRegistry and wrap the FlutterEngine with the shim. * ShimPluginRegistry shimPluginRegistry = new ShimPluginRegistry(flutterEngine, platformViewsController); * * // Use the GeneratedPluginRegistrant to add every plugin that's in the pubspec. * GeneratedPluginRegistrant.registerWith(shimPluginRegistry); * </pre> */ public class ShimPluginRegistry implements PluginRegistry { private static final String TAG = "ShimPluginRegistry"; private final FlutterEngine flutterEngine; private final Map<String, Object> pluginMap = new HashMap<>(); private final ShimRegistrarAggregate shimRegistrarAggregate; public ShimPluginRegistry(@NonNull FlutterEngine flutterEngine) { this.flutterEngine = flutterEngine; this.shimRegistrarAggregate = new ShimRegistrarAggregate(); this.flutterEngine.getPlugins().add(shimRegistrarAggregate); } @Override @NonNull public Registrar registrarFor(@NonNull String pluginKey) { Log.v(TAG, "Creating plugin Registrar for '" + pluginKey + "'"); if (pluginMap.containsKey(pluginKey)) { throw new IllegalStateException("Plugin key " + pluginKey + " is already in use"); } pluginMap.put(pluginKey, null); ShimRegistrar registrar = new ShimRegistrar(pluginKey, pluginMap); shimRegistrarAggregate.addPlugin(registrar); return registrar; } @Override public boolean hasPlugin(@NonNull String pluginKey) { return pluginMap.containsKey(pluginKey); } @Override @SuppressWarnings("unchecked") public <T> T valuePublishedByPlugin(@NonNull String pluginKey) { return (T) pluginMap.get(pluginKey); } /** * Aggregates all {@link ShimRegistrar}s within one single {@link FlutterPlugin}. * * <p>The reason we need this aggregate is because the new embedding uniquely identifies plugins * by their plugin class, but the plugin shim system represents every plugin with a {@link * ShimRegistrar}. Therefore, every plugin we would register after the first plugin, would * overwrite the previous plugin, because they're all {@link ShimRegistrar} instances. * * <p>{@code ShimRegistrarAggregate} multiplexes {@link FlutterPlugin} and {@link ActivityAware} * calls so that we can register just one {@code ShimRegistrarAggregate} with a {@link * FlutterEngine}, while forwarding the relevant plugin resources to any number of {@link * ShimRegistrar}s within this {@code ShimRegistrarAggregate}. */ private static class ShimRegistrarAggregate implements FlutterPlugin, ActivityAware { private final Set<ShimRegistrar> shimRegistrars = new HashSet<>(); private FlutterPluginBinding flutterPluginBinding; private ActivityPluginBinding activityPluginBinding; public void addPlugin(@NonNull ShimRegistrar shimRegistrar) { shimRegistrars.add(shimRegistrar); if (flutterPluginBinding != null) { shimRegistrar.onAttachedToEngine(flutterPluginBinding); } if (activityPluginBinding != null) { shimRegistrar.onAttachedToActivity(activityPluginBinding); } } @Override public void onAttachedToEngine(@NonNull FlutterPluginBinding binding) { flutterPluginBinding = binding; for (ShimRegistrar shimRegistrar : shimRegistrars) { shimRegistrar.onAttachedToEngine(binding); } } @Override public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) { for (ShimRegistrar shimRegistrar : shimRegistrars) { shimRegistrar.onDetachedFromEngine(binding); } flutterPluginBinding = null; activityPluginBinding = null; } @Override public void onAttachedToActivity(@NonNull ActivityPluginBinding binding) { activityPluginBinding = binding; for (ShimRegistrar shimRegistrar : shimRegistrars) { shimRegistrar.onAttachedToActivity(binding); } } @Override public void onDetachedFromActivityForConfigChanges() { for (ShimRegistrar shimRegistrar : shimRegistrars) { shimRegistrar.onDetachedFromActivity(); } activityPluginBinding = null; } @Override public void onReattachedToActivityForConfigChanges(@NonNull ActivityPluginBinding binding) { activityPluginBinding = binding; for (ShimRegistrar shimRegistrar : shimRegistrars) { shimRegistrar.onReattachedToActivityForConfigChanges(binding); } } @Override public void onDetachedFromActivity() { for (ShimRegistrar shimRegistrar : shimRegistrars) { shimRegistrar.onDetachedFromActivity(); } activityPluginBinding = null; } } }
engine/shell/platform/android/io/flutter/embedding/engine/plugins/shim/ShimPluginRegistry.java/0
{ "file_path": "engine/shell/platform/android/io/flutter/embedding/engine/plugins/shim/ShimPluginRegistry.java", "repo_id": "engine", "token_count": 1924 }
317
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.embedding.engine.systemchannels; import android.content.pm.ActivityInfo; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import io.flutter.Log; import io.flutter.embedding.engine.dart.DartExecutor; import io.flutter.plugin.common.JSONMethodCodec; import io.flutter.plugin.common.MethodCall; import io.flutter.plugin.common.MethodChannel; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; /** * System channel that receives requests for host platform behavior, e.g., haptic and sound effects, * system chrome configurations, and clipboard interaction. */ public class PlatformChannel { private static final String TAG = "PlatformChannel"; @NonNull public final MethodChannel channel; @Nullable private PlatformMessageHandler platformMessageHandler; @NonNull @VisibleForTesting final MethodChannel.MethodCallHandler parsingMethodCallHandler = new MethodChannel.MethodCallHandler() { @Override public void onMethodCall(@NonNull MethodCall call, @NonNull MethodChannel.Result result) { if (platformMessageHandler == null) { // If no explicit PlatformMessageHandler has been registered then we don't // need to forward this call to an API. Return. return; } String method = call.method; Object arguments = call.arguments; Log.v(TAG, "Received '" + method + "' message."); try { switch (method) { case "SystemSound.play": try { SoundType soundType = SoundType.fromValue((String) arguments); platformMessageHandler.playSystemSound(soundType); result.success(null); } catch (NoSuchFieldException exception) { // The desired sound type does not exist. result.error("error", exception.getMessage(), null); } break; case "HapticFeedback.vibrate": try { HapticFeedbackType feedbackType = HapticFeedbackType.fromValue((String) arguments); platformMessageHandler.vibrateHapticFeedback(feedbackType); result.success(null); } catch (NoSuchFieldException exception) { // The desired feedback type does not exist. result.error("error", exception.getMessage(), null); } break; case "SystemChrome.setPreferredOrientations": try { int androidOrientation = decodeOrientations((JSONArray) arguments); platformMessageHandler.setPreferredOrientations(androidOrientation); result.success(null); } catch (JSONException | NoSuchFieldException exception) { // JSONException: One or more expected fields were either omitted or referenced an // invalid type. // NoSuchFieldException: One or more expected fields were either omitted or // referenced an invalid type. result.error("error", exception.getMessage(), null); } break; case "SystemChrome.setApplicationSwitcherDescription": try { AppSwitcherDescription description = decodeAppSwitcherDescription((JSONObject) arguments); platformMessageHandler.setApplicationSwitcherDescription(description); result.success(null); } catch (JSONException exception) { // One or more expected fields were either omitted or referenced an invalid type. result.error("error", exception.getMessage(), null); } break; case "SystemChrome.setEnabledSystemUIOverlays": try { List<SystemUiOverlay> overlays = decodeSystemUiOverlays((JSONArray) arguments); platformMessageHandler.showSystemOverlays(overlays); result.success(null); } catch (JSONException | NoSuchFieldException exception) { // JSONException: One or more expected fields were either omitted or referenced an // invalid type. // NoSuchFieldException: One or more of the overlay names are invalid. result.error("error", exception.getMessage(), null); } break; case "SystemChrome.setEnabledSystemUIMode": try { SystemUiMode mode = decodeSystemUiMode((String) arguments); platformMessageHandler.showSystemUiMode(mode); result.success(null); } catch (JSONException | NoSuchFieldException exception) { // JSONException: One or more expected fields were either omitted or referenced an // invalid type. // NoSuchFieldException: One or more of the overlay names are invalid. result.error("error", exception.getMessage(), null); } break; case "SystemChrome.setSystemUIChangeListener": platformMessageHandler.setSystemUiChangeListener(); result.success(null); break; case "SystemChrome.restoreSystemUIOverlays": platformMessageHandler.restoreSystemUiOverlays(); result.success(null); break; case "SystemChrome.setSystemUIOverlayStyle": try { SystemChromeStyle systemChromeStyle = decodeSystemChromeStyle((JSONObject) arguments); platformMessageHandler.setSystemUiOverlayStyle(systemChromeStyle); result.success(null); } catch (JSONException | NoSuchFieldException exception) { // JSONException: One or more expected fields were either omitted or referenced an // invalid type. // NoSuchFieldException: One or more of the brightness names are invalid. result.error("error", exception.getMessage(), null); } break; case "SystemNavigator.setFrameworkHandlesBack": { boolean frameworkHandlesBack = (boolean) arguments; platformMessageHandler.setFrameworkHandlesBack(frameworkHandlesBack); result.success(null); break; } case "SystemNavigator.pop": platformMessageHandler.popSystemNavigator(); result.success(null); break; case "Clipboard.getData": { String contentFormatName = (String) arguments; ClipboardContentFormat clipboardFormat = null; if (contentFormatName != null) { try { clipboardFormat = ClipboardContentFormat.fromValue(contentFormatName); } catch (NoSuchFieldException exception) { // An unsupported content format was requested. Return failure. result.error( "error", "No such clipboard content format: " + contentFormatName, null); } } CharSequence clipboardContent = platformMessageHandler.getClipboardData(clipboardFormat); if (clipboardContent != null) { JSONObject response = new JSONObject(); response.put("text", clipboardContent); result.success(response); } else { result.success(null); } break; } case "Clipboard.setData": { String clipboardContent = ((JSONObject) arguments).getString("text"); platformMessageHandler.setClipboardData(clipboardContent); result.success(null); break; } case "Clipboard.hasStrings": { boolean hasStrings = platformMessageHandler.clipboardHasStrings(); JSONObject response = new JSONObject(); response.put("value", hasStrings); result.success(response); break; } case "Share.invoke": String text = (String) arguments; platformMessageHandler.share(text); result.success(null); break; default: result.notImplemented(); break; } } catch (JSONException e) { result.error("error", "JSON error: " + e.getMessage(), null); } } }; /** * Constructs a {@code PlatformChannel} that connects Android to the Dart code running in {@code * dartExecutor}. * * <p>The given {@code dartExecutor} is permitted to be idle or executing code. * * <p>See {@link DartExecutor}. */ public PlatformChannel(@NonNull DartExecutor dartExecutor) { channel = new MethodChannel(dartExecutor, "flutter/platform", JSONMethodCodec.INSTANCE); channel.setMethodCallHandler(parsingMethodCallHandler); } /** * Sets the {@link PlatformMessageHandler} which receives all events and requests that are parsed * from the underlying platform channel. */ public void setPlatformMessageHandler(@Nullable PlatformMessageHandler platformMessageHandler) { this.platformMessageHandler = platformMessageHandler; } /** Informs Flutter of a change in the SystemUI overlays. */ public void systemChromeChanged(boolean overlaysAreVisible) { Log.v(TAG, "Sending 'systemUIChange' message."); channel.invokeMethod("SystemChrome.systemUIChange", Arrays.asList(overlaysAreVisible)); } // TODO(mattcarroll): add support for IntDef annotations, then add @ScreenOrientation /** * Decodes a series of orientations to an aggregate desired orientation. * * @throws JSONException if {@code encodedOrientations} does not contain expected keys and value * types. * @throws NoSuchFieldException if any given encoded orientation is not a valid orientation name. */ private int decodeOrientations(@NonNull JSONArray encodedOrientations) throws JSONException, NoSuchFieldException { int requestedOrientation = 0x00; int firstRequestedOrientation = 0x00; for (int index = 0; index < encodedOrientations.length(); index += 1) { String encodedOrientation = encodedOrientations.getString(index); DeviceOrientation orientation = DeviceOrientation.fromValue(encodedOrientation); switch (orientation) { case PORTRAIT_UP: requestedOrientation |= 0x01; break; case PORTRAIT_DOWN: requestedOrientation |= 0x04; break; case LANDSCAPE_LEFT: requestedOrientation |= 0x02; break; case LANDSCAPE_RIGHT: requestedOrientation |= 0x08; break; } if (firstRequestedOrientation == 0x00) { firstRequestedOrientation = requestedOrientation; } } switch (requestedOrientation) { case 0x00: return ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED; case 0x01: return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT; case 0x02: return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE; case 0x04: return ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT; case 0x05: return ActivityInfo.SCREEN_ORIENTATION_USER_PORTRAIT; case 0x08: return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE; case 0x0a: return ActivityInfo.SCREEN_ORIENTATION_USER_LANDSCAPE; case 0x0b: return ActivityInfo.SCREEN_ORIENTATION_USER; case 0x0f: return ActivityInfo.SCREEN_ORIENTATION_FULL_USER; case 0x03: // portraitUp and landscapeLeft case 0x06: // portraitDown and landscapeLeft case 0x07: // portraitUp, portraitDown, and landscapeLeft case 0x09: // portraitUp and landscapeRight case 0x0c: // portraitDown and landscapeRight case 0x0d: // portraitUp, portraitDown, and landscapeRight case 0x0e: // portraitDown, landscapeLeft, and landscapeRight // Android can't describe these cases, so just default to whatever the first // specified value was. switch (firstRequestedOrientation) { case 0x01: return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT; case 0x02: return ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE; case 0x04: return ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT; case 0x08: return ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE; } } // Execution should never get this far, but if it does then we default // to a portrait orientation. return ActivityInfo.SCREEN_ORIENTATION_PORTRAIT; } @NonNull private AppSwitcherDescription decodeAppSwitcherDescription( @NonNull JSONObject encodedDescription) throws JSONException { int color = encodedDescription.getInt("primaryColor"); if (color != 0) { // 0 means color isn't set, use system default color = color | 0xFF000000; // color must be opaque if set } String label = encodedDescription.getString("label"); return new AppSwitcherDescription(color, label); } /** * Decodes a list of JSON-encoded overlays to a list of {@link SystemUiOverlay}. * * @throws JSONException if {@code encodedSystemUiOverlay} does not contain expected keys and * value types. * @throws NoSuchFieldException if any of the given encoded overlay names are invalid. */ @NonNull private List<SystemUiOverlay> decodeSystemUiOverlays(@NonNull JSONArray encodedSystemUiOverlay) throws JSONException, NoSuchFieldException { List<SystemUiOverlay> overlays = new ArrayList<>(); for (int i = 0; i < encodedSystemUiOverlay.length(); ++i) { String encodedOverlay = encodedSystemUiOverlay.getString(i); SystemUiOverlay overlay = SystemUiOverlay.fromValue(encodedOverlay); switch (overlay) { case TOP_OVERLAYS: overlays.add(SystemUiOverlay.TOP_OVERLAYS); break; case BOTTOM_OVERLAYS: overlays.add(SystemUiOverlay.BOTTOM_OVERLAYS); break; } } return overlays; } /** * Decodes an object of JSON-encoded mode to a {@link SystemUiMode}. * * @throws JSONException if {@code encodedSystemUiMode} does not contain expected keys and value * types. * @throws NoSuchFieldException if any of the given encoded mode name is invalid. */ @NonNull private SystemUiMode decodeSystemUiMode(@NonNull String encodedSystemUiMode) throws JSONException, NoSuchFieldException { SystemUiMode mode = SystemUiMode.fromValue(encodedSystemUiMode); switch (mode) { case LEAN_BACK: return SystemUiMode.LEAN_BACK; case IMMERSIVE: return SystemUiMode.IMMERSIVE; case IMMERSIVE_STICKY: return SystemUiMode.IMMERSIVE_STICKY; case EDGE_TO_EDGE: return SystemUiMode.EDGE_TO_EDGE; } // Execution should never ever get this far, but if it does, we default to edge to edge. return SystemUiMode.EDGE_TO_EDGE; } /** * Decodes a JSON-encoded {@code encodedStyle} to a {@link SystemChromeStyle}. * * @throws JSONException if {@code encodedStyle} does not contain expected keys and value types. * @throws NoSuchFieldException if any provided brightness name is invalid. */ @NonNull private SystemChromeStyle decodeSystemChromeStyle(@NonNull JSONObject encodedStyle) throws JSONException, NoSuchFieldException { // TODO(mattcarroll): add color annotation Integer statusBarColor = null; Brightness statusBarIconBrightness = null; Boolean systemStatusBarContrastEnforced = null; // TODO(mattcarroll): add color annotation Integer systemNavigationBarColor = null; Brightness systemNavigationBarIconBrightness = null; // TODO(mattcarroll): add color annotation Integer systemNavigationBarDividerColor = null; Boolean systemNavigationBarContrastEnforced = null; if (!encodedStyle.isNull("statusBarColor")) { statusBarColor = encodedStyle.getInt("statusBarColor"); } if (!encodedStyle.isNull("statusBarIconBrightness")) { statusBarIconBrightness = Brightness.fromValue(encodedStyle.getString("statusBarIconBrightness")); } if (!encodedStyle.isNull("systemStatusBarContrastEnforced")) { systemStatusBarContrastEnforced = encodedStyle.getBoolean("systemStatusBarContrastEnforced"); } if (!encodedStyle.isNull("systemNavigationBarColor")) { systemNavigationBarColor = encodedStyle.getInt("systemNavigationBarColor"); } if (!encodedStyle.isNull("systemNavigationBarIconBrightness")) { systemNavigationBarIconBrightness = Brightness.fromValue(encodedStyle.getString("systemNavigationBarIconBrightness")); } if (!encodedStyle.isNull("systemNavigationBarDividerColor")) { systemNavigationBarDividerColor = encodedStyle.getInt("systemNavigationBarDividerColor"); } if (!encodedStyle.isNull("systemNavigationBarContrastEnforced")) { systemNavigationBarContrastEnforced = encodedStyle.getBoolean("systemNavigationBarContrastEnforced"); } return new SystemChromeStyle( statusBarColor, statusBarIconBrightness, systemStatusBarContrastEnforced, systemNavigationBarColor, systemNavigationBarIconBrightness, systemNavigationBarDividerColor, systemNavigationBarContrastEnforced); } /** * Handler that receives platform messages sent from Flutter to Android through a given {@link * PlatformChannel}. * * <p>To register a {@code PlatformMessageHandler} with a {@link PlatformChannel}, see {@link * PlatformChannel#setPlatformMessageHandler(PlatformMessageHandler)}. */ public interface PlatformMessageHandler { /** The Flutter application would like to play the given {@code soundType}. */ void playSystemSound(@NonNull SoundType soundType); /** The Flutter application would like to play the given haptic {@code feedbackType}. */ void vibrateHapticFeedback(@NonNull HapticFeedbackType feedbackType); /** The Flutter application would like to display in the given {@code androidOrientation}. */ // TODO(mattcarroll): add @ScreenOrientation annotation void setPreferredOrientations(int androidOrientation); /** * The Flutter application would like to be displayed in Android's app switcher with the visual * representation described in the given {@code description}. * * <p>See the related Android documentation: * https://developer.android.com/guide/components/activities/recents */ void setApplicationSwitcherDescription(@NonNull AppSwitcherDescription description); /** * The Flutter application would like the Android system to display the given {@code overlays}. * * <p>{@link SystemUiOverlay#TOP_OVERLAYS} refers to system overlays such as the status bar, * while {@link SystemUiOverlay#BOTTOM_OVERLAYS} refers to system overlays such as the * back/home/recents navigation on the bottom of the screen. * * <p>An empty list of {@code overlays} should hide all system overlays. */ void showSystemOverlays(@NonNull List<SystemUiOverlay> overlays); /** * The Flutter application would like the Android system to display the given {@code mode}. * * <p>{@link SystemUiMode#LEAN_BACK} refers to a fullscreen experience that restores system bars * upon tapping anywhere in the application. This tap gesture is not received by the * application. * * <p>{@link SystemUiMode#IMMERSIVE} refers to a fullscreen experience that restores system bars * upon swiping from the edge of the viewport. This swipe gesture is not recived by the * application. * * <p>{@link SystemUiMode#IMMERSIVE_STICKY} refers to a fullscreen experience that restores * system bars upon swiping from the edge of the viewport. This swipe gesture is received by the * application, in contrast to {@link SystemUiMode#IMMERSIVE}. * * <p>{@link SystemUiMode#EDGE_TO_EDGE} refers to a layout configuration that will consume the * full viewport. This full screen experience does not hide status bars. These status bars can * be set to transparent, making the buttons and icons hover over the fullscreen application. */ void showSystemUiMode(@NonNull SystemUiMode mode); /** * The Flutter application would like the Android system to notify the framework when the system * ui visibility has changed. * * <p>This is relevant when using {@link SystemUiMode}s for fullscreen applications, from which * the system overlays can appear or disappear based on user input. */ void setSystemUiChangeListener(); /** * The Flutter application would like to restore the visibility of system overlays to the last * set of overlays sent via {@link #showSystemOverlays(List)} or {@link * #showSystemUiMode(SystemUiMode)}. * * <p>If {@link #showSystemOverlays(List)} or {@link #showSystemUiMode(SystemUiMode)} has yet to * be called, then a default system overlay appearance is desired: * * <p>{@code View.SYSTEM_UI_FLAG_LAYOUT_STABLE | View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN } */ void restoreSystemUiOverlays(); /** * The Flutter application would like the system chrome to present itself with the given {@code * systemUiOverlayStyle}, i.e., the given status bar and navigation bar colors and brightness. */ void setSystemUiOverlayStyle(@NonNull SystemChromeStyle systemUiOverlayStyle); /** * The Flutter application would or would not like to handle navigation pop events itself. * * <p>Relevant for registering and unregistering the app's OnBackInvokedCallback for the * Predictive Back feature, for example as in {@link * io.flutter.embedding.android.FlutterActivity}. */ default void setFrameworkHandlesBack(boolean frameworkHandlesBack) {} /** * The Flutter application would like to pop the top item off of the Android app's navigation * back stack. */ void popSystemNavigator(); /** * The Flutter application would like to receive the current data in the clipboard and have it * returned in the given {@code format}. */ @Nullable CharSequence getClipboardData(@Nullable ClipboardContentFormat format); /** * The Flutter application would like to set the current data in the clipboard to the given * {@code text}. */ void setClipboardData(@NonNull String text); /** * The Flutter application would like to know if the clipboard currently contains a string that * can be pasted. */ boolean clipboardHasStrings(); /** * The Flutter application would like to share the given {@code text} using the Android standard * intent action named {@code Intent.ACTION_SEND}. See: * https://developer.android.com/reference/android/content/Intent.html#ACTION_SEND */ void share(@NonNull String text); } /** Types of sounds the Android OS can play on behalf of an application. */ public enum SoundType { CLICK("SystemSoundType.click"), ALERT("SystemSoundType.alert"); @NonNull static SoundType fromValue(@NonNull String encodedName) throws NoSuchFieldException { for (SoundType soundType : SoundType.values()) { if (soundType.encodedName.equals(encodedName)) { return soundType; } } throw new NoSuchFieldException("No such SoundType: " + encodedName); } @NonNull private final String encodedName; SoundType(@NonNull String encodedName) { this.encodedName = encodedName; } } /** The types of haptic feedback that the Android OS can generate on behalf of an application. */ public enum HapticFeedbackType { STANDARD(null), LIGHT_IMPACT("HapticFeedbackType.lightImpact"), MEDIUM_IMPACT("HapticFeedbackType.mediumImpact"), HEAVY_IMPACT("HapticFeedbackType.heavyImpact"), SELECTION_CLICK("HapticFeedbackType.selectionClick"); @NonNull static HapticFeedbackType fromValue(@Nullable String encodedName) throws NoSuchFieldException { for (HapticFeedbackType feedbackType : HapticFeedbackType.values()) { if ((feedbackType.encodedName == null && encodedName == null) || (feedbackType.encodedName != null && feedbackType.encodedName.equals(encodedName))) { return feedbackType; } } throw new NoSuchFieldException("No such HapticFeedbackType: " + encodedName); } @Nullable private final String encodedName; HapticFeedbackType(@Nullable String encodedName) { this.encodedName = encodedName; } } /** The possible desired orientations of a Flutter application. */ public enum DeviceOrientation { PORTRAIT_UP("DeviceOrientation.portraitUp"), PORTRAIT_DOWN("DeviceOrientation.portraitDown"), LANDSCAPE_LEFT("DeviceOrientation.landscapeLeft"), LANDSCAPE_RIGHT("DeviceOrientation.landscapeRight"); @NonNull static DeviceOrientation fromValue(@NonNull String encodedName) throws NoSuchFieldException { for (DeviceOrientation orientation : DeviceOrientation.values()) { if (orientation.encodedName.equals(encodedName)) { return orientation; } } throw new NoSuchFieldException("No such DeviceOrientation: " + encodedName); } @NonNull private String encodedName; DeviceOrientation(@NonNull String encodedName) { this.encodedName = encodedName; } } /** * The set of Android system UI overlays as perceived by the Flutter application. * * <p>Android includes many more overlay options and flags than what is provided by {@code * SystemUiOverlay}. Flutter only requires control over a subset of the overlays and those * overlays are represented by {@code SystemUiOverlay} values. */ public enum SystemUiOverlay { TOP_OVERLAYS("SystemUiOverlay.top"), BOTTOM_OVERLAYS("SystemUiOverlay.bottom"); @NonNull static SystemUiOverlay fromValue(@NonNull String encodedName) throws NoSuchFieldException { for (SystemUiOverlay overlay : SystemUiOverlay.values()) { if (overlay.encodedName.equals(encodedName)) { return overlay; } } throw new NoSuchFieldException("No such SystemUiOverlay: " + encodedName); } @NonNull private String encodedName; SystemUiOverlay(@NonNull String encodedName) { this.encodedName = encodedName; } } /** The set of Android system fullscreen modes as perceived by the Flutter application. */ public enum SystemUiMode { LEAN_BACK("SystemUiMode.leanBack"), IMMERSIVE("SystemUiMode.immersive"), IMMERSIVE_STICKY("SystemUiMode.immersiveSticky"), EDGE_TO_EDGE("SystemUiMode.edgeToEdge"); /** * Returns the SystemUiMode for the provied encoded value. @throws NoSuchFieldException if any * of the given encoded overlay names are invalid. */ @NonNull static SystemUiMode fromValue(@NonNull String encodedName) throws NoSuchFieldException { for (SystemUiMode mode : SystemUiMode.values()) { if (mode.encodedName.equals(encodedName)) { return mode; } } throw new NoSuchFieldException("No such SystemUiMode: " + encodedName); } @NonNull private String encodedName; /** Returens the encoded {@link SystemUiMode} */ SystemUiMode(@NonNull String encodedName) { this.encodedName = encodedName; } } /** * The color and label of an application that appears in Android's app switcher, AKA recents * screen. */ public static class AppSwitcherDescription { // TODO(mattcarroll): add color annotation public final int color; @NonNull public final String label; public AppSwitcherDescription(int color, @NonNull String label) { this.color = color; this.label = label; } } /** The color and brightness of system chrome, e.g., status bar and system navigation bar. */ public static class SystemChromeStyle { // TODO(mattcarroll): add color annotation @Nullable public final Integer statusBarColor; @Nullable public final Brightness statusBarIconBrightness; @Nullable public final Boolean systemStatusBarContrastEnforced; // TODO(mattcarroll): add color annotation @Nullable public final Integer systemNavigationBarColor; @Nullable public final Brightness systemNavigationBarIconBrightness; // TODO(mattcarroll): add color annotation @Nullable public final Integer systemNavigationBarDividerColor; @Nullable public final Boolean systemNavigationBarContrastEnforced; public SystemChromeStyle( @Nullable Integer statusBarColor, @Nullable Brightness statusBarIconBrightness, @Nullable Boolean systemStatusBarContrastEnforced, @Nullable Integer systemNavigationBarColor, @Nullable Brightness systemNavigationBarIconBrightness, @Nullable Integer systemNavigationBarDividerColor, @Nullable Boolean systemNavigationBarContrastEnforced) { this.statusBarColor = statusBarColor; this.statusBarIconBrightness = statusBarIconBrightness; this.systemStatusBarContrastEnforced = systemStatusBarContrastEnforced; this.systemNavigationBarColor = systemNavigationBarColor; this.systemNavigationBarIconBrightness = systemNavigationBarIconBrightness; this.systemNavigationBarDividerColor = systemNavigationBarDividerColor; this.systemNavigationBarContrastEnforced = systemNavigationBarContrastEnforced; } } public enum Brightness { LIGHT("Brightness.light"), DARK("Brightness.dark"); @NonNull static Brightness fromValue(@NonNull String encodedName) throws NoSuchFieldException { for (Brightness brightness : Brightness.values()) { if (brightness.encodedName.equals(encodedName)) { return brightness; } } throw new NoSuchFieldException("No such Brightness: " + encodedName); } @NonNull private String encodedName; Brightness(@NonNull String encodedName) { this.encodedName = encodedName; } } /** Data formats of clipboard content. */ public enum ClipboardContentFormat { PLAIN_TEXT("text/plain"); @NonNull static ClipboardContentFormat fromValue(@NonNull String encodedName) throws NoSuchFieldException { for (ClipboardContentFormat format : ClipboardContentFormat.values()) { if (format.encodedName.equals(encodedName)) { return format; } } throw new NoSuchFieldException("No such ClipboardContentFormat: " + encodedName); } @NonNull private String encodedName; ClipboardContentFormat(@NonNull String encodedName) { this.encodedName = encodedName; } } }
engine/shell/platform/android/io/flutter/embedding/engine/systemchannels/PlatformChannel.java/0
{ "file_path": "engine/shell/platform/android/io/flutter/embedding/engine/systemchannels/PlatformChannel.java", "repo_id": "engine", "token_count": 12203 }
318
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.plugin.common; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import java.nio.ByteBuffer; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; /** * A {@link MethodCodec} using UTF-8 encoded JSON method calls and result envelopes. * * <p>This codec is guaranteed to be compatible with the corresponding <a * href="https://api.flutter.dev/flutter/services/JSONMethodCodec-class.html">JSONMethodCodec</a> on * the Dart side. These parts of the Flutter SDK are evolved synchronously. * * <p>Values supported as methods arguments and result payloads are those supported by {@link * JSONMessageCodec}. */ public final class JSONMethodCodec implements MethodCodec { // This codec must match the Dart codec of the same name in package flutter/services. public static final JSONMethodCodec INSTANCE = new JSONMethodCodec(); private JSONMethodCodec() {} @Override @NonNull public ByteBuffer encodeMethodCall(@NonNull MethodCall methodCall) { try { final JSONObject map = new JSONObject(); map.put("method", methodCall.method); map.put("args", JSONUtil.wrap(methodCall.arguments)); return JSONMessageCodec.INSTANCE.encodeMessage(map); } catch (JSONException e) { throw new IllegalArgumentException("Invalid JSON", e); } } @Override @NonNull public MethodCall decodeMethodCall(@NonNull ByteBuffer message) { try { final Object json = JSONMessageCodec.INSTANCE.decodeMessage(message); if (json instanceof JSONObject) { final JSONObject map = (JSONObject) json; final Object method = map.get("method"); final Object arguments = unwrapNull(map.opt("args")); if (method instanceof String) { return new MethodCall((String) method, arguments); } } throw new IllegalArgumentException("Invalid method call: " + json); } catch (JSONException e) { throw new IllegalArgumentException("Invalid JSON", e); } } @Override @NonNull public ByteBuffer encodeSuccessEnvelope(@Nullable Object result) { return JSONMessageCodec.INSTANCE.encodeMessage(new JSONArray().put(JSONUtil.wrap(result))); } @Override @NonNull public ByteBuffer encodeErrorEnvelope( @NonNull String errorCode, @Nullable String errorMessage, @Nullable Object errorDetails) { return JSONMessageCodec.INSTANCE.encodeMessage( new JSONArray() .put(errorCode) .put(JSONUtil.wrap(errorMessage)) .put(JSONUtil.wrap(errorDetails))); } @Override @NonNull public ByteBuffer encodeErrorEnvelopeWithStacktrace( @NonNull String errorCode, @Nullable String errorMessage, @Nullable Object errorDetails, @Nullable String errorStacktrace) { return JSONMessageCodec.INSTANCE.encodeMessage( new JSONArray() .put(errorCode) .put(JSONUtil.wrap(errorMessage)) .put(JSONUtil.wrap(errorDetails)) .put(JSONUtil.wrap(errorStacktrace))); } @Override @NonNull public Object decodeEnvelope(@NonNull ByteBuffer envelope) { try { final Object json = JSONMessageCodec.INSTANCE.decodeMessage(envelope); if (json instanceof JSONArray) { final JSONArray array = (JSONArray) json; if (array.length() == 1) { return unwrapNull(array.opt(0)); } if (array.length() == 3) { final Object code = array.get(0); final Object message = unwrapNull(array.opt(1)); final Object details = unwrapNull(array.opt(2)); if (code instanceof String && (message == null || message instanceof String)) { throw new FlutterException((String) code, (String) message, details); } } } throw new IllegalArgumentException("Invalid envelope: " + json); } catch (JSONException e) { throw new IllegalArgumentException("Invalid JSON", e); } } Object unwrapNull(Object value) { return (value == JSONObject.NULL) ? null : value; } }
engine/shell/platform/android/io/flutter/plugin/common/JSONMethodCodec.java/0
{ "file_path": "engine/shell/platform/android/io/flutter/plugin/common/JSONMethodCodec.java", "repo_id": "engine", "token_count": 1507 }
319
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.plugin.editing; import static io.flutter.Build.API_LEVELS; import android.annotation.SuppressLint; import android.content.Context; import android.graphics.Rect; import android.os.Build; import android.os.Bundle; import android.text.Editable; import android.text.InputType; import android.util.SparseArray; import android.view.KeyEvent; import android.view.View; import android.view.ViewStructure; import android.view.autofill.AutofillId; import android.view.autofill.AutofillManager; import android.view.autofill.AutofillValue; import android.view.inputmethod.EditorInfo; import android.view.inputmethod.InputConnection; import android.view.inputmethod.InputMethodManager; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; import androidx.core.view.inputmethod.EditorInfoCompat; import io.flutter.Log; import io.flutter.embedding.android.KeyboardManager; import io.flutter.embedding.engine.systemchannels.TextInputChannel; import io.flutter.embedding.engine.systemchannels.TextInputChannel.TextEditState; import io.flutter.plugin.platform.PlatformViewsController; import java.util.ArrayList; import java.util.HashMap; /** Android implementation of the text input plugin. */ public class TextInputPlugin implements ListenableEditingState.EditingStateWatcher { private static final String TAG = "TextInputPlugin"; @NonNull private final View mView; @NonNull private final InputMethodManager mImm; @NonNull private final AutofillManager afm; @NonNull private final TextInputChannel textInputChannel; @NonNull private InputTarget inputTarget = new InputTarget(InputTarget.Type.NO_TARGET, 0); @Nullable private TextInputChannel.Configuration configuration; @Nullable private SparseArray<TextInputChannel.Configuration> autofillConfiguration; @NonNull private ListenableEditingState mEditable; private boolean mRestartInputPending; @Nullable private InputConnection lastInputConnection; @NonNull private PlatformViewsController platformViewsController; @Nullable private Rect lastClientRect; private ImeSyncDeferringInsetsCallback imeSyncCallback; // Initialize the "last seen" text editing values to a non-null value. private TextEditState mLastKnownFrameworkTextEditingState; // When true following calls to createInputConnection will return the cached lastInputConnection // if the input // target is a platform view. See the comments on lockPlatformViewInputConnection for more // details. private boolean isInputConnectionLocked; @SuppressLint("NewApi") public TextInputPlugin( @NonNull View view, @NonNull TextInputChannel textInputChannel, @NonNull PlatformViewsController platformViewsController) { mView = view; // Create a default object. mEditable = new ListenableEditingState(null, mView); mImm = (InputMethodManager) view.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); if (Build.VERSION.SDK_INT >= API_LEVELS.API_26) { afm = view.getContext().getSystemService(AutofillManager.class); } else { afm = null; } // Sets up syncing ime insets with the framework, allowing // the Flutter view to grow and shrink to accommodate Android // controlled keyboard animations. if (Build.VERSION.SDK_INT >= API_LEVELS.API_30) { imeSyncCallback = new ImeSyncDeferringInsetsCallback(view); imeSyncCallback.install(); } this.textInputChannel = textInputChannel; textInputChannel.setTextInputMethodHandler( new TextInputChannel.TextInputMethodHandler() { @Override public void show() { showTextInput(mView); } @Override public void hide() { if (inputTarget.type == InputTarget.Type.PHYSICAL_DISPLAY_PLATFORM_VIEW) { notifyViewExited(); } else { hideTextInput(mView); } } @Override public void requestAutofill() { notifyViewEntered(); } @Override public void finishAutofillContext(boolean shouldSave) { if (Build.VERSION.SDK_INT < API_LEVELS.API_26 || afm == null) { return; } if (shouldSave) { afm.commit(); } else { afm.cancel(); } } @Override public void setClient( int textInputClientId, TextInputChannel.Configuration configuration) { setTextInputClient(textInputClientId, configuration); } @Override public void setPlatformViewClient(int platformViewId, boolean usesVirtualDisplay) { setPlatformViewTextInputClient(platformViewId, usesVirtualDisplay); } @Override public void setEditingState(TextInputChannel.TextEditState editingState) { setTextInputEditingState(mView, editingState); } @Override public void setEditableSizeAndTransform(double width, double height, double[] transform) { saveEditableSizeAndTransform(width, height, transform); } @Override public void clearClient() { clearTextInputClient(); } @Override public void sendAppPrivateCommand(String action, Bundle data) { sendTextInputAppPrivateCommand(action, data); } }); textInputChannel.requestExistingInputState(); this.platformViewsController = platformViewsController; this.platformViewsController.attachTextInputPlugin(this); } @NonNull public InputMethodManager getInputMethodManager() { return mImm; } @VisibleForTesting Editable getEditable() { return mEditable; } @VisibleForTesting ImeSyncDeferringInsetsCallback getImeSyncCallback() { return imeSyncCallback; } /** * Use the current platform view input connection until unlockPlatformViewInputConnection is * called. * * <p>The current input connection instance is cached and any following call to @{link * createInputConnection} returns the cached connection until unlockPlatformViewInputConnection is * called. * * <p>This is a no-op if the current input target isn't a platform view. * * <p>This is used to preserve an input connection when moving a platform view from one virtual * display to another. */ public void lockPlatformViewInputConnection() { if (inputTarget.type == InputTarget.Type.VIRTUAL_DISPLAY_PLATFORM_VIEW) { isInputConnectionLocked = true; } } /** * Unlocks the input connection. * * <p>See also: @{link lockPlatformViewInputConnection}. */ public void unlockPlatformViewInputConnection() { if (inputTarget.type == InputTarget.Type.VIRTUAL_DISPLAY_PLATFORM_VIEW) { isInputConnectionLocked = false; } } /** * Detaches the text input plugin from the platform views controller. * * <p>The TextInputPlugin instance should not be used after calling this. */ @SuppressLint("NewApi") public void destroy() { platformViewsController.detachTextInputPlugin(); textInputChannel.setTextInputMethodHandler(null); notifyViewExited(); mEditable.removeEditingStateListener(this); if (imeSyncCallback != null) { imeSyncCallback.remove(); } } private static int inputTypeFromTextInputType( TextInputChannel.InputType type, boolean obscureText, boolean autocorrect, boolean enableSuggestions, boolean enableIMEPersonalizedLearning, TextInputChannel.TextCapitalization textCapitalization) { if (type.type == TextInputChannel.TextInputType.DATETIME) { return InputType.TYPE_CLASS_DATETIME; } else if (type.type == TextInputChannel.TextInputType.NUMBER) { int textType = InputType.TYPE_CLASS_NUMBER; if (type.isSigned) { textType |= InputType.TYPE_NUMBER_FLAG_SIGNED; } if (type.isDecimal) { textType |= InputType.TYPE_NUMBER_FLAG_DECIMAL; } return textType; } else if (type.type == TextInputChannel.TextInputType.PHONE) { return InputType.TYPE_CLASS_PHONE; } else if (type.type == TextInputChannel.TextInputType.NONE) { return InputType.TYPE_NULL; } int textType = InputType.TYPE_CLASS_TEXT; if (type.type == TextInputChannel.TextInputType.MULTILINE) { textType |= InputType.TYPE_TEXT_FLAG_MULTI_LINE; } else if (type.type == TextInputChannel.TextInputType.EMAIL_ADDRESS) { textType |= InputType.TYPE_TEXT_VARIATION_EMAIL_ADDRESS; } else if (type.type == TextInputChannel.TextInputType.URL) { textType |= InputType.TYPE_TEXT_VARIATION_URI; } else if (type.type == TextInputChannel.TextInputType.VISIBLE_PASSWORD) { textType |= InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD; } else if (type.type == TextInputChannel.TextInputType.NAME) { textType |= InputType.TYPE_TEXT_VARIATION_PERSON_NAME; } else if (type.type == TextInputChannel.TextInputType.POSTAL_ADDRESS) { textType |= InputType.TYPE_TEXT_VARIATION_POSTAL_ADDRESS; } if (obscureText) { // Note: both required. Some devices ignore TYPE_TEXT_FLAG_NO_SUGGESTIONS. textType |= InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS; textType |= InputType.TYPE_TEXT_VARIATION_PASSWORD; } else { if (autocorrect) textType |= InputType.TYPE_TEXT_FLAG_AUTO_CORRECT; if (!enableSuggestions) { // Note: both required. Some devices ignore TYPE_TEXT_FLAG_NO_SUGGESTIONS. textType |= InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS; textType |= InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD; } } if (textCapitalization == TextInputChannel.TextCapitalization.CHARACTERS) { textType |= InputType.TYPE_TEXT_FLAG_CAP_CHARACTERS; } else if (textCapitalization == TextInputChannel.TextCapitalization.WORDS) { textType |= InputType.TYPE_TEXT_FLAG_CAP_WORDS; } else if (textCapitalization == TextInputChannel.TextCapitalization.SENTENCES) { textType |= InputType.TYPE_TEXT_FLAG_CAP_SENTENCES; } return textType; } @Nullable public InputConnection createInputConnection( @NonNull View view, @NonNull KeyboardManager keyboardManager, @NonNull EditorInfo outAttrs) { if (inputTarget.type == InputTarget.Type.NO_TARGET) { lastInputConnection = null; return null; } if (inputTarget.type == InputTarget.Type.PHYSICAL_DISPLAY_PLATFORM_VIEW) { return null; } if (inputTarget.type == InputTarget.Type.VIRTUAL_DISPLAY_PLATFORM_VIEW) { if (isInputConnectionLocked) { return lastInputConnection; } lastInputConnection = platformViewsController .getPlatformViewById(inputTarget.id) .onCreateInputConnection(outAttrs); return lastInputConnection; } outAttrs.inputType = inputTypeFromTextInputType( configuration.inputType, configuration.obscureText, configuration.autocorrect, configuration.enableSuggestions, configuration.enableIMEPersonalizedLearning, configuration.textCapitalization); outAttrs.imeOptions = EditorInfo.IME_FLAG_NO_FULLSCREEN; if (Build.VERSION.SDK_INT >= API_LEVELS.API_26 && !configuration.enableIMEPersonalizedLearning) { outAttrs.imeOptions |= EditorInfo.IME_FLAG_NO_PERSONALIZED_LEARNING; } int enterAction; if (configuration.inputAction == null) { // If an explicit input action isn't set, then default to none for multi-line fields // and done for single line fields. enterAction = (InputType.TYPE_TEXT_FLAG_MULTI_LINE & outAttrs.inputType) != 0 ? EditorInfo.IME_ACTION_NONE : EditorInfo.IME_ACTION_DONE; } else { enterAction = configuration.inputAction; } if (configuration.actionLabel != null) { outAttrs.actionLabel = configuration.actionLabel; outAttrs.actionId = enterAction; } outAttrs.imeOptions |= enterAction; if (configuration.contentCommitMimeTypes != null) { String[] imgTypeString = configuration.contentCommitMimeTypes; EditorInfoCompat.setContentMimeTypes(outAttrs, imgTypeString); } InputConnectionAdaptor connection = new InputConnectionAdaptor( view, inputTarget.id, textInputChannel, keyboardManager, mEditable, outAttrs); outAttrs.initialSelStart = mEditable.getSelectionStart(); outAttrs.initialSelEnd = mEditable.getSelectionEnd(); lastInputConnection = connection; return lastInputConnection; } @Nullable public InputConnection getLastInputConnection() { return lastInputConnection; } /** * Clears a platform view text input client if it is the current input target. * * <p>This is called when a platform view is disposed to make sure we're not hanging to a stale * input connection. */ public void clearPlatformViewClient(int platformViewId) { if ((inputTarget.type == InputTarget.Type.VIRTUAL_DISPLAY_PLATFORM_VIEW || inputTarget.type == InputTarget.Type.PHYSICAL_DISPLAY_PLATFORM_VIEW) && inputTarget.id == platformViewId) { inputTarget = new InputTarget(InputTarget.Type.NO_TARGET, 0); notifyViewExited(); mImm.hideSoftInputFromWindow(mView.getApplicationWindowToken(), 0); mImm.restartInput(mView); mRestartInputPending = false; } } public void sendTextInputAppPrivateCommand(@NonNull String action, @NonNull Bundle data) { mImm.sendAppPrivateCommand(mView, action, data); } @VisibleForTesting void showTextInput(View view) { if (configuration == null || configuration.inputType == null || configuration.inputType.type != TextInputChannel.TextInputType.NONE) { view.requestFocus(); mImm.showSoftInput(view, 0); } else { hideTextInput(view); } } private void hideTextInput(View view) { notifyViewExited(); // Note: when a virtual display is used, a race condition may lead to us hiding the keyboard // here just after a platform view has shown it. // This can only potentially happen when switching focus from a Flutter text field to a platform // view's text // field(by text field here I mean anything that keeps the keyboard open). // See: https://github.com/flutter/flutter/issues/34169 mImm.hideSoftInputFromWindow(view.getApplicationWindowToken(), 0); } @VisibleForTesting void setTextInputClient(int client, TextInputChannel.Configuration configuration) { // Call notifyViewExited on the previous field. notifyViewExited(); this.configuration = configuration; inputTarget = new InputTarget(InputTarget.Type.FRAMEWORK_CLIENT, client); mEditable.removeEditingStateListener(this); mEditable = new ListenableEditingState( configuration.autofill != null ? configuration.autofill.editState : null, mView); updateAutofillConfigurationIfNeeded(configuration); // setTextInputClient will be followed by a call to setTextInputEditingState. // Do a restartInput at that time. mRestartInputPending = true; unlockPlatformViewInputConnection(); lastClientRect = null; mEditable.addEditingStateListener(this); } private void setPlatformViewTextInputClient(int platformViewId, boolean usesVirtualDisplay) { if (usesVirtualDisplay) { // We need to make sure that the Flutter view is focused so that no imm operations get short // circuited. // Not asking for focus here specifically manifested in a bug on API 28 devices where the // platform view's request to show a keyboard was ignored. mView.requestFocus(); inputTarget = new InputTarget(InputTarget.Type.VIRTUAL_DISPLAY_PLATFORM_VIEW, platformViewId); mImm.restartInput(mView); mRestartInputPending = false; } else { inputTarget = new InputTarget(InputTarget.Type.PHYSICAL_DISPLAY_PLATFORM_VIEW, platformViewId); lastInputConnection = null; } } private static boolean composingChanged( TextInputChannel.TextEditState before, TextInputChannel.TextEditState after) { final int composingRegionLength = before.composingEnd - before.composingStart; if (composingRegionLength != after.composingEnd - after.composingStart) { return true; } for (int index = 0; index < composingRegionLength; index++) { if (before.text.charAt(index + before.composingStart) != after.text.charAt(index + after.composingStart)) { return true; } } return false; } // Called by the text input channel to update the text input plugin with the // latest TextEditState from the framework. @VisibleForTesting void setTextInputEditingState(View view, TextInputChannel.TextEditState state) { if (!mRestartInputPending && mLastKnownFrameworkTextEditingState != null && mLastKnownFrameworkTextEditingState.hasComposing()) { // Also restart input if the framework (or the developer) decides to // change the composing region by itself (which is discouraged). Many IMEs // don't expect editors to commit composing text, so a restart is needed // to reset their internal states. mRestartInputPending = composingChanged(mLastKnownFrameworkTextEditingState, state); if (mRestartInputPending) { Log.i(TAG, "Composing region changed by the framework. Restarting the input method."); } } mLastKnownFrameworkTextEditingState = state; mEditable.setEditingState(state); // Restart if needed. Restarting will also update the selection. if (mRestartInputPending) { mImm.restartInput(view); mRestartInputPending = false; } } private interface MinMax { void inspect(double x, double y); } private void saveEditableSizeAndTransform(double width, double height, double[] matrix) { final double[] minMax = new double[4]; // minX, maxX, minY, maxY. final boolean isAffine = matrix[3] == 0 && matrix[7] == 0 && matrix[15] == 1; minMax[0] = minMax[1] = matrix[12] / matrix[15]; // minX and maxX. minMax[2] = minMax[3] = matrix[13] / matrix[15]; // minY and maxY. final MinMax finder = new MinMax() { @Override public void inspect(double x, double y) { final double w = isAffine ? 1 : 1 / (matrix[3] * x + matrix[7] * y + matrix[15]); final double tx = (matrix[0] * x + matrix[4] * y + matrix[12]) * w; final double ty = (matrix[1] * x + matrix[5] * y + matrix[13]) * w; if (tx < minMax[0]) { minMax[0] = tx; } else if (tx > minMax[1]) { minMax[1] = tx; } if (ty < minMax[2]) { minMax[2] = ty; } else if (ty > minMax[3]) { minMax[3] = ty; } } }; finder.inspect(width, 0); finder.inspect(width, height); finder.inspect(0, height); final Float density = mView.getContext().getResources().getDisplayMetrics().density; lastClientRect = new Rect( (int) (minMax[0] * density), (int) (minMax[2] * density), (int) Math.ceil(minMax[1] * density), (int) Math.ceil(minMax[3] * density)); } @VisibleForTesting void clearTextInputClient() { if (inputTarget.type == InputTarget.Type.VIRTUAL_DISPLAY_PLATFORM_VIEW) { // This only applies to platform views that use a virtual display. // Focus changes in the framework tree have no guarantees on the order focus nodes are // notified. A node that lost focus may be notified before or after a node that gained focus. // When moving the focus from a Flutter text field to an AndroidView, it is possible that the // Flutter text field's focus node will be notified that it lost focus after the AndroidView // was notified that it gained focus. When this happens the text field will send a // clearTextInput command which we ignore. // By doing this we prevent the framework from clearing a platform view input client (the only // way to do so is to set a new framework text client). I don't see an obvious use case for // "clearing" a platform view's text input client, and it may be error prone as we don't know // how the platform view manages the input connection and we probably shouldn't interfere. // If we ever want to allow the framework to clear a platform view text client we should // probably consider changing the focus manager such that focus nodes that lost focus are // notified before focus nodes that gained focus as part of the same focus event. return; } mEditable.removeEditingStateListener(this); notifyViewExited(); configuration = null; updateAutofillConfigurationIfNeeded(null); inputTarget = new InputTarget(InputTarget.Type.NO_TARGET, 0); unlockPlatformViewInputConnection(); lastClientRect = null; // Call restartInput to reset IME internal states. Otherwise some IMEs (Gboard for instance) // keep reacting based on the previous input configuration until a new configuration is set. mImm.restartInput(mView); } private static class InputTarget { enum Type { NO_TARGET, // InputConnection is managed by the TextInputPlugin, and events are forwarded to the Flutter // framework. FRAMEWORK_CLIENT, // InputConnection is managed by a platform view that is presented on a virtual display. VIRTUAL_DISPLAY_PLATFORM_VIEW, // InputConnection is managed by a platform view that is embedded in the activity's view // hierarchy. This view hierarchy is displayed in a physical display within the aplication // display area. PHYSICAL_DISPLAY_PLATFORM_VIEW, } public InputTarget(@NonNull Type type, int id) { this.type = type; this.id = id; } @NonNull Type type; // The ID of the input target. // // For framework clients this is the framework input connection client ID. // For platform views this is the platform view's ID. int id; } // -------- Start: KeyboardManager Synchronous Responder ------- public boolean handleKeyEvent(@NonNull KeyEvent keyEvent) { if (!getInputMethodManager().isAcceptingText() || lastInputConnection == null) { return false; } // Send the KeyEvent as an IME KeyEvent. If the input connection is an // InputConnectionAdaptor then call its handleKeyEvent method (because // this method will be called by the keyboard manager, and // InputConnectionAdaptor#sendKeyEvent forwards the key event back to the // keyboard manager). return (lastInputConnection instanceof InputConnectionAdaptor) ? ((InputConnectionAdaptor) lastInputConnection).handleKeyEvent(keyEvent) : lastInputConnection.sendKeyEvent(keyEvent); } // -------- End: KeyboardManager Synchronous Responder ------- // -------- Start: ListenableEditingState watcher implementation ------- @Override public void didChangeEditingState( boolean textChanged, boolean selectionChanged, boolean composingRegionChanged) { if (textChanged) { // Notify the autofill manager of the value change. notifyValueChanged(mEditable.toString()); } final int selectionStart = mEditable.getSelectionStart(); final int selectionEnd = mEditable.getSelectionEnd(); final int composingStart = mEditable.getComposingStart(); final int composingEnd = mEditable.getComposingEnd(); final ArrayList<TextEditingDelta> batchTextEditingDeltas = mEditable.extractBatchTextEditingDeltas(); final boolean skipFrameworkUpdate = // The framework needs to send its editing state first. mLastKnownFrameworkTextEditingState == null || (mEditable.toString().equals(mLastKnownFrameworkTextEditingState.text) && selectionStart == mLastKnownFrameworkTextEditingState.selectionStart && selectionEnd == mLastKnownFrameworkTextEditingState.selectionEnd && composingStart == mLastKnownFrameworkTextEditingState.composingStart && composingEnd == mLastKnownFrameworkTextEditingState.composingEnd); if (!skipFrameworkUpdate) { Log.v(TAG, "send EditingState to flutter: " + mEditable.toString()); if (configuration.enableDeltaModel) { textInputChannel.updateEditingStateWithDeltas(inputTarget.id, batchTextEditingDeltas); mEditable.clearBatchDeltas(); } else { textInputChannel.updateEditingState( inputTarget.id, mEditable.toString(), selectionStart, selectionEnd, composingStart, composingEnd); } mLastKnownFrameworkTextEditingState = new TextEditState( mEditable.toString(), selectionStart, selectionEnd, composingStart, composingEnd); } else { // Don't accumulate deltas if they are not sent to the framework. mEditable.clearBatchDeltas(); } } // -------- End: ListenableEditingState watcher implementation ------- // -------- Start: Autofill ------- // ### Setup and provide the initial text values and hints. // // The TextInputConfiguration used to set up the current client is also used for populating // "AutofillVirtualStructure" when requested by the autofill manager (AFM), See // #onProvideAutofillVirtualStructure. // // ### Keep the AFM updated // // The autofill session connected to the AFM keeps a copy of the current state for each reported // field in "AutofillVirtualStructure" (instead of holding a reference to those fields), so the // AFM needs to be notified when text changes if the client was part of the // "AutofillVirtualStructure" previously reported to the AFM. This step is essential for // triggering autofill save. This is done in #didChangeEditingState by calling // #notifyValueChanged. // // Additionally when the text input plugin receives a new TextInputConfiguration, // AutofillManager#notifyValueChanged will be called on all the autofillable fields contained in // the TextInputConfiguration, in case some of them are tracked by the session and their values // have changed. However if the value of an unfocused EditableText is changed in the framework, // such change will not be sent to the text input plugin until the next TextInput.attach call. private boolean needsAutofill() { return autofillConfiguration != null; } private void notifyViewEntered() { if (Build.VERSION.SDK_INT < API_LEVELS.API_26 || afm == null || !needsAutofill()) { return; } final String triggerIdentifier = configuration.autofill.uniqueIdentifier; final int[] offset = new int[2]; mView.getLocationOnScreen(offset); Rect rect = new Rect(lastClientRect); rect.offset(offset[0], offset[1]); afm.notifyViewEntered(mView, triggerIdentifier.hashCode(), rect); } private void notifyViewExited() { if (Build.VERSION.SDK_INT < API_LEVELS.API_26 || afm == null || configuration == null || configuration.autofill == null || !needsAutofill()) { return; } final String triggerIdentifier = configuration.autofill.uniqueIdentifier; afm.notifyViewExited(mView, triggerIdentifier.hashCode()); } private void notifyValueChanged(String newValue) { if (Build.VERSION.SDK_INT < API_LEVELS.API_26 || afm == null || !needsAutofill()) { return; } final String triggerIdentifier = configuration.autofill.uniqueIdentifier; afm.notifyValueChanged(mView, triggerIdentifier.hashCode(), AutofillValue.forText(newValue)); } private void updateAutofillConfigurationIfNeeded(TextInputChannel.Configuration configuration) { if (Build.VERSION.SDK_INT < API_LEVELS.API_26) { return; } if (configuration == null || configuration.autofill == null) { // Disables autofill if the configuration doesn't have an autofill field. autofillConfiguration = null; return; } final TextInputChannel.Configuration[] configurations = configuration.fields; autofillConfiguration = new SparseArray<>(); if (configurations == null) { autofillConfiguration.put(configuration.autofill.uniqueIdentifier.hashCode(), configuration); } else { for (TextInputChannel.Configuration config : configurations) { TextInputChannel.Configuration.Autofill autofill = config.autofill; if (autofill != null) { autofillConfiguration.put(autofill.uniqueIdentifier.hashCode(), config); afm.notifyValueChanged( mView, autofill.uniqueIdentifier.hashCode(), AutofillValue.forText(autofill.editState.text)); } } } } public void onProvideAutofillVirtualStructure(@NonNull ViewStructure structure, int flags) { if (Build.VERSION.SDK_INT < API_LEVELS.API_26 || !needsAutofill()) { return; } final String triggerIdentifier = configuration.autofill.uniqueIdentifier; final AutofillId parentId = structure.getAutofillId(); for (int i = 0; i < autofillConfiguration.size(); i++) { final int autofillId = autofillConfiguration.keyAt(i); final TextInputChannel.Configuration config = autofillConfiguration.valueAt(i); final TextInputChannel.Configuration.Autofill autofill = config.autofill; if (autofill == null) { continue; } structure.addChildCount(1); final ViewStructure child = structure.newChild(i); child.setAutofillId(parentId, autofillId); // Don't set hints when there's none. // See https://github.com/flutter/flutter/issues/98505. if (autofill.hints.length > 0) { child.setAutofillHints(autofill.hints); } child.setAutofillType(View.AUTOFILL_TYPE_TEXT); child.setVisibility(View.VISIBLE); if (autofill.hintText != null) { child.setHint(autofill.hintText); } // For some autofill services, only visible input fields are eligible for autofill. // Reports the real size of the child if it's the current client, or 1x1 if we don't // know the real dimensions of the child. if (triggerIdentifier.hashCode() == autofillId && lastClientRect != null) { child.setDimens( lastClientRect.left, lastClientRect.top, 0, 0, lastClientRect.width(), lastClientRect.height()); child.setAutofillValue(AutofillValue.forText(mEditable)); } else { child.setDimens(0, 0, 0, 0, 1, 1); child.setAutofillValue(AutofillValue.forText(autofill.editState.text)); } } } public void autofill(@NonNull SparseArray<AutofillValue> values) { if (Build.VERSION.SDK_INT < API_LEVELS.API_26) { return; } if (configuration == null || autofillConfiguration == null || configuration.autofill == null) { return; } final TextInputChannel.Configuration.Autofill currentAutofill = configuration.autofill; final HashMap<String, TextInputChannel.TextEditState> editingValues = new HashMap<>(); for (int i = 0; i < values.size(); i++) { int virtualId = values.keyAt(i); final TextInputChannel.Configuration config = autofillConfiguration.get(virtualId); if (config == null || config.autofill == null) { continue; } final TextInputChannel.Configuration.Autofill autofill = config.autofill; final String value = values.valueAt(i).getTextValue().toString(); final TextInputChannel.TextEditState newState = new TextInputChannel.TextEditState(value, value.length(), value.length(), -1, -1); if (autofill.uniqueIdentifier.equals(currentAutofill.uniqueIdentifier)) { // Autofilling the current client is the same as handling user input // from the virtual keyboard. Setting the editable to newState and an // update will be sent to the framework. mEditable.setEditingState(newState); } else { editingValues.put(autofill.uniqueIdentifier, newState); } } textInputChannel.updateEditingStateWithTag(inputTarget.id, editingValues); } // -------- End: Autofill ------- }
engine/shell/platform/android/io/flutter/plugin/editing/TextInputPlugin.java/0
{ "file_path": "engine/shell/platform/android/io/flutter/plugin/editing/TextInputPlugin.java", "repo_id": "engine", "token_count": 11655 }
320
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.plugin.platform; import static android.content.Context.WINDOW_SERVICE; import static android.view.View.OnFocusChangeListener; import android.app.AlertDialog; import android.app.Presentation; import android.content.Context; import android.content.ContextWrapper; import android.content.MutableContextWrapper; import android.graphics.drawable.ColorDrawable; import android.os.Bundle; import android.view.Display; import android.view.View; import android.view.WindowManager; import android.view.accessibility.AccessibilityEvent; import android.view.inputmethod.InputMethodManager; import android.widget.FrameLayout; import androidx.annotation.Keep; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import io.flutter.Log; /* * A presentation used for hosting a single Android view in a virtual display. * * This presentation overrides the WindowManager's addView/removeView/updateViewLayout methods, such that views added * directly to the WindowManager are added as part of the presentation's view hierarchy (to fakeWindowViewGroup). * * The view hierarchy for the presentation is as following: * * rootView * / \ * / \ * / \ * container state.fakeWindowViewGroup * | * EmbeddedView */ @Keep class SingleViewPresentation extends Presentation { private static final String TAG = "PlatformViewsController"; /* * When an embedded view is resized in Flutterverse we move the Android view to a new virtual display * that has the new size. This class keeps the presentation state that moves with the view to the presentation of * the new virtual display. */ static class PresentationState { // The Android view we are embedding in the Flutter app. private PlatformView platformView; // The InvocationHandler for a WindowManager proxy. This is essentially the custom window // manager for the // presentation. private WindowManagerHandler windowManagerHandler; // Contains views that were added directly to the window manager (e.g // android.widget.PopupWindow). private SingleViewFakeWindowViewGroup fakeWindowViewGroup; } // A reference to the current accessibility bridge to which accessibility events will be // delegated. private final AccessibilityEventsDelegate accessibilityEventsDelegate; private final OnFocusChangeListener focusChangeListener; // This is the view id assigned by the Flutter framework to the embedded view, we keep it here // so when we create the platform view we can tell it its view id. private int viewId; // The root view for the presentation, it has 2 childs: container which contains the embedded // view, and // fakeWindowViewGroup which contains views that were added directly to the presentation's window // manager. private AccessibilityDelegatingFrameLayout rootView; // Contains the embedded platform view (platformView.getView()) when it is attached to the // presentation. private FrameLayout container; private final PresentationState state; private boolean startFocused = false; // The context for the application window that hosts FlutterView. private final Context outerContext; /** * Creates a presentation that will use the view factory to create a new platform view in the * presentation's onCreate, and attach it. */ public SingleViewPresentation( Context outerContext, Display display, PlatformView view, AccessibilityEventsDelegate accessibilityEventsDelegate, int viewId, OnFocusChangeListener focusChangeListener) { super(new ImmContext(outerContext), display); this.accessibilityEventsDelegate = accessibilityEventsDelegate; this.viewId = viewId; this.focusChangeListener = focusChangeListener; this.outerContext = outerContext; state = new PresentationState(); state.platformView = view; getWindow() .setFlags( WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE); getWindow().setType(WindowManager.LayoutParams.TYPE_PRIVATE_PRESENTATION); } /** * Creates a presentation that will attach an already existing view as its root view. * * <p>The display's density must match the density of the context used when the view was created. */ public SingleViewPresentation( Context outerContext, Display display, AccessibilityEventsDelegate accessibilityEventsDelegate, PresentationState state, OnFocusChangeListener focusChangeListener, boolean startFocused) { super(new ImmContext(outerContext), display); this.accessibilityEventsDelegate = accessibilityEventsDelegate; this.state = state; this.focusChangeListener = focusChangeListener; this.outerContext = outerContext; getWindow() .setFlags( WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE, WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE); this.startFocused = startFocused; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // This makes sure we preserve alpha for the VD's content. getWindow().setBackgroundDrawable(new ColorDrawable(android.graphics.Color.TRANSPARENT)); if (state.fakeWindowViewGroup == null) { state.fakeWindowViewGroup = new SingleViewFakeWindowViewGroup(getContext()); } if (state.windowManagerHandler == null) { WindowManager windowManagerDelegate = (WindowManager) getContext().getSystemService(WINDOW_SERVICE); state.windowManagerHandler = new WindowManagerHandler(windowManagerDelegate, state.fakeWindowViewGroup); } container = new FrameLayout(getContext()); // Our base mContext has already been wrapped with an IMM cache at instantiation time, but // we want to wrap it again here to also return state.windowManagerHandler. Context baseContext = new PresentationContext(getContext(), state.windowManagerHandler, outerContext); View embeddedView = state.platformView.getView(); if (embeddedView.getContext() instanceof MutableContextWrapper) { MutableContextWrapper currentContext = (MutableContextWrapper) embeddedView.getContext(); currentContext.setBaseContext(baseContext); } else { // In some cases, such as when using LayoutInflator, the original context // may not be preserved. For backward compatibility with previous // implementations of Virtual Display, which didn't validate the context, // continue, but log a warning indicating that some functionality may not // work as expected. // See https://github.com/flutter/flutter/issues/110146 for context. Log.w( TAG, "Unexpected platform view context for view ID " + viewId + "; some functionality may not work correctly. When constructing a platform view " + "in the factory, ensure that the view returned from PlatformViewFactory#create " + "returns the provided context from getContext(). If you are unable to associate " + "the view with that context, consider using Hybrid Composition instead."); } container.addView(embeddedView); rootView = new AccessibilityDelegatingFrameLayout( getContext(), accessibilityEventsDelegate, embeddedView); rootView.addView(container); rootView.addView(state.fakeWindowViewGroup); embeddedView.setOnFocusChangeListener(focusChangeListener); rootView.setFocusableInTouchMode(true); if (startFocused) { embeddedView.requestFocus(); } else { rootView.requestFocus(); } setContentView(rootView); } public PresentationState detachState() { // These views can be null before onCreate() is called if (container != null) { container.removeAllViews(); } if (rootView != null) { rootView.removeAllViews(); } return state; } @Nullable public PlatformView getView() { return state.platformView; } /** Answers calls for {@link InputMethodManager} with an instance cached at creation time. */ // TODO(mklim): This caches the IMM at construction time and won't pick up any changes. In rare // cases where the FlutterView changes windows this will return an outdated instance. This // should be fixed to instead defer returning the IMM to something that know's FlutterView's // true Context. private static class ImmContext extends ContextWrapper { private @NonNull final InputMethodManager inputMethodManager; ImmContext(Context base) { this(base, /*inputMethodManager=*/ null); } private ImmContext(Context base, @Nullable InputMethodManager inputMethodManager) { super(base); this.inputMethodManager = inputMethodManager != null ? inputMethodManager : (InputMethodManager) base.getSystemService(INPUT_METHOD_SERVICE); } @Override public Object getSystemService(String name) { if (INPUT_METHOD_SERVICE.equals(name)) { return inputMethodManager; } return super.getSystemService(name); } @Override public Context createDisplayContext(Display display) { Context displayContext = super.createDisplayContext(display); return new ImmContext(displayContext, inputMethodManager); } } /** Proxies a Context replacing the WindowManager with our custom instance. */ // TODO(mklim): This caches the IMM at construction time and won't pick up any changes. In rare // cases where the FlutterView changes windows this will return an outdated instance. This // should be fixed to instead defer returning the IMM to something that know's FlutterView's // true Context. private static class PresentationContext extends ContextWrapper { private @NonNull final WindowManagerHandler windowManagerHandler; private @Nullable WindowManager windowManager; private final Context flutterAppWindowContext; PresentationContext( Context base, @NonNull WindowManagerHandler windowManagerHandler, Context flutterAppWindowContext) { super(base); this.windowManagerHandler = windowManagerHandler; this.flutterAppWindowContext = flutterAppWindowContext; } @Override public Object getSystemService(String name) { if (WINDOW_SERVICE.equals(name)) { if (isCalledFromAlertDialog()) { // Alert dialogs are showing on top of the entire application and should not be limited to // the virtual // display. If we detect that an android.app.AlertDialog constructor is what's fetching // the window manager // we return the one for the application's window. // // Note that if we don't do this AlertDialog will throw a ClassCastException as down the // line it tries // to case this instance to a WindowManagerImpl which the object returned by // getWindowManager is not // a subclass of. return flutterAppWindowContext.getSystemService(name); } return getWindowManager(); } return super.getSystemService(name); } private WindowManager getWindowManager() { if (windowManager == null) { windowManager = windowManagerHandler; } return windowManager; } private boolean isCalledFromAlertDialog() { StackTraceElement[] stackTraceElements = Thread.currentThread().getStackTrace(); for (int i = 0; i < stackTraceElements.length && i < 11; i++) { if (stackTraceElements[i].getClassName().equals(AlertDialog.class.getCanonicalName()) && stackTraceElements[i].getMethodName().equals("<init>")) { return true; } } return false; } } private static class AccessibilityDelegatingFrameLayout extends FrameLayout { private final AccessibilityEventsDelegate accessibilityEventsDelegate; private final View embeddedView; public AccessibilityDelegatingFrameLayout( Context context, AccessibilityEventsDelegate accessibilityEventsDelegate, View embeddedView) { super(context); this.accessibilityEventsDelegate = accessibilityEventsDelegate; this.embeddedView = embeddedView; } @Override public boolean requestSendAccessibilityEvent(View child, AccessibilityEvent event) { return accessibilityEventsDelegate.requestSendAccessibilityEvent(embeddedView, child, event); } } }
engine/shell/platform/android/io/flutter/plugin/platform/SingleViewPresentation.java/0
{ "file_path": "engine/shell/platform/android/io/flutter/plugin/platform/SingleViewPresentation.java", "repo_id": "engine", "token_count": 4017 }
321
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.view; import android.content.Context; import android.os.Handler; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import io.flutter.FlutterInjector; import io.flutter.embedding.engine.loader.FlutterLoader; /** * A legacy class to initialize the Flutter engine. * * @deprecated Replaced by {@link io.flutter.embedding.engine.loader.FlutterLoader}. */ @Deprecated public class FlutterMain { public static class Settings { private String logTag; @Nullable public String getLogTag() { return logTag; } /** * Set the tag associated with Flutter app log messages. * * @param tag Log tag. */ public void setLogTag(String tag) { logTag = tag; } } /** * Starts initialization of the native system. * * @param applicationContext The Android application context. */ public static void startInitialization(@NonNull Context applicationContext) { FlutterInjector.instance().flutterLoader().startInitialization(applicationContext); } /** * Starts initialization of the native system. * * <p>This loads the Flutter engine's native library to enable subsequent JNI calls. This also * starts locating and unpacking Dart resources packaged in the app's APK. * * <p>Calling this method multiple times has no effect. * * @param applicationContext The Android application context. * @param settings Configuration settings. */ public static void startInitialization( @NonNull Context applicationContext, @NonNull Settings settings) { FlutterLoader.Settings newSettings = new FlutterLoader.Settings(); newSettings.setLogTag(settings.getLogTag()); FlutterInjector.instance().flutterLoader().startInitialization(applicationContext, newSettings); } /** * Blocks until initialization of the native system has completed. * * <p>Calling this method multiple times has no effect. * * @param applicationContext The Android application context. * @param args Flags sent to the Flutter runtime. */ public static void ensureInitializationComplete( @NonNull Context applicationContext, @Nullable String[] args) { FlutterInjector.instance() .flutterLoader() .ensureInitializationComplete(applicationContext, args); } /** * Same as {@link #ensureInitializationComplete(Context, String[])} but waiting on a background * thread, then invoking {@code callback} on the {@code callbackHandler}. */ public static void ensureInitializationCompleteAsync( @NonNull Context applicationContext, @Nullable String[] args, @NonNull Handler callbackHandler, @NonNull Runnable callback) { FlutterInjector.instance() .flutterLoader() .ensureInitializationCompleteAsync(applicationContext, args, callbackHandler, callback); } @NonNull public static String findAppBundlePath() { return FlutterInjector.instance().flutterLoader().findAppBundlePath(); } @Deprecated @Nullable public static String findAppBundlePath(@NonNull Context applicationContext) { return FlutterInjector.instance().flutterLoader().findAppBundlePath(); } /** * Returns the file name for the given asset. The returned file name can be used to access the * asset in the APK through the {@link android.content.res.AssetManager} API. * * @param asset the name of the asset. The name can be hierarchical * @return the filename to be used with {@link android.content.res.AssetManager} */ @NonNull public static String getLookupKeyForAsset(@NonNull String asset) { return FlutterInjector.instance().flutterLoader().getLookupKeyForAsset(asset); } /** * Returns the file name for the given asset which originates from the specified packageName. The * returned file name can be used to access the asset in the APK through the {@link * android.content.res.AssetManager} API. * * @param asset the name of the asset. The name can be hierarchical * @param packageName the name of the package from which the asset originates * @return the file name to be used with {@link android.content.res.AssetManager} */ @NonNull public static String getLookupKeyForAsset(@NonNull String asset, @NonNull String packageName) { return FlutterInjector.instance().flutterLoader().getLookupKeyForAsset(asset, packageName); } }
engine/shell/platform/android/io/flutter/view/FlutterMain.java/0
{ "file_path": "engine/shell/platform/android/io/flutter/view/FlutterMain.java", "repo_id": "engine", "token_count": 1369 }
322
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/shell/platform/android/platform_view_android.h" #include <android/api-level.h> #include <memory> #include <utility> #include "flutter/common/graphics/texture.h" #include "flutter/fml/synchronization/waitable_event.h" #include "flutter/shell/common/shell_io_manager.h" #include "flutter/shell/gpu/gpu_surface_gl_delegate.h" #include "flutter/shell/platform/android/android_context_gl_impeller.h" #include "flutter/shell/platform/android/android_context_gl_skia.h" #include "flutter/shell/platform/android/android_context_vulkan_impeller.h" #include "flutter/shell/platform/android/android_surface_gl_impeller.h" #include "flutter/shell/platform/android/android_surface_gl_skia.h" #include "flutter/shell/platform/android/android_surface_software.h" #include "flutter/shell/platform/android/image_external_texture_gl.h" #include "flutter/shell/platform/android/surface_texture_external_texture_gl.h" #include "fml/logging.h" #if IMPELLER_ENABLE_VULKAN // b/258506856 for why this is behind an if #include "flutter/shell/platform/android/android_surface_vulkan_impeller.h" #include "flutter/shell/platform/android/image_external_texture_vk.h" #endif #include "flutter/shell/platform/android/context/android_context.h" #include "flutter/shell/platform/android/external_view_embedder/external_view_embedder.h" #include "flutter/shell/platform/android/jni/platform_view_android_jni.h" #include "flutter/shell/platform/android/platform_message_response_android.h" #include "flutter/shell/platform/android/surface/android_surface.h" #include "flutter/shell/platform/android/surface/snapshot_surface_producer.h" #include "flutter/shell/platform/android/vsync_waiter_android.h" namespace flutter { AndroidSurfaceFactoryImpl::AndroidSurfaceFactoryImpl( const std::shared_ptr<AndroidContext>& context, bool enable_impeller) : android_context_(context), enable_impeller_(enable_impeller) {} AndroidSurfaceFactoryImpl::~AndroidSurfaceFactoryImpl() = default; std::unique_ptr<AndroidSurface> AndroidSurfaceFactoryImpl::CreateSurface() { switch (android_context_->RenderingApi()) { case AndroidRenderingAPI::kSoftware: return std::make_unique<AndroidSurfaceSoftware>(); case AndroidRenderingAPI::kImpellerOpenGLES: return std::make_unique<AndroidSurfaceGLImpeller>( std::static_pointer_cast<AndroidContextGLImpeller>(android_context_)); case AndroidRenderingAPI::kSkiaOpenGLES: return std::make_unique<AndroidSurfaceGLSkia>( std::static_pointer_cast<AndroidContextGLSkia>(android_context_)); case AndroidRenderingAPI::kImpellerVulkan: return std::make_unique<AndroidSurfaceVulkanImpeller>( std::static_pointer_cast<AndroidContextVulkanImpeller>( android_context_)); } FML_UNREACHABLE(); } static std::shared_ptr<flutter::AndroidContext> CreateAndroidContext( bool use_software_rendering, const flutter::TaskRunners& task_runners, uint8_t msaa_samples, AndroidRenderingAPI android_rendering_api, bool enable_vulkan_validation, bool enable_opengl_gpu_tracing, bool enable_vulkan_gpu_tracing) { switch (android_rendering_api) { case AndroidRenderingAPI::kSoftware: return std::make_shared<AndroidContext>(AndroidRenderingAPI::kSoftware); case AndroidRenderingAPI::kImpellerOpenGLES: return std::make_unique<AndroidContextGLImpeller>( std::make_unique<impeller::egl::Display>(), enable_opengl_gpu_tracing); case AndroidRenderingAPI::kImpellerVulkan: return std::make_unique<AndroidContextVulkanImpeller>( enable_vulkan_validation, enable_vulkan_gpu_tracing); case AndroidRenderingAPI::kSkiaOpenGLES: return std::make_unique<AndroidContextGLSkia>( fml::MakeRefCounted<AndroidEnvironmentGL>(), // task_runners, // msaa_samples // ); } FML_UNREACHABLE(); } PlatformViewAndroid::PlatformViewAndroid( PlatformView::Delegate& delegate, const flutter::TaskRunners& task_runners, const std::shared_ptr<PlatformViewAndroidJNI>& jni_facade, bool use_software_rendering, uint8_t msaa_samples) : PlatformViewAndroid( delegate, task_runners, jni_facade, CreateAndroidContext( use_software_rendering, task_runners, msaa_samples, delegate.OnPlatformViewGetSettings().android_rendering_api, delegate.OnPlatformViewGetSettings().enable_vulkan_validation, delegate.OnPlatformViewGetSettings().enable_opengl_gpu_tracing, delegate.OnPlatformViewGetSettings().enable_vulkan_gpu_tracing)) { } PlatformViewAndroid::PlatformViewAndroid( PlatformView::Delegate& delegate, const flutter::TaskRunners& task_runners, const std::shared_ptr<PlatformViewAndroidJNI>& jni_facade, const std::shared_ptr<flutter::AndroidContext>& android_context) : PlatformView(delegate, task_runners), jni_facade_(jni_facade), android_context_(android_context), platform_view_android_delegate_(jni_facade), platform_message_handler_(new PlatformMessageHandlerAndroid(jni_facade)) { if (android_context_) { FML_CHECK(android_context_->IsValid()) << "Could not create surface from invalid Android context."; surface_factory_ = std::make_shared<AndroidSurfaceFactoryImpl>( android_context_, // delegate.OnPlatformViewGetSettings().enable_impeller // ); android_surface_ = surface_factory_->CreateSurface(); FML_CHECK(android_surface_ && android_surface_->IsValid()) << "Could not create an OpenGL, Vulkan or Software surface to set up " "rendering."; } } PlatformViewAndroid::~PlatformViewAndroid() = default; void PlatformViewAndroid::NotifyCreated( fml::RefPtr<AndroidNativeWindow> native_window) { if (android_surface_) { InstallFirstFrameCallback(); fml::AutoResetWaitableEvent latch; fml::TaskRunner::RunNowOrPostTask( task_runners_.GetRasterTaskRunner(), [&latch, surface = android_surface_.get(), native_window = std::move(native_window)]() { surface->SetNativeWindow(native_window); latch.Signal(); }); latch.Wait(); } PlatformView::NotifyCreated(); } void PlatformViewAndroid::NotifySurfaceWindowChanged( fml::RefPtr<AndroidNativeWindow> native_window) { if (android_surface_) { fml::AutoResetWaitableEvent latch; fml::TaskRunner::RunNowOrPostTask( task_runners_.GetRasterTaskRunner(), [&latch, surface = android_surface_.get(), native_window = std::move(native_window)]() { surface->TeardownOnScreenContext(); surface->SetNativeWindow(native_window); latch.Signal(); }); latch.Wait(); } PlatformView::ScheduleFrame(); } void PlatformViewAndroid::NotifyDestroyed() { PlatformView::NotifyDestroyed(); if (android_surface_) { fml::AutoResetWaitableEvent latch; fml::TaskRunner::RunNowOrPostTask( task_runners_.GetRasterTaskRunner(), [&latch, surface = android_surface_.get()]() { surface->TeardownOnScreenContext(); latch.Signal(); }); latch.Wait(); } } void PlatformViewAndroid::NotifyChanged(const SkISize& size) { if (!android_surface_) { return; } fml::AutoResetWaitableEvent latch; fml::TaskRunner::RunNowOrPostTask( task_runners_.GetRasterTaskRunner(), // [&latch, surface = android_surface_.get(), size]() { surface->OnScreenSurfaceResize(size); latch.Signal(); }); latch.Wait(); } void PlatformViewAndroid::DispatchPlatformMessage(JNIEnv* env, std::string name, jobject java_message_data, jint java_message_position, jint response_id) { uint8_t* message_data = static_cast<uint8_t*>(env->GetDirectBufferAddress(java_message_data)); fml::MallocMapping message = fml::MallocMapping::Copy(message_data, java_message_position); fml::RefPtr<flutter::PlatformMessageResponse> response; if (response_id) { response = fml::MakeRefCounted<PlatformMessageResponseAndroid>( response_id, jni_facade_, task_runners_.GetPlatformTaskRunner()); } PlatformView::DispatchPlatformMessage( std::make_unique<flutter::PlatformMessage>( std::move(name), std::move(message), std::move(response))); } void PlatformViewAndroid::DispatchEmptyPlatformMessage(JNIEnv* env, std::string name, jint response_id) { fml::RefPtr<flutter::PlatformMessageResponse> response; if (response_id) { response = fml::MakeRefCounted<PlatformMessageResponseAndroid>( response_id, jni_facade_, task_runners_.GetPlatformTaskRunner()); } PlatformView::DispatchPlatformMessage( std::make_unique<flutter::PlatformMessage>(std::move(name), std::move(response))); } // |PlatformView| void PlatformViewAndroid::HandlePlatformMessage( std::unique_ptr<flutter::PlatformMessage> message) { // Called from the ui thread. platform_message_handler_->HandlePlatformMessage(std::move(message)); } // |PlatformView| void PlatformViewAndroid::OnPreEngineRestart() const { jni_facade_->FlutterViewOnPreEngineRestart(); } void PlatformViewAndroid::DispatchSemanticsAction(JNIEnv* env, jint id, jint action, jobject args, jint args_position) { if (env->IsSameObject(args, NULL)) { PlatformView::DispatchSemanticsAction( id, static_cast<flutter::SemanticsAction>(action), fml::MallocMapping()); return; } uint8_t* args_data = static_cast<uint8_t*>(env->GetDirectBufferAddress(args)); auto args_vector = fml::MallocMapping::Copy(args_data, args_position); PlatformView::DispatchSemanticsAction( id, static_cast<flutter::SemanticsAction>(action), std::move(args_vector)); } // |PlatformView| void PlatformViewAndroid::UpdateSemantics( flutter::SemanticsNodeUpdates update, flutter::CustomAccessibilityActionUpdates actions) { platform_view_android_delegate_.UpdateSemantics(update, actions); } void PlatformViewAndroid::RegisterExternalTexture( int64_t texture_id, const fml::jni::ScopedJavaGlobalRef<jobject>& surface_texture) { switch (android_context_->RenderingApi()) { case AndroidRenderingAPI::kImpellerOpenGLES: // Impeller GLES. RegisterTexture(std::make_shared<SurfaceTextureExternalTextureImpellerGL>( std::static_pointer_cast<impeller::ContextGLES>( android_context_->GetImpellerContext()), texture_id, surface_texture, jni_facade_)); break; case AndroidRenderingAPI::kSkiaOpenGLES: // Legacy GL. RegisterTexture(std::make_shared<SurfaceTextureExternalTextureGL>( texture_id, surface_texture, jni_facade_)); break; case AndroidRenderingAPI::kSoftware: case AndroidRenderingAPI::kImpellerVulkan: FML_LOG(INFO) << "Attempted to use a SurfaceTextureExternalTexture with an " "unsupported rendering API."; break; } } void PlatformViewAndroid::RegisterImageTexture( int64_t texture_id, const fml::jni::ScopedJavaGlobalRef<jobject>& image_texture_entry) { switch (android_context_->RenderingApi()) { case AndroidRenderingAPI::kImpellerOpenGLES: // Impeller GLES. RegisterTexture(std::make_shared<ImageExternalTextureGLImpeller>( std::static_pointer_cast<impeller::ContextGLES>( android_context_->GetImpellerContext()), texture_id, image_texture_entry, jni_facade_)); break; case AndroidRenderingAPI::kSkiaOpenGLES: // Legacy GL. RegisterTexture(std::make_shared<ImageExternalTextureGLSkia>( std::static_pointer_cast<AndroidContextGLSkia>(android_context_), texture_id, image_texture_entry, jni_facade_)); break; case AndroidRenderingAPI::kImpellerVulkan: RegisterTexture(std::make_shared<ImageExternalTextureVK>( std::static_pointer_cast<impeller::ContextVK>( android_context_->GetImpellerContext()), texture_id, image_texture_entry, jni_facade_)); break; case AndroidRenderingAPI::kSoftware: FML_LOG(INFO) << "Attempted to use a SurfaceTextureExternalTexture with an " "unsupported rendering API."; break; } } // |PlatformView| std::unique_ptr<VsyncWaiter> PlatformViewAndroid::CreateVSyncWaiter() { return std::make_unique<VsyncWaiterAndroid>(task_runners_); } // |PlatformView| std::unique_ptr<Surface> PlatformViewAndroid::CreateRenderingSurface() { if (!android_surface_) { return nullptr; } return android_surface_->CreateGPUSurface( android_context_->GetMainSkiaContext().get()); } // |PlatformView| std::shared_ptr<ExternalViewEmbedder> PlatformViewAndroid::CreateExternalViewEmbedder() { return std::make_shared<AndroidExternalViewEmbedder>( *android_context_, jni_facade_, surface_factory_, task_runners_); } // |PlatformView| std::unique_ptr<SnapshotSurfaceProducer> PlatformViewAndroid::CreateSnapshotSurfaceProducer() { if (!android_surface_) { return nullptr; } return std::make_unique<AndroidSnapshotSurfaceProducer>(*android_surface_); } // |PlatformView| sk_sp<GrDirectContext> PlatformViewAndroid::CreateResourceContext() const { if (!android_surface_) { return nullptr; } sk_sp<GrDirectContext> resource_context; if (android_surface_->ResourceContextMakeCurrent()) { // TODO(chinmaygarde): Currently, this code depends on the fact that only // the OpenGL surface will be able to make a resource context current. If // this changes, this assumption breaks. Handle the same. resource_context = ShellIOManager::CreateCompatibleResourceLoadingContext( GrBackendApi::kOpenGL, GPUSurfaceGLDelegate::GetDefaultPlatformGLInterface()); } else { FML_DLOG(ERROR) << "Could not make the resource context current."; } return resource_context; } // |PlatformView| void PlatformViewAndroid::ReleaseResourceContext() const { if (android_surface_) { android_surface_->ResourceContextClearCurrent(); } } // |PlatformView| std::shared_ptr<impeller::Context> PlatformViewAndroid::GetImpellerContext() const { if (android_surface_) { return android_surface_->GetImpellerContext(); } return nullptr; } // |PlatformView| std::unique_ptr<std::vector<std::string>> PlatformViewAndroid::ComputePlatformResolvedLocales( const std::vector<std::string>& supported_locale_data) { return jni_facade_->FlutterViewComputePlatformResolvedLocale( supported_locale_data); } // |PlatformView| void PlatformViewAndroid::RequestDartDeferredLibrary(intptr_t loading_unit_id) { if (jni_facade_->RequestDartDeferredLibrary(loading_unit_id)) { return; } return; // TODO(garyq): Call LoadDartDeferredLibraryFailure() } // |PlatformView| void PlatformViewAndroid::LoadDartDeferredLibrary( intptr_t loading_unit_id, std::unique_ptr<const fml::Mapping> snapshot_data, std::unique_ptr<const fml::Mapping> snapshot_instructions) { delegate_.LoadDartDeferredLibrary(loading_unit_id, std::move(snapshot_data), std::move(snapshot_instructions)); } // |PlatformView| void PlatformViewAndroid::LoadDartDeferredLibraryError( intptr_t loading_unit_id, const std::string error_message, bool transient) { delegate_.LoadDartDeferredLibraryError(loading_unit_id, error_message, transient); } // |PlatformView| void PlatformViewAndroid::UpdateAssetResolverByType( std::unique_ptr<AssetResolver> updated_asset_resolver, AssetResolver::AssetResolverType type) { delegate_.UpdateAssetResolverByType(std::move(updated_asset_resolver), type); } void PlatformViewAndroid::InstallFirstFrameCallback() { // On Platform Task Runner. SetNextFrameCallback( [platform_view = GetWeakPtr(), platform_task_runner = task_runners_.GetPlatformTaskRunner()]() { // On GPU Task Runner. platform_task_runner->PostTask([platform_view]() { // Back on Platform Task Runner. if (platform_view) { reinterpret_cast<PlatformViewAndroid*>(platform_view.get()) ->FireFirstFrameCallback(); } }); }); } void PlatformViewAndroid::FireFirstFrameCallback() { jni_facade_->FlutterViewOnFirstFrame(); } double PlatformViewAndroid::GetScaledFontSize(double unscaled_font_size, int configuration_id) const { return jni_facade_->FlutterViewGetScaledFontSize(unscaled_font_size, configuration_id); } } // namespace flutter
engine/shell/platform/android/platform_view_android.cc/0
{ "file_path": "engine/shell/platform/android/platform_view_android.cc", "repo_id": "engine", "token_count": 6916 }
323
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/shell/platform/android/surface/snapshot_surface_producer.h" namespace flutter { AndroidSnapshotSurfaceProducer::AndroidSnapshotSurfaceProducer( AndroidSurface& android_surface) : android_surface_(android_surface) {} std::unique_ptr<Surface> AndroidSnapshotSurfaceProducer::CreateSnapshotSurface() { return android_surface_.CreateSnapshotSurface(); } } // namespace flutter
engine/shell/platform/android/surface/snapshot_surface_producer.cc/0
{ "file_path": "engine/shell/platform/android/surface/snapshot_surface_producer.cc", "repo_id": "engine", "token_count": 168 }
324
package io.flutter.embedding.android; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import android.content.Context; import androidx.activity.OnBackPressedCallback; import androidx.fragment.app.FragmentActivity; import androidx.test.core.app.ApplicationProvider; import androidx.test.ext.junit.runners.AndroidJUnit4; import io.flutter.embedding.engine.FlutterEngine; import io.flutter.embedding.engine.FlutterEngineCache; import io.flutter.embedding.engine.FlutterJNI; import io.flutter.embedding.engine.loader.FlutterLoader; import java.util.ArrayList; import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.Robolectric; import org.robolectric.annotation.Config; @Config(manifest = Config.NONE) @RunWith(AndroidJUnit4.class) public class FlutterFragmentTest { private final Context ctx = ApplicationProvider.getApplicationContext(); boolean isDelegateAttached; class TestDelegateFactory implements FlutterActivityAndFragmentDelegate.DelegateFactory { FlutterActivityAndFragmentDelegate delegate; TestDelegateFactory(FlutterActivityAndFragmentDelegate delegate) { this.delegate = delegate; } public FlutterActivityAndFragmentDelegate createDelegate( FlutterActivityAndFragmentDelegate.Host host) { return delegate; } } @Test public void itCreatesDefaultFragmentWithExpectedDefaults() { FlutterFragment fragment = FlutterFragment.createDefault(); TestDelegateFactory delegateFactory = new TestDelegateFactory(new FlutterActivityAndFragmentDelegate(fragment)); fragment.setDelegateFactory(delegateFactory); assertEquals("main", fragment.getDartEntrypointFunctionName()); assertNull(fragment.getDartEntrypointLibraryUri()); assertNull(fragment.getDartEntrypointArgs()); assertEquals("/", fragment.getInitialRoute()); assertArrayEquals(new String[] {}, fragment.getFlutterShellArgs().toArray()); assertTrue(fragment.shouldAttachEngineToActivity()); assertFalse(fragment.shouldHandleDeeplinking()); assertNull(fragment.getCachedEngineId()); assertTrue(fragment.shouldDestroyEngineWithHost()); assertEquals(RenderMode.surface, fragment.getRenderMode()); assertEquals(TransparencyMode.transparent, fragment.getTransparencyMode()); assertFalse(fragment.shouldDelayFirstAndroidViewDraw()); } @Test public void itCreatesNewEngineFragmentWithRequestedSettings() { FlutterFragment fragment = FlutterFragment.withNewEngine() .dartEntrypoint("custom_entrypoint") .dartLibraryUri("package:foo/bar.dart") .dartEntrypointArgs(new ArrayList<String>(Arrays.asList("foo", "bar"))) .initialRoute("/custom/route") .shouldAttachEngineToActivity(false) .handleDeeplinking(true) .renderMode(RenderMode.texture) .transparencyMode(TransparencyMode.opaque) .build(); TestDelegateFactory delegateFactory = new TestDelegateFactory(new FlutterActivityAndFragmentDelegate(fragment)); fragment.setDelegateFactory(delegateFactory); assertEquals("custom_entrypoint", fragment.getDartEntrypointFunctionName()); assertEquals("package:foo/bar.dart", fragment.getDartEntrypointLibraryUri()); assertEquals("/custom/route", fragment.getInitialRoute()); assertArrayEquals(new String[] {"foo", "bar"}, fragment.getDartEntrypointArgs().toArray()); assertArrayEquals(new String[] {}, fragment.getFlutterShellArgs().toArray()); assertFalse(fragment.shouldAttachEngineToActivity()); assertTrue(fragment.shouldHandleDeeplinking()); assertNull(fragment.getCachedEngineId()); assertTrue(fragment.shouldDestroyEngineWithHost()); assertEquals(RenderMode.texture, fragment.getRenderMode()); assertEquals(TransparencyMode.opaque, fragment.getTransparencyMode()); } @Test public void itCreatesNewEngineInGroupFragmentWithRequestedSettings() { FlutterFragment fragment = FlutterFragment.withNewEngineInGroup("my_cached_engine_group") .dartEntrypoint("custom_entrypoint") .initialRoute("/custom/route") .shouldAttachEngineToActivity(false) .handleDeeplinking(true) .renderMode(RenderMode.texture) .transparencyMode(TransparencyMode.opaque) .build(); TestDelegateFactory delegateFactory = new TestDelegateFactory(new FlutterActivityAndFragmentDelegate(fragment)); fragment.setDelegateFactory(delegateFactory); assertEquals("my_cached_engine_group", fragment.getCachedEngineGroupId()); assertEquals("custom_entrypoint", fragment.getDartEntrypointFunctionName()); assertEquals("/custom/route", fragment.getInitialRoute()); assertArrayEquals(new String[] {}, fragment.getFlutterShellArgs().toArray()); assertFalse(fragment.shouldAttachEngineToActivity()); assertTrue(fragment.shouldHandleDeeplinking()); assertNull(fragment.getCachedEngineId()); assertTrue(fragment.shouldDestroyEngineWithHost()); assertEquals(RenderMode.texture, fragment.getRenderMode()); assertEquals(TransparencyMode.opaque, fragment.getTransparencyMode()); } @Test public void itCreatesNewEngineFragmentThatDelaysFirstDrawWhenRequested() { FlutterFragment fragment = FlutterFragment.withNewEngine().shouldDelayFirstAndroidViewDraw(true).build(); assertNotNull(fragment.shouldDelayFirstAndroidViewDraw()); } @Test public void itCreatesCachedEngineFragmentWithExpectedDefaults() { FlutterFragment fragment = FlutterFragment.withCachedEngine("my_cached_engine").build(); assertTrue(fragment.shouldAttachEngineToActivity()); assertEquals("my_cached_engine", fragment.getCachedEngineId()); assertFalse(fragment.shouldDestroyEngineWithHost()); assertFalse(fragment.shouldDelayFirstAndroidViewDraw()); } @Test public void itCreatesCachedEngineFragmentThatDestroysTheEngine() { FlutterFragment fragment = FlutterFragment.withCachedEngine("my_cached_engine") .destroyEngineWithFragment(true) .build(); assertTrue(fragment.shouldAttachEngineToActivity()); assertEquals("my_cached_engine", fragment.getCachedEngineId()); assertTrue(fragment.shouldDestroyEngineWithHost()); } @Test public void itCreatesCachedEngineFragmentThatDelaysFirstDrawWhenRequested() { FlutterFragment fragment = FlutterFragment.withCachedEngine("my_cached_engine") .shouldDelayFirstAndroidViewDraw(true) .build(); assertNotNull(fragment.shouldDelayFirstAndroidViewDraw()); } @Test public void itCanBeDetachedFromTheEngineAndStopSendingFurtherEvents() { FlutterActivityAndFragmentDelegate mockDelegate = mock(FlutterActivityAndFragmentDelegate.class); TestDelegateFactory delegateFactory = new TestDelegateFactory(mockDelegate); FlutterFragment fragment = FlutterFragment.withCachedEngine("my_cached_engine") .destroyEngineWithFragment(true) .build(); isDelegateAttached = true; when(mockDelegate.isAttached()).thenAnswer(invocation -> isDelegateAttached); doAnswer(invocation -> isDelegateAttached = false).when(mockDelegate).onDetach(); fragment.setDelegateFactory(delegateFactory); fragment.onStart(); fragment.onResume(); fragment.onPostResume(); verify(mockDelegate, times(1)).onStart(); verify(mockDelegate, times(1)).onResume(); verify(mockDelegate, times(1)).onPostResume(); fragment.onPause(); fragment.detachFromFlutterEngine(); verify(mockDelegate, times(1)).onPause(); verify(mockDelegate, times(1)).onDestroyView(); verify(mockDelegate, times(1)).onDetach(); fragment.onStop(); verify(mockDelegate, never()).onStop(); fragment.onStart(); fragment.onResume(); fragment.onPostResume(); // No more events through to the delegate. verify(mockDelegate, times(1)).onStart(); verify(mockDelegate, times(1)).onResume(); verify(mockDelegate, times(1)).onPostResume(); fragment.onDestroy(); // 1 time same as before. verify(mockDelegate, times(1)).onDestroyView(); verify(mockDelegate, times(1)).onDetach(); } @Test public void itDoesNotReleaseEnginewhenDetachFromFlutterEngine() { FlutterActivityAndFragmentDelegate mockDelegate = mock(FlutterActivityAndFragmentDelegate.class); isDelegateAttached = true; when(mockDelegate.isAttached()).thenAnswer(invocation -> isDelegateAttached); doAnswer(invocation -> isDelegateAttached = false).when(mockDelegate).onDetach(); TestDelegateFactory delegateFactory = new TestDelegateFactory(mockDelegate); FlutterFragment fragment = FlutterFragment.withCachedEngine("my_cached_engine") .destroyEngineWithFragment(true) .build(); fragment.setDelegateFactory(delegateFactory); fragment.onStart(); fragment.onResume(); fragment.onPostResume(); fragment.onPause(); assertTrue(mockDelegate.isAttached()); fragment.detachFromFlutterEngine(); verify(mockDelegate, times(1)).onDetach(); verify(mockDelegate, never()).release(); assertFalse(mockDelegate.isAttached()); } @Test public void itReleaseEngineWhenOnDetach() { FlutterActivityAndFragmentDelegate mockDelegate = mock(FlutterActivityAndFragmentDelegate.class); isDelegateAttached = true; when(mockDelegate.isAttached()).thenAnswer(invocation -> isDelegateAttached); doAnswer(invocation -> isDelegateAttached = false).when(mockDelegate).onDetach(); TestDelegateFactory delegateFactory = new TestDelegateFactory(mockDelegate); FlutterFragment fragment = spy( FlutterFragment.withCachedEngine("my_cached_engine") .destroyEngineWithFragment(true) .build()); when(fragment.getContext()).thenReturn(mock(Context.class)); fragment.setDelegateFactory(delegateFactory); fragment.onStart(); fragment.onResume(); fragment.onPostResume(); fragment.onPause(); assertTrue(mockDelegate.isAttached()); fragment.onDetach(); verify(mockDelegate, times(1)).onDetach(); verify(mockDelegate, times(1)).release(); assertFalse(mockDelegate.isAttached()); } @Test public void itReturnsExclusiveAppComponent() { FlutterFragment fragment = FlutterFragment.createDefault(); FlutterActivityAndFragmentDelegate delegate = new FlutterActivityAndFragmentDelegate(fragment); TestDelegateFactory delegateFactory = new TestDelegateFactory(delegate); fragment.setDelegateFactory(delegateFactory); assertEquals(fragment.getExclusiveAppComponent(), delegate); } @SuppressWarnings("deprecation") private FragmentActivity getMockFragmentActivity() { // TODO(reidbaker): https://github.com/flutter/flutter/issues/133151 return Robolectric.setupActivity(FragmentActivity.class); } @Test public void itDelegatesOnBackPressedAutomaticallyWhenEnabled() { // We need to mock FlutterJNI to avoid triggering native code. FlutterJNI flutterJNI = mock(FlutterJNI.class); when(flutterJNI.isAttached()).thenReturn(true); FlutterEngine flutterEngine = new FlutterEngine(ctx, new FlutterLoader(), flutterJNI, null, false); FlutterEngineCache.getInstance().put("my_cached_engine", flutterEngine); FlutterFragment fragment = FlutterFragment.withCachedEngine("my_cached_engine") .shouldAutomaticallyHandleOnBackPressed(true) .build(); FragmentActivity activity = getMockFragmentActivity(); activity .getSupportFragmentManager() .beginTransaction() .add(android.R.id.content, fragment) .commitNow(); FlutterActivityAndFragmentDelegate mockDelegate = mock(FlutterActivityAndFragmentDelegate.class); isDelegateAttached = true; when(mockDelegate.isAttached()).thenAnswer(invocation -> isDelegateAttached); doAnswer(invocation -> isDelegateAttached = false).when(mockDelegate).onDetach(); TestDelegateFactory delegateFactory = new TestDelegateFactory(mockDelegate); fragment.setDelegateFactory(delegateFactory); activity.onBackPressed(); verify(mockDelegate, times(1)).onBackPressed(); } @SuppressWarnings("deprecation") // Robolectric.setupActivity // TODO(reidbaker): https://github.com/flutter/flutter/issues/133151 @Test public void itHandlesPopSystemNavigationAutomaticallyWhenEnabled() { // We need to mock FlutterJNI to avoid triggering native code. FlutterJNI flutterJNI = mock(FlutterJNI.class); when(flutterJNI.isAttached()).thenReturn(true); FlutterEngine flutterEngine = new FlutterEngine(ctx, new FlutterLoader(), flutterJNI, null, false); FlutterEngineCache.getInstance().put("my_cached_engine", flutterEngine); FlutterFragment fragment = FlutterFragment.withCachedEngine("my_cached_engine") .shouldAutomaticallyHandleOnBackPressed(true) .build(); FragmentActivity activity = getMockFragmentActivity(); activity .getSupportFragmentManager() .beginTransaction() .add(android.R.id.content, fragment) .commitNow(); final AtomicBoolean onBackPressedCalled = new AtomicBoolean(false); OnBackPressedCallback callback = new OnBackPressedCallback(true) { @Override public void handleOnBackPressed() { onBackPressedCalled.set(true); } }; activity.getOnBackPressedDispatcher().addCallback(callback); FlutterActivityAndFragmentDelegate mockDelegate = mock(FlutterActivityAndFragmentDelegate.class); TestDelegateFactory delegateFactory = new TestDelegateFactory(mockDelegate); fragment.setDelegateFactory(delegateFactory); assertTrue(fragment.popSystemNavigator()); verify(mockDelegate, never()).onBackPressed(); assertTrue(onBackPressedCalled.get()); } @Test public void itRegistersComponentCallbacks() { FlutterActivityAndFragmentDelegate mockDelegate = mock(FlutterActivityAndFragmentDelegate.class); isDelegateAttached = true; when(mockDelegate.isAttached()).thenAnswer(invocation -> isDelegateAttached); doAnswer(invocation -> isDelegateAttached = false).when(mockDelegate).onDetach(); TestDelegateFactory delegateFactory = new TestDelegateFactory(mockDelegate); Context spyCtx = spy(ctx); // We need to mock FlutterJNI to avoid triggering native code. FlutterJNI flutterJNI = mock(FlutterJNI.class); when(flutterJNI.isAttached()).thenReturn(true); FlutterEngine flutterEngine = new FlutterEngine(spyCtx, new FlutterLoader(), flutterJNI, null, false); FlutterEngineCache.getInstance().put("my_cached_engine", flutterEngine); FlutterFragment fragment = spy(FlutterFragment.withCachedEngine("my_cached_engine").build()); when(fragment.getContext()).thenReturn(spyCtx); fragment.setDelegateFactory(delegateFactory); fragment.onAttach(spyCtx); verify(spyCtx, times(1)).registerComponentCallbacks(any()); verify(spyCtx, never()).unregisterComponentCallbacks(any()); fragment.onDetach(); verify(spyCtx, times(1)).registerComponentCallbacks(any()); verify(spyCtx, times(1)).unregisterComponentCallbacks(any()); } }
engine/shell/platform/android/test/io/flutter/embedding/android/FlutterFragmentTest.java/0
{ "file_path": "engine/shell/platform/android/test/io/flutter/embedding/android/FlutterFragmentTest.java", "repo_id": "engine", "token_count": 5592 }
325
package io.flutter.embedding.engine.dart; import static android.os.Looper.getMainLooper; import static junit.framework.TestCase.assertEquals; import static junit.framework.TestCase.assertNotNull; import static junit.framework.TestCase.assertTrue; import static org.junit.Assert.assertArrayEquals; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.robolectric.Shadows.shadowOf; import androidx.test.ext.junit.runners.AndroidJUnit4; import io.flutter.embedding.engine.FlutterJNI; import io.flutter.embedding.engine.dart.DartMessenger.DartMessengerTaskQueue; import io.flutter.plugin.common.BinaryMessenger; import io.flutter.plugin.common.BinaryMessenger.BinaryMessageHandler; import java.nio.ByteBuffer; import java.util.LinkedList; import java.util.Random; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Mockito; import org.robolectric.annotation.Config; @Config(manifest = Config.NONE) @RunWith(AndroidJUnit4.class) public class DartMessengerTest { SynchronousTaskQueue synchronousTaskQueue = new SynchronousTaskQueue(); private static class ReportingUncaughtExceptionHandler implements Thread.UncaughtExceptionHandler { public Throwable latestException; @Override public void uncaughtException(Thread t, Throwable e) { latestException = e; } } private static class SynchronousTaskQueue implements DartMessengerTaskQueue { public void dispatch(Runnable runnable) { runnable.run(); } } @Test public void itHandlesErrors() { // Setup test. final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final Thread currentThread = Thread.currentThread(); final Thread.UncaughtExceptionHandler savedHandler = currentThread.getUncaughtExceptionHandler(); final ReportingUncaughtExceptionHandler reportingHandler = new ReportingUncaughtExceptionHandler(); currentThread.setUncaughtExceptionHandler(reportingHandler); // Create object under test. final DartMessenger messenger = new DartMessenger(fakeFlutterJni, (options) -> synchronousTaskQueue); final BinaryMessageHandler throwingHandler = mock(BinaryMessageHandler.class); Mockito.doThrow(AssertionError.class) .when(throwingHandler) .onMessage(any(ByteBuffer.class), any(DartMessenger.Reply.class)); BinaryMessenger.TaskQueue taskQueue = messenger.makeBackgroundTaskQueue(); messenger.setMessageHandler("test", throwingHandler, taskQueue); messenger.handleMessageFromDart("test", ByteBuffer.allocate(0), 0, 0); assertNotNull(reportingHandler.latestException); assertTrue(reportingHandler.latestException instanceof AssertionError); currentThread.setUncaughtExceptionHandler(savedHandler); } @Test public void givesDirectByteBuffer() { // Setup test. final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final DartMessenger messenger = new DartMessenger(fakeFlutterJni, (options) -> synchronousTaskQueue); final String channel = "foobar"; final boolean[] wasDirect = {false}; final BinaryMessenger.BinaryMessageHandler handler = (message, reply) -> { wasDirect[0] = message.isDirect(); }; BinaryMessenger.TaskQueue taskQueue = messenger.makeBackgroundTaskQueue(); messenger.setMessageHandler(channel, handler, taskQueue); final ByteBuffer message = ByteBuffer.allocateDirect(4 * 2); message.rewind(); message.putChar('a'); message.putChar('b'); message.putChar('c'); message.putChar('d'); messenger.handleMessageFromDart(channel, message, /*replyId=*/ 123, 0); assertTrue(wasDirect[0]); } @Test public void directByteBufferLimitZeroAfterUsage() { // Setup test. final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final DartMessenger messenger = new DartMessenger(fakeFlutterJni, (options) -> synchronousTaskQueue); final String channel = "foobar"; final ByteBuffer[] byteBuffers = {null}; final int bufferSize = 4 * 2; final BinaryMessenger.BinaryMessageHandler handler = (message, reply) -> { byteBuffers[0] = message; assertEquals(bufferSize, byteBuffers[0].limit()); }; BinaryMessenger.TaskQueue taskQueue = messenger.makeBackgroundTaskQueue(); messenger.setMessageHandler(channel, handler, taskQueue); final ByteBuffer message = ByteBuffer.allocateDirect(bufferSize); message.rewind(); message.putChar('a'); message.putChar('b'); message.putChar('c'); message.putChar('d'); messenger.handleMessageFromDart(channel, message, /*replyId=*/ 123, 0); assertNotNull(byteBuffers[0]); assertTrue(byteBuffers[0].isDirect()); assertEquals(0, byteBuffers[0].limit()); } @Test public void directByteBufferLimitZeroAfterReply() { // Setup test. final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final DartMessenger messenger = new DartMessenger(fakeFlutterJni); final ByteBuffer message = ByteBuffer.allocateDirect(4 * 2); final String channel = "foobar"; message.rewind(); message.putChar('a'); message.putChar('b'); message.putChar('c'); message.putChar('d'); final ByteBuffer[] byteBuffers = {null}; BinaryMessenger.BinaryReply callback = (reply) -> { assertTrue(reply.isDirect()); byteBuffers[0] = reply; }; messenger.send(channel, null, callback); messenger.handlePlatformMessageResponse(1, message); assertEquals(0, byteBuffers[0].limit()); } @Test public void replyIdIncrementsOnNullReply() { /// Setup test. final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final DartMessenger messenger = new DartMessenger(fakeFlutterJni); final String channel = "foobar"; messenger.send(channel, null, null); verify(fakeFlutterJni, times(1)).dispatchEmptyPlatformMessage(eq("foobar"), eq(1)); messenger.send(channel, null, null); verify(fakeFlutterJni, times(1)).dispatchEmptyPlatformMessage(eq("foobar"), eq(2)); } @Test public void cleansUpMessageData() throws InterruptedException { final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final DartMessenger messenger = new DartMessenger(fakeFlutterJni, (options) -> synchronousTaskQueue); BinaryMessenger.TaskQueue taskQueue = messenger.makeBackgroundTaskQueue(); String channel = "foobar"; BinaryMessenger.BinaryMessageHandler handler = (ByteBuffer message, BinaryMessenger.BinaryReply reply) -> { reply.reply(null); }; messenger.setMessageHandler(channel, handler, taskQueue); final ByteBuffer message = ByteBuffer.allocateDirect(4 * 2); final int replyId = 1; final long messageData = 1234; messenger.handleMessageFromDart(channel, message, replyId, messageData); verify(fakeFlutterJni).cleanupMessageData(eq(messageData)); } @Test public void cleansUpMessageDataOnError() throws InterruptedException { final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final DartMessenger messenger = new DartMessenger(fakeFlutterJni, (options) -> synchronousTaskQueue); BinaryMessenger.TaskQueue taskQueue = messenger.makeBackgroundTaskQueue(); String channel = "foobar"; BinaryMessenger.BinaryMessageHandler handler = (ByteBuffer message, BinaryMessenger.BinaryReply reply) -> { throw new RuntimeException("hello"); }; messenger.setMessageHandler(channel, handler, taskQueue); final ByteBuffer message = ByteBuffer.allocateDirect(4 * 2); final int replyId = 1; final long messageData = 1234; messenger.handleMessageFromDart(channel, message, replyId, messageData); verify(fakeFlutterJni).cleanupMessageData(eq(messageData)); } @Test public void emptyResponseWhenHandlerIsNotSet() throws InterruptedException { final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final DartMessenger messenger = new DartMessenger(fakeFlutterJni, (options) -> synchronousTaskQueue); final String channel = "foobar"; final ByteBuffer message = ByteBuffer.allocateDirect(4 * 2); final int replyId = 1; final long messageData = 1234; messenger.handleMessageFromDart(channel, message, replyId, messageData); shadowOf(getMainLooper()).idle(); verify(fakeFlutterJni).invokePlatformMessageEmptyResponseCallback(replyId); } @Test public void buffersResponseWhenHandlerIsNotSet() throws InterruptedException { final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final DartMessenger messenger = new DartMessenger(fakeFlutterJni, (options) -> synchronousTaskQueue); final BinaryMessenger.TaskQueue taskQueue = messenger.makeBackgroundTaskQueue(); final String channel = "foobar"; final ByteBuffer message = ByteBuffer.allocateDirect(4 * 2); final int replyId = 1; final long messageData = 1234; messenger.enableBufferingIncomingMessages(); messenger.handleMessageFromDart(channel, message, replyId, messageData); shadowOf(getMainLooper()).idle(); verify(fakeFlutterJni, never()).invokePlatformMessageEmptyResponseCallback(eq(replyId)); final BinaryMessenger.BinaryMessageHandler handler = (ByteBuffer msg, BinaryMessenger.BinaryReply reply) -> { reply.reply(ByteBuffer.wrap("done".getBytes())); }; messenger.setMessageHandler(channel, handler, taskQueue); shadowOf(getMainLooper()).idle(); verify(fakeFlutterJni, never()).invokePlatformMessageEmptyResponseCallback(eq(replyId)); final ArgumentCaptor<ByteBuffer> response = ArgumentCaptor.forClass(ByteBuffer.class); verify(fakeFlutterJni) .invokePlatformMessageResponseCallback(anyInt(), response.capture(), anyInt()); assertArrayEquals("done".getBytes(), response.getValue().array()); } @Test public void disableBufferingTriggersEmptyResponseForPendingMessages() throws InterruptedException { final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final DartMessenger messenger = new DartMessenger(fakeFlutterJni, (options) -> synchronousTaskQueue); final String channel = "foobar"; final ByteBuffer message = ByteBuffer.allocateDirect(4 * 2); final int replyId = 1; final long messageData = 1234; messenger.enableBufferingIncomingMessages(); messenger.handleMessageFromDart(channel, message, replyId, messageData); shadowOf(getMainLooper()).idle(); verify(fakeFlutterJni, never()).invokePlatformMessageEmptyResponseCallback(replyId); messenger.disableBufferingIncomingMessages(); shadowOf(getMainLooper()).idle(); verify(fakeFlutterJni).invokePlatformMessageEmptyResponseCallback(replyId); } @Test public void emptyResponseWhenHandlerIsUnregistered() throws InterruptedException { final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final DartMessenger messenger = new DartMessenger(fakeFlutterJni, (options) -> synchronousTaskQueue); final BinaryMessenger.TaskQueue taskQueue = messenger.makeBackgroundTaskQueue(); final String channel = "foobar"; final ByteBuffer message = ByteBuffer.allocateDirect(4 * 2); final int replyId = 1; final long messageData = 1234; messenger.enableBufferingIncomingMessages(); messenger.handleMessageFromDart(channel, message, replyId, messageData); shadowOf(getMainLooper()).idle(); verify(fakeFlutterJni, never()).invokePlatformMessageEmptyResponseCallback(eq(replyId)); final BinaryMessenger.BinaryMessageHandler handler = (ByteBuffer msg, BinaryMessenger.BinaryReply reply) -> { reply.reply(ByteBuffer.wrap("done".getBytes())); }; messenger.setMessageHandler(channel, handler, taskQueue); shadowOf(getMainLooper()).idle(); verify(fakeFlutterJni, never()).invokePlatformMessageEmptyResponseCallback(eq(replyId)); final ArgumentCaptor<ByteBuffer> response = ArgumentCaptor.forClass(ByteBuffer.class); verify(fakeFlutterJni) .invokePlatformMessageResponseCallback(anyInt(), response.capture(), anyInt()); assertArrayEquals("done".getBytes(), response.getValue().array()); messenger.disableBufferingIncomingMessages(); messenger.setMessageHandler(channel, null, null); // Unregister handler. messenger.handleMessageFromDart(channel, message, replyId, messageData); shadowOf(getMainLooper()).idle(); verify(fakeFlutterJni).invokePlatformMessageEmptyResponseCallback(replyId); } @Test public void testSerialTaskQueue() throws InterruptedException { final FlutterJNI fakeFlutterJni = mock(FlutterJNI.class); final DartMessenger messenger = new DartMessenger(fakeFlutterJni); final ExecutorService taskQueuePool = Executors.newFixedThreadPool(4); final DartMessengerTaskQueue taskQueue = new DartMessenger.SerialTaskQueue(taskQueuePool); final int count = 5000; final LinkedList<Integer> ints = new LinkedList<>(); Random rand = new Random(); for (int i = 0; i < count; ++i) { final int value = i; taskQueue.dispatch( () -> { try { Thread.sleep(rand.nextInt(10)); } catch (InterruptedException ex) { System.out.println(ex.toString()); } ints.add(value); }); taskQueuePool.execute( () -> { // Add some extra noise to make sure we aren't always handling on the same thread. try { Thread.sleep(rand.nextInt(10)); } catch (InterruptedException ex) { System.out.println(ex.toString()); } }); } CountDownLatch latch = new CountDownLatch(1); taskQueue.dispatch( () -> { latch.countDown(); }); latch.await(); assertEquals(count, ints.size()); for (int i = 0; i < count - 1; ++i) { assertEquals((int) ints.get(i), (int) (ints.get(i + 1)) - 1); } } }
engine/shell/platform/android/test/io/flutter/embedding/engine/dart/DartMessengerTest.java/0
{ "file_path": "engine/shell/platform/android/test/io/flutter/embedding/engine/dart/DartMessengerTest.java", "repo_id": "engine", "token_count": 4920 }
326
package io.flutter.embedding.engine.systemchannels; import static io.flutter.Build.API_LEVELS; import static junit.framework.TestCase.assertEquals; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.isNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import android.annotation.TargetApi; import android.util.DisplayMetrics; import androidx.test.ext.junit.runners.AndroidJUnit4; import io.flutter.embedding.engine.dart.DartExecutor; import io.flutter.plugin.common.BasicMessageChannel; import java.nio.ByteBuffer; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.robolectric.annotation.Config; @Config(manifest = Config.NONE) @RunWith(AndroidJUnit4.class) public class SettingsChannelTest { @Test @TargetApi(API_LEVELS.API_33) @Config(sdk = API_LEVELS.API_33) @SuppressWarnings("deprecation") // DartExecutor.send is deprecated. public void setDisplayMetricsDoesNothingOnAPILevel33() { final DartExecutor executor = mock(DartExecutor.class); executor.onAttachedToJNI(); final SettingsChannel settingsChannel = new SettingsChannel(executor); final ArgumentCaptor<ByteBuffer> messageCaptor = ArgumentCaptor.forClass(ByteBuffer.class); settingsChannel.startMessage().setDisplayMetrics(mock(DisplayMetrics.class)).send(); verify(executor).send(eq("flutter/settings"), messageCaptor.capture(), isNull()); } @Test public void configurationQueueWorks() { final SettingsChannel.ConfigurationQueue queue = new SettingsChannel.ConfigurationQueue(); final int baseId = Integer.MIN_VALUE; queue.enqueueConfiguration( new SettingsChannel.ConfigurationQueue.SentConfiguration(mock(DisplayMetrics.class))); queue.enqueueConfiguration( new SettingsChannel.ConfigurationQueue.SentConfiguration(mock(DisplayMetrics.class))); assertEquals(baseId + 0, queue.getConfiguration(baseId + 0).generationNumber); assertEquals(baseId + 1, queue.getConfiguration(baseId + 1).generationNumber); assertEquals(baseId + 1, queue.getConfiguration(baseId + 1).generationNumber); queue.enqueueConfiguration( new SettingsChannel.ConfigurationQueue.SentConfiguration(mock(DisplayMetrics.class))); queue.enqueueConfiguration( new SettingsChannel.ConfigurationQueue.SentConfiguration(mock(DisplayMetrics.class))); assertEquals(baseId + 3, queue.getConfiguration(baseId + 3).generationNumber); // Can get the same configuration more than once. assertEquals(baseId + 3, queue.getConfiguration(baseId + 3).generationNumber); final BasicMessageChannel.Reply replyFor4 = queue.enqueueConfiguration( new SettingsChannel.ConfigurationQueue.SentConfiguration(mock(DisplayMetrics.class))); final BasicMessageChannel.Reply replyFor5 = queue.enqueueConfiguration( new SettingsChannel.ConfigurationQueue.SentConfiguration(mock(DisplayMetrics.class))); replyFor4.reply(null); replyFor5.reply(null); assertEquals(baseId + 5, queue.getConfiguration(baseId + 5).generationNumber); assertEquals(baseId + 5, queue.getConfiguration(baseId + 5).generationNumber); } // TODO(LongCatIsLooong): add tests for API 34 code path. // https://github.com/flutter/flutter/issues/128825 }
engine/shell/platform/android/test/io/flutter/embedding/engine/systemchannels/SettingsChannelTest.java/0
{ "file_path": "engine/shell/platform/android/test/io/flutter/embedding/engine/systemchannels/SettingsChannelTest.java", "repo_id": "engine", "token_count": 1077 }
327
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.plugin.platform; import static android.os.Looper.getMainLooper; import static io.flutter.Build.API_LEVELS; import static org.junit.Assert.*; import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; import static org.robolectric.Shadows.shadowOf; import android.annotation.TargetApi; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.PorterDuff; import android.media.Image; import android.view.Surface; import android.view.View; import androidx.test.core.app.ApplicationProvider; import androidx.test.ext.junit.runners.AndroidJUnit4; import io.flutter.view.TextureRegistry.ImageTextureEntry; import org.junit.Test; import org.junit.runner.RunWith; @TargetApi(API_LEVELS.API_29) @RunWith(AndroidJUnit4.class) public class ImageReaderPlatformViewRenderTargetTest { private final Context ctx = ApplicationProvider.getApplicationContext(); class TestImageTextureEntry implements ImageTextureEntry { private Image lastPushedImage; public long id() { return 1; } public void release() { if (this.lastPushedImage != null) { this.lastPushedImage.close(); } } public void pushImage(Image image) { if (this.lastPushedImage != null) { this.lastPushedImage.close(); } this.lastPushedImage = image; } public Image acquireLatestImage() { Image r = this.lastPushedImage; this.lastPushedImage = null; return r; } } @Test public void viewDraw_writesToBuffer() { final TestImageTextureEntry textureEntry = new TestImageTextureEntry(); final ImageReaderPlatformViewRenderTarget renderTarget = new ImageReaderPlatformViewRenderTarget(textureEntry); // Custom view. final View platformView = new View(ctx) { @Override public void draw(Canvas canvas) { super.draw(canvas); canvas.drawColor(Color.RED); } }; final int size = 100; platformView.measure(size, size); platformView.layout(0, 0, size, size); renderTarget.resize(size, size); // We don't have an image in the texture entry. assertNull(textureEntry.acquireLatestImage()); // Start rendering a frame. final Surface s = renderTarget.getSurface(); assertNotNull(s); final Canvas targetCanvas = s.lockHardwareCanvas(); assertNotNull(targetCanvas); try { // Fill the render target with transparent pixels. This is needed for platform views that // expect a transparent background. targetCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR); // Override the canvas that this subtree of views will use to draw. platformView.draw(targetCanvas); } finally { // Finish rendering a frame. s.unlockCanvasAndPost(targetCanvas); } // Pump the UI thread task loop. This is needed so that the OnImageAvailable callback // gets invoked (resulting in textureEntry.pushImage being invoked). shadowOf(getMainLooper()).idle(); // An image was pushed into the texture entry and it has the correct dimensions. Image pushedImage = textureEntry.acquireLatestImage(); assertNotNull(pushedImage); assertEquals(pushedImage.getWidth(), size); assertEquals(pushedImage.getHeight(), size); } }
engine/shell/platform/android/test/io/flutter/plugin/platform/ImageReaderPlatformViewRenderTargetTest.java/0
{ "file_path": "engine/shell/platform/android/test/io/flutter/plugin/platform/ImageReaderPlatformViewRenderTargetTest.java", "repo_id": "engine", "token_count": 1215 }
328
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package io.flutter.view; import static io.flutter.Build.API_LEVELS; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import android.annotation.TargetApi; import android.app.Activity; import android.content.ContentResolver; import android.content.Context; import android.content.res.Configuration; import android.content.res.Resources; import android.graphics.Rect; import android.os.Bundle; import android.text.SpannableString; import android.text.SpannedString; import android.text.style.LocaleSpan; import android.text.style.TtsSpan; import android.view.MotionEvent; import android.view.View; import android.view.ViewParent; import android.view.Window; import android.view.WindowInsets; import android.view.WindowManager; import android.view.accessibility.AccessibilityEvent; import android.view.accessibility.AccessibilityManager; import android.view.accessibility.AccessibilityNodeInfo; import androidx.test.core.app.ApplicationProvider; import androidx.test.ext.junit.runners.AndroidJUnit4; import io.flutter.embedding.engine.FlutterJNI; import io.flutter.embedding.engine.dart.DartExecutor; import io.flutter.embedding.engine.systemchannels.AccessibilityChannel; import io.flutter.plugin.common.BasicMessageChannel; import io.flutter.plugin.platform.PlatformViewsAccessibilityDelegate; import io.flutter.view.AccessibilityBridge.Action; import io.flutter.view.AccessibilityBridge.Flag; import java.nio.ByteBuffer; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.invocation.InvocationOnMock; import org.robolectric.annotation.Config; @Config(manifest = Config.NONE) @RunWith(AndroidJUnit4.class) public class AccessibilityBridgeTest { @Test public void itDescribesNonTextFieldsWithAContentDescription() { AccessibilityBridge accessibilityBridge = setUpBridge(); TestSemanticsNode testSemanticsNode = new TestSemanticsNode(); testSemanticsNode.label = "Hello, World"; TestSemanticsUpdate testSemanticsUpdate = testSemanticsNode.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); assertEquals(nodeInfo.getContentDescription().toString(), "Hello, World"); assertEquals(nodeInfo.getText(), null); } @Config(sdk = API_LEVELS.API_28) @TargetApi(API_LEVELS.API_28) @Test public void itDescribesTextFieldsWithTextAndHint() { AccessibilityBridge accessibilityBridge = setUpBridge(); TestSemanticsNode testSemanticsNode = new TestSemanticsNode(); testSemanticsNode.value = "Hello, World"; testSemanticsNode.label = "some label"; testSemanticsNode.hint = "some hint"; testSemanticsNode.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD); TestSemanticsUpdate testSemanticsUpdate = testSemanticsNode.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); assertEquals(nodeInfo.getContentDescription(), null); assertEquals(nodeInfo.getText().toString(), "Hello, World"); assertEquals(nodeInfo.getHintText().toString(), "some label, some hint"); } @Test public void itTakesGlobalCoordinatesOfFlutterViewIntoAccount() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); final int position = 88; // The getBoundsInScreen() in createAccessibilityNodeInfo() needs View.getLocationOnScreen() doAnswer( invocation -> { int[] outLocation = (int[]) invocation.getArguments()[0]; outLocation[0] = position; outLocation[1] = position; return null; }) .when(mockRootView) .getLocationOnScreen(any(int[].class)); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); TestSemanticsNode testSemanticsNode = new TestSemanticsNode(); TestSemanticsUpdate testSemanticsUpdate = testSemanticsNode.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); Rect outBoundsInScreen = new Rect(); nodeInfo.getBoundsInScreen(outBoundsInScreen); assertEquals(position, outBoundsInScreen.left); assertEquals(position, outBoundsInScreen.top); } @Test public void itSetsAccessibleNavigation() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); when(mockManager.isTouchExplorationEnabled()).thenReturn(false); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ArgumentCaptor<AccessibilityManager.TouchExplorationStateChangeListener> listenerCaptor = ArgumentCaptor.forClass(AccessibilityManager.TouchExplorationStateChangeListener.class); verify(mockManager).addTouchExplorationStateChangeListener(listenerCaptor.capture()); assertEquals(accessibilityBridge.getAccessibleNavigation(), false); verify(mockChannel).setAccessibilityFeatures(0); reset(mockChannel); // Simulate assistive technology accessing accessibility tree. accessibilityBridge.createAccessibilityNodeInfo(0); verify(mockChannel).setAccessibilityFeatures(1); assertEquals(accessibilityBridge.getAccessibleNavigation(), true); // Simulate turning off TalkBack. reset(mockChannel); listenerCaptor.getValue().onTouchExplorationStateChanged(false); verify(mockChannel).setAccessibilityFeatures(0); assertEquals(accessibilityBridge.getAccessibleNavigation(), false); } @Test public void itDoesNotContainADescriptionIfScopesRoute() { AccessibilityBridge accessibilityBridge = setUpBridge(); TestSemanticsNode testSemanticsNode = new TestSemanticsNode(); testSemanticsNode.label = "Hello, World"; testSemanticsNode.addFlag(AccessibilityBridge.Flag.SCOPES_ROUTE); TestSemanticsUpdate testSemanticsUpdate = testSemanticsNode.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); assertEquals(nodeInfo.getContentDescription(), null); assertEquals(nodeInfo.getText(), null); } @Test public void itUnfocusesPlatformViewWhenPlatformViewGoesAway() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); // Sent a11y tree with platform view. TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode platformView = new TestSemanticsNode(); platformView.id = 1; platformView.platformViewId = 42; root.children.add(platformView); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); // Set a11y focus to platform view. View mockView = mock(View.class); AccessibilityEvent focusEvent = mock(AccessibilityEvent.class); when(mockViewEmbedder.requestSendAccessibilityEvent(mockView, mockView, focusEvent)) .thenReturn(true); when(mockViewEmbedder.getRecordFlutterId(mockView, focusEvent)).thenReturn(42); when(focusEvent.getEventType()).thenReturn(AccessibilityEvent.TYPE_VIEW_ACCESSIBILITY_FOCUSED); accessibilityBridge.externalViewRequestSendAccessibilityEvent(mockView, mockView, focusEvent); // Replace the platform view. TestSemanticsNode node = new TestSemanticsNode(); node.id = 2; root.children.clear(); root.children.add(node); testSemanticsUpdate = root.toUpdate(); when(mockManager.isEnabled()).thenReturn(true); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); // Check that unfocus event was sent. ArgumentCaptor<AccessibilityEvent> eventCaptor = ArgumentCaptor.forClass(AccessibilityEvent.class); verify(mockParent, times(2)) .requestSendAccessibilityEvent(eq(mockRootView), eventCaptor.capture()); AccessibilityEvent event = eventCaptor.getAllValues().get(0); assertEquals(event.getEventType(), AccessibilityEvent.TYPE_VIEW_ACCESSIBILITY_FOCUS_CLEARED); } @Test public void itAnnouncesRouteNameWhenAddingNewRoute() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.addFlag(AccessibilityBridge.Flag.SCOPES_ROUTE); node1.addFlag(AccessibilityBridge.Flag.NAMES_ROUTE); node1.label = "node1"; root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); verify(mockRootView, times(1)).setAccessibilityPaneTitle(eq("node1")); TestSemanticsNode new_root = new TestSemanticsNode(); new_root.id = 0; TestSemanticsNode new_node1 = new TestSemanticsNode(); new_node1.id = 1; new_node1.addFlag(AccessibilityBridge.Flag.SCOPES_ROUTE); new_node1.addFlag(AccessibilityBridge.Flag.NAMES_ROUTE); new_node1.label = "new_node1"; new_root.children.add(new_node1); TestSemanticsNode new_node2 = new TestSemanticsNode(); new_node2.id = 2; new_node2.addFlag(AccessibilityBridge.Flag.SCOPES_ROUTE); new_node2.addFlag(AccessibilityBridge.Flag.NAMES_ROUTE); new_node2.label = "new_node2"; new_node1.children.add(new_node2); testSemanticsUpdate = new_root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); verify(mockRootView, times(1)).setAccessibilityPaneTitle(eq("new_node2")); } @Test public void itSetsTraversalAfter() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.label = "node1"; root.children.add(node1); TestSemanticsNode node2 = new TestSemanticsNode(); node2.id = 2; node2.label = "node2"; root.children.add(node2); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityBridge spyAccessibilityBridge = spy(accessibilityBridge); AccessibilityNodeInfo mockNodeInfo2 = mock(AccessibilityNodeInfo.class); when(spyAccessibilityBridge.obtainAccessibilityNodeInfo(mockRootView, 2)) .thenReturn(mockNodeInfo2); spyAccessibilityBridge.createAccessibilityNodeInfo(2); verify(mockNodeInfo2, times(1)).setTraversalAfter(eq(mockRootView), eq(1)); } @Config(sdk = API_LEVELS.API_24) @TargetApi(API_LEVELS.API_24) @Test public void itSetsRootViewNotImportantForAccessibility() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityBridge spyAccessibilityBridge = spy(accessibilityBridge); AccessibilityNodeInfo mockNodeInfo = mock(AccessibilityNodeInfo.class); when(spyAccessibilityBridge.obtainAccessibilityNodeInfo(mockRootView)).thenReturn(mockNodeInfo); spyAccessibilityBridge.createAccessibilityNodeInfo(View.NO_ID); verify(mockNodeInfo, times(1)).setImportantForAccessibility(eq(false)); } @Config(sdk = API_LEVELS.API_24) @TargetApi(API_LEVELS.API_24) @Test public void itSetsNodeImportantForAccessibilityIfItHasContent() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.label = "some label"; TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityBridge spyAccessibilityBridge = spy(accessibilityBridge); AccessibilityNodeInfo mockNodeInfo = mock(AccessibilityNodeInfo.class); when(spyAccessibilityBridge.obtainAccessibilityNodeInfo(mockRootView, 0)) .thenReturn(mockNodeInfo); spyAccessibilityBridge.createAccessibilityNodeInfo(0); verify(mockNodeInfo, times(1)).setImportantForAccessibility(eq(true)); } @Config(sdk = API_LEVELS.API_24) @TargetApi(API_LEVELS.API_24) @Test public void itSetsNodeImportantForAccessibilityIfItHasActions() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.addAction(Action.TAP); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityBridge spyAccessibilityBridge = spy(accessibilityBridge); AccessibilityNodeInfo mockNodeInfo = mock(AccessibilityNodeInfo.class); when(spyAccessibilityBridge.obtainAccessibilityNodeInfo(mockRootView, 0)) .thenReturn(mockNodeInfo); spyAccessibilityBridge.createAccessibilityNodeInfo(0); verify(mockNodeInfo, times(1)).setImportantForAccessibility(eq(true)); } @Config(sdk = API_LEVELS.API_24) @TargetApi(API_LEVELS.API_24) @Test public void itSetsNodeUnImportantForAccessibilityIfItIsEmpty() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node = new TestSemanticsNode(); node.id = 1; root.children.add(node); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityBridge spyAccessibilityBridge = spy(accessibilityBridge); AccessibilityNodeInfo mockNodeInfo = mock(AccessibilityNodeInfo.class); when(spyAccessibilityBridge.obtainAccessibilityNodeInfo(mockRootView, 0)) .thenReturn(mockNodeInfo); spyAccessibilityBridge.createAccessibilityNodeInfo(0); verify(mockNodeInfo, times(1)).setImportantForAccessibility(eq(false)); AccessibilityNodeInfo mockNodeInfo1 = mock(AccessibilityNodeInfo.class); when(spyAccessibilityBridge.obtainAccessibilityNodeInfo(mockRootView, 1)) .thenReturn(mockNodeInfo1); spyAccessibilityBridge.createAccessibilityNodeInfo(1); verify(mockNodeInfo1, times(1)).setImportantForAccessibility(eq(false)); } @SuppressWarnings("deprecation") // getSystemWindowInset* methods deprecated. @Config(sdk = API_LEVELS.API_28) @TargetApi(API_LEVELS.API_28) @Test public void itSetCutoutInsetBasedonLayoutModeNever() { int expectedInsetLeft = 5; int top = 0; int left = 0; int right = 100; int bottom = 200; AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Activity context = mock(Activity.class); Window window = mock(Window.class); WindowInsets insets = mock(WindowInsets.class); WindowManager.LayoutParams layoutParams = new WindowManager.LayoutParams(); layoutParams.layoutInDisplayCutoutMode = WindowManager.LayoutParams.LAYOUT_IN_DISPLAY_CUTOUT_MODE_NEVER; when(mockRootView.getContext()).thenReturn(context); when(context.getWindow()).thenReturn(window); when(window.getAttributes()).thenReturn(layoutParams); when(mockRootView.getRootWindowInsets()).thenReturn(insets); when(insets.getSystemWindowInsetLeft()).thenReturn(expectedInsetLeft); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.left = left; root.top = top; root.right = right; root.bottom = bottom; TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityBridge spyAccessibilityBridge = spy(accessibilityBridge); AccessibilityNodeInfo mockNodeInfo = mock(AccessibilityNodeInfo.class); when(spyAccessibilityBridge.obtainAccessibilityNodeInfo(mockRootView, 0)) .thenReturn(mockNodeInfo); spyAccessibilityBridge.createAccessibilityNodeInfo(0); verify(mockNodeInfo, times(1)) .setBoundsInScreen( new Rect(left + expectedInsetLeft, top, right + expectedInsetLeft, bottom)); } @SuppressWarnings("deprecation") // getSystemWindowInset* methods deprecated. @Config(sdk = API_LEVELS.API_28) @TargetApi(API_LEVELS.API_28) @Test public void itSetCutoutInsetBasedonLayoutModeDefault() { int expectedInsetLeft = 5; int top = 0; int left = 0; int right = 100; int bottom = 200; AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Activity context = mock(Activity.class); Window window = mock(Window.class); WindowInsets insets = mock(WindowInsets.class); WindowManager.LayoutParams layoutParams = new WindowManager.LayoutParams(); layoutParams.layoutInDisplayCutoutMode = WindowManager.LayoutParams.LAYOUT_IN_DISPLAY_CUTOUT_MODE_DEFAULT; when(mockRootView.getContext()).thenReturn(context); when(context.getWindow()).thenReturn(window); when(window.getAttributes()).thenReturn(layoutParams); when(mockRootView.getRootWindowInsets()).thenReturn(insets); when(insets.getSystemWindowInsetLeft()).thenReturn(expectedInsetLeft); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.left = left; root.top = top; root.right = right; root.bottom = bottom; TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityBridge spyAccessibilityBridge = spy(accessibilityBridge); AccessibilityNodeInfo mockNodeInfo = mock(AccessibilityNodeInfo.class); when(spyAccessibilityBridge.obtainAccessibilityNodeInfo(mockRootView, 0)) .thenReturn(mockNodeInfo); spyAccessibilityBridge.createAccessibilityNodeInfo(0); verify(mockNodeInfo, times(1)) .setBoundsInScreen( new Rect(left + expectedInsetLeft, top, right + expectedInsetLeft, bottom)); } @SuppressWarnings("deprecation") // getSystemWindowInset* methods deprecated. @Config(sdk = API_LEVELS.API_28) @TargetApi(API_LEVELS.API_28) @Test public void itSetCutoutInsetBasedonLayoutModeShortEdges() { int expectedInsetLeft = 5; int top = 0; int left = 0; int right = 100; int bottom = 200; AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Activity context = mock(Activity.class); Window window = mock(Window.class); WindowInsets insets = mock(WindowInsets.class); WindowManager.LayoutParams layoutParams = new WindowManager.LayoutParams(); layoutParams.layoutInDisplayCutoutMode = WindowManager.LayoutParams.LAYOUT_IN_DISPLAY_CUTOUT_MODE_SHORT_EDGES; when(mockRootView.getContext()).thenReturn(context); when(context.getWindow()).thenReturn(window); when(window.getAttributes()).thenReturn(layoutParams); when(mockRootView.getRootWindowInsets()).thenReturn(insets); when(insets.getSystemWindowInsetLeft()).thenReturn(expectedInsetLeft); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.left = left; root.top = top; root.right = right; root.bottom = bottom; TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityBridge spyAccessibilityBridge = spy(accessibilityBridge); AccessibilityNodeInfo mockNodeInfo = mock(AccessibilityNodeInfo.class); when(spyAccessibilityBridge.obtainAccessibilityNodeInfo(mockRootView, 0)) .thenReturn(mockNodeInfo); spyAccessibilityBridge.createAccessibilityNodeInfo(0); // Does not apply left inset if the layout mode is `short edges`. verify(mockNodeInfo, times(1)).setBoundsInScreen(new Rect(left, top, right, bottom)); } @SuppressWarnings("deprecation") // getSystemWindowInset* methods deprecated. // fluter#133074 tracks post deprecation work. @Config(sdk = API_LEVELS.API_30) @TargetApi(API_LEVELS.API_30) @Test public void itSetCutoutInsetBasedonLayoutModeAlways() { int expectedInsetLeft = 5; int top = 0; int left = 0; int right = 100; int bottom = 200; AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Activity context = mock(Activity.class); Window window = mock(Window.class); WindowInsets insets = mock(WindowInsets.class); WindowManager.LayoutParams layoutParams = new WindowManager.LayoutParams(); layoutParams.layoutInDisplayCutoutMode = WindowManager.LayoutParams.LAYOUT_IN_DISPLAY_CUTOUT_MODE_ALWAYS; when(mockRootView.getContext()).thenReturn(context); when(context.getWindow()).thenReturn(window); when(window.getAttributes()).thenReturn(layoutParams); when(mockRootView.getRootWindowInsets()).thenReturn(insets); when(insets.getSystemWindowInsetLeft()).thenReturn(expectedInsetLeft); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.left = left; root.top = top; root.right = right; root.bottom = bottom; TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityBridge spyAccessibilityBridge = spy(accessibilityBridge); AccessibilityNodeInfo mockNodeInfo = mock(AccessibilityNodeInfo.class); when(spyAccessibilityBridge.obtainAccessibilityNodeInfo(mockRootView, 0)) .thenReturn(mockNodeInfo); spyAccessibilityBridge.createAccessibilityNodeInfo(0); // Does not apply left inset if the layout mode is `always`. verify(mockNodeInfo, times(1)).setBoundsInScreen(new Rect(left, top, right, bottom)); } @Test public void itIgnoresUnfocusableNodeDuringHitTest() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); when(mockManager.isTouchExplorationEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.left = 0; root.top = 0; root.bottom = 20; root.right = 20; TestSemanticsNode ignored = new TestSemanticsNode(); ignored.id = 1; ignored.addFlag(AccessibilityBridge.Flag.SCOPES_ROUTE); ignored.left = 0; ignored.top = 0; ignored.bottom = 20; ignored.right = 20; root.children.add(ignored); TestSemanticsNode child = new TestSemanticsNode(); child.id = 2; child.label = "label"; child.left = 0; child.top = 0; child.bottom = 20; child.right = 20; root.children.add(child); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); verify(mockRootView, times(1)).setAccessibilityPaneTitle(eq(" ")); // Synthesize an accessibility hit test event. MotionEvent mockEvent = mock(MotionEvent.class); when(mockEvent.getX()).thenReturn(10.0f); when(mockEvent.getY()).thenReturn(10.0f); when(mockEvent.getAction()).thenReturn(MotionEvent.ACTION_HOVER_ENTER); boolean hit = accessibilityBridge.onAccessibilityHoverEvent(mockEvent); assertEquals(hit, true); ArgumentCaptor<AccessibilityEvent> eventCaptor = ArgumentCaptor.forClass(AccessibilityEvent.class); verify(mockParent, times(2)) .requestSendAccessibilityEvent(eq(mockRootView), eventCaptor.capture()); AccessibilityEvent event = eventCaptor.getAllValues().get(1); assertEquals(event.getEventType(), AccessibilityEvent.TYPE_VIEW_HOVER_ENTER); assertEquals(accessibilityBridge.getHoveredObjectId(), 2); } @Test public void itFindsPlatformViewsDuringHoverByDefault() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); when(mockManager.isTouchExplorationEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.left = 0; root.top = 0; root.bottom = 20; root.right = 20; TestSemanticsNode platformView = new TestSemanticsNode(); platformView.id = 1; platformView.platformViewId = 1; platformView.left = 0; platformView.top = 0; platformView.bottom = 20; platformView.right = 20; root.addChild(platformView); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); // Synthesize an accessibility hit test event. MotionEvent mockEvent = mock(MotionEvent.class); when(mockEvent.getX()).thenReturn(10.0f); when(mockEvent.getY()).thenReturn(10.0f); when(mockEvent.getAction()).thenReturn(MotionEvent.ACTION_HOVER_ENTER); final boolean handled = accessibilityBridge.onAccessibilityHoverEvent(mockEvent); assertTrue(handled); } @Test public void itIgnoresPlatformViewsDuringHoverIfRequested() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); when(mockManager.isTouchExplorationEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.left = 0; root.top = 0; root.bottom = 20; root.right = 20; TestSemanticsNode platformView = new TestSemanticsNode(); platformView.id = 1; platformView.platformViewId = 1; platformView.left = 0; platformView.top = 0; platformView.bottom = 20; platformView.right = 20; root.addChild(platformView); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); // Synthesize an accessibility hit test event. MotionEvent mockEvent = mock(MotionEvent.class); when(mockEvent.getX()).thenReturn(10.0f); when(mockEvent.getY()).thenReturn(10.0f); when(mockEvent.getAction()).thenReturn(MotionEvent.ACTION_HOVER_ENTER); final boolean handled = accessibilityBridge.onAccessibilityHoverEvent(mockEvent, true); assertFalse(handled); } @Test public void itAnnouncesRouteNameWhenRemoveARoute() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.addFlag(AccessibilityBridge.Flag.SCOPES_ROUTE); node1.addFlag(AccessibilityBridge.Flag.NAMES_ROUTE); node1.label = "node1"; root.children.add(node1); TestSemanticsNode node2 = new TestSemanticsNode(); node2.id = 2; node2.addFlag(AccessibilityBridge.Flag.SCOPES_ROUTE); node2.addFlag(AccessibilityBridge.Flag.NAMES_ROUTE); node2.label = "node2"; node1.children.add(node2); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); verify(mockRootView, times(1)).setAccessibilityPaneTitle(eq("node2")); TestSemanticsNode new_root = new TestSemanticsNode(); new_root.id = 0; TestSemanticsNode new_node1 = new TestSemanticsNode(); new_node1.id = 1; new_node1.label = "new_node1"; new_root.children.add(new_node1); TestSemanticsNode new_node2 = new TestSemanticsNode(); new_node2.id = 2; new_node2.addFlag(AccessibilityBridge.Flag.SCOPES_ROUTE); new_node2.addFlag(AccessibilityBridge.Flag.NAMES_ROUTE); new_node2.label = "new_node2"; new_node1.children.add(new_node2); testSemanticsUpdate = new_root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); verify(mockRootView, times(1)).setAccessibilityPaneTitle(eq("new_node2")); } @Config(sdk = API_LEVELS.API_21) @Test public void itCanPerformSetText() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD); root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); Bundle bundle = new Bundle(); String expectedText = "some string"; bundle.putString(AccessibilityNodeInfo.ACTION_ARGUMENT_SET_TEXT_CHARSEQUENCE, expectedText); accessibilityBridge.performAction(1, AccessibilityNodeInfo.ACTION_SET_TEXT, bundle); verify(mockChannel) .dispatchSemanticsAction(1, AccessibilityBridge.Action.SET_TEXT, expectedText); } @Config(sdk = API_LEVELS.API_21) @Test public void itCanPredictSetText() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD); root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); Bundle bundle = new Bundle(); String expectedText = "some string"; bundle.putString(AccessibilityNodeInfo.ACTION_ARGUMENT_SET_TEXT_CHARSEQUENCE, expectedText); accessibilityBridge.performAction(1, AccessibilityNodeInfo.ACTION_SET_TEXT, bundle); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(1); assertEquals(nodeInfo.getText().toString(), expectedText); } @Config(sdk = API_LEVELS.API_21) @Test public void itBuildsAttributedString() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.label = "label"; TestStringAttribute attribute = new TestStringAttributeSpellOut(); attribute.start = 1; attribute.end = 2; attribute.type = TestStringAttributeType.SPELLOUT; root.labelAttributes = new ArrayList<TestStringAttribute>() { { add(attribute); } }; root.value = "value"; TestStringAttributeLocale localeAttribute = new TestStringAttributeLocale(); localeAttribute.start = 1; localeAttribute.end = 2; localeAttribute.type = TestStringAttributeType.LOCALE; localeAttribute.locale = "es-MX"; root.valueAttributes = new ArrayList<TestStringAttribute>() { { add(localeAttribute); } }; TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); SpannedString actual = (SpannedString) nodeInfo.getContentDescription(); assertEquals(actual.toString(), "value, label"); Object[] objectSpans = actual.getSpans(0, actual.length(), Object.class); assertEquals(objectSpans.length, 2); LocaleSpan localeSpan = (LocaleSpan) objectSpans[0]; assertEquals(localeSpan.getLocale().toLanguageTag(), "es-MX"); assertEquals(actual.getSpanStart(localeSpan), 1); assertEquals(actual.getSpanEnd(localeSpan), 2); TtsSpan spellOutSpan = (TtsSpan) objectSpans[1]; assertEquals(spellOutSpan.getType(), TtsSpan.TYPE_VERBATIM); assertEquals(actual.getSpanStart(spellOutSpan), 8); assertEquals(actual.getSpanEnd(spellOutSpan), 9); } @Config(sdk = API_LEVELS.API_21) @Test public void itSetsTextCorrectly() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.value = "value"; TestStringAttribute attribute = new TestStringAttributeSpellOut(); attribute.start = 1; attribute.end = 2; attribute.type = TestStringAttributeType.SPELLOUT; root.valueAttributes = new ArrayList<>(); root.valueAttributes.add(attribute); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); SpannableString actual = (SpannableString) nodeInfo.getContentDescription(); assertEquals(actual.toString(), "value"); Object[] objectSpans = actual.getSpans(0, actual.length(), Object.class); assertEquals(objectSpans.length, 1); TtsSpan spellOutSpan = (TtsSpan) objectSpans[0]; assertEquals(spellOutSpan.getType(), TtsSpan.TYPE_VERBATIM); assertEquals(actual.getSpanStart(spellOutSpan), 1); assertEquals(actual.getSpanEnd(spellOutSpan), 2); // Perform a set text action. Bundle bundle = new Bundle(); String expectedText = "a"; bundle.putString(AccessibilityNodeInfo.ACTION_ARGUMENT_SET_TEXT_CHARSEQUENCE, expectedText); accessibilityBridge.performAction(0, AccessibilityNodeInfo.ACTION_SET_TEXT, bundle); // The action should remove the string attributes. nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); actual = (SpannableString) nodeInfo.getContentDescription(); assertEquals(actual.toString(), expectedText); objectSpans = actual.getSpans(0, actual.length(), Object.class); assertEquals(objectSpans.length, 0); } @Config(sdk = API_LEVELS.API_28) @TargetApi(API_LEVELS.API_28) @Test public void itSetsTooltipCorrectly() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); // Create a node with tooltip. TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.tooltip = "tooltip"; TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); // Test the generated AccessibilityNodeInfo for the node we created // and verify it has correct tooltip text. AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); CharSequence actual = nodeInfo.getTooltipText(); assertEquals(actual.toString(), root.tooltip); } @TargetApi(API_LEVELS.API_28) @Test public void itSetsIdentifierCorrectly() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); // Create a node with identifier. TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.identifier = "identifier"; TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); // Test the generated AccessibilityNodeInfo for the node we created and // verify it has correct identifier (i.e. resource-id per Android // terminology). AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); CharSequence actual = nodeInfo.getViewIdResourceName(); assertEquals(actual.toString(), root.identifier); } @Config(sdk = API_LEVELS.API_21) @Test public void itCanCreateAccessibilityNodeInfoWithSetText() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD); node1.addAction(AccessibilityBridge.Action.SET_TEXT); root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(1); List<AccessibilityNodeInfo.AccessibilityAction> actions = nodeInfo.getActionList(); assertTrue(actions.contains(AccessibilityNodeInfo.AccessibilityAction.ACTION_SET_TEXT)); } @Test public void itCanPredictSetSelection() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.value = "some text"; node1.textSelectionBase = -1; node1.textSelectionExtent = -1; node1.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD); root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); Bundle bundle = new Bundle(); int expectedStart = 1; int expectedEnd = 3; bundle.putInt(AccessibilityNodeInfo.ACTION_ARGUMENT_SELECTION_START_INT, expectedStart); bundle.putInt(AccessibilityNodeInfo.ACTION_ARGUMENT_SELECTION_END_INT, expectedEnd); accessibilityBridge.performAction(1, AccessibilityNodeInfo.ACTION_SET_SELECTION, bundle); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(1); assertEquals(nodeInfo.getTextSelectionStart(), expectedStart); assertEquals(nodeInfo.getTextSelectionEnd(), expectedEnd); } @Test public void itPerformsClearAccessibilityFocusCorrectly() { BasicMessageChannel mockChannel = mock(BasicMessageChannel.class); AccessibilityChannel accessibilityChannel = new AccessibilityChannel(mockChannel, mock(FlutterJNI.class)); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ accessibilityChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.label = "root"; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.value = "some text"; root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); accessibilityBridge.performAction(0, AccessibilityNodeInfo.ACTION_ACCESSIBILITY_FOCUS, null); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); assertTrue(nodeInfo.isAccessibilityFocused()); HashMap<String, Object> message = new HashMap<>(); message.put("type", "didGainFocus"); message.put("nodeId", 0); verify(mockChannel).send(message); // Clear focus on non-focused node shouldn't do anything accessibilityBridge.performAction( 1, AccessibilityNodeInfo.ACTION_CLEAR_ACCESSIBILITY_FOCUS, null); nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); assertTrue(nodeInfo.isAccessibilityFocused()); // Now, clear the focus for real. accessibilityBridge.performAction( 0, AccessibilityNodeInfo.ACTION_CLEAR_ACCESSIBILITY_FOCUS, null); nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); assertFalse(nodeInfo.isAccessibilityFocused()); } @Test public void itSetsFocusabilityBasedOnFlagsCorrectly() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.addFlag(Flag.HAS_IMPLICIT_SCROLLING); TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.addFlag(Flag.IS_READ_ONLY); root.children.add(node1); TestSemanticsNode node2 = new TestSemanticsNode(); node2.id = 2; node2.addFlag(Flag.HAS_CHECKED_STATE); root.children.add(node2); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); // Only node 2 is focusable because it has a flag that is not in // AccessibilityBridge.TRIVIAL_FLAGS. AccessibilityNodeInfo rootInfo = accessibilityBridge.createAccessibilityNodeInfo(0); assertFalse(rootInfo.isFocusable()); AccessibilityNodeInfo node1Info = accessibilityBridge.createAccessibilityNodeInfo(1); assertFalse(node1Info.isFocusable()); AccessibilityNodeInfo node2Info = accessibilityBridge.createAccessibilityNodeInfo(2); assertTrue(node2Info.isFocusable()); } @Config(sdk = API_LEVELS.API_31) @TargetApi(API_LEVELS.API_31) @Test public void itSetsBoldTextFlagCorrectly() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); Resources resource = mock(Resources.class); Configuration config = new Configuration(); config.fontWeightAdjustment = 300; when(mockRootView.getContext()).thenReturn(context); when(mockRootView.getResources()).thenReturn(resource); when(resource.getConfiguration()).thenReturn(config); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); verify(mockChannel).setAccessibilityFeatures(1 << 3); } @Test public void itSetsFocusedNodeBeforeSendingEvent() { BasicMessageChannel mockChannel = mock(BasicMessageChannel.class); AccessibilityChannel accessibilityChannel = new AccessibilityChannel(mockChannel, mock(FlutterJNI.class)); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ accessibilityChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.label = "root"; TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); class Verifier { public Verifier(AccessibilityBridge accessibilityBridge) { this.accessibilityBridge = accessibilityBridge; } public AccessibilityBridge accessibilityBridge; public boolean verified = false; public boolean verify(InvocationOnMock invocation) { AccessibilityEvent event = (AccessibilityEvent) invocation.getArguments()[1]; assertEquals(event.getEventType(), AccessibilityEvent.TYPE_VIEW_ACCESSIBILITY_FOCUSED); // The accessibility focus must be set before sending out // the TYPE_VIEW_ACCESSIBILITY_FOCUSED event. AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); assertTrue(nodeInfo.isAccessibilityFocused()); verified = true; return true; } }; Verifier verifier = new Verifier(accessibilityBridge); when(mockParent.requestSendAccessibilityEvent(eq(mockRootView), any(AccessibilityEvent.class))) .thenAnswer(invocation -> verifier.verify(invocation)); accessibilityBridge.performAction(0, AccessibilityNodeInfo.ACTION_ACCESSIBILITY_FOCUS, null); assertTrue(verifier.verified); HashMap<String, Object> message = new HashMap<>(); message.put("type", "didGainFocus"); message.put("nodeId", 0); verify(mockChannel).send(message); } @Test public void itClearsFocusedNodeBeforeSendingEvent() { BasicMessageChannel mockChannel = mock(BasicMessageChannel.class); AccessibilityChannel accessibilityChannel = new AccessibilityChannel(mockChannel, mock(FlutterJNI.class)); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ accessibilityChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; root.label = "root"; TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); // Set the focus on root. accessibilityBridge.performAction(0, AccessibilityNodeInfo.ACTION_ACCESSIBILITY_FOCUS, null); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); assertTrue(nodeInfo.isAccessibilityFocused()); HashMap<String, Object> message = new HashMap<>(); message.put("type", "didGainFocus"); message.put("nodeId", 0); verify(mockChannel).send(message); class Verifier { public Verifier(AccessibilityBridge accessibilityBridge) { this.accessibilityBridge = accessibilityBridge; } public AccessibilityBridge accessibilityBridge; public boolean verified = false; public boolean verify(InvocationOnMock invocation) { AccessibilityEvent event = (AccessibilityEvent) invocation.getArguments()[1]; assertEquals( event.getEventType(), AccessibilityEvent.TYPE_VIEW_ACCESSIBILITY_FOCUS_CLEARED); // The accessibility focus must be cleared before sending out // the TYPE_VIEW_ACCESSIBILITY_FOCUS_CLEARED event. AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(0); assertFalse(nodeInfo.isAccessibilityFocused()); verified = true; return true; } }; Verifier verifier = new Verifier(accessibilityBridge); when(mockParent.requestSendAccessibilityEvent(eq(mockRootView), any(AccessibilityEvent.class))) .thenAnswer(invocation -> verifier.verify(invocation)); accessibilityBridge.performAction( 0, AccessibilityNodeInfo.ACTION_CLEAR_ACCESSIBILITY_FOCUS, null); assertTrue(verifier.verified); } @Test public void itCanPredictCursorMovementsWithGranularityWord() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.value = "some text"; node1.textSelectionBase = 0; node1.textSelectionExtent = 0; node1.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD); root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); Bundle bundle = new Bundle(); bundle.putInt( AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT, AccessibilityNodeInfo.MOVEMENT_GRANULARITY_WORD); bundle.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN, false); accessibilityBridge.performAction( 1, AccessibilityNodeInfo.ACTION_NEXT_AT_MOVEMENT_GRANULARITY, bundle); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(1); // The seletction should be at the end of 'text' assertEquals(nodeInfo.getTextSelectionStart(), 9); assertEquals(nodeInfo.getTextSelectionEnd(), 9); bundle = new Bundle(); bundle.putInt( AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT, AccessibilityNodeInfo.MOVEMENT_GRANULARITY_WORD); bundle.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN, false); accessibilityBridge.performAction( 1, AccessibilityNodeInfo.ACTION_PREVIOUS_AT_MOVEMENT_GRANULARITY, bundle); nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(1); // The seletction should be go to beginning of 'text'. assertEquals(nodeInfo.getTextSelectionStart(), 5); assertEquals(nodeInfo.getTextSelectionEnd(), 5); } @Test public void itAlsoFireSelectionEventWhenPredictCursorMovements() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.value = "some text"; node1.textSelectionBase = 0; node1.textSelectionExtent = 0; node1.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD); root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); Bundle bundle = new Bundle(); bundle.putInt( AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT, AccessibilityNodeInfo.MOVEMENT_GRANULARITY_CHARACTER); bundle.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN, false); accessibilityBridge.performAction( 1, AccessibilityNodeInfo.ACTION_NEXT_AT_MOVEMENT_GRANULARITY, bundle); ArgumentCaptor<AccessibilityEvent> eventCaptor = ArgumentCaptor.forClass(AccessibilityEvent.class); verify(mockParent, times(2)) .requestSendAccessibilityEvent(eq(mockRootView), eventCaptor.capture()); AccessibilityEvent event = eventCaptor.getAllValues().get(1); assertEquals(event.getEventType(), AccessibilityEvent.TYPE_VIEW_TEXT_SELECTION_CHANGED); assertEquals(event.getText().toString(), "[" + node1.value + "]"); assertEquals(event.getFromIndex(), 1); assertEquals(event.getToIndex(), 1); assertEquals(event.getItemCount(), node1.value.length()); } @Test public void itDoesNotFireSelectionEventWhenPredictCursorMovementsDoesNotChangeSelection() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.value = "some text"; node1.textSelectionBase = 0; node1.textSelectionExtent = 0; node1.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD); root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); Bundle bundle = new Bundle(); bundle.putInt( AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT, AccessibilityNodeInfo.MOVEMENT_GRANULARITY_CHARACTER); bundle.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN, false); accessibilityBridge.performAction( 1, AccessibilityNodeInfo.ACTION_PREVIOUS_AT_MOVEMENT_GRANULARITY, bundle); ArgumentCaptor<AccessibilityEvent> eventCaptor = ArgumentCaptor.forClass(AccessibilityEvent.class); verify(mockParent, times(1)) .requestSendAccessibilityEvent(eq(mockRootView), eventCaptor.capture()); assertEquals(eventCaptor.getAllValues().size(), 1); AccessibilityEvent event = eventCaptor.getAllValues().get(0); assertNotEquals(event.getEventType(), AccessibilityEvent.TYPE_VIEW_TEXT_SELECTION_CHANGED); } @Test public void itCanPredictCursorMovementsWithGranularityWordUnicode() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.value = "你 好 嗎"; node1.textSelectionBase = 0; node1.textSelectionExtent = 0; node1.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD); root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); Bundle bundle = new Bundle(); bundle.putInt( AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT, AccessibilityNodeInfo.MOVEMENT_GRANULARITY_WORD); bundle.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN, false); accessibilityBridge.performAction( 1, AccessibilityNodeInfo.ACTION_NEXT_AT_MOVEMENT_GRANULARITY, bundle); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(1); // The seletction should be at the end of '好' assertEquals(nodeInfo.getTextSelectionStart(), 3); assertEquals(nodeInfo.getTextSelectionEnd(), 3); bundle = new Bundle(); bundle.putInt( AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT, AccessibilityNodeInfo.MOVEMENT_GRANULARITY_WORD); bundle.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN, false); accessibilityBridge.performAction( 1, AccessibilityNodeInfo.ACTION_PREVIOUS_AT_MOVEMENT_GRANULARITY, bundle); nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(1); // The seletction should be go to beginning of '好'. assertEquals(nodeInfo.getTextSelectionStart(), 2); assertEquals(nodeInfo.getTextSelectionEnd(), 2); } @Test public void itCanPredictCursorMovementsWithGranularityLine() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.value = "How are you\nI am fine\nThank you"; // Selection is at the second line. node1.textSelectionBase = 14; node1.textSelectionExtent = 14; node1.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD); root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); Bundle bundle = new Bundle(); bundle.putInt( AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT, AccessibilityNodeInfo.MOVEMENT_GRANULARITY_LINE); bundle.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN, false); accessibilityBridge.performAction( 1, AccessibilityNodeInfo.ACTION_NEXT_AT_MOVEMENT_GRANULARITY, bundle); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(1); // The seletction should be at the beginning of the third line. assertEquals(nodeInfo.getTextSelectionStart(), 21); assertEquals(nodeInfo.getTextSelectionEnd(), 21); bundle = new Bundle(); bundle.putInt( AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT, AccessibilityNodeInfo.MOVEMENT_GRANULARITY_LINE); bundle.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN, false); accessibilityBridge.performAction( 1, AccessibilityNodeInfo.ACTION_PREVIOUS_AT_MOVEMENT_GRANULARITY, bundle); nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(1); // The seletction should be at the beginning of the second line. assertEquals(nodeInfo.getTextSelectionStart(), 11); assertEquals(nodeInfo.getTextSelectionEnd(), 11); } @Test public void itCanPredictCursorMovementsWithGranularityCharacter() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ mockRootView, /*accessibilityChannel=*/ mockChannel, /*accessibilityManager=*/ mockManager, /*contentResolver=*/ null, /*accessibilityViewEmbedder=*/ mockViewEmbedder, /*platformViewsAccessibilityDelegate=*/ null); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode node1 = new TestSemanticsNode(); node1.id = 1; node1.value = "some text"; node1.textSelectionBase = 0; node1.textSelectionExtent = 0; node1.addFlag(AccessibilityBridge.Flag.IS_TEXT_FIELD); root.children.add(node1); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); Bundle bundle = new Bundle(); bundle.putInt( AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT, AccessibilityNodeInfo.MOVEMENT_GRANULARITY_CHARACTER); bundle.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN, false); accessibilityBridge.performAction( 1, AccessibilityNodeInfo.ACTION_NEXT_AT_MOVEMENT_GRANULARITY, bundle); AccessibilityNodeInfo nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(1); assertEquals(nodeInfo.getTextSelectionStart(), 1); assertEquals(nodeInfo.getTextSelectionEnd(), 1); bundle = new Bundle(); bundle.putInt( AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT, AccessibilityNodeInfo.MOVEMENT_GRANULARITY_CHARACTER); bundle.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN, false); accessibilityBridge.performAction( 1, AccessibilityNodeInfo.ACTION_PREVIOUS_AT_MOVEMENT_GRANULARITY, bundle); nodeInfo = accessibilityBridge.createAccessibilityNodeInfo(1); assertEquals(nodeInfo.getTextSelectionStart(), 0); assertEquals(nodeInfo.getTextSelectionEnd(), 0); } @Test public void itAnnouncesWhiteSpaceWhenNoNamesRoute() { AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); // Sent a11y tree with scopeRoute without namesRoute. TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode scopeRoute = new TestSemanticsNode(); scopeRoute.id = 1; scopeRoute.addFlag(AccessibilityBridge.Flag.SCOPES_ROUTE); root.children.add(scopeRoute); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); verify(mockRootView, times(1)).setAccessibilityPaneTitle(eq(" ")); } @Test public void itHoverOverOutOfBoundsDoesNotCrash() { // SementicsNode.hitTest() returns null when out of bounds. AccessibilityViewEmbedder mockViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, mockManager, mockViewEmbedder); // Sent a11y tree with platform view. TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode platformView = new TestSemanticsNode(); platformView.id = 1; platformView.platformViewId = 42; root.children.add(platformView); TestSemanticsUpdate testSemanticsUpdate = root.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); // Pass an out of bounds MotionEvent. accessibilityBridge.onAccessibilityHoverEvent(MotionEvent.obtain(1, 1, 1, -10, -10, 0)); } @Test public void itProducesPlatformViewNodeForHybridComposition() { PlatformViewsAccessibilityDelegate accessibilityDelegate = mock(PlatformViewsAccessibilityDelegate.class); Context context = ApplicationProvider.getApplicationContext(); View rootAccessibilityView = new View(context); AccessibilityViewEmbedder accessibilityViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityBridge accessibilityBridge = setUpBridge( rootAccessibilityView, /*accessibilityChannel=*/ null, /*accessibilityManager=*/ null, /*contentResolver=*/ null, accessibilityViewEmbedder, accessibilityDelegate); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode platformView = new TestSemanticsNode(); platformView.id = 1; platformView.platformViewId = 1; root.addChild(platformView); TestSemanticsUpdate testSemanticsRootUpdate = root.toUpdate(); testSemanticsRootUpdate.sendUpdateToBridge(accessibilityBridge); TestSemanticsUpdate testSemanticsPlatformViewUpdate = platformView.toUpdate(); testSemanticsPlatformViewUpdate.sendUpdateToBridge(accessibilityBridge); View embeddedView = mock(View.class); when(accessibilityDelegate.getPlatformViewById(1)).thenReturn(embeddedView); when(accessibilityDelegate.usesVirtualDisplay(1)).thenReturn(false); AccessibilityNodeInfo nodeInfo = mock(AccessibilityNodeInfo.class); when(embeddedView.createAccessibilityNodeInfo()).thenReturn(nodeInfo); AccessibilityNodeInfo result = accessibilityBridge.createAccessibilityNodeInfo(0); assertNotNull(result); assertEquals(result.getChildCount(), 1); assertEquals(result.getClassName(), "android.view.View"); } @Test public void itMakesPlatformViewImportantForAccessibility() { PlatformViewsAccessibilityDelegate accessibilityDelegate = mock(PlatformViewsAccessibilityDelegate.class); Context context = ApplicationProvider.getApplicationContext(); View rootAccessibilityView = new View(context); AccessibilityViewEmbedder accessibilityViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityBridge accessibilityBridge = setUpBridge( rootAccessibilityView, /*accessibilityChannel=*/ null, /*accessibilityManager=*/ null, /*contentResolver=*/ null, accessibilityViewEmbedder, accessibilityDelegate); TestSemanticsNode root = new TestSemanticsNode(); root.id = 0; TestSemanticsNode platformView = new TestSemanticsNode(); platformView.id = 1; platformView.platformViewId = 1; root.addChild(platformView); View embeddedView = mock(View.class); when(accessibilityDelegate.getPlatformViewById(1)).thenReturn(embeddedView); when(accessibilityDelegate.usesVirtualDisplay(1)).thenReturn(false); TestSemanticsUpdate testSemanticsRootUpdate = root.toUpdate(); testSemanticsRootUpdate.sendUpdateToBridge(accessibilityBridge); verify(embeddedView).setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_AUTO); } @Test public void itMakesPlatformViewNoImportantForAccessibility() { PlatformViewsAccessibilityDelegate accessibilityDelegate = mock(PlatformViewsAccessibilityDelegate.class); Context context = ApplicationProvider.getApplicationContext(); View rootAccessibilityView = new View(context); AccessibilityViewEmbedder accessibilityViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityBridge accessibilityBridge = setUpBridge( rootAccessibilityView, /*accessibilityChannel=*/ null, /*accessibilityManager=*/ null, /*contentResolver=*/ null, accessibilityViewEmbedder, accessibilityDelegate); TestSemanticsNode rootWithPlatformView = new TestSemanticsNode(); rootWithPlatformView.id = 0; TestSemanticsNode platformView = new TestSemanticsNode(); platformView.id = 1; platformView.platformViewId = 1; rootWithPlatformView.addChild(platformView); View embeddedView = mock(View.class); when(accessibilityDelegate.getPlatformViewById(1)).thenReturn(embeddedView); when(accessibilityDelegate.usesVirtualDisplay(1)).thenReturn(false); TestSemanticsUpdate testSemanticsRootWithPlatformViewUpdate = rootWithPlatformView.toUpdate(); testSemanticsRootWithPlatformViewUpdate.sendUpdateToBridge(accessibilityBridge); TestSemanticsNode rootWithoutPlatformView = new TestSemanticsNode(); rootWithoutPlatformView.id = 0; TestSemanticsUpdate testSemanticsRootWithoutPlatformViewUpdate = rootWithoutPlatformView.toUpdate(); testSemanticsRootWithoutPlatformViewUpdate.sendUpdateToBridge(accessibilityBridge); verify(embeddedView) .setImportantForAccessibility(View.IMPORTANT_FOR_ACCESSIBILITY_NO_HIDE_DESCENDANTS); } @Test public void itProducesPlatformViewNodeForVirtualDisplay() { PlatformViewsAccessibilityDelegate accessibilityDelegate = mock(PlatformViewsAccessibilityDelegate.class); AccessibilityViewEmbedder accessibilityViewEmbedder = mock(AccessibilityViewEmbedder.class); AccessibilityBridge accessibilityBridge = setUpBridge( /*rootAccessibilityView=*/ null, /*accessibilityChannel=*/ null, /*accessibilityManager=*/ null, /*contentResolver=*/ null, accessibilityViewEmbedder, accessibilityDelegate); TestSemanticsNode platformView = new TestSemanticsNode(); platformView.platformViewId = 1; TestSemanticsUpdate testSemanticsUpdate = platformView.toUpdate(); testSemanticsUpdate.sendUpdateToBridge(accessibilityBridge); View embeddedView = mock(View.class); when(accessibilityDelegate.getPlatformViewById(1)).thenReturn(embeddedView); when(accessibilityDelegate.usesVirtualDisplay(1)).thenReturn(true); accessibilityBridge.createAccessibilityNodeInfo(0); verify(accessibilityViewEmbedder).getRootNode(eq(embeddedView), eq(0), any(Rect.class)); } @Test public void releaseDropsChannelMessageHandler() { AccessibilityChannel mockChannel = mock(AccessibilityChannel.class); AccessibilityManager mockManager = mock(AccessibilityManager.class); ContentResolver mockContentResolver = mock(ContentResolver.class); when(mockManager.isEnabled()).thenReturn(true); AccessibilityBridge accessibilityBridge = setUpBridge(null, mockChannel, mockManager, mockContentResolver, null, null); verify(mockChannel) .setAccessibilityMessageHandler( any(AccessibilityChannel.AccessibilityMessageHandler.class)); ArgumentCaptor<AccessibilityManager.AccessibilityStateChangeListener> stateListenerCaptor = ArgumentCaptor.forClass(AccessibilityManager.AccessibilityStateChangeListener.class); ArgumentCaptor<AccessibilityManager.TouchExplorationStateChangeListener> touchListenerCaptor = ArgumentCaptor.forClass(AccessibilityManager.TouchExplorationStateChangeListener.class); verify(mockManager).addAccessibilityStateChangeListener(stateListenerCaptor.capture()); verify(mockManager).addTouchExplorationStateChangeListener(touchListenerCaptor.capture()); accessibilityBridge.release(); verify(mockChannel).setAccessibilityMessageHandler(null); reset(mockChannel); stateListenerCaptor.getValue().onAccessibilityStateChanged(true); verify(mockChannel, never()).onAndroidAccessibilityEnabled(); touchListenerCaptor.getValue().onTouchExplorationStateChanged(true); verify(mockChannel, never()).setAccessibilityFeatures(anyInt()); } @Test public void sendFocusAccessibilityEvent() { AccessibilityManager mockManager = mock(AccessibilityManager.class); AccessibilityChannel accessibilityChannel = new AccessibilityChannel(mock(DartExecutor.class), mock(FlutterJNI.class)); ContentResolver mockContentResolver = mock(ContentResolver.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); when(mockManager.isEnabled()).thenReturn(true); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, accessibilityChannel, mockManager, null, null, null); HashMap<String, Object> arguments = new HashMap<>(); arguments.put("type", "focus"); arguments.put("nodeId", 123); BasicMessageChannel.Reply reply = mock(BasicMessageChannel.Reply.class); accessibilityChannel.parsingMessageHandler.onMessage(arguments, reply); // Check that focus event was sent. ArgumentCaptor<AccessibilityEvent> eventCaptor = ArgumentCaptor.forClass(AccessibilityEvent.class); verify(mockParent).requestSendAccessibilityEvent(eq(mockRootView), eventCaptor.capture()); AccessibilityEvent event = eventCaptor.getAllValues().get(0); assertEquals(event.getEventType(), AccessibilityEvent.TYPE_VIEW_FOCUSED); assertEquals(event.getSource(), null); } @Test public void SetSourceAndPackageNameForAccessibilityEvent() { AccessibilityManager mockManager = mock(AccessibilityManager.class); ContentResolver mockContentResolver = mock(ContentResolver.class); View mockRootView = mock(View.class); Context context = mock(Context.class); when(mockRootView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); when(mockManager.isEnabled()).thenReturn(true); ViewParent mockParent = mock(ViewParent.class); when(mockRootView.getParent()).thenReturn(mockParent); AccessibilityEvent mockEvent = mock(AccessibilityEvent.class); AccessibilityBridge accessibilityBridge = setUpBridge(mockRootView, null, mockManager, null, null, null); AccessibilityBridge spyAccessibilityBridge = spy(accessibilityBridge); when(spyAccessibilityBridge.obtainAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_FOCUSED)) .thenReturn(mockEvent); spyAccessibilityBridge.sendAccessibilityEvent(123, AccessibilityEvent.TYPE_VIEW_FOCUSED); verify(mockEvent).setPackageName("test"); verify(mockEvent).setSource(eq(mockRootView), eq(123)); } AccessibilityBridge setUpBridge() { return setUpBridge(null, null, null, null, null, null); } AccessibilityBridge setUpBridge( View rootAccessibilityView, AccessibilityManager accessibilityManager, AccessibilityViewEmbedder accessibilityViewEmbedder) { return setUpBridge( rootAccessibilityView, null, accessibilityManager, null, accessibilityViewEmbedder, null); } AccessibilityBridge setUpBridge( View rootAccessibilityView, AccessibilityChannel accessibilityChannel, AccessibilityManager accessibilityManager, ContentResolver contentResolver, AccessibilityViewEmbedder accessibilityViewEmbedder, PlatformViewsAccessibilityDelegate platformViewsAccessibilityDelegate) { if (rootAccessibilityView == null) { rootAccessibilityView = mock(View.class); Context context = mock(Context.class); when(rootAccessibilityView.getContext()).thenReturn(context); when(context.getPackageName()).thenReturn("test"); } if (accessibilityChannel == null) { accessibilityChannel = mock(AccessibilityChannel.class); } if (accessibilityManager == null) { accessibilityManager = mock(AccessibilityManager.class); } if (contentResolver == null) { contentResolver = mock(ContentResolver.class); } if (accessibilityViewEmbedder == null) { accessibilityViewEmbedder = mock(AccessibilityViewEmbedder.class); } if (platformViewsAccessibilityDelegate == null) { platformViewsAccessibilityDelegate = mock(PlatformViewsAccessibilityDelegate.class); } return new AccessibilityBridge( rootAccessibilityView, accessibilityChannel, accessibilityManager, contentResolver, accessibilityViewEmbedder, platformViewsAccessibilityDelegate); } /// The encoding for semantics is described in platform_view_android.cc class TestSemanticsUpdate { TestSemanticsUpdate(ByteBuffer buffer, String[] strings, ByteBuffer[] stringAttributeArgs) { this.buffer = buffer; this.strings = strings; this.stringAttributeArgs = stringAttributeArgs; } void sendUpdateToBridge(AccessibilityBridge bridge) { bridge.updateSemantics(buffer, strings, stringAttributeArgs); } final ByteBuffer buffer; final String[] strings; final ByteBuffer[] stringAttributeArgs; } enum TestStringAttributeType { SPELLOUT(0), LOCALE(1); private final int value; private TestStringAttributeType(int value) { this.value = value; } public int getValue() { return value; } } class TestStringAttribute { int start; int end; TestStringAttributeType type; } class TestStringAttributeSpellOut extends TestStringAttribute {} class TestStringAttributeLocale extends TestStringAttribute { String locale; } class TestSemanticsNode { TestSemanticsNode() {} void addFlag(AccessibilityBridge.Flag flag) { flags |= flag.value; } void addAction(AccessibilityBridge.Action action) { actions |= action.value; } // These fields are declared in the order they should be // encoded. int id = 0; int flags = 0; int actions = 0; int maxValueLength = 0; int currentValueLength = 0; int textSelectionBase = 0; int textSelectionExtent = 0; int platformViewId = -1; int scrollChildren = 0; int scrollIndex = 0; float scrollPosition = 0.0f; float scrollExtentMax = 0.0f; float scrollExtentMin = 0.0f; String identifier = null; String label = null; List<TestStringAttribute> labelAttributes; String value = null; List<TestStringAttribute> valueAttributes; String increasedValue = null; List<TestStringAttribute> increasedValueAttributes; String decreasedValue = null; List<TestStringAttribute> decreasedValueAttributes; String hint = null; List<TestStringAttribute> hintAttributes; String tooltip = null; int textDirection = 0; float left = 0.0f; float top = 0.0f; float right = 0.0f; float bottom = 0.0f; float[] transform = new float[] { 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f }; final List<TestSemanticsNode> children = new ArrayList<TestSemanticsNode>(); public void addChild(TestSemanticsNode child) { children.add(child); } // custom actions not supported. TestSemanticsUpdate toUpdate() { ArrayList<String> strings = new ArrayList<String>(); ByteBuffer bytes = ByteBuffer.allocate(1000); ArrayList<ByteBuffer> stringAttributeArgs = new ArrayList<ByteBuffer>(); addToBuffer(bytes, strings, stringAttributeArgs); bytes.flip(); return new TestSemanticsUpdate( bytes, strings.toArray(new String[strings.size()]), stringAttributeArgs.toArray(new ByteBuffer[stringAttributeArgs.size()])); } protected void addToBuffer( ByteBuffer bytes, ArrayList<String> strings, ArrayList<ByteBuffer> stringAttributeArgs) { bytes.putInt(id); bytes.putInt(flags); bytes.putInt(actions); bytes.putInt(maxValueLength); bytes.putInt(currentValueLength); bytes.putInt(textSelectionBase); bytes.putInt(textSelectionExtent); bytes.putInt(platformViewId); bytes.putInt(scrollChildren); bytes.putInt(scrollIndex); bytes.putFloat(scrollPosition); bytes.putFloat(scrollExtentMax); bytes.putFloat(scrollExtentMin); if (identifier == null) { bytes.putInt(-1); } else { strings.add(identifier); bytes.putInt(strings.size() - 1); } updateString(label, labelAttributes, bytes, strings, stringAttributeArgs); updateString(value, valueAttributes, bytes, strings, stringAttributeArgs); updateString(increasedValue, increasedValueAttributes, bytes, strings, stringAttributeArgs); updateString(decreasedValue, decreasedValueAttributes, bytes, strings, stringAttributeArgs); updateString(hint, hintAttributes, bytes, strings, stringAttributeArgs); if (tooltip == null) { bytes.putInt(-1); } else { strings.add(tooltip); bytes.putInt(strings.size() - 1); } bytes.putInt(textDirection); bytes.putFloat(left); bytes.putFloat(top); bytes.putFloat(right); bytes.putFloat(bottom); // transform. for (int i = 0; i < 16; i++) { bytes.putFloat(transform[i]); } // children in traversal order. bytes.putInt(children.size()); for (TestSemanticsNode node : children) { bytes.putInt(node.id); } // children in hit test order. for (TestSemanticsNode node : children) { bytes.putInt(node.id); } // custom actions bytes.putInt(0); // child nodes for (TestSemanticsNode node : children) { node.addToBuffer(bytes, strings, stringAttributeArgs); } } } static void updateString( String value, List<TestStringAttribute> attributes, ByteBuffer bytes, ArrayList<String> strings, ArrayList<ByteBuffer> stringAttributeArgs) { if (value == null) { bytes.putInt(-1); } else { strings.add(value); bytes.putInt(strings.size() - 1); } // attributes if (attributes == null || attributes.isEmpty()) { bytes.putInt(-1); return; } bytes.putInt(attributes.size()); for (TestStringAttribute attribute : attributes) { bytes.putInt(attribute.start); bytes.putInt(attribute.end); bytes.putInt(attribute.type.getValue()); switch (attribute.type) { case SPELLOUT: bytes.putInt(-1); break; case LOCALE: bytes.putInt(stringAttributeArgs.size()); TestStringAttributeLocale localeAttribute = (TestStringAttributeLocale) attribute; stringAttributeArgs.add(Charset.forName("UTF-8").encode(localeAttribute.locale)); break; } } } }
engine/shell/platform/android/test/io/flutter/view/AccessibilityBridgeTest.java/0
{ "file_path": "engine/shell/platform/android/test/io/flutter/view/AccessibilityBridgeTest.java", "repo_id": "engine", "token_count": 35120 }
329
# Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//flutter/testing/testing.gni") import("core_wrapper_files.gni") # Client library build for internal use by the shell implementation. source_set("client_wrapper") { sources = core_cpp_client_wrapper_sources public = core_cpp_client_wrapper_includes + core_cpp_client_wrapper_internal_headers deps = [ "//flutter/shell/platform/common:common_cpp_library_headers" ] configs += [ "//flutter/shell/platform/common:desktop_library_implementation" ] public_configs = [ "//flutter/shell/platform/common:relative_flutter_library_headers" ] } source_set("client_wrapper_library_stubs") { sources = [ "testing/stub_flutter_api.cc", "testing/stub_flutter_api.h", ] defines = [ "FLUTTER_DESKTOP_LIBRARY" ] public_deps = [ "//flutter/shell/platform/common:common_cpp_library_headers" ] } test_fixtures("client_wrapper_fixtures") { fixtures = [] } executable("client_wrapper_unittests") { testonly = true sources = [ "basic_message_channel_unittests.cc", "encodable_value_unittests.cc", "event_channel_unittests.cc", "method_call_unittests.cc", "method_channel_unittests.cc", "method_result_functions_unittests.cc", "plugin_registrar_unittests.cc", "standard_message_codec_unittests.cc", "standard_method_codec_unittests.cc", "testing/test_codec_extensions.cc", "testing/test_codec_extensions.h", "texture_registrar_unittests.cc", ] deps = [ ":client_wrapper", ":client_wrapper_fixtures", ":client_wrapper_library_stubs", "//flutter/testing", # TODO(chunhtai): Consider refactoring flutter_root/testing so that there's a testing # target that doesn't require a Dart runtime to be linked in. # https://github.com/flutter/flutter/issues/41414. "$dart_src/runtime:libdart_jit", ] defines = [ "FLUTTER_DESKTOP_LIBRARY" ] }
engine/shell/platform/common/client_wrapper/BUILD.gn/0
{ "file_path": "engine/shell/platform/common/client_wrapper/BUILD.gn", "repo_id": "engine", "token_count": 760 }
330
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_SINK_H_ #define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_SINK_H_ namespace flutter { class EncodableValue; // Event callback. Events to be sent to Flutter application // act as clients of this interface for sending events. template <typename T = EncodableValue> class EventSink { public: EventSink() = default; virtual ~EventSink() = default; // Prevent copying. EventSink(EventSink const&) = delete; EventSink& operator=(EventSink const&) = delete; // Consumes a successful event void Success(const T& event) { SuccessInternal(&event); } // Consumes a successful event. void Success() { SuccessInternal(nullptr); } // Consumes an error event. void Error(const std::string& error_code, const std::string& error_message, const T& error_details) { ErrorInternal(error_code, error_message, &error_details); } // Consumes an error event. void Error(const std::string& error_code, const std::string& error_message = "") { ErrorInternal(error_code, error_message, nullptr); } // Consumes end of stream. Ensuing calls to Success() or // Error(), if any, are ignored. void EndOfStream() { EndOfStreamInternal(); } protected: // Implementation of the public interface, to be provided by subclasses. virtual void SuccessInternal(const T* event = nullptr) = 0; // Implementation of the public interface, to be provided by subclasses. virtual void ErrorInternal(const std::string& error_code, const std::string& error_message, const T* error_details) = 0; // Implementation of the public interface, to be provided by subclasses. virtual void EndOfStreamInternal() = 0; }; } // namespace flutter #endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_EVENT_SINK_H_
engine/shell/platform/common/client_wrapper/include/flutter/event_sink.h/0
{ "file_path": "engine/shell/platform/common/client_wrapper/include/flutter/event_sink.h", "repo_id": "engine", "token_count": 722 }
331
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/shell/platform/common/client_wrapper/include/flutter/method_channel.h" #include <memory> #include <string> #include "flutter/shell/platform/common/client_wrapper/include/flutter/binary_messenger.h" #include "flutter/shell/platform/common/client_wrapper/include/flutter/method_result_functions.h" #include "flutter/shell/platform/common/client_wrapper/include/flutter/standard_method_codec.h" #include "gtest/gtest.h" namespace flutter { namespace { class TestBinaryMessenger : public BinaryMessenger { public: void Send(const std::string& channel, const uint8_t* message, size_t message_size, BinaryReply reply) const override { send_called_ = true; last_reply_handler_ = reply; int length = static_cast<int>(message_size); last_message_ = std::vector<uint8_t>(message, message + length * sizeof(uint8_t)); } void SetMessageHandler(const std::string& channel, BinaryMessageHandler handler) override { last_message_handler_channel_ = channel; last_message_handler_ = handler; } bool send_called() { return send_called_; } BinaryReply last_reply_handler() { return last_reply_handler_; } std::string last_message_handler_channel() { return last_message_handler_channel_; } BinaryMessageHandler last_message_handler() { return last_message_handler_; } std::vector<uint8_t> last_message() { return last_message_; } private: mutable bool send_called_ = false; mutable BinaryReply last_reply_handler_; std::string last_message_handler_channel_; BinaryMessageHandler last_message_handler_; mutable std::vector<uint8_t> last_message_; }; } // namespace // Tests that SetMethodCallHandler sets a handler that correctly interacts with // the binary messenger. TEST(MethodChannelTest, Registration) { TestBinaryMessenger messenger; const std::string channel_name("some_channel"); const StandardMethodCodec& codec = StandardMethodCodec::GetInstance(); MethodChannel channel(&messenger, channel_name, &codec); bool callback_called = false; const std::string method_name("hello"); channel.SetMethodCallHandler( [&callback_called, method_name](const auto& call, auto result) { callback_called = true; // Ensure that the wrapper received a correctly decoded call and a // result. EXPECT_EQ(call.method_name(), method_name); EXPECT_NE(result, nullptr); result->Success(); }); EXPECT_EQ(messenger.last_message_handler_channel(), channel_name); EXPECT_NE(messenger.last_message_handler(), nullptr); // Send a test message to trigger the handler test assertions. MethodCall<> call(method_name, nullptr); auto message = codec.EncodeMethodCall(call); messenger.last_message_handler()( message->data(), message->size(), [](const uint8_t* reply, size_t reply_size) {}); EXPECT_TRUE(callback_called); } // Tests that SetMethodCallHandler with a null handler unregisters the handler. TEST(MethodChannelTest, Unregistration) { TestBinaryMessenger messenger; const std::string channel_name("some_channel"); MethodChannel channel(&messenger, channel_name, &StandardMethodCodec::GetInstance()); channel.SetMethodCallHandler([](const auto& call, auto result) {}); EXPECT_EQ(messenger.last_message_handler_channel(), channel_name); EXPECT_NE(messenger.last_message_handler(), nullptr); channel.SetMethodCallHandler(nullptr); EXPECT_EQ(messenger.last_message_handler_channel(), channel_name); EXPECT_EQ(messenger.last_message_handler(), nullptr); } TEST(MethodChannelTest, InvokeWithoutResponse) { TestBinaryMessenger messenger; const std::string channel_name("some_channel"); MethodChannel channel(&messenger, channel_name, &StandardMethodCodec::GetInstance()); channel.InvokeMethod("foo", nullptr); EXPECT_TRUE(messenger.send_called()); EXPECT_EQ(messenger.last_reply_handler(), nullptr); } TEST(MethodChannelTest, InvokeWithResponse) { TestBinaryMessenger messenger; const std::string channel_name("some_channel"); MethodChannel channel(&messenger, channel_name, &StandardMethodCodec::GetInstance()); bool received_reply = false; const std::string reply = "bar"; auto result_handler = std::make_unique<MethodResultFunctions<>>( [&received_reply, reply](const EncodableValue* success_value) { received_reply = true; EXPECT_EQ(std::get<std::string>(*success_value), reply); }, nullptr, nullptr); channel.InvokeMethod("foo", nullptr, std::move(result_handler)); EXPECT_TRUE(messenger.send_called()); ASSERT_NE(messenger.last_reply_handler(), nullptr); // Call the underlying reply handler to ensure it's processed correctly. EncodableValue reply_value(reply); std::unique_ptr<std::vector<uint8_t>> encoded_reply = StandardMethodCodec::GetInstance().EncodeSuccessEnvelope(&reply_value); messenger.last_reply_handler()(encoded_reply->data(), encoded_reply->size()); EXPECT_TRUE(received_reply); } TEST(MethodChannelTest, InvokeNotImplemented) { TestBinaryMessenger messenger; const std::string channel_name("some_channel"); MethodChannel channel(&messenger, channel_name, &StandardMethodCodec::GetInstance()); bool received_not_implemented = false; auto result_handler = std::make_unique<MethodResultFunctions<>>( nullptr, nullptr, [&received_not_implemented]() { received_not_implemented = true; }); channel.InvokeMethod("foo", nullptr, std::move(result_handler)); EXPECT_EQ(messenger.send_called(), true); ASSERT_NE(messenger.last_reply_handler(), nullptr); // Call the underlying reply handler to ensure it's reported as unimplemented. messenger.last_reply_handler()(nullptr, 0); EXPECT_TRUE(received_not_implemented); } // Tests that calling Resize generates the binary message expected by the Dart // implementation. TEST(MethodChannelTest, Resize) { TestBinaryMessenger messenger; const std::string channel_name("flutter/test"); MethodChannel channel(&messenger, channel_name, &StandardMethodCodec::GetInstance()); channel.Resize(3); // Because the Dart implementation for the control channel implements its own // custom deserialization logic, this test compares the generated bytes array // to the expected one (for instance, the deserialization logic expects the // size parameter of the resize method call to be an uint32). // // The expected content was created from the following Dart code: // MethodCall call = MethodCall('resize', ['flutter/test',3]); // StandardMethodCodec().encodeMethodCall(call).buffer.asUint8List(); const int expected_message_size = 29; EXPECT_EQ(messenger.send_called(), true); EXPECT_EQ(static_cast<int>(messenger.last_message().size()), expected_message_size); int expected[expected_message_size] = { 7, 6, 114, 101, 115, 105, 122, 101, 12, 2, 7, 12, 102, 108, 117, 116, 116, 101, 114, 47, 116, 101, 115, 116, 3, 3, 0, 0, 0}; for (int i = 0; i < expected_message_size; i++) { EXPECT_EQ(messenger.last_message()[i], expected[i]); } } // Tests that calling SetWarnsOnOverflow generates the binary message expected // by the Dart implementation. TEST(MethodChannelTest, SetWarnsOnOverflow) { TestBinaryMessenger messenger; const std::string channel_name("flutter/test"); MethodChannel channel(&messenger, channel_name, &StandardMethodCodec::GetInstance()); channel.SetWarnsOnOverflow(false); // The expected content was created from the following Dart code: // MethodCall call = MethodCall('overflow',['flutter/test', true]); // StandardMethodCodec().encodeMethodCall(call).buffer.asUint8List(); const int expected_message_size = 27; EXPECT_EQ(messenger.send_called(), true); EXPECT_EQ(static_cast<int>(messenger.last_message().size()), expected_message_size); int expected[expected_message_size] = { 7, 8, 111, 118, 101, 114, 102, 108, 111, 119, 12, 2, 7, 12, 102, 108, 117, 116, 116, 101, 114, 47, 116, 101, 115, 116, 1}; for (int i = 0; i < expected_message_size; i++) { EXPECT_EQ(messenger.last_message()[i], expected[i]); } } } // namespace flutter
engine/shell/platform/common/client_wrapper/method_channel_unittests.cc/0
{ "file_path": "engine/shell/platform/common/client_wrapper/method_channel_unittests.cc", "repo_id": "engine", "token_count": 2890 }
332
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/shell/platform/common/engine_switches.h" #include "gtest/gtest.h" namespace flutter { namespace { // Sets |key=value| in the environment of this process. void SetEnvironmentVariable(const char* key, const char* value) { #ifdef _WIN32 _putenv_s(key, value); #else setenv(key, value, 1); #endif } // Removes |key| from the environment of this process, if present. void ClearEnvironmentVariable(const char* key) { #ifdef _WIN32 _putenv_s(key, ""); #else unsetenv(key); #endif } } // namespace TEST(FlutterProjectBundle, SwitchesEmpty) { // Clear the main environment variable, since test order is not guaranteed. ClearEnvironmentVariable("FLUTTER_ENGINE_SWITCHES"); EXPECT_EQ(GetSwitchesFromEnvironment().size(), 0U); } #ifdef FLUTTER_RELEASE TEST(FlutterProjectBundle, SwitchesIgnoredInRelease) { SetEnvironmentVariable("FLUTTER_ENGINE_SWITCHES", "2"); SetEnvironmentVariable("FLUTTER_ENGINE_SWITCH_1", "abc"); SetEnvironmentVariable("FLUTTER_ENGINE_SWITCH_2", "foo=\"bar, baz\""); std::vector<std::string> switches = GetSwitchesFromEnvironment(); EXPECT_EQ(switches.size(), 0U); } #endif // FLUTTER_RELEASE #ifndef FLUTTER_RELEASE TEST(FlutterProjectBundle, Switches) { SetEnvironmentVariable("FLUTTER_ENGINE_SWITCHES", "2"); SetEnvironmentVariable("FLUTTER_ENGINE_SWITCH_1", "abc"); SetEnvironmentVariable("FLUTTER_ENGINE_SWITCH_2", "foo=\"bar, baz\""); std::vector<std::string> switches = GetSwitchesFromEnvironment(); EXPECT_EQ(switches.size(), 2U); EXPECT_EQ(switches[0], "--abc"); EXPECT_EQ(switches[1], "--foo=\"bar, baz\""); } TEST(FlutterProjectBundle, SwitchesExtraValues) { SetEnvironmentVariable("FLUTTER_ENGINE_SWITCHES", "1"); SetEnvironmentVariable("FLUTTER_ENGINE_SWITCH_1", "abc"); SetEnvironmentVariable("FLUTTER_ENGINE_SWITCH_2", "foo=\"bar, baz\""); std::vector<std::string> switches = GetSwitchesFromEnvironment(); EXPECT_EQ(switches.size(), 1U); EXPECT_EQ(switches[0], "--abc"); } TEST(FlutterProjectBundle, SwitchesMissingValues) { SetEnvironmentVariable("FLUTTER_ENGINE_SWITCHES", "4"); SetEnvironmentVariable("FLUTTER_ENGINE_SWITCH_1", "abc"); SetEnvironmentVariable("FLUTTER_ENGINE_SWITCH_2", "foo=\"bar, baz\""); ClearEnvironmentVariable("FLUTTER_ENGINE_SWITCH_3"); SetEnvironmentVariable("FLUTTER_ENGINE_SWITCH_4", "oops"); std::vector<std::string> switches = GetSwitchesFromEnvironment(); EXPECT_EQ(switches.size(), 3U); EXPECT_EQ(switches[0], "--abc"); EXPECT_EQ(switches[1], "--foo=\"bar, baz\""); // The missing switch should be skipped, leaving SWITCH_4 as the third // switch in the array. EXPECT_EQ(switches[2], "--oops"); } #endif // !FLUTTER_RELEASE } // namespace flutter
engine/shell/platform/common/engine_switches_unittests.cc/0
{ "file_path": "engine/shell/platform/common/engine_switches_unittests.cc", "repo_id": "engine", "token_count": 1012 }
333
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_PLATFORM_COMMON_PATH_UTILS_H_ #define FLUTTER_SHELL_PLATFORM_COMMON_PATH_UTILS_H_ #include <filesystem> namespace flutter { // Returns the path of the directory containing this executable, or an empty // path if the directory cannot be found. std::filesystem::path GetExecutableDirectory(); } // namespace flutter #endif // FLUTTER_SHELL_PLATFORM_COMMON_PATH_UTILS_H_
engine/shell/platform/common/path_utils.h/0
{ "file_path": "engine/shell/platform/common/path_utils.h", "repo_id": "engine", "token_count": 180 }
334
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_PLATFORM_COMMON_TEXT_RANGE_H_ #define FLUTTER_SHELL_PLATFORM_COMMON_TEXT_RANGE_H_ #include <algorithm> #include "flutter/fml/logging.h" namespace flutter { // A directional range of text. // // A |TextRange| describes a range of text with |base| and |extent| positions. // In the case where |base| == |extent|, the range is said to be collapsed, and // when |base| > |extent|, the range is said to be reversed. class TextRange { public: explicit TextRange(size_t position) : base_(position), extent_(position) {} explicit TextRange(size_t base, size_t extent) : base_(base), extent_(extent) {} TextRange(const TextRange&) = default; TextRange& operator=(const TextRange&) = default; virtual ~TextRange() = default; // The base position of the range. size_t base() const { return base_; } // Sets the base position of the range. void set_base(size_t pos) { base_ = pos; } // The extent position of the range. size_t extent() const { return extent_; } // Sets the extent position of the range. void set_extent(size_t pos) { extent_ = pos; } // The lesser of the base and extent positions. size_t start() const { return std::min(base_, extent_); } // Sets the start position of the range. void set_start(size_t pos) { if (base_ <= extent_) { base_ = pos; } else { extent_ = pos; } } // The greater of the base and extent positions. size_t end() const { return std::max(base_, extent_); } // Sets the end position of the range. void set_end(size_t pos) { if (base_ <= extent_) { extent_ = pos; } else { base_ = pos; } } // The position of a collapsed range. // // Asserts that the range is of length 0. size_t position() const { FML_DCHECK(base_ == extent_); return extent_; } // The length of the range. size_t length() const { return end() - start(); } // Returns true if the range is of length 0. bool collapsed() const { return base_ == extent_; } // Returns true if the base is greater than the extent. bool reversed() const { return base_ > extent_; } // Returns true if |position| is contained within the range. bool Contains(size_t position) const { return position >= start() && position <= end(); } // Returns true if |range| is contained within the range. bool Contains(const TextRange& range) const { return range.start() >= start() && range.end() <= end(); } bool operator==(const TextRange& other) const { return base_ == other.base_ && extent_ == other.extent_; } private: size_t base_; size_t extent_; }; } // namespace flutter #endif // FLUTTER_SHELL_PLATFORM_COMMON_TEXT_RANGE_H_
engine/shell/platform/common/text_range.h/0
{ "file_path": "engine/shell/platform/common/text_range.h", "repo_id": "engine", "token_count": 957 }
335
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_HEADERS_FLUTTERDARTPROJECT_H_ #define FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_HEADERS_FLUTTERDARTPROJECT_H_ #import <Foundation/Foundation.h> #import <TargetConditionals.h> #import "FlutterMacros.h" NS_ASSUME_NONNULL_BEGIN /** * A set of Flutter and Dart assets used by a `FlutterEngine` to initialize execution. * */ FLUTTER_DARWIN_EXPORT @interface FlutterDartProject : NSObject /** * Initializes a Flutter Dart project from a bundle. * * The bundle must either contain a flutter_assets resource directory, or set the Info.plist key * FLTAssetsPath to override that name (if you are doing a custom build using a different name). * * @param bundle The bundle containing the Flutter assets directory. If nil, the App framework * created by Flutter will be used. */ - (instancetype)initWithPrecompiledDartBundle:(nullable NSBundle*)bundle NS_DESIGNATED_INITIALIZER; /** * Unavailable - use `init` instead. */ - (instancetype)initFromDefaultSourceForConfiguration API_UNAVAILABLE(macos) FLUTTER_UNAVAILABLE("Use -init instead."); /** * Returns the default identifier for the bundle where we expect to find the Flutter Dart * application. */ + (NSString*)defaultBundleIdentifier; /** * An NSArray of NSStrings to be passed as command line arguments to the Dart entrypoint. * * If this is not explicitly set, this will default to the contents of * [NSProcessInfo arguments], without the binary name. * * Set this to nil to pass no arguments to the Dart entrypoint. */ @property(nonatomic, nullable, copy) NSArray<NSString*>* dartEntrypointArguments API_UNAVAILABLE(ios); /** * Returns the file name for the given asset. If the bundle with the identifier * "io.flutter.flutter.app" exists, it will try use that bundle; otherwise, it * will use the main bundle. To specify a different bundle, use * `+lookupKeyForAsset:fromBundle`. * * @param asset The name of the asset. The name can be hierarchical. * @return the file name to be used for lookup in the main bundle. */ + (NSString*)lookupKeyForAsset:(NSString*)asset; /** * Returns the file name for the given asset. * The returned file name can be used to access the asset in the supplied bundle. * * @param asset The name of the asset. The name can be hierarchical. * @param bundle The `NSBundle` to use for looking up the asset. * @return the file name to be used for lookup in the main bundle. */ + (NSString*)lookupKeyForAsset:(NSString*)asset fromBundle:(nullable NSBundle*)bundle; /** * Returns the file name for the given asset which originates from the specified package. * The returned file name can be used to access the asset in the application's main bundle. * * @param asset The name of the asset. The name can be hierarchical. * @param package The name of the package from which the asset originates. * @return the file name to be used for lookup in the main bundle. */ + (NSString*)lookupKeyForAsset:(NSString*)asset fromPackage:(NSString*)package; /** * Returns the file name for the given asset which originates from the specified package. * The returned file name can be used to access the asset in the specified bundle. * * @param asset The name of the asset. The name can be hierarchical. * @param package The name of the package from which the asset originates. * @param bundle The bundle to use when doing the lookup. * @return the file name to be used for lookup in the main bundle. */ + (NSString*)lookupKeyForAsset:(NSString*)asset fromPackage:(NSString*)package fromBundle:(nullable NSBundle*)bundle; @end NS_ASSUME_NONNULL_END #endif // FLUTTER_SHELL_PLATFORM_DARWIN_COMMON_FRAMEWORK_HEADERS_FLUTTERDARTPROJECT_H_
engine/shell/platform/darwin/common/framework/Headers/FlutterDartProject.h/0
{ "file_path": "engine/shell/platform/darwin/common/framework/Headers/FlutterDartProject.h", "repo_id": "engine", "token_count": 1199 }
336
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #import "flutter/shell/platform/darwin/common/framework/Source/FlutterTestUtils.h" #import "flutter/shell/platform/darwin/common/framework/Headers/FlutterMacros.h" FLUTTER_ASSERT_ARC BOOL FLTThrowsObjcException(dispatch_block_t block) { @try { block(); } @catch (...) { return YES; } return NO; }
engine/shell/platform/darwin/common/framework/Source/FlutterTestUtils.mm/0
{ "file_path": "engine/shell/platform/darwin/common/framework/Source/FlutterTestUtils.mm", "repo_id": "engine", "token_count": 166 }
337
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_HEADERS_FLUTTERCALLBACKCACHE_H_ #define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_HEADERS_FLUTTERCALLBACKCACHE_H_ #import <Foundation/Foundation.h> #import "FlutterMacros.h" /** * An object containing the result of `FlutterCallbackCache`'s `lookupCallbackInformation` * method. */ FLUTTER_DARWIN_EXPORT @interface FlutterCallbackInformation : NSObject /** * The name of the callback. */ @property(copy) NSString* callbackName; /** * The class name of the callback. */ @property(copy) NSString* callbackClassName; /** * The library path of the callback. */ @property(copy) NSString* callbackLibraryPath; @end /** * The cache containing callback information for spawning a * `FlutterHeadlessDartRunner`. */ FLUTTER_DARWIN_EXPORT @interface FlutterCallbackCache : NSObject /** * Returns the callback information for the given callback handle. * This callback information can be used when spawning a * `FlutterHeadlessDartRunner`. * * @param handle The handle for a callback, provided by the * Dart method `PluginUtilities.getCallbackHandle`. * @return A `FlutterCallbackInformation` object which contains the name of the * callback, the name of the class in which the callback is defined, and the * path of the library which contains the callback. If the provided handle is * invalid, nil is returned. */ + (FlutterCallbackInformation*)lookupCallbackInformation:(int64_t)handle; @end #endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_HEADERS_FLUTTERCALLBACKCACHE_H_
engine/shell/platform/darwin/ios/framework/Headers/FlutterCallbackCache.h/0
{ "file_path": "engine/shell/platform/darwin/ios/framework/Headers/FlutterCallbackCache.h", "repo_id": "engine", "token_count": 532 }
338
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #import "flutter/shell/platform/darwin/ios/framework/Source/FlutterChannelKeyResponder.h" #import <objc/message.h> #include <sys/types.h> #include "fml/memory/weak_ptr.h" #import "flutter/shell/platform/darwin/common/framework/Headers/FlutterCodecs.h" #import "flutter/shell/platform/darwin/ios/framework/Headers/FlutterEngine.h" #import "flutter/shell/platform/darwin/ios/framework/Source/FlutterUIPressProxy.h" #import "flutter/shell/platform/darwin/ios/framework/Source/FlutterViewController_Internal.h" #import "flutter/shell/platform/darwin/ios/framework/Source/KeyCodeMap_Internal.h" namespace { // An enumeration of the modifier values that the framework expects. These are // largely the same values as the OS (UIKeyModifierShift, etc.), but because the // framework code expects certain values, and has additional values (like the // sided modifier values below), we translate the iOS values to the framework // values, and add a mask for all the possible values. typedef NS_OPTIONS(NSInteger, kKeyboardModifier) { kKeyboardModifierAlphaShift = 0x10000, kKeyboardModifierShift = 0x20000, kKeyboardModifierLeftShift = 0x02, kKeyboardModifierRightShift = 0x04, kKeyboardModifierControl = 0x40000, kKeyboardModifierLeftControl = 0x01, kKeyboardModifierRightControl = 0x2000, kKeyboardModifierOption = 0x80000, kKeyboardModifierLeftOption = 0x20, kKeyboardModifierRightOption = 0x40, kKeyboardModifierCommand = 0x100000, kKeyboardModifierLeftCommand = 0x08, kKeyboardModifierRightCommand = 0x10, kKeyboardModifierNumericPad = 0x200000, kKeyboardModifierMask = kKeyboardModifierAlphaShift | // kKeyboardModifierShift | // kKeyboardModifierLeftShift | // kKeyboardModifierRightShift | // kKeyboardModifierControl | // kKeyboardModifierLeftControl | // kKeyboardModifierRightControl | // kKeyboardModifierOption | // kKeyboardModifierLeftOption | // kKeyboardModifierRightOption | // kKeyboardModifierCommand | // kKeyboardModifierLeftCommand | // kKeyboardModifierRightCommand | // kKeyboardModifierNumericPad, }; /** * Filters out some special cases in the characters field on UIKey. */ static NSString* getEventCharacters(NSString* characters, UIKeyboardHIDUsage keyCode) API_AVAILABLE(ios(13.4)) { if (characters == nil) { return nil; } if ([characters length] == 0) { return nil; } if (@available(iOS 13.4, *)) { // On iOS, function keys return the UTF8 string "\^P" (with a literal '/', // '^' and a 'P', not escaped ctrl-P) as their "characters" field. This // isn't a valid (single) UTF8 character. Looking at the only UTF16 // character for a function key yields a value of "16", which is a Unicode // "SHIFT IN" character, which is just odd. UTF8 conversion of that string // is what generates the three characters "\^P". // // Anyhow, we strip them all out and replace them with empty strings, since // function keys shouldn't be printable. if (functionKeyCodes.find(keyCode) != functionKeyCodes.end()) { return nil; } } return characters; } } // namespace @interface FlutterChannelKeyResponder () /** * The channel used to communicate with Flutter. */ @property(nonatomic) FlutterBasicMessageChannel* channel; - (NSInteger)adjustModifiers:(nonnull FlutterUIPressProxy*)press API_AVAILABLE(ios(13.4)); - (void)updatePressedModifiers:(nonnull FlutterUIPressProxy*)press API_AVAILABLE(ios(13.4)); @property(nonatomic) kKeyboardModifier pressedModifiers; @end @implementation FlutterChannelKeyResponder - (nonnull instancetype)initWithChannel:(nonnull FlutterBasicMessageChannel*)channel { self = [super init]; if (self != nil) { _channel = channel; _pressedModifiers = 0; } return self; } - (void)handlePress:(nonnull FlutterUIPressProxy*)press callback:(nonnull FlutterAsyncKeyCallback)callback API_AVAILABLE(ios(13.4)) { if (@available(iOS 13.4, *)) { // no-op } else { return; } NSString* type; switch (press.phase) { case UIPressPhaseBegan: type = @"keydown"; break; case UIPressPhaseCancelled: // This event doesn't appear to happen on iOS, at least when switching // apps. Maybe on tvOS? In any case, it's probably best to send a keyup if // we do receive one, since if the event was canceled, it's likely that a // keyup will never be received otherwise. case UIPressPhaseEnded: type = @"keyup"; break; case UIPressPhaseChanged: // This only happens for analog devices like joysticks. return; case UIPressPhaseStationary: // The entire volume of documentation of this phase on the Apple site, and // indeed the Internet, is: // "A button was pressed but hasn’t moved since the previous event." // It's unclear what this is actually used for, and we've yet to see it in // the wild. return; } NSString* characters = getEventCharacters(press.key.characters, press.key.keyCode); NSString* charactersIgnoringModifiers = getEventCharacters(press.key.charactersIgnoringModifiers, press.key.keyCode); NSMutableDictionary* keyMessage = [[@{ @"keymap" : @"ios", @"type" : type, @"keyCode" : @(press.key.keyCode), @"modifiers" : @([self adjustModifiers:press]), @"characters" : characters == nil ? @"" : characters, @"charactersIgnoringModifiers" : charactersIgnoringModifiers == nil ? @"" : charactersIgnoringModifiers, } mutableCopy] autorelease]; [self.channel sendMessage:keyMessage reply:^(id reply) { bool handled = reply ? [[reply valueForKey:@"handled"] boolValue] : true; callback(handled); }]; } #pragma mark - Private - (void)updatePressedModifiers:(nonnull FlutterUIPressProxy*)press API_AVAILABLE(ios(13.4)) { if (@available(iOS 13.4, *)) { // no-op } else { return; } bool isKeyDown = false; switch (press.phase) { case UIPressPhaseStationary: case UIPressPhaseChanged: // These kinds of events shouldn't get this far. NSAssert(false, @"Unexpected key event type received in updatePressedModifiers."); return; case UIPressPhaseBegan: isKeyDown = true; break; case UIPressPhaseCancelled: case UIPressPhaseEnded: isKeyDown = false; break; } void (^update)(kKeyboardModifier, bool) = ^(kKeyboardModifier mod, bool isOn) { if (isOn) { _pressedModifiers |= mod; } else { _pressedModifiers &= ~mod; } }; switch (press.key.keyCode) { case UIKeyboardHIDUsageKeyboardCapsLock: update(kKeyboardModifierAlphaShift, isKeyDown); break; case UIKeyboardHIDUsageKeypadNumLock: update(kKeyboardModifierNumericPad, isKeyDown); break; case UIKeyboardHIDUsageKeyboardLeftShift: update(kKeyboardModifierLeftShift, isKeyDown); break; case UIKeyboardHIDUsageKeyboardRightShift: update(kKeyboardModifierRightShift, isKeyDown); break; case UIKeyboardHIDUsageKeyboardLeftControl: update(kKeyboardModifierLeftControl, isKeyDown); break; case UIKeyboardHIDUsageKeyboardRightControl: update(kKeyboardModifierRightControl, isKeyDown); break; case UIKeyboardHIDUsageKeyboardLeftAlt: update(kKeyboardModifierLeftOption, isKeyDown); break; case UIKeyboardHIDUsageKeyboardRightAlt: update(kKeyboardModifierRightOption, isKeyDown); break; case UIKeyboardHIDUsageKeyboardLeftGUI: update(kKeyboardModifierLeftCommand, isKeyDown); break; case UIKeyboardHIDUsageKeyboardRightGUI: update(kKeyboardModifierRightCommand, isKeyDown); break; default: // If we didn't update any of the modifiers above, we're done. return; } // Update the non-sided modifier flags to match the content of the sided ones. update(kKeyboardModifierShift, _pressedModifiers & (kKeyboardModifierRightShift | kKeyboardModifierLeftShift)); update(kKeyboardModifierControl, _pressedModifiers & (kKeyboardModifierRightControl | kKeyboardModifierLeftControl)); update(kKeyboardModifierOption, _pressedModifiers & (kKeyboardModifierRightOption | kKeyboardModifierLeftOption)); update(kKeyboardModifierCommand, _pressedModifiers & (kKeyboardModifierRightCommand | kKeyboardModifierLeftCommand)); } // Because iOS differs from macOS in that the modifier flags still contain the // flag for the key that is being released on the keyup event, we adjust the // modifiers when the key being released is the matching modifier key itself. - (NSInteger)adjustModifiers:(nonnull FlutterUIPressProxy*)press API_AVAILABLE(ios(13.4)) { if (@available(iOS 13.4, *)) { // no-op } else { return press.key.modifierFlags; } [self updatePressedModifiers:press]; // Replace the supplied modifier flags with our computed ones. return _pressedModifiers | (press.key.modifierFlags & ~kKeyboardModifierMask); } @end
engine/shell/platform/darwin/ios/framework/Source/FlutterChannelKeyResponder.mm/0
{ "file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterChannelKeyResponder.mm", "repo_id": "engine", "token_count": 3620 }
339
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERENGINE_TEST_H_ #define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERENGINE_TEST_H_ #import "flutter/shell/common/shell.h" #import "flutter/shell/platform/darwin/ios/framework/Headers/FlutterEngine.h" #import "flutter/shell/platform/darwin/ios/framework/Source/FlutterTextInputDelegate.h" #import "flutter/shell/platform/darwin/ios/rendering_api_selection.h" #include "flutter/shell/platform/embedder/embedder.h" extern NSString* const kFlutterEngineWillDealloc; @class FlutterBinaryMessengerRelay; namespace flutter { class ThreadHost; } // Category to add test-only visibility. @interface FlutterEngine (Test) <FlutterBinaryMessenger> @property(readonly, nonatomic) FlutterEngineProcTable& embedderAPI; @property(readonly, nonatomic) BOOL enableEmbedderAPI; - (flutter::Shell&)shell; - (void)setBinaryMessenger:(FlutterBinaryMessengerRelay*)binaryMessenger; - (flutter::IOSRenderingAPI)platformViewsRenderingAPI; - (void)waitForFirstFrame:(NSTimeInterval)timeout callback:(void (^)(BOOL didTimeout))callback; - (FlutterEngine*)spawnWithEntrypoint:(/*nullable*/ NSString*)entrypoint libraryURI:(/*nullable*/ NSString*)libraryURI initialRoute:(/*nullable*/ NSString*)initialRoute entrypointArgs:(/*nullable*/ NSArray<NSString*>*)entrypointArgs; - (const flutter::ThreadHost&)threadHost; - (void)updateDisplays; - (void)flutterTextInputView:(FlutterTextInputView*)textInputView performAction:(FlutterTextInputAction)action withClient:(int)client; - (void)sceneWillEnterForeground:(NSNotification*)notification API_AVAILABLE(ios(13.0)); - (void)sceneDidEnterBackground:(NSNotification*)notification API_AVAILABLE(ios(13.0)); - (void)applicationWillEnterForeground:(NSNotification*)notification; - (void)applicationDidEnterBackground:(NSNotification*)notification; @end #endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERENGINE_TEST_H_
engine/shell/platform/darwin/ios/framework/Source/FlutterEngine_Test.h/0
{ "file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterEngine_Test.h", "repo_id": "engine", "token_count": 809 }
340
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #import "flutter/shell/platform/darwin/ios/framework/Source/FlutterPlatformPlugin.h" #import <AudioToolbox/AudioToolbox.h> #import <Foundation/Foundation.h> #import <UIKit/UIApplication.h> #import <UIKit/UIKit.h> #include "flutter/fml/logging.h" #import "flutter/shell/platform/darwin/ios/framework/Source/FlutterEngine_Internal.h" #import "flutter/shell/platform/darwin/ios/framework/Source/FlutterTextInputPlugin.h" #import "flutter/shell/platform/darwin/ios/framework/Source/FlutterViewController_Internal.h" #import "flutter/shell/platform/darwin/ios/framework/Source/UIViewController+FlutterScreenAndSceneIfLoaded.h" namespace { constexpr char kTextPlainFormat[] = "text/plain"; const UInt32 kKeyPressClickSoundId = 1306; #if not APPLICATION_EXTENSION_API_ONLY const NSString* searchURLPrefix = @"x-web-search://?"; #endif } // namespace namespace flutter { // TODO(abarth): Move these definitions from system_chrome_impl.cc to here. const char* const kOrientationUpdateNotificationName = "io.flutter.plugin.platform.SystemChromeOrientationNotificationName"; const char* const kOrientationUpdateNotificationKey = "io.flutter.plugin.platform.SystemChromeOrientationNotificationKey"; const char* const kOverlayStyleUpdateNotificationName = "io.flutter.plugin.platform.SystemChromeOverlayNotificationName"; const char* const kOverlayStyleUpdateNotificationKey = "io.flutter.plugin.platform.SystemChromeOverlayNotificationKey"; } // namespace flutter using namespace flutter; static void SetStatusBarHiddenForSharedApplication(BOOL hidden) { #if not APPLICATION_EXTENSION_API_ONLY [UIApplication sharedApplication].statusBarHidden = hidden; #else FML_LOG(WARNING) << "Application based status bar styling is not available in app extension."; #endif } static void SetStatusBarStyleForSharedApplication(UIStatusBarStyle style) { #if not APPLICATION_EXTENSION_API_ONLY // Note: -[UIApplication setStatusBarStyle] is deprecated in iOS9 // in favor of delegating to the view controller. [[UIApplication sharedApplication] setStatusBarStyle:style]; #else FML_LOG(WARNING) << "Application based status bar styling is not available in app extension."; #endif } @interface FlutterPlatformPlugin () /** * @brief Whether the status bar appearance is based on the style preferred for this ViewController. * * The default value is YES. * Explicitly add `UIViewControllerBasedStatusBarAppearance` as `false` in * info.plist makes this value to be false. */ @property(nonatomic, assign) BOOL enableViewControllerBasedStatusBarAppearance; @end @implementation FlutterPlatformPlugin { fml::WeakNSObject<FlutterEngine> _engine; // Used to detect whether this device has live text input ability or not. UITextField* _textField; } - (instancetype)initWithEngine:(fml::WeakNSObject<FlutterEngine>)engine { FML_DCHECK(engine) << "engine must be set"; self = [super init]; if (self) { _engine = engine; NSObject* infoValue = [[NSBundle mainBundle] objectForInfoDictionaryKey:@"UIViewControllerBasedStatusBarAppearance"]; #if FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_DEBUG if (infoValue != nil && ![infoValue isKindOfClass:[NSNumber class]]) { FML_LOG(ERROR) << "The value of UIViewControllerBasedStatusBarAppearance in info.plist must " "be a Boolean type."; } #endif _enableViewControllerBasedStatusBarAppearance = (infoValue == nil || [(NSNumber*)infoValue boolValue]); } return self; } - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result { NSString* method = call.method; id args = call.arguments; if ([method isEqualToString:@"SystemSound.play"]) { [self playSystemSound:args]; result(nil); } else if ([method isEqualToString:@"HapticFeedback.vibrate"]) { [self vibrateHapticFeedback:args]; result(nil); } else if ([method isEqualToString:@"SystemChrome.setPreferredOrientations"]) { [self setSystemChromePreferredOrientations:args]; result(nil); } else if ([method isEqualToString:@"SystemChrome.setApplicationSwitcherDescription"]) { [self setSystemChromeApplicationSwitcherDescription:args]; result(nil); } else if ([method isEqualToString:@"SystemChrome.setEnabledSystemUIOverlays"]) { [self setSystemChromeEnabledSystemUIOverlays:args]; result(nil); } else if ([method isEqualToString:@"SystemChrome.setEnabledSystemUIMode"]) { [self setSystemChromeEnabledSystemUIMode:args]; result(nil); } else if ([method isEqualToString:@"SystemChrome.restoreSystemUIOverlays"]) { [self restoreSystemChromeSystemUIOverlays]; result(nil); } else if ([method isEqualToString:@"SystemChrome.setSystemUIOverlayStyle"]) { [self setSystemChromeSystemUIOverlayStyle:args]; result(nil); } else if ([method isEqualToString:@"SystemNavigator.pop"]) { NSNumber* isAnimated = args; [self popSystemNavigator:isAnimated.boolValue]; result(nil); } else if ([method isEqualToString:@"Clipboard.getData"]) { result([self getClipboardData:args]); } else if ([method isEqualToString:@"Clipboard.setData"]) { [self setClipboardData:args]; result(nil); } else if ([method isEqualToString:@"Clipboard.hasStrings"]) { result([self clipboardHasStrings]); } else if ([method isEqualToString:@"LiveText.isLiveTextInputAvailable"]) { result(@([self isLiveTextInputAvailable])); } else if ([method isEqualToString:@"SearchWeb.invoke"]) { [self searchWeb:args]; result(nil); } else if ([method isEqualToString:@"LookUp.invoke"]) { [self showLookUpViewController:args]; result(nil); } else if ([method isEqualToString:@"Share.invoke"]) { [self showShareViewController:args]; result(nil); } else { result(FlutterMethodNotImplemented); } } - (void)showShareViewController:(NSString*)content { UIViewController* engineViewController = [_engine.get() viewController]; NSArray* itemsToShare = @[ content ?: [NSNull null] ]; UIActivityViewController* activityViewController = [[[UIActivityViewController alloc] initWithActivityItems:itemsToShare applicationActivities:nil] autorelease]; if (UI_USER_INTERFACE_IDIOM() == UIUserInterfaceIdiomPad) { // On iPad, the share screen is presented in a popover view, and requires a // sourceView and sourceRect FlutterTextInputPlugin* _textInputPlugin = [_engine.get() textInputPlugin]; UITextRange* range = _textInputPlugin.textInputView.selectedTextRange; // firstRectForRange cannot be used here as it's current implementation does // not always return the full rect of the range. CGRect firstRect = [(FlutterTextInputView*)_textInputPlugin.textInputView caretRectForPosition:(FlutterTextPosition*)range.start]; CGRect transformedFirstRect = [(FlutterTextInputView*)_textInputPlugin.textInputView localRectFromFrameworkTransform:firstRect]; CGRect lastRect = [(FlutterTextInputView*)_textInputPlugin.textInputView caretRectForPosition:(FlutterTextPosition*)range.end]; CGRect transformedLastRect = [(FlutterTextInputView*)_textInputPlugin.textInputView localRectFromFrameworkTransform:lastRect]; activityViewController.popoverPresentationController.sourceView = engineViewController.view; // In case of RTL Language, get the minimum x coordinate activityViewController.popoverPresentationController.sourceRect = CGRectMake(fmin(transformedFirstRect.origin.x, transformedLastRect.origin.x), transformedFirstRect.origin.y, abs(transformedLastRect.origin.x - transformedFirstRect.origin.x), transformedFirstRect.size.height); } [engineViewController presentViewController:activityViewController animated:YES completion:nil]; } - (void)searchWeb:(NSString*)searchTerm { #if APPLICATION_EXTENSION_API_ONLY FML_LOG(WARNING) << "SearchWeb.invoke is not availabe in app extension."; #else NSString* escapedText = [searchTerm stringByAddingPercentEncodingWithAllowedCharacters:[NSCharacterSet URLHostAllowedCharacterSet]]; NSString* searchURL = [NSString stringWithFormat:@"%@%@", searchURLPrefix, escapedText]; [[UIApplication sharedApplication] openURL:[NSURL URLWithString:searchURL] options:@{} completionHandler:nil]; #endif } - (void)playSystemSound:(NSString*)soundType { if ([soundType isEqualToString:@"SystemSoundType.click"]) { // All feedback types are specific to Android and are treated as equal on // iOS. AudioServicesPlaySystemSound(kKeyPressClickSoundId); } } - (void)vibrateHapticFeedback:(NSString*)feedbackType { if (!feedbackType) { AudioServicesPlaySystemSound(kSystemSoundID_Vibrate); return; } if ([@"HapticFeedbackType.lightImpact" isEqualToString:feedbackType]) { [[[[UIImpactFeedbackGenerator alloc] initWithStyle:UIImpactFeedbackStyleLight] autorelease] impactOccurred]; } else if ([@"HapticFeedbackType.mediumImpact" isEqualToString:feedbackType]) { [[[[UIImpactFeedbackGenerator alloc] initWithStyle:UIImpactFeedbackStyleMedium] autorelease] impactOccurred]; } else if ([@"HapticFeedbackType.heavyImpact" isEqualToString:feedbackType]) { [[[[UIImpactFeedbackGenerator alloc] initWithStyle:UIImpactFeedbackStyleHeavy] autorelease] impactOccurred]; } else if ([@"HapticFeedbackType.selectionClick" isEqualToString:feedbackType]) { [[[[UISelectionFeedbackGenerator alloc] init] autorelease] selectionChanged]; } } - (void)setSystemChromePreferredOrientations:(NSArray*)orientations { UIInterfaceOrientationMask mask = 0; if (orientations.count == 0) { mask |= UIInterfaceOrientationMaskAll; } else { for (NSString* orientation in orientations) { if ([orientation isEqualToString:@"DeviceOrientation.portraitUp"]) { mask |= UIInterfaceOrientationMaskPortrait; } else if ([orientation isEqualToString:@"DeviceOrientation.portraitDown"]) { mask |= UIInterfaceOrientationMaskPortraitUpsideDown; } else if ([orientation isEqualToString:@"DeviceOrientation.landscapeLeft"]) { mask |= UIInterfaceOrientationMaskLandscapeLeft; } else if ([orientation isEqualToString:@"DeviceOrientation.landscapeRight"]) { mask |= UIInterfaceOrientationMaskLandscapeRight; } } } if (!mask) { return; } [[NSNotificationCenter defaultCenter] postNotificationName:@(kOrientationUpdateNotificationName) object:nil userInfo:@{@(kOrientationUpdateNotificationKey) : @(mask)}]; } - (void)setSystemChromeApplicationSwitcherDescription:(NSDictionary*)object { // No counterpart on iOS but is a benign operation. So no asserts. } - (void)setSystemChromeEnabledSystemUIOverlays:(NSArray*)overlays { BOOL statusBarShouldBeHidden = ![overlays containsObject:@"SystemUiOverlay.top"]; if ([overlays containsObject:@"SystemUiOverlay.bottom"]) { [[NSNotificationCenter defaultCenter] postNotificationName:FlutterViewControllerShowHomeIndicator object:nil]; } else { [[NSNotificationCenter defaultCenter] postNotificationName:FlutterViewControllerHideHomeIndicator object:nil]; } if (self.enableViewControllerBasedStatusBarAppearance) { [_engine.get() viewController].prefersStatusBarHidden = statusBarShouldBeHidden; } else { // Checks if the top status bar should be visible. This platform ignores all // other overlays // We opt out of view controller based status bar visibility since we want // to be able to modify this on the fly. The key used is // UIViewControllerBasedStatusBarAppearance. SetStatusBarHiddenForSharedApplication(statusBarShouldBeHidden); } } - (void)setSystemChromeEnabledSystemUIMode:(NSString*)mode { BOOL edgeToEdge = [mode isEqualToString:@"SystemUiMode.edgeToEdge"]; if (self.enableViewControllerBasedStatusBarAppearance) { [_engine.get() viewController].prefersStatusBarHidden = !edgeToEdge; } else { // Checks if the top status bar should be visible, reflected by edge to edge setting. This // platform ignores all other system ui modes. // We opt out of view controller based status bar visibility since we want // to be able to modify this on the fly. The key used is // UIViewControllerBasedStatusBarAppearance. SetStatusBarHiddenForSharedApplication(!edgeToEdge); } [[NSNotificationCenter defaultCenter] postNotificationName:edgeToEdge ? FlutterViewControllerShowHomeIndicator : FlutterViewControllerHideHomeIndicator object:nil]; } - (void)restoreSystemChromeSystemUIOverlays { // Nothing to do on iOS. } - (void)setSystemChromeSystemUIOverlayStyle:(NSDictionary*)message { NSString* brightness = message[@"statusBarBrightness"]; if (brightness == (id)[NSNull null]) { return; } UIStatusBarStyle statusBarStyle; if ([brightness isEqualToString:@"Brightness.dark"]) { statusBarStyle = UIStatusBarStyleLightContent; } else if ([brightness isEqualToString:@"Brightness.light"]) { if (@available(iOS 13, *)) { statusBarStyle = UIStatusBarStyleDarkContent; } else { statusBarStyle = UIStatusBarStyleDefault; } } else { return; } if (self.enableViewControllerBasedStatusBarAppearance) { // This notification is respected by the iOS embedder. [[NSNotificationCenter defaultCenter] postNotificationName:@(kOverlayStyleUpdateNotificationName) object:nil userInfo:@{@(kOverlayStyleUpdateNotificationKey) : @(statusBarStyle)}]; } else { SetStatusBarStyleForSharedApplication(statusBarStyle); } } - (void)popSystemNavigator:(BOOL)isAnimated { // Apple's human user guidelines say not to terminate iOS applications. However, if the // root view of the app is a navigation controller, it is instructed to back up a level // in the navigation hierarchy. // It's also possible in an Add2App scenario that the FlutterViewController was presented // outside the context of a UINavigationController, and still wants to be popped. FlutterViewController* engineViewController = [_engine.get() viewController]; UINavigationController* navigationController = [engineViewController navigationController]; if (navigationController) { [navigationController popViewControllerAnimated:isAnimated]; } else { UIViewController* rootViewController = nil; #if APPLICATION_EXTENSION_API_ONLY if (@available(iOS 15.0, *)) { rootViewController = [engineViewController flutterWindowSceneIfViewLoaded].keyWindow.rootViewController; } else { FML_LOG(WARNING) << "rootViewController is not available in application extension prior to iOS 15.0."; } #else rootViewController = [UIApplication sharedApplication].keyWindow.rootViewController; #endif if (engineViewController != rootViewController) { [engineViewController dismissViewControllerAnimated:isAnimated completion:nil]; } } } - (NSDictionary*)getClipboardData:(NSString*)format { UIPasteboard* pasteboard = [UIPasteboard generalPasteboard]; if (!format || [format isEqualToString:@(kTextPlainFormat)]) { NSString* stringInPasteboard = pasteboard.string; // The pasteboard may contain an item but it may not be a string (an image for instance). return stringInPasteboard == nil ? nil : @{@"text" : stringInPasteboard}; } return nil; } - (void)setClipboardData:(NSDictionary*)data { UIPasteboard* pasteboard = [UIPasteboard generalPasteboard]; id copyText = data[@"text"]; if ([copyText isKindOfClass:[NSString class]]) { pasteboard.string = copyText; } else { pasteboard.string = @"null"; } } - (NSDictionary*)clipboardHasStrings { return @{@"value" : @([UIPasteboard generalPasteboard].hasStrings)}; } - (BOOL)isLiveTextInputAvailable { return [[self textField] canPerformAction:@selector(captureTextFromCamera:) withSender:nil]; } - (void)showLookUpViewController:(NSString*)term { UIViewController* engineViewController = [_engine.get() viewController]; UIReferenceLibraryViewController* referenceLibraryViewController = [[[UIReferenceLibraryViewController alloc] initWithTerm:term] autorelease]; [engineViewController presentViewController:referenceLibraryViewController animated:YES completion:nil]; } - (UITextField*)textField { if (_textField == nil) { _textField = [[UITextField alloc] init]; } return _textField; } - (void)dealloc { [_textField release]; [super dealloc]; } @end
engine/shell/platform/darwin/ios/framework/Source/FlutterPlatformPlugin.mm/0
{ "file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterPlatformPlugin.mm", "repo_id": "engine", "token_count": 5905 }
341
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #import "flutter/shell/platform/darwin/ios/framework/Source/FlutterSpellCheckPlugin.h" #import <OCMock/OCMock.h> #import <XCTest/XCTest.h> #pragma mark - // A mock class representing the UITextChecker used in the tests. // // Because OCMock doesn't support mocking NSRange as method arguments, // this is necessary. @interface MockTextChecker : UITextChecker // The range of misspelled word based on the startingIndex. // // Key is the starting index, value is the range @property(strong, nonatomic) NSMutableDictionary<NSNumber*, NSValue*>* startingIndexToRange; // The suggestions of misspelled word based on the starting index of the misspelled word. // // Key is a string representing the range of the misspelled word, value is the suggestions. @property(strong, nonatomic) NSMutableDictionary<NSString*, NSArray<NSString*>*>* rangeToSuggestions; // Mock the spell checking results. // // When no misspelled word should be detected, pass (NSNotFound, 0) for the `range` parameter, and // an empty array for `suggestions`. // // Call `reset` to remove all the mocks. - (void)mockResultRange:(NSRange)range suggestions:(nonnull NSArray<NSString*>*)suggestions withStartingIndex:(NSInteger)startingIndex; // Remove all mocks. - (void)reset; @end @implementation MockTextChecker - (instancetype)init { self = [super init]; if (self) { _startingIndexToRange = [[NSMutableDictionary alloc] init]; _rangeToSuggestions = [[NSMutableDictionary alloc] init]; } return self; } - (void)mockResultRange:(NSRange)range suggestions:(NSArray<NSString*>*)suggestions withStartingIndex:(NSInteger)startingIndex { NSValue* valueForRange = [NSValue valueWithRange:range]; self.startingIndexToRange[@(startingIndex)] = valueForRange; NSString* rangeString = NSStringFromRange(valueForRange.rangeValue); self.rangeToSuggestions[rangeString] = suggestions; } - (void)reset { [self.startingIndexToRange removeAllObjects]; [self.rangeToSuggestions removeAllObjects]; } #pragma mark UITextChecker Overrides - (NSRange)rangeOfMisspelledWordInString:(NSString*)stringToCheck range:(NSRange)range startingAt:(NSInteger)startingOffset wrap:(BOOL)wrapFlag language:(NSString*)language { return self.startingIndexToRange[@(startingOffset)].rangeValue; } - (NSArray<NSString*>*)guessesForWordRange:(NSRange)range inString:(NSString*)string language:(NSString*)language { return self.rangeToSuggestions[NSStringFromRange(range)]; } @end @interface FlutterSpellCheckPlugin () - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result; - (UITextChecker*)textChecker; @end @interface FlutterSpellCheckPluginTest : XCTestCase @property(strong, nonatomic) id mockMethodChannel; @property(strong, nonatomic) FlutterSpellCheckPlugin* plugin; @property(strong, nonatomic) id mockTextChecker; @property(strong, nonatomic) id partialMockPlugin; @end #pragma mark - @implementation FlutterSpellCheckPluginTest - (void)setUp { [super setUp]; self.mockMethodChannel = OCMClassMock([FlutterMethodChannel class]); self.plugin = [[FlutterSpellCheckPlugin alloc] init]; __weak FlutterSpellCheckPlugin* weakPlugin = self.plugin; OCMStub([self.mockMethodChannel invokeMethod:[OCMArg any] arguments:[OCMArg any] result:[OCMArg any]]) .andDo(^(NSInvocation* invocation) { NSString* name; id args; FlutterResult result; [invocation getArgument:&name atIndex:2]; [invocation getArgument:&args atIndex:3]; [invocation getArgument:&result atIndex:4]; FlutterMethodCall* methodCall = [FlutterMethodCall methodCallWithMethodName:name arguments:args]; [weakPlugin handleMethodCall:methodCall result:result]; }); self.mockTextChecker = [[MockTextChecker alloc] init]; } - (void)tearDown { self.plugin = nil; [super tearDown]; } #pragma mark - Tests // Test to make sure the while loop that checks all the misspelled word stops when the a // `NSNotFound` is found. - (void)testFindAllSpellCheckSuggestionsForText { self.partialMockPlugin = OCMPartialMock(self.plugin); OCMStub([self.partialMockPlugin textChecker]).andReturn(self.mockTextChecker); id textCheckerClassMock = OCMClassMock([UITextChecker class]); [[[textCheckerClassMock stub] andReturn:@[ @"en" ]] availableLanguages]; NSArray* suggestions1 = @[ @"suggestion 1", @"suggestion 2" ]; NSArray* suggestions2 = @[ @"suggestion 3", @"suggestion 4" ]; // 0-4 is a misspelled word. [self mockUITextCheckerWithExpectedMisspelledWordRange:NSMakeRange(0, 5) startingIndex:0 suggestions:suggestions1]; // 5-9 is a misspelled word. [self mockUITextCheckerWithExpectedMisspelledWordRange:NSMakeRange(5, 5) startingIndex:5 suggestions:suggestions2]; // No misspelled word after index 10. [self mockUITextCheckerWithExpectedMisspelledWordRange:NSMakeRange(NSNotFound, 0) startingIndex:10 suggestions:@[]]; __block NSArray* capturedResult; [self.mockMethodChannel invokeMethod:@"SpellCheck.initiateSpellCheck" arguments:@[ @"en", @"ksajlkdf aslkdfl kasdf asdfjk" ] result:^(id _Nullable result) { capturedResult = result; }]; XCTAssertTrue(capturedResult.count == 2); NSDictionary* suggestionsJSON1 = capturedResult.firstObject; XCTAssertEqualObjects(suggestionsJSON1[@"startIndex"], @0); XCTAssertEqualObjects(suggestionsJSON1[@"endIndex"], @5); XCTAssertEqualObjects(suggestionsJSON1[@"suggestions"], suggestions1); NSDictionary* suggestionsJSON2 = capturedResult[1]; XCTAssertEqualObjects(suggestionsJSON2[@"startIndex"], @5); XCTAssertEqualObjects(suggestionsJSON2[@"endIndex"], @10); XCTAssertEqualObjects(suggestionsJSON2[@"suggestions"], suggestions2); [self.mockTextChecker reset]; [textCheckerClassMock stopMocking]; } // Test to make sure while loop that checks all the misspelled word stops when the last word is // misspelled (aka nextIndex is out of bounds) - (void)testStopFindingMoreWhenTheLastWordIsMisspelled { self.partialMockPlugin = OCMPartialMock(self.plugin); OCMStub([self.partialMockPlugin textChecker]).andReturn(self.mockTextChecker); id textCheckerClassMock = OCMClassMock([UITextChecker class]); [[[textCheckerClassMock stub] andReturn:@[ @"en" ]] availableLanguages]; NSArray* suggestions1 = @[ @"suggestion 1", @"suggestion 2" ]; NSArray* suggestions2 = @[ @"suggestion 3", @"suggestion 4" ]; // 0-4 is a misspelled word. [self mockUITextCheckerWithExpectedMisspelledWordRange:NSMakeRange(0, 5) startingIndex:0 suggestions:suggestions1]; // 5-9 is a misspelled word. [self mockUITextCheckerWithExpectedMisspelledWordRange:NSMakeRange(6, 4) startingIndex:5 suggestions:suggestions2]; __block NSArray* capturedResult; [self.mockMethodChannel invokeMethod:@"SpellCheck.initiateSpellCheck" arguments:@[ @"en", @"hejjo abcd" ] result:^(id _Nullable result) { capturedResult = result; }]; XCTAssertTrue(capturedResult.count == 2); NSDictionary* suggestionsJSON1 = capturedResult.firstObject; XCTAssertEqualObjects(suggestionsJSON1[@"startIndex"], @0); XCTAssertEqualObjects(suggestionsJSON1[@"endIndex"], @5); XCTAssertEqualObjects(suggestionsJSON1[@"suggestions"], suggestions1); NSDictionary* suggestionsJSON2 = capturedResult[1]; XCTAssertEqualObjects(suggestionsJSON2[@"startIndex"], @6); XCTAssertEqualObjects(suggestionsJSON2[@"endIndex"], @10); XCTAssertEqualObjects(suggestionsJSON2[@"suggestions"], suggestions2); [self.mockTextChecker reset]; [textCheckerClassMock stopMocking]; } - (void)testStopFindingMoreWhenTheWholeStringIsAMisspelledWord { self.partialMockPlugin = OCMPartialMock(self.plugin); OCMStub([self.partialMockPlugin textChecker]).andReturn(self.mockTextChecker); id textCheckerClassMock = OCMClassMock([UITextChecker class]); [[[textCheckerClassMock stub] andReturn:@[ @"en" ]] availableLanguages]; NSArray* suggestions1 = @[ @"suggestion 1", @"suggestion 2" ]; // 0-4 is a misspelled word. [self mockUITextCheckerWithExpectedMisspelledWordRange:NSMakeRange(0, 5) startingIndex:0 suggestions:suggestions1]; __block NSArray* capturedResult; [self.mockMethodChannel invokeMethod:@"SpellCheck.initiateSpellCheck" arguments:@[ @"en", @"hejjo" ] result:^(id _Nullable result) { capturedResult = result; }]; XCTAssertTrue(capturedResult.count == 1); NSDictionary* suggestionsJSON1 = capturedResult.firstObject; XCTAssertEqualObjects(suggestionsJSON1[@"startIndex"], @0); XCTAssertEqualObjects(suggestionsJSON1[@"endIndex"], @5); XCTAssertEqualObjects(suggestionsJSON1[@"suggestions"], suggestions1); [self.mockTextChecker reset]; [textCheckerClassMock stopMocking]; } - (void)testInitiateSpellCheckWithNoMisspelledWord { self.partialMockPlugin = OCMPartialMock(self.plugin); OCMStub([self.partialMockPlugin textChecker]).andReturn(self.mockTextChecker); id textCheckerClassMock = OCMClassMock([UITextChecker class]); [[[textCheckerClassMock stub] andReturn:@[ @"en" ]] availableLanguages]; [self mockUITextCheckerWithExpectedMisspelledWordRange:NSMakeRange(NSNotFound, 0) startingIndex:0 suggestions:@[]]; __block id capturedResult; [self.mockMethodChannel invokeMethod:@"SpellCheck.initiateSpellCheck" arguments:@[ @"en", @"helloo" ] result:^(id _Nullable result) { capturedResult = result; }]; XCTAssertEqualObjects(capturedResult, @[]); [textCheckerClassMock stopMocking]; } - (void)testUnsupportedLanguageShouldReturnNil { self.partialMockPlugin = OCMPartialMock(self.plugin); OCMStub([self.partialMockPlugin textChecker]).andReturn(self.mockTextChecker); id textCheckerClassMock = OCMClassMock([UITextChecker class]); [[[textCheckerClassMock stub] andReturn:@[ @"en" ]] availableLanguages]; [self mockUITextCheckerWithExpectedMisspelledWordRange:NSMakeRange(0, 5) startingIndex:0 suggestions:@[]]; __block id capturedResult; [self.mockMethodChannel invokeMethod:@"SpellCheck.initiateSpellCheck" arguments:@[ @"xx", @"helloo" ] result:^(id _Nullable result) { capturedResult = result; }]; XCTAssertNil(capturedResult); [textCheckerClassMock stopMocking]; } - (void)testSupportSubLanguage { self.partialMockPlugin = OCMPartialMock(self.plugin); OCMStub([self.partialMockPlugin textChecker]).andReturn(self.mockTextChecker); id textCheckerClassMock = OCMClassMock([UITextChecker class]); [[[textCheckerClassMock stub] andReturn:@[ @"en_US" ]] availableLanguages]; NSArray* suggestions1 = @[ @"suggestion 1", @"suggestion 2" ]; [self mockUITextCheckerWithExpectedMisspelledWordRange:NSMakeRange(0, 5) startingIndex:0 suggestions:suggestions1]; __block NSArray* capturedResult; [self.mockMethodChannel invokeMethod:@"SpellCheck.initiateSpellCheck" arguments:@[ @"en-us", @"hejjo" ] result:^(id _Nullable result) { capturedResult = result; }]; NSDictionary* suggestionsJSON1 = capturedResult.firstObject; XCTAssertEqualObjects(suggestionsJSON1[@"startIndex"], @0); XCTAssertEqualObjects(suggestionsJSON1[@"endIndex"], @5); XCTAssertEqualObjects(suggestionsJSON1[@"suggestions"], suggestions1); [textCheckerClassMock stopMocking]; } - (void)testEmptyStringShouldReturnEmptyResults { self.partialMockPlugin = OCMPartialMock(self.plugin); OCMStub([self.partialMockPlugin textChecker]).andReturn(self.mockTextChecker); // Use real UITextChecker for this as we want to rely on the actual behavior of UITextChecker // to ensure that spell checks on an empty result always return empty. [self.partialMockPlugin stopMocking]; id textCheckerClassMock = OCMClassMock([UITextChecker class]); [[[textCheckerClassMock stub] andReturn:@[ @"en" ]] availableLanguages]; __block id capturedResult; [self.mockMethodChannel invokeMethod:@"SpellCheck.initiateSpellCheck" arguments:@[ @"en", @"" ] result:^(id _Nullable result) { capturedResult = result; }]; XCTAssertEqualObjects(capturedResult, @[]); [textCheckerClassMock stopMocking]; } - (void)testNullStringArgumentShouldReturnNilResults { self.partialMockPlugin = OCMPartialMock(self.plugin); OCMStub([self.partialMockPlugin textChecker]).andReturn(self.mockTextChecker); id textCheckerClassMock = OCMClassMock([UITextChecker class]); [[[textCheckerClassMock stub] andReturn:@[ @"en" ]] availableLanguages]; __block id capturedResult; [self.mockMethodChannel invokeMethod:@"SpellCheck.initiateSpellCheck" arguments:@[ @"en", [NSNull null] ] result:^(id _Nullable result) { capturedResult = result; }]; XCTAssertNil(capturedResult); [textCheckerClassMock stopMocking]; } - (void)testNullLanguageArgumentShouldReturnNilResults { self.partialMockPlugin = OCMPartialMock(self.plugin); OCMStub([self.partialMockPlugin textChecker]).andReturn(self.mockTextChecker); id textCheckerClassMock = OCMClassMock([UITextChecker class]); [[[textCheckerClassMock stub] andReturn:@[ @"en" ]] availableLanguages]; __block id capturedResult; [self.mockMethodChannel invokeMethod:@"SpellCheck.initiateSpellCheck" arguments:@[ [NSNull null], @"some string" ] result:^(id _Nullable result) { capturedResult = result; }]; XCTAssertNil(capturedResult); [textCheckerClassMock stopMocking]; } - (void)testUITextCheckerIsInitializedAfterMethodChannelCall { XCTAssertNil([self.plugin textChecker]); __block id capturedResult; [self.mockMethodChannel invokeMethod:@"SpellCheck.initiateSpellCheck" arguments:@[ [NSNull null], @"some string" ] result:^(id _Nullable result) { capturedResult = result; }]; XCTAssertNotNil([self.plugin textChecker]); } - (void)mockUITextCheckerWithExpectedMisspelledWordRange:(NSRange)expectedRange startingIndex:(NSInteger)startingIndex suggestions:(NSArray*)suggestions { [self.mockTextChecker mockResultRange:expectedRange suggestions:suggestions withStartingIndex:startingIndex]; } @end
engine/shell/platform/darwin/ios/framework/Source/FlutterSpellCheckPluginTest.mm/0
{ "file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterSpellCheckPluginTest.mm", "repo_id": "engine", "token_count": 7171 }
342
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERVIEW_H_ #define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERVIEW_H_ #include <Metal/Metal.h> #import <UIKit/UIKit.h> #include <memory> #include "flutter/fml/memory/weak_ptr.h" #include "flutter/shell/common/shell.h" #import "flutter/shell/platform/darwin/ios/framework/Source/FlutterPlatformViews_Internal.h" #import "flutter/shell/platform/darwin/ios/ios_context.h" #import "flutter/shell/platform/darwin/ios/ios_surface.h" @protocol FlutterViewEngineDelegate <NSObject> @property(nonatomic, readonly) BOOL isUsingImpeller; - (flutter::Rasterizer::Screenshot)takeScreenshot:(flutter::Rasterizer::ScreenshotType)type asBase64Encoded:(BOOL)base64Encode; - (std::shared_ptr<flutter::FlutterPlatformViewsController>&)platformViewsController; /** * A callback that is called when iOS queries accessibility information of the Flutter view. * * This is useful to predict the current iOS accessibility status. For example, there is * no API to listen whether voice control is turned on or off. The Flutter engine uses * this callback to enable semantics in order to catch the case that voice control is * on. */ - (void)flutterViewAccessibilityDidCall; @end @interface FlutterView : UIView - (instancetype)init NS_UNAVAILABLE; + (instancetype)new NS_UNAVAILABLE; - (instancetype)initWithFrame:(CGRect)frame NS_UNAVAILABLE; - (instancetype)initWithCoder:(NSCoder*)aDecoder NS_UNAVAILABLE; - (instancetype)initWithDelegate:(id<FlutterViewEngineDelegate>)delegate opaque:(BOOL)opaque enableWideGamut:(BOOL)isWideGamutEnabled NS_DESIGNATED_INITIALIZER; - (UIScreen*)screen; - (MTLPixelFormat)pixelFormat; // Set by FlutterEngine or FlutterViewController to override software rendering. @property(class, nonatomic) BOOL forceSoftwareRendering; @end #endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_FLUTTERVIEW_H_
engine/shell/platform/darwin/ios/framework/Source/FlutterView.h/0
{ "file_path": "engine/shell/platform/darwin/ios/framework/Source/FlutterView.h", "repo_id": "engine", "token_count": 783 }
343
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #import <XCTest/XCTest.h> #import "flutter/shell/platform/darwin/common/framework/Headers/FlutterMacros.h" #import "flutter/shell/platform/darwin/ios/framework/Source/UIViewController+FlutterScreenAndSceneIfLoaded.h" FLUTTER_ASSERT_ARC @interface UIViewController_FlutterViewAndSceneIfLoadedTest : XCTestCase @end @implementation UIViewController_FlutterViewAndSceneIfLoadedTest - (void)testWindowSceneIfViewLoadedReturnsWindowSceneIfViewLoaded { if (@available(iOS 13.0, *)) { UIViewController* viewController = [[UIViewController alloc] initWithNibName:nil bundle:nil]; NSSet<UIScene*>* scenes = UIApplication.sharedApplication.connectedScenes; XCTAssertEqual(scenes.count, 1UL, @"There must only be 1 scene for test"); UIScene* scene = scenes.anyObject; XCTAssert([scene isKindOfClass:[UIWindowScene class]], @"Must be a window scene for test"); UIWindowScene* windowScene = (UIWindowScene*)scene; XCTAssert(windowScene.windows.count > 0, @"There must be at least 1 window for test"); UIWindow* window = windowScene.windows[0]; [window addSubview:viewController.view]; [viewController loadView]; XCTAssertEqual(viewController.flutterWindowSceneIfViewLoaded, windowScene, @"Must return the correct window scene when view loaded"); } } - (void)testWindowSceneIfViewLoadedReturnsNilIfViewNotLoaded { if (@available(iOS 13.0, *)) { UIViewController* viewController = [[UIViewController alloc] initWithNibName:nil bundle:nil]; XCTAssertNil(viewController.flutterWindowSceneIfViewLoaded, @"Must return nil window scene when view not loaded"); } } - (void)testScreenIfViewLoadedReturnsMainScreenBeforeIOS13 { if (@available(iOS 13.0, *)) { return; } UIViewController* viewController = [[UIViewController alloc] initWithNibName:nil bundle:nil]; XCTAssertEqual(viewController.flutterScreenIfViewLoaded, UIScreen.mainScreen, @"Must return UIScreen.mainScreen before iOS 13"); } - (void)testScreenIfViewLoadedReturnsScreenIfViewLoadedAfterIOS13 { if (@available(iOS 13.0, *)) { UIViewController* viewController = [[UIViewController alloc] initWithNibName:nil bundle:nil]; NSSet<UIScene*>* scenes = UIApplication.sharedApplication.connectedScenes; XCTAssertEqual(scenes.count, 1UL, @"There must only be 1 scene for test"); UIScene* scene = scenes.anyObject; XCTAssert([scene isKindOfClass:[UIWindowScene class]], @"Must be a window scene for test"); UIWindowScene* windowScene = (UIWindowScene*)scene; XCTAssert(windowScene.windows.count > 0, @"There must be at least 1 window for test"); UIWindow* window = windowScene.windows[0]; [window addSubview:viewController.view]; [viewController loadView]; XCTAssertEqual(viewController.flutterScreenIfViewLoaded, windowScene.screen, @"Must return the correct screen when view loaded"); } } - (void)testScreenIfViewLoadedReturnsNilIfViewNotLoadedAfterIOS13 { if (@available(iOS 13.0, *)) { UIViewController* viewController = [[UIViewController alloc] initWithNibName:nil bundle:nil]; XCTAssertNil(viewController.flutterScreenIfViewLoaded, @"Must return nil screen when view not loaded"); } } @end
engine/shell/platform/darwin/ios/framework/Source/UIViewController_FlutterScreenAndSceneIfLoadedTest.mm/0
{ "file_path": "engine/shell/platform/darwin/ios/framework/Source/UIViewController_FlutterScreenAndSceneIfLoadedTest.mm", "repo_id": "engine", "token_count": 1202 }
344
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_VSYNC_WAITER_IOS_H_ #define FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_VSYNC_WAITER_IOS_H_ #include <QuartzCore/CADisplayLink.h> #include "flutter/fml/macros.h" #include "flutter/fml/memory/weak_ptr.h" #include "flutter/fml/platform/darwin/scoped_nsobject.h" #include "flutter/shell/common/variable_refresh_rate_reporter.h" #include "flutter/shell/common/vsync_waiter.h" @interface DisplayLinkManager : NSObject // Whether the max refresh rate on iPhone Pro-motion devices are enabled. // This reflects the value of `CADisableMinimumFrameDurationOnPhone` in the // info.plist file. // // Note on iPads that support Pro-motion, the max refresh rate is always enabled. @property(class, nonatomic, readonly) BOOL maxRefreshRateEnabledOnIPhone; //------------------------------------------------------------------------------ /// @brief The display refresh rate used for reporting purposes. The engine does not care /// about this for frame scheduling. It is only used by tools for instrumentation. The /// engine uses the duration field of the link per frame for frame scheduling. /// /// @attention Do not use the this call in frame scheduling. It is only meant for reporting. /// /// @return The refresh rate in frames per second. /// + (double)displayRefreshRate; @end @interface VSyncClient : NSObject //------------------------------------------------------------------------------ /// @brief Default value is YES. Vsync client will pause vsync callback after receiving /// a vsync signal. Setting this property to NO can avoid this and vsync client /// will trigger vsync callback continuously. /// /// /// @param allowPauseAfterVsync Allow vsync client to pause after receiving a vsync signal. /// @property(nonatomic, assign) BOOL allowPauseAfterVsync; - (instancetype)initWithTaskRunner:(fml::RefPtr<fml::TaskRunner>)task_runner callback:(flutter::VsyncWaiter::Callback)callback; - (void)await; - (void)pause; - (void)invalidate; - (double)getRefreshRate; - (void)setMaxRefreshRate:(double)refreshRate; @end namespace flutter { class VsyncWaiterIOS final : public VsyncWaiter, public VariableRefreshRateReporter { public: explicit VsyncWaiterIOS(const flutter::TaskRunners& task_runners); ~VsyncWaiterIOS() override; // |VariableRefreshRateReporter| double GetRefreshRate() const override; // Made public for testing. fml::scoped_nsobject<VSyncClient> GetVsyncClient() const; // |VsyncWaiter| // Made public for testing. void AwaitVSync() override; private: fml::scoped_nsobject<VSyncClient> client_; double max_refresh_rate_; FML_DISALLOW_COPY_AND_ASSIGN(VsyncWaiterIOS); }; } // namespace flutter #endif // FLUTTER_SHELL_PLATFORM_DARWIN_IOS_FRAMEWORK_SOURCE_VSYNC_WAITER_IOS_H_
engine/shell/platform/darwin/ios/framework/Source/vsync_waiter_ios.h/0
{ "file_path": "engine/shell/platform/darwin/ios/framework/Source/vsync_waiter_ios.h", "repo_id": "engine", "token_count": 1012 }
345
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #import "flutter/shell/platform/darwin/ios/ios_surface.h" #import "flutter/shell/platform/darwin/ios/ios_surface_software.h" #include "flutter/shell/platform/darwin/ios/rendering_api_selection.h" #if SHELL_ENABLE_METAL #import "flutter/shell/platform/darwin/ios/ios_surface_metal_impeller.h" #import "flutter/shell/platform/darwin/ios/ios_surface_metal_skia.h" #endif // SHELL_ENABLE_METAL namespace flutter { std::unique_ptr<IOSSurface> IOSSurface::Create(std::shared_ptr<IOSContext> context, const fml::scoped_nsobject<CALayer>& layer) { FML_DCHECK(layer); FML_DCHECK(context); #if SHELL_ENABLE_METAL if (@available(iOS METAL_IOS_VERSION_BASELINE, *)) { if ([layer.get() isKindOfClass:[CAMetalLayer class]]) { switch (context->GetBackend()) { case IOSRenderingBackend::kSkia: return std::make_unique<IOSSurfaceMetalSkia>( fml::scoped_nsobject<CAMetalLayer>( reinterpret_cast<CAMetalLayer*>([layer.get() retain])), // Metal layer std::move(context) // context ); break; case IOSRenderingBackend::kImpeller: return std::make_unique<IOSSurfaceMetalImpeller>( fml::scoped_nsobject<CAMetalLayer>( reinterpret_cast<CAMetalLayer*>([layer.get() retain])), // Metal layer std::move(context) // context ); } } } #endif // SHELL_ENABLE_METAL return std::make_unique<IOSSurfaceSoftware>(layer, // layer std::move(context) // context ); } IOSSurface::IOSSurface(std::shared_ptr<IOSContext> ios_context) : ios_context_(std::move(ios_context)) { FML_DCHECK(ios_context_); } IOSSurface::~IOSSurface() = default; std::shared_ptr<IOSContext> IOSSurface::GetContext() const { return ios_context_; } } // namespace flutter
engine/shell/platform/darwin/ios/ios_surface.mm/0
{ "file_path": "engine/shell/platform/darwin/ios/ios_surface.mm", "repo_id": "engine", "token_count": 1032 }
346
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_HEADERS_FLUTTERAPPDELEGATE_H_ #define FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_HEADERS_FLUTTERAPPDELEGATE_H_ #import <Cocoa/Cocoa.h> #import "FlutterAppLifecycleDelegate.h" #import "FlutterMacros.h" /** * A protocol to be implemented by the `NSApplicationDelegate` of an application to enable the * Flutter framework and any Flutter plugins to register to receive application life cycle events. * * Implementers should forward all of the `NSApplicationDelegate` methods corresponding to the * handlers in FlutterAppLifecycleDelegate to any registered delegates. */ FLUTTER_DARWIN_EXPORT @protocol FlutterAppLifecycleProvider <NSObject> /** * Adds an object implementing |FlutterAppLifecycleDelegate| to the list of * delegates to be informed of application lifecycle events. */ - (void)addApplicationLifecycleDelegate:(nonnull NSObject<FlutterAppLifecycleDelegate>*)delegate; /** * Removes an object implementing |FlutterAppLifecycleDelegate| to the list of * delegates to be informed of application lifecycle events. */ - (void)removeApplicationLifecycleDelegate:(nonnull NSObject<FlutterAppLifecycleDelegate>*)delegate; @end /** * |NSApplicationDelegate| subclass for simple apps that want default behavior. * * This class implements the following behaviors: * * Updates the application name of items in the application menu to match the name in * the app's Info.plist, assuming it is set to APP_NAME initially. |applicationMenu| must be * set before the application finishes launching for this to take effect. * * Updates the main Flutter window's title to match the name in the app's Info.plist. * |mainFlutterWindow| must be set before the application finishes launching for this to take * effect. * * Forwards `NSApplicationDelegate` callbacks to plugins that register for them. * * App delegates for Flutter applications are *not* required to inherit from * this class. Developers of custom app delegate classes should copy and paste * code as necessary from FlutterAppDelegate.mm. */ FLUTTER_DARWIN_EXPORT @interface FlutterAppDelegate : NSObject <NSApplicationDelegate, FlutterAppLifecycleProvider> /** * The application menu in the menu bar. */ @property(weak, nonatomic, nullable) IBOutlet NSMenu* applicationMenu; /** * The primary application window containing a FlutterViewController. This is * primarily intended for use in single-window applications. */ @property(weak, nonatomic, nullable) IBOutlet NSWindow* mainFlutterWindow; @end #endif // FLUTTER_SHELL_PLATFORM_DARWIN_MACOS_FRAMEWORK_HEADERS_FLUTTERAPPDELEGATE_H_
engine/shell/platform/darwin/macos/framework/Headers/FlutterAppDelegate.h/0
{ "file_path": "engine/shell/platform/darwin/macos/framework/Headers/FlutterAppDelegate.h", "repo_id": "engine", "token_count": 826 }
347
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #import "flutter/shell/platform/darwin/macos/framework/Headers/FlutterAppLifecycleDelegate.h" #import "flutter/testing/testing.h" #include "third_party/googletest/googletest/include/gtest/gtest.h" @interface TestFlutterAppLifecycleDelegate : NSObject <FlutterAppLifecycleDelegate> @property(nonatomic, readwrite, nullable) NSNotification* lastNotification; @end @implementation TestFlutterAppLifecycleDelegate - (void)setNotification:(NSNotification*)notification { self.lastNotification = notification; } - (void)handleWillFinishLaunching:(NSNotification*)notification { [self setNotification:notification]; } - (void)handleDidFinishLaunching:(NSNotification*)notification { [self setNotification:notification]; } - (void)handleWillBecomeActive:(NSNotification*)notification { [self setNotification:notification]; } - (void)handleDidBecomeActive:(NSNotification*)notification { [self setNotification:notification]; } - (void)handleWillResignActive:(NSNotification*)notification { [self setNotification:notification]; } - (void)handleDidResignActive:(NSNotification*)notification { [self setNotification:notification]; } - (void)handleWillHide:(NSNotification*)notification { [self setNotification:notification]; } - (void)handleDidHide:(NSNotification*)notification { [self setNotification:notification]; } - (void)handleWillUnhide:(NSNotification*)notification { [self setNotification:notification]; } - (void)handleDidUnhide:(NSNotification*)notification { [self setNotification:notification]; } - (void)handleDidChangeScreenParameters:(NSNotification*)notification { [self setNotification:notification]; } - (void)handleDidChangeOcclusionState:(NSNotification*)notification API_AVAILABLE(macos(10.9)) { [self setNotification:notification]; } - (void)handleWillTerminate:(NSNotification*)notification { [self setNotification:notification]; } @end namespace flutter::testing { TEST(FlutterAppLifecycleDelegateTest, RespondsToWillFinishLaunching) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* willFinishLaunching = [NSNotification notificationWithName:NSApplicationWillFinishLaunchingNotification object:nil]; [registrar handleWillFinishLaunching:willFinishLaunching]; EXPECT_EQ([delegate lastNotification], willFinishLaunching); } TEST(FlutterAppLifecycleDelegateTest, RespondsToDidFinishLaunching) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* didFinishLaunching = [NSNotification notificationWithName:NSApplicationDidFinishLaunchingNotification object:nil]; [registrar handleDidFinishLaunching:didFinishLaunching]; EXPECT_EQ([delegate lastNotification], didFinishLaunching); } TEST(FlutterAppLifecycleDelegateTest, RespondsToWillBecomeActive) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* willBecomeActive = [NSNotification notificationWithName:NSApplicationWillBecomeActiveNotification object:nil]; [registrar handleWillBecomeActive:willBecomeActive]; EXPECT_EQ([delegate lastNotification], willBecomeActive); } TEST(FlutterAppLifecycleDelegateTest, RespondsToDidBecomeActive) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* didBecomeActive = [NSNotification notificationWithName:NSApplicationDidBecomeActiveNotification object:nil]; [registrar handleDidBecomeActive:didBecomeActive]; EXPECT_EQ([delegate lastNotification], didBecomeActive); } TEST(FlutterAppLifecycleDelegateTest, RespondsToWillResignActive) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* willResignActive = [NSNotification notificationWithName:NSApplicationWillResignActiveNotification object:nil]; [registrar handleWillResignActive:willResignActive]; EXPECT_EQ([delegate lastNotification], willResignActive); } TEST(FlutterAppLifecycleDelegateTest, RespondsToDidResignActive) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* didResignActive = [NSNotification notificationWithName:NSApplicationDidResignActiveNotification object:nil]; [registrar handleDidResignActive:didResignActive]; EXPECT_EQ([delegate lastNotification], didResignActive); } TEST(FlutterAppLifecycleDelegateTest, RespondsToWillTerminate) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* applicationWillTerminate = [NSNotification notificationWithName:NSApplicationWillTerminateNotification object:nil]; [registrar handleWillTerminate:applicationWillTerminate]; EXPECT_EQ([delegate lastNotification], applicationWillTerminate); } TEST(FlutterAppLifecycleDelegateTest, RespondsToWillHide) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* willHide = [NSNotification notificationWithName:NSApplicationWillHideNotification object:nil]; [registrar handleWillHide:willHide]; EXPECT_EQ([delegate lastNotification], willHide); } TEST(FlutterAppLifecycleDelegateTest, RespondsToWillUnhide) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* willUnhide = [NSNotification notificationWithName:NSApplicationWillUnhideNotification object:nil]; [registrar handleWillUnhide:willUnhide]; EXPECT_EQ([delegate lastNotification], willUnhide); } TEST(FlutterAppLifecycleDelegateTest, RespondsToDidHide) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* didHide = [NSNotification notificationWithName:NSApplicationDidHideNotification object:nil]; [registrar handleDidHide:didHide]; EXPECT_EQ([delegate lastNotification], didHide); } TEST(FlutterAppLifecycleDelegateTest, RespondsToDidUnhide) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* didUnhide = [NSNotification notificationWithName:NSApplicationDidUnhideNotification object:nil]; [registrar handleDidUnhide:didUnhide]; EXPECT_EQ([delegate lastNotification], didUnhide); } TEST(FlutterAppLifecycleDelegateTest, RespondsToDidChangeScreenParameters) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* didChangeScreenParameters = [NSNotification notificationWithName:NSApplicationDidChangeScreenParametersNotification object:nil]; [registrar handleDidChangeScreenParameters:didChangeScreenParameters]; EXPECT_EQ([delegate lastNotification], didChangeScreenParameters); } TEST(FlutterAppLifecycleDelegateTest, RespondsToDidChangeOcclusionState) { FlutterAppLifecycleRegistrar* registrar = [[FlutterAppLifecycleRegistrar alloc] init]; TestFlutterAppLifecycleDelegate* delegate = [[TestFlutterAppLifecycleDelegate alloc] init]; [registrar addDelegate:delegate]; NSNotification* didChangeOcclusionState = [NSNotification notificationWithName:NSApplicationDidChangeOcclusionStateNotification object:nil]; if ([registrar respondsToSelector:@selector(handleDidChangeOcclusionState:)]) { [registrar handleDidChangeOcclusionState:didChangeOcclusionState]; EXPECT_EQ([delegate lastNotification], didChangeOcclusionState); } } } // namespace flutter::testing
engine/shell/platform/darwin/macos/framework/Source/FlutterAppLifecycleDelegateTest.mm/0
{ "file_path": "engine/shell/platform/darwin/macos/framework/Source/FlutterAppLifecycleDelegateTest.mm", "repo_id": "engine", "token_count": 3052 }
348
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #import <objc/message.h> #include <memory> #import "FlutterEmbedderKeyResponder.h" #import "KeyCodeMap_Internal.h" #import "flutter/shell/platform/darwin/common/framework/Headers/FlutterCodecs.h" #import "flutter/shell/platform/darwin/macos/framework/Source/FlutterViewController_Internal.h" #import "flutter/shell/platform/embedder/embedder.h" namespace { /** * Isolate the least significant 1-bit. * * For example, * * * lowestSetBit(0x1010) returns 0x10. * * lowestSetBit(0) returns 0. */ static NSUInteger lowestSetBit(NSUInteger bitmask) { // This utilizes property of two's complement (negation), which propagates a // carry bit from LSB to the lowest set bit. return bitmask & -bitmask; } /** * Whether a string represents a control character. */ static bool IsControlCharacter(uint64_t character) { return (character <= 0x1f && character >= 0x00) || (character >= 0x7f && character <= 0x9f); } /** * Whether a string represents an unprintable key. */ static bool IsUnprintableKey(uint64_t character) { return character >= 0xF700 && character <= 0xF8FF; } /** * Returns a key code composed with a base key and a plane. * * Examples of unprintable keys are "NSUpArrowFunctionKey = 0xF700" or * "NSHomeFunctionKey = 0xF729". * * See * https://developer.apple.com/documentation/appkit/1535851-function-key_unicodes?language=objc * for more information. */ static uint64_t KeyOfPlane(uint64_t baseKey, uint64_t plane) { return plane | (baseKey & flutter::kValueMask); } /** * Returns the physical key for a key code. */ static uint64_t GetPhysicalKeyForKeyCode(unsigned short keyCode) { NSNumber* physicalKey = [flutter::keyCodeToPhysicalKey objectForKey:@(keyCode)]; if (physicalKey == nil) { return KeyOfPlane(keyCode, flutter::kMacosPlane); } return physicalKey.unsignedLongLongValue; } /** * Returns the logical key for a modifier physical key. */ static uint64_t GetLogicalKeyForModifier(unsigned short keyCode, uint64_t hidCode) { NSNumber* fromKeyCode = [flutter::keyCodeToLogicalKey objectForKey:@(keyCode)]; if (fromKeyCode != nil) { return fromKeyCode.unsignedLongLongValue; } return KeyOfPlane(hidCode, flutter::kMacosPlane); } /** * Converts upper letters to lower letters in ASCII, and returns as-is * otherwise. * * Independent of locale. */ static uint64_t toLower(uint64_t n) { constexpr uint64_t lowerA = 0x61; constexpr uint64_t upperA = 0x41; constexpr uint64_t upperZ = 0x5a; constexpr uint64_t lowerAGrave = 0xe0; constexpr uint64_t upperAGrave = 0xc0; constexpr uint64_t upperThorn = 0xde; constexpr uint64_t division = 0xf7; // ASCII range. if (n >= upperA && n <= upperZ) { return n - upperA + lowerA; } // EASCII range. if (n >= upperAGrave && n <= upperThorn && n != division) { return n - upperAGrave + lowerAGrave; } return n; } // Decode a UTF-16 sequence to an array of char32 (UTF-32). // // See https://en.wikipedia.org/wiki/UTF-16#Description for the algorithm. // // The returned character array must be deallocated with delete[]. The length of // the result is stored in `out_length`. // // Although NSString has a dataUsingEncoding method, we implement our own // because dataUsingEncoding outputs redundant characters for unknown reasons. static uint32_t* DecodeUtf16(NSString* target, size_t* out_length) { // The result always has a length less or equal to target. size_t result_pos = 0; uint32_t* result = new uint32_t[target.length]; uint16_t high_surrogate = 0; for (NSUInteger target_pos = 0; target_pos < target.length; target_pos += 1) { uint16_t codeUnit = [target characterAtIndex:target_pos]; // BMP if (codeUnit <= 0xD7FF || codeUnit >= 0xE000) { result[result_pos] = codeUnit; result_pos += 1; // High surrogates } else if (codeUnit <= 0xDBFF) { high_surrogate = codeUnit - 0xD800; // Low surrogates } else { uint16_t low_surrogate = codeUnit - 0xDC00; result[result_pos] = (high_surrogate << 10) + low_surrogate + 0x10000; result_pos += 1; } } *out_length = result_pos; return result; } /** * Returns the logical key of a KeyUp or KeyDown event. * * For FlagsChanged event, use GetLogicalKeyForModifier. */ static uint64_t GetLogicalKeyForEvent(NSEvent* event, uint64_t physicalKey) { // Look to see if the keyCode can be mapped from keycode. NSNumber* fromKeyCode = [flutter::keyCodeToLogicalKey objectForKey:@(event.keyCode)]; if (fromKeyCode != nil) { return fromKeyCode.unsignedLongLongValue; } // Convert `charactersIgnoringModifiers` to UTF32. NSString* keyLabelUtf16 = event.charactersIgnoringModifiers; // Check if this key is a single character, which will be used to generate the // logical key from its Unicode value. // // Multi-char keys will be minted onto the macOS plane because there are no // meaningful values for them. Control keys and unprintable keys have been // converted by `keyCodeToLogicalKey` earlier. uint32_t character = 0; if (keyLabelUtf16.length != 0) { size_t keyLabelLength; uint32_t* keyLabel = DecodeUtf16(keyLabelUtf16, &keyLabelLength); if (keyLabelLength == 1) { uint32_t keyLabelChar = *keyLabel; NSCAssert(!IsControlCharacter(keyLabelChar) && !IsUnprintableKey(keyLabelChar), @"Unexpected control or unprintable keylabel 0x%x", keyLabelChar); NSCAssert(keyLabelChar <= 0x10FFFF, @"Out of range keylabel 0x%x", keyLabelChar); character = keyLabelChar; } delete[] keyLabel; } if (character != 0) { return KeyOfPlane(toLower(character), flutter::kUnicodePlane); } // We can't represent this key with a single printable unicode, so a new code // is minted to the macOS plane. return KeyOfPlane(event.keyCode, flutter::kMacosPlane); } /** * Converts NSEvent.timestamp to the timestamp for Flutter. */ static double GetFlutterTimestampFrom(NSTimeInterval timestamp) { // Timestamp in microseconds. The event.timestamp is in seconds with sub-ms precision. return timestamp * 1000000.0; } /** * Compute |modifierFlagOfInterestMask| out of |keyCodeToModifierFlag|. * * This is equal to the bitwise-or of all values of |keyCodeToModifierFlag| as * well as NSEventModifierFlagCapsLock. */ static NSUInteger computeModifierFlagOfInterestMask() { __block NSUInteger modifierFlagOfInterestMask = NSEventModifierFlagCapsLock; [flutter::keyCodeToModifierFlag enumerateKeysAndObjectsUsingBlock:^(NSNumber* keyCode, NSNumber* flag, BOOL* stop) { modifierFlagOfInterestMask = modifierFlagOfInterestMask | [flag unsignedLongValue]; }]; return modifierFlagOfInterestMask; } /** * The C-function sent to the embedder's |SendKeyEvent|, wrapping * |FlutterEmbedderKeyResponder.handleResponse|. * * For the reason of this wrap, see |FlutterKeyPendingResponse|. */ void HandleResponse(bool handled, void* user_data); /** * Converts NSEvent.characters to a C-string for FlutterKeyEvent. */ const char* getEventString(NSString* characters) { if ([characters length] == 0) { return nullptr; } unichar utf16Code = [characters characterAtIndex:0]; if (utf16Code >= 0xf700 && utf16Code <= 0xf7ff) { // Some function keys are assigned characters with codepoints from the // private use area. These characters are filtered out since they're // unprintable. // // The official documentation reserves 0xF700-0xF8FF as private use area // (https://developer.apple.com/documentation/appkit/1535851-function-key_unicodes?language=objc). // But macOS seems to only use a reduced range of it. The official doc // defines a few constants, all of which are within 0xF700-0xF747. // (https://developer.apple.com/documentation/appkit/1535851-function-key_unicodes?language=objc). // This mostly aligns with the experimentation result, except for 0xF8FF, // which is used for the "Apple logo" character (Option-Shift-K on a US // keyboard.) // // Assume that non-printable function keys are defined from // 0xF700 upwards, and printable private keys are defined from 0xF8FF // downwards. This function filters out 0xF700-0xF7FF in order to keep // the printable private keys. return nullptr; } return [characters UTF8String]; } } // namespace /** * The invocation context for |HandleResponse|, wrapping * |FlutterEmbedderKeyResponder.handleResponse|. */ struct FlutterKeyPendingResponse { FlutterEmbedderKeyResponder* responder; uint64_t responseId; }; /** * Guards a |FlutterAsyncKeyCallback| to make sure it's handled exactly once * throughout |FlutterEmbedderKeyResponder.handleEvent|. * * A callback can either be handled with |pendTo:withId:|, or with |resolveTo:|. * Either way, the callback cannot be handled again, or an assertion will be * thrown. */ @interface FlutterKeyCallbackGuard : NSObject - (nonnull instancetype)initWithCallback:(FlutterAsyncKeyCallback)callback; /** * Handle the callback by storing it to pending responses. */ - (void)pendTo:(nonnull NSMutableDictionary<NSNumber*, FlutterAsyncKeyCallback>*)pendingResponses withId:(uint64_t)responseId; /** * Handle the callback by calling it with a result. */ - (void)resolveTo:(BOOL)handled; @property(nonatomic) BOOL handled; @property(nonatomic) BOOL sentAnyEvents; /** * A string indicating how the callback is handled. * * Only set in debug mode. Nil in release mode, or if the callback has not been * handled. */ @property(nonatomic, copy) NSString* debugHandleSource; @end @implementation FlutterKeyCallbackGuard { // The callback is declared in the implemnetation block to avoid being // accessed directly. FlutterAsyncKeyCallback _callback; } - (nonnull instancetype)initWithCallback:(FlutterAsyncKeyCallback)callback { self = [super init]; if (self != nil) { _callback = callback; _handled = FALSE; _sentAnyEvents = FALSE; } return self; } - (void)pendTo:(nonnull NSMutableDictionary<NSNumber*, FlutterAsyncKeyCallback>*)pendingResponses withId:(uint64_t)responseId { NSAssert(!_handled, @"This callback has been handled by %@.", _debugHandleSource); if (_handled) { return; } pendingResponses[@(responseId)] = _callback; _handled = TRUE; NSAssert( ((_debugHandleSource = [NSString stringWithFormat:@"pending event %llu", responseId]), TRUE), @""); } - (void)resolveTo:(BOOL)handled { NSAssert(!_handled, @"This callback has been handled by %@.", _debugHandleSource); if (_handled) { return; } _callback(handled); _handled = TRUE; NSAssert(((_debugHandleSource = [NSString stringWithFormat:@"resolved with %d", _handled]), TRUE), @""); } @end @interface FlutterEmbedderKeyResponder () /** * The function to send converted events to. * * Set by the initializer. */ @property(nonatomic, copy) FlutterSendEmbedderKeyEvent sendEvent; /** * A map of presessd keys. * * The keys of the dictionary are physical keys, while the values are the logical keys * of the key down event. */ @property(nonatomic) NSMutableDictionary<NSNumber*, NSNumber*>* pressingRecords; /** * A constant mask for NSEvent.modifierFlags that Flutter synchronizes with. * * Flutter keeps track of the last |modifierFlags| and compares it with the * incoming one. Any bit within |modifierFlagOfInterestMask| that is different * (except for the one that corresponds to the event key) indicates that an * event for this modifier was missed, and Flutter synthesizes an event to make * up for the state difference. * * It is computed by computeModifierFlagOfInterestMask. */ @property(nonatomic) NSUInteger modifierFlagOfInterestMask; /** * The modifier flags of the last received key event, excluding uninterested * bits. * * This should be kept synchronized with the last |NSEvent.modifierFlags| * after masking with |modifierFlagOfInterestMask|. This should also be kept * synchronized with the corresponding keys of |pressingRecords|. * * This is used by |synchronizeModifiers| to quickly find * out modifier keys that are desynchronized. */ @property(nonatomic) NSUInteger lastModifierFlagsOfInterest; /** * A self-incrementing ID used to label key events sent to the framework. */ @property(nonatomic) uint64_t responseId; /** * A map of unresponded key events sent to the framework. * * Its values are |responseId|s, and keys are the callback that was received * along with the event. */ @property(nonatomic) NSMutableDictionary<NSNumber*, FlutterAsyncKeyCallback>* pendingResponses; /** * Compare the last modifier flags and the current, and dispatch synthesized * key events for each different modifier flag bit. * * The flags compared are all flags after masking with * |modifierFlagOfInterestMask| and excluding |ignoringFlags|. * * The |guard| is basically a regular guarded callback, but instead of being * called, it is only used to record whether an event is sent. */ - (void)synchronizeModifiers:(NSUInteger)currentFlags ignoringFlags:(NSUInteger)ignoringFlags timestamp:(NSTimeInterval)timestamp guard:(nonnull FlutterKeyCallbackGuard*)guard; /** * Update the pressing state. * * If `logicalKey` is not 0, `physicalKey` is pressed as `logicalKey`. * Otherwise, `physicalKey` is released. */ - (void)updateKey:(uint64_t)physicalKey asPressed:(uint64_t)logicalKey; /** * Send an event to the framework, expecting its response. */ - (void)sendPrimaryFlutterEvent:(const FlutterKeyEvent&)event callback:(nonnull FlutterKeyCallbackGuard*)callback; /** * Send a synthesized key event, never expecting its event result. * * The |guard| is basically a regular guarded callback, but instead of being * called, it is only used to record whether an event is sent. */ - (void)sendSynthesizedFlutterEvent:(const FlutterKeyEvent&)event guard:(FlutterKeyCallbackGuard*)guard; /** * Send a CapsLock down event, then a CapsLock up event. * * If synthesizeDown is TRUE, then both events will be synthesized. Otherwise, * the callback will be used as the callback for the down event, which is not * synthesized, while the up event will always be synthesized. */ - (void)sendCapsLockTapWithTimestamp:(NSTimeInterval)timestamp synthesizeDown:(bool)synthesizeDown callback:(nonnull FlutterKeyCallbackGuard*)callback; /** * Send a key event for a modifier key. */ - (void)sendModifierEventOfType:(BOOL)isDownEvent timestamp:(NSTimeInterval)timestamp keyCode:(unsigned short)keyCode synthesized:(bool)synthesized callback:(nonnull FlutterKeyCallbackGuard*)callback; /** * Processes a down event from the system. */ - (void)handleDownEvent:(nonnull NSEvent*)event callback:(nonnull FlutterKeyCallbackGuard*)callback; /** * Processes an up event from the system. */ - (void)handleUpEvent:(nonnull NSEvent*)event callback:(nonnull FlutterKeyCallbackGuard*)callback; /** * Processes an event from the system for the CapsLock key. */ - (void)handleCapsLockEvent:(nonnull NSEvent*)event callback:(nonnull FlutterKeyCallbackGuard*)callback; /** * Processes a flags changed event from the system, where modifier keys are pressed or released. */ - (void)handleFlagEvent:(nonnull NSEvent*)event callback:(nonnull FlutterKeyCallbackGuard*)callback; /** * Processes the response from the framework. */ - (void)handleResponse:(BOOL)handled forId:(uint64_t)responseId; @end @implementation FlutterEmbedderKeyResponder @synthesize layoutMap; - (nonnull instancetype)initWithSendEvent:(FlutterSendEmbedderKeyEvent)sendEvent { self = [super init]; if (self != nil) { _sendEvent = sendEvent; _pressingRecords = [NSMutableDictionary dictionary]; _pendingResponses = [NSMutableDictionary dictionary]; _responseId = 1; _lastModifierFlagsOfInterest = 0; _modifierFlagOfInterestMask = computeModifierFlagOfInterestMask(); } return self; } - (void)handleEvent:(NSEvent*)event callback:(FlutterAsyncKeyCallback)callback { // The conversion algorithm relies on a non-nil callback to properly compute // `synthesized`. NSAssert(callback != nil, @"The callback must not be nil."); FlutterKeyCallbackGuard* guardedCallback = [[FlutterKeyCallbackGuard alloc] initWithCallback:callback]; switch (event.type) { case NSEventTypeKeyDown: [self handleDownEvent:event callback:guardedCallback]; break; case NSEventTypeKeyUp: [self handleUpEvent:event callback:guardedCallback]; break; case NSEventTypeFlagsChanged: [self handleFlagEvent:event callback:guardedCallback]; break; default: NSAssert(false, @"Unexpected key event type: |%@|.", @(event.type)); } NSAssert(guardedCallback.handled, @"The callback is returned without being handled."); if (!guardedCallback.sentAnyEvents) { FlutterKeyEvent flutterEvent = { .struct_size = sizeof(FlutterKeyEvent), .timestamp = 0, .type = kFlutterKeyEventTypeDown, .physical = 0, .logical = 0, .character = nil, .synthesized = false, }; _sendEvent(flutterEvent, nullptr, nullptr); } NSAssert(_lastModifierFlagsOfInterest == (event.modifierFlags & _modifierFlagOfInterestMask), @"The modifier flags are not properly updated: recorded 0x%lx, event with mask 0x%lx", _lastModifierFlagsOfInterest, event.modifierFlags & _modifierFlagOfInterestMask); } #pragma mark - Private - (void)synchronizeModifiers:(NSUInteger)currentFlags ignoringFlags:(NSUInteger)ignoringFlags timestamp:(NSTimeInterval)timestamp guard:(FlutterKeyCallbackGuard*)guard { const NSUInteger updatingMask = _modifierFlagOfInterestMask & ~ignoringFlags; const NSUInteger currentFlagsOfInterest = currentFlags & updatingMask; const NSUInteger lastFlagsOfInterest = _lastModifierFlagsOfInterest & updatingMask; NSUInteger flagDifference = currentFlagsOfInterest ^ lastFlagsOfInterest; if (flagDifference & NSEventModifierFlagCapsLock) { [self sendCapsLockTapWithTimestamp:timestamp synthesizeDown:true callback:guard]; flagDifference = flagDifference & ~NSEventModifierFlagCapsLock; } while (true) { const NSUInteger currentFlag = lowestSetBit(flagDifference); if (currentFlag == 0) { break; } flagDifference = flagDifference & ~currentFlag; NSNumber* keyCode = [flutter::modifierFlagToKeyCode objectForKey:@(currentFlag)]; NSAssert(keyCode != nil, @"Invalid modifier flag 0x%lx", currentFlag); if (keyCode == nil) { continue; } BOOL isDownEvent = (currentFlagsOfInterest & currentFlag) != 0; [self sendModifierEventOfType:isDownEvent timestamp:timestamp keyCode:[keyCode unsignedShortValue] synthesized:true callback:guard]; } _lastModifierFlagsOfInterest = (_lastModifierFlagsOfInterest & ~updatingMask) | currentFlagsOfInterest; } - (void)updateKey:(uint64_t)physicalKey asPressed:(uint64_t)logicalKey { if (logicalKey == 0) { [_pressingRecords removeObjectForKey:@(physicalKey)]; } else { _pressingRecords[@(physicalKey)] = @(logicalKey); } } - (void)sendPrimaryFlutterEvent:(const FlutterKeyEvent&)event callback:(FlutterKeyCallbackGuard*)callback { _responseId += 1; uint64_t responseId = _responseId; // The `pending` is released in `HandleResponse`. FlutterKeyPendingResponse* pending = new FlutterKeyPendingResponse{self, responseId}; [callback pendTo:_pendingResponses withId:responseId]; _sendEvent(event, HandleResponse, pending); callback.sentAnyEvents = TRUE; } - (void)sendSynthesizedFlutterEvent:(const FlutterKeyEvent&)event guard:(FlutterKeyCallbackGuard*)guard { _sendEvent(event, nullptr, nullptr); guard.sentAnyEvents = TRUE; } - (void)sendCapsLockTapWithTimestamp:(NSTimeInterval)timestamp synthesizeDown:(bool)synthesizeDown callback:(FlutterKeyCallbackGuard*)callback { // MacOS sends a down *or* an up when CapsLock is tapped, alternatively on // even taps and odd taps. A CapsLock down or CapsLock up should always be // converted to a down *and* an up, and the up should always be a synthesized // event, since the FlutterEmbedderKeyResponder will never know when the // button is released. FlutterKeyEvent flutterEvent = { .struct_size = sizeof(FlutterKeyEvent), .timestamp = GetFlutterTimestampFrom(timestamp), .type = kFlutterKeyEventTypeDown, .physical = flutter::kCapsLockPhysicalKey, .logical = flutter::kCapsLockLogicalKey, .character = nil, .synthesized = synthesizeDown, }; if (!synthesizeDown) { [self sendPrimaryFlutterEvent:flutterEvent callback:callback]; } else { [self sendSynthesizedFlutterEvent:flutterEvent guard:callback]; } flutterEvent.type = kFlutterKeyEventTypeUp; flutterEvent.synthesized = true; [self sendSynthesizedFlutterEvent:flutterEvent guard:callback]; } - (void)sendModifierEventOfType:(BOOL)isDownEvent timestamp:(NSTimeInterval)timestamp keyCode:(unsigned short)keyCode synthesized:(bool)synthesized callback:(FlutterKeyCallbackGuard*)callback { uint64_t physicalKey = GetPhysicalKeyForKeyCode(keyCode); uint64_t logicalKey = GetLogicalKeyForModifier(keyCode, physicalKey); if (physicalKey == 0 || logicalKey == 0) { NSLog(@"Unrecognized modifier key: keyCode 0x%hx, physical key 0x%llx", keyCode, physicalKey); [callback resolveTo:TRUE]; return; } FlutterKeyEvent flutterEvent = { .struct_size = sizeof(FlutterKeyEvent), .timestamp = GetFlutterTimestampFrom(timestamp), .type = isDownEvent ? kFlutterKeyEventTypeDown : kFlutterKeyEventTypeUp, .physical = physicalKey, .logical = logicalKey, .character = nil, .synthesized = synthesized, }; [self updateKey:physicalKey asPressed:isDownEvent ? logicalKey : 0]; if (!synthesized) { [self sendPrimaryFlutterEvent:flutterEvent callback:callback]; } else { [self sendSynthesizedFlutterEvent:flutterEvent guard:callback]; } } - (void)handleDownEvent:(NSEvent*)event callback:(FlutterKeyCallbackGuard*)callback { uint64_t physicalKey = GetPhysicalKeyForKeyCode(event.keyCode); NSNumber* logicalKeyFromMap = self.layoutMap[@(event.keyCode)]; uint64_t logicalKey = logicalKeyFromMap != nil ? [logicalKeyFromMap unsignedLongLongValue] : GetLogicalKeyForEvent(event, physicalKey); [self synchronizeModifiers:event.modifierFlags ignoringFlags:0 timestamp:event.timestamp guard:callback]; bool isARepeat = event.isARepeat; NSNumber* pressedLogicalKey = _pressingRecords[@(physicalKey)]; if (pressedLogicalKey != nil && !isARepeat) { // This might happen in add-to-app scenarios if the focus is changed // from the native view to the Flutter view amid the key tap. // // This might also happen when a key event is forged (such as by an // IME) using the same keyCode as an unreleased key. See // https://github.com/flutter/flutter/issues/82673#issuecomment-988661079 FlutterKeyEvent flutterEvent = { .struct_size = sizeof(FlutterKeyEvent), .timestamp = GetFlutterTimestampFrom(event.timestamp), .type = kFlutterKeyEventTypeUp, .physical = physicalKey, .logical = [pressedLogicalKey unsignedLongLongValue], .character = nil, .synthesized = true, }; [self sendSynthesizedFlutterEvent:flutterEvent guard:callback]; pressedLogicalKey = nil; } if (pressedLogicalKey == nil) { [self updateKey:physicalKey asPressed:logicalKey]; } FlutterKeyEvent flutterEvent = { .struct_size = sizeof(FlutterKeyEvent), .timestamp = GetFlutterTimestampFrom(event.timestamp), .type = pressedLogicalKey == nil ? kFlutterKeyEventTypeDown : kFlutterKeyEventTypeRepeat, .physical = physicalKey, .logical = pressedLogicalKey == nil ? logicalKey : [pressedLogicalKey unsignedLongLongValue], .character = getEventString(event.characters), .synthesized = false, }; [self sendPrimaryFlutterEvent:flutterEvent callback:callback]; } - (void)handleUpEvent:(NSEvent*)event callback:(FlutterKeyCallbackGuard*)callback { NSAssert(!event.isARepeat, @"Unexpected repeated Up event: keyCode %d, char %@, charIM %@", event.keyCode, event.characters, event.charactersIgnoringModifiers); [self synchronizeModifiers:event.modifierFlags ignoringFlags:0 timestamp:event.timestamp guard:callback]; uint64_t physicalKey = GetPhysicalKeyForKeyCode(event.keyCode); NSNumber* pressedLogicalKey = _pressingRecords[@(physicalKey)]; if (pressedLogicalKey == nil) { // Normally the key up events won't be missed since macOS always sends the // key up event to the window where the corresponding key down occurred. // However this might happen in add-to-app scenarios if the focus is changed // from the native view to the Flutter view amid the key tap. [callback resolveTo:TRUE]; return; } [self updateKey:physicalKey asPressed:0]; FlutterKeyEvent flutterEvent = { .struct_size = sizeof(FlutterKeyEvent), .timestamp = GetFlutterTimestampFrom(event.timestamp), .type = kFlutterKeyEventTypeUp, .physical = physicalKey, .logical = [pressedLogicalKey unsignedLongLongValue], .character = nil, .synthesized = false, }; [self sendPrimaryFlutterEvent:flutterEvent callback:callback]; } - (void)handleCapsLockEvent:(NSEvent*)event callback:(FlutterKeyCallbackGuard*)callback { [self synchronizeModifiers:event.modifierFlags ignoringFlags:NSEventModifierFlagCapsLock timestamp:event.timestamp guard:callback]; if ((_lastModifierFlagsOfInterest & NSEventModifierFlagCapsLock) != (event.modifierFlags & NSEventModifierFlagCapsLock)) { [self sendCapsLockTapWithTimestamp:event.timestamp synthesizeDown:false callback:callback]; _lastModifierFlagsOfInterest = _lastModifierFlagsOfInterest ^ NSEventModifierFlagCapsLock; } else { [callback resolveTo:TRUE]; } } - (void)handleFlagEvent:(NSEvent*)event callback:(FlutterKeyCallbackGuard*)callback { NSNumber* targetModifierFlagObj = flutter::keyCodeToModifierFlag[@(event.keyCode)]; NSUInteger targetModifierFlag = targetModifierFlagObj == nil ? 0 : [targetModifierFlagObj unsignedLongValue]; uint64_t targetKey = GetPhysicalKeyForKeyCode(event.keyCode); if (targetKey == flutter::kCapsLockPhysicalKey) { return [self handleCapsLockEvent:event callback:callback]; } [self synchronizeModifiers:event.modifierFlags ignoringFlags:targetModifierFlag timestamp:event.timestamp guard:callback]; NSNumber* pressedLogicalKey = [_pressingRecords objectForKey:@(targetKey)]; BOOL lastTargetPressed = pressedLogicalKey != nil; NSAssert(targetModifierFlagObj == nil || (_lastModifierFlagsOfInterest & targetModifierFlag) != 0 == lastTargetPressed, @"Desynchronized state between lastModifierFlagsOfInterest (0x%lx) on bit 0x%lx " @"for keyCode 0x%hx, whose pressing state is %@.", _lastModifierFlagsOfInterest, targetModifierFlag, event.keyCode, lastTargetPressed ? [NSString stringWithFormat:@"0x%llx", [pressedLogicalKey unsignedLongLongValue]] : @"empty"); BOOL shouldBePressed = (event.modifierFlags & targetModifierFlag) != 0; if (lastTargetPressed == shouldBePressed) { [callback resolveTo:TRUE]; return; } _lastModifierFlagsOfInterest = _lastModifierFlagsOfInterest ^ targetModifierFlag; [self sendModifierEventOfType:shouldBePressed timestamp:event.timestamp keyCode:event.keyCode synthesized:false callback:callback]; } - (void)handleResponse:(BOOL)handled forId:(uint64_t)responseId { FlutterAsyncKeyCallback callback = _pendingResponses[@(responseId)]; callback(handled); [_pendingResponses removeObjectForKey:@(responseId)]; } - (void)syncModifiersIfNeeded:(NSEventModifierFlags)modifierFlags timestamp:(NSTimeInterval)timestamp { FlutterAsyncKeyCallback replyCallback = ^(BOOL handled) { // Do nothing. }; FlutterKeyCallbackGuard* guardedCallback = [[FlutterKeyCallbackGuard alloc] initWithCallback:replyCallback]; [self synchronizeModifiers:modifierFlags ignoringFlags:0 timestamp:timestamp guard:guardedCallback]; } - (nonnull NSDictionary*)getPressedState { return [NSDictionary dictionaryWithDictionary:_pressingRecords]; } @end namespace { void HandleResponse(bool handled, void* user_data) { // Use unique_ptr to release on leaving. auto pending = std::unique_ptr<FlutterKeyPendingResponse>( reinterpret_cast<FlutterKeyPendingResponse*>(user_data)); [pending->responder handleResponse:handled forId:pending->responseId]; } } // namespace
engine/shell/platform/darwin/macos/framework/Source/FlutterEmbedderKeyResponder.mm/0
{ "file_path": "engine/shell/platform/darwin/macos/framework/Source/FlutterEmbedderKeyResponder.mm", "repo_id": "engine", "token_count": 10471 }
349