text
stringlengths
6
13.6M
id
stringlengths
13
176
metadata
dict
__index_level_0__
int64
0
1.69k
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef BRANCHING_GLSL_ #define BRANCHING_GLSL_ #include <impeller/constants.glsl> #include <impeller/types.glsl> /// Perform an equality check for each vec3 component. /// /// Returns 1.0 if x == y, otherwise 0.0. BoolV3 IPVec3IsEqual(vec3 x, float y) { vec3 diff = abs(x - y); return vec3(diff.r < kEhCloseEnough, // diff.g < kEhCloseEnough, // diff.b < kEhCloseEnough); } /// Perform a branchless greater than check. /// /// Returns 1.0 if x > y, otherwise 0.0. BoolF IPFloatIsGreaterThan(float x, float y) { return max(sign(x - y), 0); } /// Perform a branchless greater than check for each vec3 component. /// /// Returns 1.0 if x > y, otherwise 0.0. BoolV3 IPVec3IsGreaterThan(vec3 x, vec3 y) { return max(sign(x - y), 0); } /// Perform a branchless less than check. /// /// Returns 1.0 if x < y, otherwise 0.0. BoolF IPFloatIsLessThan(float x, float y) { return max(sign(y - x), 0); } /// For each vec3 component, if value > cutoff, return b, otherwise return a. vec3 IPVec3ChooseCutoff(vec3 a, vec3 b, vec3 value, float cutoff) { return mix(a, b, IPVec3IsGreaterThan(value, vec3(cutoff))); } /// For each vec3 component, if value > 0.5, return b, otherwise return a. vec3 IPVec3Choose(vec3 a, vec3 b, vec3 value) { return IPVec3ChooseCutoff(a, b, value, 0.5); } /// Perform a branchless greater than check for each vec3 component. /// /// Returns 1.0 if x > y, otherwise 0.0. f16vec3 IPHalfVec3IsGreaterThan(f16vec3 x, f16vec3 y) { return max(sign(x - y), float16_t(0.0hf)); } /// For each vec3 component, if value > cutoff, return b, otherwise return a. f16vec3 IPHalfVec3ChooseCutoff(f16vec3 a, f16vec3 b, f16vec3 value, float16_t cutoff) { return mix(a, b, IPHalfVec3IsGreaterThan(value, f16vec3(cutoff))); } /// For each vec3 component, if value > 0.5, return b, otherwise return a. f16vec3 IPHalfVec3Choose(f16vec3 a, f16vec3 b, f16vec3 value) { return IPHalfVec3ChooseCutoff(a, b, value, float16_t(0.5hf)); } #endif
engine/impeller/compiler/shader_lib/impeller/branching.glsl/0
{ "file_path": "engine/impeller/compiler/shader_lib/impeller/branching.glsl", "repo_id": "engine", "token_count": 933 }
220
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/compiler/spirv_compiler.h" #include <array> #include "impeller/compiler/logger.h" #include "impeller/compiler/types.h" namespace impeller { namespace compiler { SPIRVCompiler::SPIRVCompiler(const SourceOptions& options, std::shared_ptr<const fml::Mapping> sources) : options_(options), sources_(std::move(sources)) {} SPIRVCompiler::~SPIRVCompiler() = default; std::shared_ptr<fml::Mapping> SPIRVCompiler::CompileToSPV( std::stringstream& stream, const shaderc::CompileOptions& spirv_options) const { if (!sources_ || sources_->GetMapping() == nullptr) { COMPILER_ERROR(stream) << "Invalid sources for SPIRV Compiler."; return nullptr; } shaderc::Compiler spv_compiler; if (!spv_compiler.IsValid()) { COMPILER_ERROR(stream) << "Could not initialize the " << SourceLanguageToString(options_.source_language) << " to SPIRV compiler."; return nullptr; } const auto shader_kind = ToShaderCShaderKind(options_.type); if (shader_kind == shaderc_shader_kind::shaderc_glsl_infer_from_source) { COMPILER_ERROR(stream) << "Could not figure out shader stage."; return nullptr; } auto result = std::make_shared<shaderc::SpvCompilationResult>( spv_compiler.CompileGlslToSpv( reinterpret_cast<const char*>(sources_->GetMapping()), // source_text sources_->GetSize(), // source_text_size shader_kind, // shader_kind options_.file_name.c_str(), // input_file_name options_.entry_point_name.c_str(), // entry_point_name spirv_options // options )); if (result->GetCompilationStatus() != shaderc_compilation_status::shaderc_compilation_status_success) { COMPILER_ERROR(stream) << SourceLanguageToString(options_.source_language) << " to SPIRV failed; " << ShaderCErrorToString( result->GetCompilationStatus()) << ". " << result->GetNumErrors() << " error(s) and " << result->GetNumWarnings() << " warning(s)."; // It should normally be enough to check that there are errors or warnings, // but some cases result in no errors or warnings and still have an error // message. If there's a message we should print it. if (result->GetNumErrors() > 0 || result->GetNumWarnings() > 0 || !result->GetErrorMessage().empty()) { COMPILER_ERROR_NO_PREFIX(stream) << result->GetErrorMessage(); } return nullptr; } if (!result) { COMPILER_ERROR(stream) << "Could not fetch SPIRV from compile job."; return nullptr; } const auto data_length = (result->cend() - result->cbegin()) * sizeof(decltype(result)::element_type::element_type); return std::make_unique<fml::NonOwnedMapping>( reinterpret_cast<const uint8_t*>(result->cbegin()), // data_length, // [result](auto, auto) {} // ); } std::string SPIRVCompiler::GetSourcePrefix() const { std::stringstream stream; stream << options_.file_name << ": "; return stream.str(); } static void SetDefaultLimitations(shaderc::CompileOptions& compiler_opts) { using Limit = std::pair<shaderc_limit, int>; static constexpr std::array<Limit, 83> limits = { Limit{shaderc_limit::shaderc_limit_max_lights, 8}, Limit{shaderc_limit::shaderc_limit_max_clip_planes, 6}, Limit{shaderc_limit::shaderc_limit_max_texture_units, 2}, Limit{shaderc_limit::shaderc_limit_max_texture_coords, 8}, Limit{shaderc_limit::shaderc_limit_max_vertex_attribs, 16}, Limit{shaderc_limit::shaderc_limit_max_vertex_uniform_components, 4096}, Limit{shaderc_limit::shaderc_limit_max_varying_floats, 60}, Limit{shaderc_limit::shaderc_limit_max_vertex_texture_image_units, 16}, Limit{shaderc_limit::shaderc_limit_max_combined_texture_image_units, 80}, Limit{shaderc_limit::shaderc_limit_max_texture_image_units, 16}, Limit{shaderc_limit::shaderc_limit_max_fragment_uniform_components, 1024}, Limit{shaderc_limit::shaderc_limit_max_draw_buffers, 8}, Limit{shaderc_limit::shaderc_limit_max_vertex_uniform_vectors, 256}, Limit{shaderc_limit::shaderc_limit_max_varying_vectors, 15}, Limit{shaderc_limit::shaderc_limit_max_fragment_uniform_vectors, 256}, Limit{shaderc_limit::shaderc_limit_max_vertex_output_vectors, 16}, Limit{shaderc_limit::shaderc_limit_max_fragment_input_vectors, 15}, Limit{shaderc_limit::shaderc_limit_min_program_texel_offset, -8}, Limit{shaderc_limit::shaderc_limit_max_program_texel_offset, 7}, Limit{shaderc_limit::shaderc_limit_max_clip_distances, 8}, Limit{shaderc_limit::shaderc_limit_max_compute_work_group_count_x, 65535}, Limit{shaderc_limit::shaderc_limit_max_compute_work_group_count_y, 65535}, Limit{shaderc_limit::shaderc_limit_max_compute_work_group_count_z, 65535}, Limit{shaderc_limit::shaderc_limit_max_compute_work_group_size_x, 1024}, Limit{shaderc_limit::shaderc_limit_max_compute_work_group_size_y, 1024}, Limit{shaderc_limit::shaderc_limit_max_compute_work_group_size_z, 64}, Limit{shaderc_limit::shaderc_limit_max_compute_uniform_components, 512}, Limit{shaderc_limit::shaderc_limit_max_compute_texture_image_units, 16}, Limit{shaderc_limit::shaderc_limit_max_compute_image_uniforms, 8}, Limit{shaderc_limit::shaderc_limit_max_compute_atomic_counters, 8}, Limit{shaderc_limit::shaderc_limit_max_compute_atomic_counter_buffers, 1}, Limit{shaderc_limit::shaderc_limit_max_varying_components, 60}, Limit{shaderc_limit::shaderc_limit_max_vertex_output_components, 64}, Limit{shaderc_limit::shaderc_limit_max_geometry_input_components, 64}, Limit{shaderc_limit::shaderc_limit_max_geometry_output_components, 128}, Limit{shaderc_limit::shaderc_limit_max_fragment_input_components, 128}, Limit{shaderc_limit::shaderc_limit_max_image_units, 8}, Limit{shaderc_limit:: shaderc_limit_max_combined_image_units_and_fragment_outputs, 8}, Limit{shaderc_limit::shaderc_limit_max_combined_shader_output_resources, 8}, Limit{shaderc_limit::shaderc_limit_max_image_samples, 0}, Limit{shaderc_limit::shaderc_limit_max_vertex_image_uniforms, 0}, Limit{shaderc_limit::shaderc_limit_max_tess_control_image_uniforms, 0}, Limit{shaderc_limit::shaderc_limit_max_tess_evaluation_image_uniforms, 0}, Limit{shaderc_limit::shaderc_limit_max_geometry_image_uniforms, 0}, Limit{shaderc_limit::shaderc_limit_max_fragment_image_uniforms, 8}, Limit{shaderc_limit::shaderc_limit_max_combined_image_uniforms, 8}, Limit{shaderc_limit::shaderc_limit_max_geometry_texture_image_units, 16}, Limit{shaderc_limit::shaderc_limit_max_geometry_output_vertices, 256}, Limit{shaderc_limit::shaderc_limit_max_geometry_total_output_components, 1024}, Limit{shaderc_limit::shaderc_limit_max_geometry_uniform_components, 512}, Limit{shaderc_limit::shaderc_limit_max_geometry_varying_components, 60}, Limit{shaderc_limit::shaderc_limit_max_tess_control_input_components, 128}, Limit{shaderc_limit::shaderc_limit_max_tess_control_output_components, 128}, Limit{shaderc_limit::shaderc_limit_max_tess_control_texture_image_units, 16}, Limit{shaderc_limit::shaderc_limit_max_tess_control_uniform_components, 1024}, Limit{ shaderc_limit::shaderc_limit_max_tess_control_total_output_components, 4096}, Limit{shaderc_limit::shaderc_limit_max_tess_evaluation_input_components, 128}, Limit{shaderc_limit::shaderc_limit_max_tess_evaluation_output_components, 128}, Limit{ shaderc_limit::shaderc_limit_max_tess_evaluation_texture_image_units, 16}, Limit{shaderc_limit::shaderc_limit_max_tess_evaluation_uniform_components, 1024}, Limit{shaderc_limit::shaderc_limit_max_tess_patch_components, 120}, Limit{shaderc_limit::shaderc_limit_max_patch_vertices, 32}, Limit{shaderc_limit::shaderc_limit_max_tess_gen_level, 64}, Limit{shaderc_limit::shaderc_limit_max_viewports, 16}, Limit{shaderc_limit::shaderc_limit_max_vertex_atomic_counters, 0}, Limit{shaderc_limit::shaderc_limit_max_tess_control_atomic_counters, 0}, Limit{shaderc_limit::shaderc_limit_max_tess_evaluation_atomic_counters, 0}, Limit{shaderc_limit::shaderc_limit_max_geometry_atomic_counters, 0}, Limit{shaderc_limit::shaderc_limit_max_fragment_atomic_counters, 8}, Limit{shaderc_limit::shaderc_limit_max_combined_atomic_counters, 8}, Limit{shaderc_limit::shaderc_limit_max_atomic_counter_bindings, 1}, Limit{shaderc_limit::shaderc_limit_max_vertex_atomic_counter_buffers, 0}, Limit{ shaderc_limit::shaderc_limit_max_tess_control_atomic_counter_buffers, 0}, Limit{shaderc_limit:: shaderc_limit_max_tess_evaluation_atomic_counter_buffers, 0}, Limit{shaderc_limit::shaderc_limit_max_geometry_atomic_counter_buffers, 0}, Limit{shaderc_limit::shaderc_limit_max_fragment_atomic_counter_buffers, 0}, Limit{shaderc_limit::shaderc_limit_max_combined_atomic_counter_buffers, 1}, Limit{shaderc_limit::shaderc_limit_max_atomic_counter_buffer_size, 32}, Limit{shaderc_limit::shaderc_limit_max_transform_feedback_buffers, 4}, Limit{shaderc_limit:: shaderc_limit_max_transform_feedback_interleaved_components, 64}, Limit{shaderc_limit::shaderc_limit_max_cull_distances, 8}, Limit{shaderc_limit::shaderc_limit_max_combined_clip_and_cull_distances, 8}, Limit{shaderc_limit::shaderc_limit_max_samples, 4}, }; for (auto& [limit, value] : limits) { compiler_opts.SetLimit(limit, value); } } static void SetBindingBaseOffset(shaderc::CompileOptions& options) { constexpr uint32_t kBindingBaseOffset = 64; static const shaderc_uniform_kind kUniformKinds[] = { shaderc_uniform_kind::shaderc_uniform_kind_sampler, shaderc_uniform_kind::shaderc_uniform_kind_texture, shaderc_uniform_kind::shaderc_uniform_kind_image, shaderc_uniform_kind::shaderc_uniform_kind_buffer, // UBOs shaderc_uniform_kind::shaderc_uniform_kind_storage_buffer, // SSBOs }; for (size_t i = 0u; i < sizeof(kUniformKinds) / sizeof(shaderc_uniform_kind); i++) { options.SetBindingBaseForStage( shaderc_shader_kind::shaderc_fragment_shader, // kUniformKinds[i], // kBindingBaseOffset // ); } } //------------------------------------------------------------------------------ /// @brief Wraps a shared includer so unique includers may be created to /// satisfy the shaderc API. This is a simple proxy object and does /// nothing. /// class UniqueIncluder final : public shaderc::CompileOptions::IncluderInterface { public: static std::unique_ptr<UniqueIncluder> Make( std::shared_ptr<Includer> includer) { return std::unique_ptr<UniqueIncluder>( new UniqueIncluder(std::move(includer))); } // |shaderc::CompileOptions::IncluderInterface| ~UniqueIncluder() = default; // |shaderc::CompileOptions::IncluderInterface| shaderc_include_result* GetInclude(const char* requested_source, shaderc_include_type type, const char* requesting_source, size_t include_depth) override { return includer_->GetInclude(requested_source, // type, // requesting_source, // include_depth // ); } // |shaderc::CompileOptions::IncluderInterface| void ReleaseInclude(shaderc_include_result* data) override { return includer_->ReleaseInclude(data); } private: std::shared_ptr<Includer> includer_; explicit UniqueIncluder(std::shared_ptr<Includer> includer) : includer_(std::move(includer)) { FML_CHECK(includer_); } UniqueIncluder(const UniqueIncluder&) = delete; UniqueIncluder& operator=(const UniqueIncluder&) = delete; }; shaderc::CompileOptions SPIRVCompilerOptions::BuildShadercOptions() const { shaderc::CompileOptions options; SetDefaultLimitations(options); SetBindingBaseOffset(options); options.SetAutoBindUniforms(true); options.SetAutoMapLocations(true); options.SetOptimizationLevel(optimization_level); if (generate_debug_info) { options.SetGenerateDebugInfo(); } if (source_langauge.has_value()) { options.SetSourceLanguage(source_langauge.value()); } if (source_profile.has_value()) { options.SetForcedVersionProfile(source_profile->version, source_profile->profile); } if (target.has_value()) { options.SetTargetEnvironment(target->env, target->version); options.SetTargetSpirv(target->spirv_version); } for (const auto& macro : macro_definitions) { options.AddMacroDefinition(macro); } if (includer) { options.SetIncluder(UniqueIncluder::Make(includer)); } options.SetVulkanRulesRelaxed(relaxed_vulkan_rules); return options; } } // namespace compiler } // namespace impeller
engine/impeller/compiler/spirv_compiler.cc/0
{ "file_path": "engine/impeller/compiler/spirv_compiler.cc", "repo_id": "engine", "token_count": 6145 }
221
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <memory> #include "flutter/testing/testing.h" #include "impeller/core/allocator.h" #include "impeller/core/formats.h" #include "impeller/core/texture_descriptor.h" #include "impeller/geometry/size.h" #include "impeller/renderer/testing/mocks.h" namespace impeller { namespace testing { TEST(AllocatorTest, TextureDescriptorCompatibility) { // Size. { TextureDescriptor desc_a = {.size = ISize(100, 100)}; TextureDescriptor desc_b = {.size = ISize(100, 100)}; TextureDescriptor desc_c = {.size = ISize(101, 100)}; ASSERT_EQ(desc_a, desc_b); ASSERT_NE(desc_a, desc_c); } // Storage Mode. { TextureDescriptor desc_a = {.storage_mode = StorageMode::kDevicePrivate}; TextureDescriptor desc_b = {.storage_mode = StorageMode::kDevicePrivate}; TextureDescriptor desc_c = {.storage_mode = StorageMode::kHostVisible}; ASSERT_EQ(desc_a, desc_b); ASSERT_NE(desc_a, desc_c); } // Format. { TextureDescriptor desc_a = {.format = PixelFormat::kR8G8B8A8UNormInt}; TextureDescriptor desc_b = {.format = PixelFormat::kR8G8B8A8UNormInt}; TextureDescriptor desc_c = {.format = PixelFormat::kB10G10R10A10XR}; ASSERT_EQ(desc_a, desc_b); ASSERT_NE(desc_a, desc_c); } // Sample Count. { TextureDescriptor desc_a = {.sample_count = SampleCount::kCount4}; TextureDescriptor desc_b = {.sample_count = SampleCount::kCount4}; TextureDescriptor desc_c = {.sample_count = SampleCount::kCount1}; ASSERT_EQ(desc_a, desc_b); ASSERT_NE(desc_a, desc_c); } // Sample Count. { TextureDescriptor desc_a = {.type = TextureType::kTexture2DMultisample}; TextureDescriptor desc_b = {.type = TextureType::kTexture2DMultisample}; TextureDescriptor desc_c = {.type = TextureType::kTexture2D}; ASSERT_EQ(desc_a, desc_b); ASSERT_NE(desc_a, desc_c); } // Compression. { TextureDescriptor desc_a = {.compression_type = CompressionType::kLossless}; TextureDescriptor desc_b = {.compression_type = CompressionType::kLossless}; TextureDescriptor desc_c = {.compression_type = CompressionType::kLossy}; ASSERT_EQ(desc_a, desc_b); ASSERT_NE(desc_a, desc_c); } // Mip Count. { TextureDescriptor desc_a = {.mip_count = 1}; TextureDescriptor desc_b = {.mip_count = 1}; TextureDescriptor desc_c = {.mip_count = 4}; ASSERT_EQ(desc_a, desc_b); ASSERT_NE(desc_a, desc_c); } } } // namespace testing } // namespace impeller
engine/impeller/core/allocator_unittests.cc/0
{ "file_path": "engine/impeller/core/allocator_unittests.cc", "repo_id": "engine", "token_count": 1058 }
222
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_CORE_RANGE_H_ #define FLUTTER_IMPELLER_CORE_RANGE_H_ #include <cstddef> #include "flutter/fml/macros.h" namespace impeller { struct Range { size_t offset = 0; size_t length = 0; constexpr Range() {} constexpr Range(size_t p_offset, size_t p_length) : offset(p_offset), length(p_length) {} constexpr bool operator==(const Range& o) const { return offset == o.offset && length == o.length; } }; } // namespace impeller #endif // FLUTTER_IMPELLER_CORE_RANGE_H_
engine/impeller/core/range.h/0
{ "file_path": "engine/impeller/core/range.h", "repo_id": "engine", "token_count": 254 }
223
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_CORE_VERTEX_BUFFER_H_ #define FLUTTER_IMPELLER_CORE_VERTEX_BUFFER_H_ #include "impeller/core/buffer_view.h" #include "impeller/core/formats.h" namespace impeller { struct VertexBuffer { BufferView vertex_buffer; //---------------------------------------------------------------------------- /// The index buffer binding used by the vertex shader stage. BufferView index_buffer; //---------------------------------------------------------------------------- /// The total count of vertices, either in the vertex_buffer if the /// index_type is IndexType::kNone or in the index_buffer otherwise. size_t vertex_count = 0u; //---------------------------------------------------------------------------- /// The type of indices in the index buffer. The indices must be tightly /// packed in the index buffer. /// IndexType index_type = IndexType::kUnknown; constexpr explicit operator bool() const { return static_cast<bool>(vertex_buffer) && (index_type == IndexType::kNone || static_cast<bool>(index_buffer)); } }; } // namespace impeller #endif // FLUTTER_IMPELLER_CORE_VERTEX_BUFFER_H_
engine/impeller/core/vertex_buffer.h/0
{ "file_path": "engine/impeller/core/vertex_buffer.h", "repo_id": "engine", "token_count": 372 }
224
# Android ## Rendering Backend Selection Impeller supports both Vulkan and Open GL ES rendering on Android. Impeller will prefer rendering with Vulkan and fallback to OpenGL ES 2.0 for compatibility. With both the Vulkan and OpenGL ES, Impeller supports rendering on [all existing Android versions supported by Flutter](https://docs.flutter.dev/reference/supported-platforms). The rough logic for backend selection on Android is depicted below. ```mermaid flowchart TD start[Start] android_api{Check Android API} device_support{Device Supports Vulkan} vulkan_version{Vulkan Version Check} vulkan_exts{Vulkan Supports Extensions} vulkan[Use Vulkan Backend] opengl[Use OpenGL Backend] start-->device_support device_support-->|Yes|android_api device_support-->|No|opengl android_api-->|>= Android 29|vulkan_version android_api-->|< Android 29|opengl vulkan_version-->|>= Vulkan 1.1|vulkan_exts vulkan_version-->|< Vulkan 1.1|opengl vulkan_exts-->|Supports Extensions|vulkan vulkan_exts-->|Doesn't Support Extensions|opengl ``` ### Vulkan Version For Vulkan, Impeller needs at least Vulkan version 1.1. As of January 06, 2023, this includes 77% of all Android devices per the [Android Distribution dashboard](https://developer.android.com/about/dashboards#Vulkan). ```mermaid pie title Vulkan version "No Vulkan Support" : 15 "Vulkan 1.0.3" : 8 "Vulkan 1.1" : 77 ``` Impeller could theoretically support older version of Vulkan but at a significant cost of implementation and for ultimately diminishing returns on investment. The team would rather spend that time improving the OpenGL ES backend. Besides older devices still in use, devices you can buy new that don't support Vulkan are configurations with 64-bit kernels and 32-bit user-spaces (`armv7l`) or other RAM constrained devices. ### Android Version For Vulkan support, Impeller needs at least Android 10, API level 29 (Q, Quince Tart). Cumulative usage of Android 10 as of August 21, 2023 is 78.5% per [apilevels.com](https://apilevels.com/). ```mermaid pie title Vulkan version "Android 10 or higher" : 78.5 "Android 9 and lower" : 21.5 ``` Android 9 and older will unconditionally use OpenGL. Android 10, API level 29 provides necessary support to work with [`HardwareBuffer`](https://developer.android.com/reference/android/hardware/HardwareBuffer)s efficiently. This is critical to support platform views. ### Vulkan Extensions Besides the Vulkan and Android versions. On Android, Impeller needs some extensions for interoperability with the underlying platform to support features like platform views and external texture composition. The expectation is that very Android devices will be filtered out in this check because support for extensions like `VK_ANDROID_external_memory_android_hardware_buffer` [is almost universal on devices with Android 10](https://vulkan.gpuinfo.org/listextensions.php?platform=android) and above. ## Platform Views Android Platform Views (i.e. an android.view.View embedded inside a Flutter app) are supported in both GLES and Vulkan backends and the Engine manages this automatically. ## SurfaceTexture Flutter's Java API allows for developers to register custom SurfaceTexture backed textures that can be rendered inside of a Flutter application. See `TextureRegistry.registerSurfaceTexture` and `TextureRegistry.createSurfaceTexture`. ### GLES There are no issues with SurfaceTextures when using the GLES backend. ### Vulkan We do not currently support rendering these textures when using the Vulkan backend. Supporting this will require adding support for importing GL textures into Vulkan textures which will have performance implications.
engine/impeller/docs/android.md/0
{ "file_path": "engine/impeller/docs/android.md", "repo_id": "engine", "token_count": 1008 }
225
# Color blending Impeller currently supports the same set of blending operations that [Skia](https://api.skia.org/SkBlendMode_8h.html#ad96d76accb8ff5f3eafa29b91f7a25f0) supports. Internally, Impeller distinguishes between two different kinds of blend modes: Those which can be performed using the raster pipeline blend configuration (called "Pipeline Blends"), and those which cannot (called "Advanced Blends"). All blend modes conform to the [W3C Compositing and Blending recommendation](https://www.w3.org/TR/compositing-1/). Blend operations are driven by the `BlendMode` enum. In the Aiks layer, all drawing operations conform to the given `Paint::blend_mode`. In the Entities layer, all Entities have an associated blend mode, which can be set via `Entity::SetBlendMode(BlendMode)`. ## Glossary of blending terms | Term | Definition | | --- | --- | | Source color | Any color that is output by a fragment shader. | | Destination color | The backdrop color in a blend operation. | | Premultiplied color | A color that has its alpha multiplied into it. Used for additive blending operations as well as colors presented to a surface. | | Porter-Duff alpha composite | One of several operations that add together a source color and destination color, with both the source and destination colors being multiplied by respective alpha factors. | | Pipeline blend | A blend mode that Impeller can always implement by using the raster pipeline blend configuration provided by the underlying graphics backend. Most of these are simple _Porter-Duff alpha composites_. | | Advanced blend | A blend mode that Impeller computes using a fragment program. | ## Premultiplied colors In Impeller, all blending _source colors_ are assumed to be _premultiplied_ for the purpose of blending. This means that all Entity shaders must output colors with premultiplied alpha. In general, these shaders also assume that sampled textures and uniform color inputs are premultiplied. The reason for this is that it enables us to implement all of the _Porter-Duff alpha composites_ using the built-in raster pipeline blend configuration offered by all major graphics backends. ## Pipeline blends Most of the pipeline blends are actually _Porter-Duff alpha composites_, which add together the source color and destination color -- both the source and destination colors are multiplied by an alpha factor which determines the behavior of the blend. Pipeline blends are always cheap and don't require additional draw calls to render. | Pipeline blend | | --- | | Clear | | Source | | Destination | | SourceOver | | DestinationOver | | SourceIn | | DestinationIn | | SourceOut | | DestinationOut | | SourceATop | | DestinationATop | | Xor | | Plus | | Modulate | ## Advanced blends Advanced blends are blends that Impeller can't always implement using the built-in raster pipeline blend configuration offered by graphics backends. Instead, they're implemented using special blend shaders that bind the backdrop texture in a separate render pass. Note that all of the advanced blends are _color blends_ rather than _alpha composites_, and they can technically be combined with any _pipeline blend_ with predictable compositing behavior. However, in order to keep in line with Flutter's (and Skia's) current behavior, Impeller uses _Source Over_ compositing when rendering all advanced blends. Advanced blends are expensive when compared to pipeline blends (which are essentially free) for the following reasons: * For each advanced blend, the current render pass ends because the backdrop texture needs to be sampled. * A potentially large texture (the render pass backdrop) is sampled. Although in practice, just the coverage rectangle of the source being blended is actually used. * An intermediary texture is allocated for the blend output before being blitted back to the render pass texture. | Advanced blend | | --- | | kScreen | | Overlay | | Darken | | Lighten | | ColorDodge | | ColorBurn | | HardLight | | SoftLight | | Difference | | Exclusion | | Multiply | | Hue | | Saturation | | Color | | Luminosity |
engine/impeller/docs/blending.md/0
{ "file_path": "engine/impeller/docs/blending.md", "repo_id": "engine", "token_count": 1038 }
226
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_ENTITY_CONTENTS_ATLAS_CONTENTS_H_ #define FLUTTER_IMPELLER_ENTITY_CONTENTS_ATLAS_CONTENTS_H_ #include <functional> #include <memory> #include <vector> #include "flutter/fml/macros.h" #include "impeller/core/sampler_descriptor.h" #include "impeller/entity/contents/contents.h" #include "impeller/entity/entity.h" namespace impeller { struct SubAtlasResult { // Sub atlas values. std::vector<Rect> sub_texture_coords; std::vector<Color> sub_colors; std::vector<Matrix> sub_transforms; // Result atlas values. std::vector<Rect> result_texture_coords; std::vector<Matrix> result_transforms; // Size of the sub-atlass. ISize size; }; class AtlasContents final : public Contents { public: explicit AtlasContents(); ~AtlasContents() override; void SetTexture(std::shared_ptr<Texture> texture); std::shared_ptr<Texture> GetTexture() const; void SetTransforms(std::vector<Matrix> transforms); void SetBlendMode(BlendMode blend_mode); void SetTextureCoordinates(std::vector<Rect> texture_coords); void SetColors(std::vector<Color> colors); void SetCullRect(std::optional<Rect> cull_rect); void SetSamplerDescriptor(SamplerDescriptor desc); void SetAlpha(Scalar alpha); const SamplerDescriptor& GetSamplerDescriptor() const; const std::vector<Matrix>& GetTransforms() const; const std::vector<Rect>& GetTextureCoordinates() const; const std::vector<Color>& GetColors() const; /// @brief Compress a drawAtlas call with blending into a smaller sized atlas. /// This atlas has no overlapping to ensure /// blending behaves as if it were done in the fragment shader. std::shared_ptr<SubAtlasResult> GenerateSubAtlas() const; // |Contents| std::optional<Rect> GetCoverage(const Entity& entity) const override; // |Contents| bool Render(const ContentContext& renderer, const Entity& entity, RenderPass& pass) const override; private: Rect ComputeBoundingBox() const; std::shared_ptr<Texture> texture_; std::vector<Rect> texture_coords_; std::vector<Color> colors_; std::vector<Matrix> transforms_; BlendMode blend_mode_; std::optional<Rect> cull_rect_; Scalar alpha_ = 1.0; SamplerDescriptor sampler_descriptor_ = {}; mutable std::optional<Rect> bounding_box_cache_; AtlasContents(const AtlasContents&) = delete; AtlasContents& operator=(const AtlasContents&) = delete; }; class AtlasTextureContents final : public Contents { public: explicit AtlasTextureContents(const AtlasContents& parent); ~AtlasTextureContents() override; // |Contents| std::optional<Rect> GetCoverage(const Entity& entity) const override; // |Contents| bool Render(const ContentContext& renderer, const Entity& entity, RenderPass& pass) const override; void SetAlpha(Scalar alpha); void SetCoverage(Rect coverage); void SetTexture(std::shared_ptr<Texture> texture); void SetUseDestination(bool value); void SetSubAtlas(const std::shared_ptr<SubAtlasResult>& subatlas); private: const AtlasContents& parent_; Scalar alpha_ = 1.0; Rect coverage_; std::shared_ptr<Texture> texture_; bool use_destination_ = false; std::shared_ptr<SubAtlasResult> subatlas_; AtlasTextureContents(const AtlasTextureContents&) = delete; AtlasTextureContents& operator=(const AtlasTextureContents&) = delete; }; class AtlasColorContents final : public Contents { public: explicit AtlasColorContents(const AtlasContents& parent); ~AtlasColorContents() override; // |Contents| std::optional<Rect> GetCoverage(const Entity& entity) const override; // |Contents| bool Render(const ContentContext& renderer, const Entity& entity, RenderPass& pass) const override; void SetAlpha(Scalar alpha); void SetCoverage(Rect coverage); void SetSubAtlas(const std::shared_ptr<SubAtlasResult>& subatlas); private: const AtlasContents& parent_; Scalar alpha_ = 1.0; Rect coverage_; std::shared_ptr<SubAtlasResult> subatlas_; AtlasColorContents(const AtlasColorContents&) = delete; AtlasColorContents& operator=(const AtlasColorContents&) = delete; }; } // namespace impeller #endif // FLUTTER_IMPELLER_ENTITY_CONTENTS_ATLAS_CONTENTS_H_
engine/impeller/entity/contents/atlas_contents.h/0
{ "file_path": "engine/impeller/entity/contents/atlas_contents.h", "repo_id": "engine", "token_count": 1456 }
227
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_ENTITY_CONTENTS_FILTERS_BLEND_FILTER_CONTENTS_H_ #define FLUTTER_IMPELLER_ENTITY_CONTENTS_FILTERS_BLEND_FILTER_CONTENTS_H_ #include <optional> #include "impeller/entity/contents/filters/color_filter_contents.h" #include "impeller/entity/contents/filters/inputs/filter_input.h" #include "impeller/geometry/color.h" namespace impeller { constexpr std::array<std::array<Scalar, 5>, 15> kPorterDuffCoefficients = {{ {0, 0, 0, 0, 0}, // Clear {1, 0, 0, 0, 0}, // Source {0, 0, 1, 0, 0}, // Destination {1, 0, 1, -1, 0}, // SourceOver {1, -1, 1, 0, 0}, // DestinationOver {0, 1, 0, 0, 0}, // SourceIn {0, 0, 0, 1, 0}, // DestinationIn {1, -1, 0, 0, 0}, // SourceOut {0, 0, 1, -1, 0}, // DestinationOut {0, 1, 1, -1, 0}, // SourceATop {1, -1, 0, 1, 0}, // DestinationATop {1, -1, 1, -1, 0}, // Xor {1, 0, 1, 0, 0}, // Plus {0, 0, 0, 0, 1}, // Modulate {0, 0, 1, 0, -1}, // Screen }}; std::optional<BlendMode> InvertPorterDuffBlend(BlendMode blend_mode); class BlendFilterContents : public ColorFilterContents { public: using AdvancedBlendProc = std::function<std::optional<Entity>( const FilterInput::Vector& inputs, const ContentContext& renderer, const Entity& entity, const Rect& coverage, BlendMode blend_mode, std::optional<Color> foreground_color, ColorFilterContents::AbsorbOpacity absorb_opacity, std::optional<Scalar> alpha)>; BlendFilterContents(); ~BlendFilterContents() override; void SetBlendMode(BlendMode blend_mode); /// @brief Sets a source color which is blended after all of the inputs have /// been blended. void SetForegroundColor(std::optional<Color> color); private: // |FilterContents| std::optional<Entity> RenderFilter( const FilterInput::Vector& inputs, const ContentContext& renderer, const Entity& entity, const Matrix& effect_transform, const Rect& coverage, const std::optional<Rect>& coverage_hint) const override; /// @brief Optimized advanced blend that avoids a second subpass when there is /// only a single input and a foreground color. /// /// These contents cannot absorb opacity. std::optional<Entity> CreateForegroundAdvancedBlend( const std::shared_ptr<FilterInput>& input, const ContentContext& renderer, const Entity& entity, const Rect& coverage, Color foreground_color, BlendMode blend_mode, std::optional<Scalar> alpha, ColorFilterContents::AbsorbOpacity absorb_opacity) const; /// @brief Optimized porter-duff blend that avoids a second subpass when there /// is only a single input and a foreground color. /// /// These contents cannot absorb opacity. std::optional<Entity> CreateForegroundPorterDuffBlend( const std::shared_ptr<FilterInput>& input, const ContentContext& renderer, const Entity& entity, const Rect& coverage, Color foreground_color, BlendMode blend_mode, std::optional<Scalar> alpha, ColorFilterContents::AbsorbOpacity absorb_opacity) const; BlendMode blend_mode_ = BlendMode::kSourceOver; AdvancedBlendProc advanced_blend_proc_; std::optional<Color> foreground_color_; BlendFilterContents(const BlendFilterContents&) = delete; BlendFilterContents& operator=(const BlendFilterContents&) = delete; }; } // namespace impeller #endif // FLUTTER_IMPELLER_ENTITY_CONTENTS_FILTERS_BLEND_FILTER_CONTENTS_H_
engine/impeller/entity/contents/filters/blend_filter_contents.h/0
{ "file_path": "engine/impeller/entity/contents/filters/blend_filter_contents.h", "repo_id": "engine", "token_count": 1382 }
228
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/entity/contents/filters/inputs/filter_input.h" #include <memory> #include <utility> #include "flutter/fml/logging.h" #include "impeller/entity/contents/filters/filter_contents.h" #include "impeller/entity/contents/filters/inputs/contents_filter_input.h" #include "impeller/entity/contents/filters/inputs/filter_contents_filter_input.h" #include "impeller/entity/contents/filters/inputs/placeholder_filter_input.h" #include "impeller/entity/contents/filters/inputs/texture_filter_input.h" namespace impeller { FilterInput::Ref FilterInput::Make(Variant input, bool msaa_enabled) { if (auto filter = std::get_if<std::shared_ptr<FilterContents>>(&input)) { return std::static_pointer_cast<FilterInput>( std::shared_ptr<FilterContentsFilterInput>( new FilterContentsFilterInput(*filter))); } if (auto contents = std::get_if<std::shared_ptr<Contents>>(&input)) { return std::static_pointer_cast<FilterInput>( std::shared_ptr<ContentsFilterInput>( new ContentsFilterInput(*contents, msaa_enabled))); } if (auto texture = std::get_if<std::shared_ptr<Texture>>(&input)) { return Make(*texture, Matrix()); } if (auto rect = std::get_if<Rect>(&input)) { return std::shared_ptr<PlaceholderFilterInput>( new PlaceholderFilterInput(*rect)); } FML_UNREACHABLE(); } FilterInput::Ref FilterInput::Make(std::shared_ptr<Texture> texture, Matrix local_transform) { return std::shared_ptr<TextureFilterInput>( new TextureFilterInput(std::move(texture), local_transform)); } FilterInput::Vector FilterInput::Make(std::initializer_list<Variant> inputs) { FilterInput::Vector result; result.reserve(inputs.size()); for (const auto& input : inputs) { result.push_back(Make(input)); } return result; } Matrix FilterInput::GetLocalTransform(const Entity& entity) const { return Matrix(); } std::optional<Rect> FilterInput::GetLocalCoverage(const Entity& entity) const { Entity local_entity = entity.Clone(); local_entity.SetTransform(GetLocalTransform(entity)); return GetCoverage(local_entity); } std::optional<Rect> FilterInput::GetSourceCoverage( const Matrix& effect_transform, const Rect& output_limit) const { return output_limit; } Matrix FilterInput::GetTransform(const Entity& entity) const { return entity.GetTransform() * GetLocalTransform(entity); } void FilterInput::PopulateGlyphAtlas( const std::shared_ptr<LazyGlyphAtlas>& lazy_glyph_atlas, Scalar scale) {} FilterInput::~FilterInput() = default; bool FilterInput::IsTranslationOnly() const { return true; } bool FilterInput::IsLeaf() const { return true; } void FilterInput::SetLeafInputs(const FilterInput::Vector& inputs) {} void FilterInput::SetEffectTransform(const Matrix& matrix) {} void FilterInput::SetRenderingMode(Entity::RenderingMode rendering_mode) {} } // namespace impeller
engine/impeller/entity/contents/filters/inputs/filter_input.cc/0
{ "file_path": "engine/impeller/entity/contents/filters/inputs/filter_input.cc", "repo_id": "engine", "token_count": 1054 }
229
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_ENTITY_CONTENTS_FILTERS_SRGB_TO_LINEAR_FILTER_CONTENTS_H_ #define FLUTTER_IMPELLER_ENTITY_CONTENTS_FILTERS_SRGB_TO_LINEAR_FILTER_CONTENTS_H_ #include "impeller/entity/contents/filters/color_filter_contents.h" #include "impeller/entity/contents/filters/inputs/filter_input.h" namespace impeller { class SrgbToLinearFilterContents final : public ColorFilterContents { public: SrgbToLinearFilterContents(); ~SrgbToLinearFilterContents() override; private: // |FilterContents| std::optional<Entity> RenderFilter( const FilterInput::Vector& input_textures, const ContentContext& renderer, const Entity& entity, const Matrix& effect_transform, const Rect& coverage, const std::optional<Rect>& coverage_hint) const override; SrgbToLinearFilterContents(const SrgbToLinearFilterContents&) = delete; SrgbToLinearFilterContents& operator=(const SrgbToLinearFilterContents&) = delete; }; } // namespace impeller #endif // FLUTTER_IMPELLER_ENTITY_CONTENTS_FILTERS_SRGB_TO_LINEAR_FILTER_CONTENTS_H_
engine/impeller/entity/contents/filters/srgb_to_linear_filter_contents.h/0
{ "file_path": "engine/impeller/entity/contents/filters/srgb_to_linear_filter_contents.h", "repo_id": "engine", "token_count": 435 }
230
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "solid_color_contents.h" #include "impeller/entity/contents/content_context.h" #include "impeller/entity/entity.h" #include "impeller/entity/geometry/geometry.h" #include "impeller/geometry/path.h" #include "impeller/renderer/render_pass.h" namespace impeller { SolidColorContents::SolidColorContents() = default; SolidColorContents::~SolidColorContents() = default; void SolidColorContents::SetColor(Color color) { color_ = color; } Color SolidColorContents::GetColor() const { return color_.WithAlpha(color_.alpha * GetOpacityFactor()); } bool SolidColorContents::IsSolidColor() const { return true; } bool SolidColorContents::IsOpaque() const { return GetColor().IsOpaque(); } std::optional<Rect> SolidColorContents::GetCoverage( const Entity& entity) const { if (GetColor().IsTransparent()) { return std::nullopt; } const std::shared_ptr<Geometry>& geometry = GetGeometry(); if (geometry == nullptr) { return std::nullopt; } return geometry->GetCoverage(entity.GetTransform()); }; bool SolidColorContents::Render(const ContentContext& renderer, const Entity& entity, RenderPass& pass) const { auto capture = entity.GetCapture().CreateChild("SolidColorContents"); using VS = SolidFillPipeline::VertexShader; VS::FrameInfo frame_info; frame_info.color = capture.AddColor("Color", GetColor()).Premultiply(); PipelineBuilderCallback pipeline_callback = [&renderer](ContentContextOptions options) { return renderer.GetSolidFillPipeline(options); }; return ColorSourceContents::DrawGeometry<VS>( renderer, entity, pass, pipeline_callback, frame_info, [](RenderPass& pass) { pass.SetCommandLabel("Solid Fill"); return true; }); } std::unique_ptr<SolidColorContents> SolidColorContents::Make(const Path& path, Color color) { auto contents = std::make_unique<SolidColorContents>(); contents->SetGeometry(Geometry::MakeFillPath(path)); contents->SetColor(color); return contents; } std::optional<Color> SolidColorContents::AsBackgroundColor( const Entity& entity, ISize target_size) const { Rect target_rect = Rect::MakeSize(target_size); return GetGeometry()->CoversArea(entity.GetTransform(), target_rect) ? GetColor() : std::optional<Color>(); } bool SolidColorContents::ApplyColorFilter( const ColorFilterProc& color_filter_proc) { color_ = color_filter_proc(color_); return true; } } // namespace impeller
engine/impeller/entity/contents/solid_color_contents.cc/0
{ "file_path": "engine/impeller/entity/contents/solid_color_contents.cc", "repo_id": "engine", "token_count": 990 }
231
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <memory> #include "impeller/core/formats.h" #include "impeller/core/texture_descriptor.h" #include "impeller/entity/contents/test/recording_render_pass.h" #include "impeller/entity/contents/tiled_texture_contents.h" #include "impeller/entity/entity_playground.h" #include "impeller/playground/playground_test.h" #include "third_party/googletest/googletest/include/gtest/gtest.h" namespace impeller { namespace testing { using EntityTest = EntityPlayground; TEST_P(EntityTest, TiledTextureContentsRendersWithCorrectPipeline) { TextureDescriptor texture_desc; texture_desc.size = {100, 100}; texture_desc.type = TextureType::kTexture2D; texture_desc.format = PixelFormat::kR8G8B8A8UNormInt; texture_desc.storage_mode = StorageMode::kDevicePrivate; auto texture = GetContext()->GetResourceAllocator()->CreateTexture(texture_desc); TiledTextureContents contents; contents.SetTexture(texture); contents.SetGeometry(Geometry::MakeCover()); auto content_context = GetContentContext(); auto buffer = content_context->GetContext()->CreateCommandBuffer(); auto render_target = GetContentContext()->GetRenderTargetCache()->CreateOffscreenMSAA( *content_context->GetContext(), {100, 100}, /*mip_count=*/1); auto render_pass = buffer->CreateRenderPass(render_target); auto recording_pass = std::make_shared<RecordingRenderPass>( render_pass, GetContext(), render_target); ASSERT_TRUE(contents.Render(*GetContentContext(), {}, *recording_pass)); const std::vector<Command>& commands = recording_pass->GetCommands(); ASSERT_EQ(commands.size(), 1u); EXPECT_TRUE(commands[0].pipeline->GetDescriptor().GetLabel().find( "TextureFill Pipeline") != std::string::npos); if (GetParam() == PlaygroundBackend::kMetal) { recording_pass->EncodeCommands(); } } // GL_OES_EGL_image_external isn't supported on MacOS hosts. #if !defined(FML_OS_MACOSX) TEST_P(EntityTest, TiledTextureContentsRendersWithCorrectPipelineExternalOES) { if (GetParam() != PlaygroundBackend::kOpenGLES) { GTEST_SKIP_( "External OES textures are only valid for the OpenGLES backend."); } TextureDescriptor texture_desc; texture_desc.size = {100, 100}; texture_desc.type = TextureType::kTextureExternalOES; texture_desc.format = PixelFormat::kR8G8B8A8UNormInt; texture_desc.storage_mode = StorageMode::kDevicePrivate; auto texture = GetContext()->GetResourceAllocator()->CreateTexture(texture_desc); TiledTextureContents contents; contents.SetTexture(texture); contents.SetGeometry(Geometry::MakeCover()); auto content_context = GetContentContext(); auto buffer = content_context->GetContext()->CreateCommandBuffer(); auto render_target = GetContentContext()->GetRenderTargetCache()->CreateOffscreenMSAA( *content_context->GetContext(), {100, 100}, /*mip_count=*/1); auto render_pass = buffer->CreateRenderPass(render_target); ASSERT_TRUE(contents.Render(*GetContentContext(), {}, *render_pass)); const std::vector<Command>& commands = render_pass->GetCommands(); ASSERT_EQ(commands.size(), 1u); EXPECT_TRUE(commands[0].pipeline->GetDescriptor().GetLabel().find( "TiledTextureFillExternal Pipeline") != std::string::npos); } #endif } // namespace testing } // namespace impeller
engine/impeller/entity/contents/tiled_texture_contents_unittests.cc/0
{ "file_path": "engine/impeller/entity/contents/tiled_texture_contents_unittests.cc", "repo_id": "engine", "token_count": 1197 }
232
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <algorithm> #include "flutter/impeller/entity/geometry/circle_geometry.h" #include "flutter/impeller/entity/geometry/line_geometry.h" namespace impeller { CircleGeometry::CircleGeometry(const Point& center, Scalar radius) : center_(center), radius_(radius), stroke_width_(-1.0f) { FML_DCHECK(radius >= 0); } CircleGeometry::CircleGeometry(const Point& center, Scalar radius, Scalar stroke_width) : center_(center), radius_(radius), stroke_width_(std::max(stroke_width, 0.0f)) { FML_DCHECK(radius >= 0); FML_DCHECK(stroke_width >= 0); } GeometryResult CircleGeometry::GetPositionBuffer(const ContentContext& renderer, const Entity& entity, RenderPass& pass) const { auto& transform = entity.GetTransform(); Scalar half_width = stroke_width_ < 0 ? 0.0 : LineGeometry::ComputePixelHalfWidth( transform, stroke_width_); std::shared_ptr<Tessellator> tessellator = renderer.GetTessellator(); // We call the StrokedCircle method which will simplify to a // FilledCircleGenerator if the inner_radius is <= 0. auto generator = tessellator->StrokedCircle(transform, center_, radius_, half_width); return ComputePositionGeometry(renderer, generator, entity, pass); } // |Geometry| GeometryResult CircleGeometry::GetPositionUVBuffer( Rect texture_coverage, Matrix effect_transform, const ContentContext& renderer, const Entity& entity, RenderPass& pass) const { auto& transform = entity.GetTransform(); auto uv_transform = texture_coverage.GetNormalizingTransform() * effect_transform; Scalar half_width = stroke_width_ < 0 ? 0.0 : LineGeometry::ComputePixelHalfWidth( transform, stroke_width_); std::shared_ptr<Tessellator> tessellator = renderer.GetTessellator(); // We call the StrokedCircle method which will simplify to a // FilledCircleGenerator if the inner_radius is <= 0. auto generator = tessellator->StrokedCircle(transform, center_, radius_, half_width); return ComputePositionUVGeometry(renderer, generator, uv_transform, entity, pass); } GeometryVertexType CircleGeometry::GetVertexType() const { return GeometryVertexType::kPosition; } std::optional<Rect> CircleGeometry::GetCoverage(const Matrix& transform) const { Point corners[4]{ {center_.x, center_.y - radius_}, {center_.x + radius_, center_.y}, {center_.x, center_.y + radius_}, {center_.x - radius_, center_.y}, }; for (int i = 0; i < 4; i++) { corners[i] = transform * corners[i]; } return Rect::MakePointBounds(std::begin(corners), std::end(corners)); } bool CircleGeometry::CoversArea(const Matrix& transform, const Rect& rect) const { return false; } bool CircleGeometry::IsAxisAlignedRect() const { return false; } } // namespace impeller
engine/impeller/entity/geometry/circle_geometry.cc/0
{ "file_path": "engine/impeller/entity/geometry/circle_geometry.cc", "repo_id": "engine", "token_count": 1373 }
233
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_ENTITY_GEOMETRY_RECT_GEOMETRY_H_ #define FLUTTER_IMPELLER_ENTITY_GEOMETRY_RECT_GEOMETRY_H_ #include "impeller/entity/geometry/geometry.h" namespace impeller { class RectGeometry final : public Geometry { public: explicit RectGeometry(Rect rect); ~RectGeometry() = default; // |Geometry| bool CoversArea(const Matrix& transform, const Rect& rect) const override; // |Geometry| bool IsAxisAlignedRect() const override; // |Geometry| GeometryResult GetPositionBuffer(const ContentContext& renderer, const Entity& entity, RenderPass& pass) const override; // |Geometry| GeometryVertexType GetVertexType() const override; // |Geometry| std::optional<Rect> GetCoverage(const Matrix& transform) const override; // |Geometry| GeometryResult GetPositionUVBuffer(Rect texture_coverage, Matrix effect_transform, const ContentContext& renderer, const Entity& entity, RenderPass& pass) const override; private: Rect rect_; RectGeometry(const RectGeometry&) = delete; RectGeometry& operator=(const RectGeometry&) = delete; }; static_assert(std::is_trivially_destructible<RectGeometry>::value); } // namespace impeller #endif // FLUTTER_IMPELLER_ENTITY_GEOMETRY_RECT_GEOMETRY_H_
engine/impeller/entity/geometry/rect_geometry.h/0
{ "file_path": "engine/impeller/entity/geometry/rect_geometry.h", "repo_id": "engine", "token_count": 668 }
234
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <impeller/blending.glsl> #include <impeller/color.glsl> #include <impeller/types.glsl> // kScreen = 0, // kOverlay, // kDarken, // kLighten, // kColorDodge, // kColorBurn, // kHardLight, // kSoftLight, // kDifference, // kExclusion, // kMultiply, // kHue, // kSaturation, // kColor, // kLuminosity, // Note, this isn't a switch as GLSL ES 1.0 does not support them. f16vec3 AdvancedBlend(f16vec3 dst, f16vec3 src, int blend_type) { if (blend_type == 0) { return IPBlendScreen(dst, src); } if (blend_type == 1) { return IPBlendOverlay(dst, src); } if (blend_type == 2) { return IPBlendDarken(dst, src); } if (blend_type == 3) { return IPBlendLighten(dst, src); } if (blend_type == 4) { return IPBlendColorDodge(dst, src); } if (blend_type == 5) { return IPBlendColorBurn(dst, src); } if (blend_type == 6) { return IPBlendHardLight(dst, src); } if (blend_type == 7) { return IPBlendSoftLight(dst, src); } if (blend_type == 8) { return IPBlendDifference(dst, src); } if (blend_type == 9) { return IPBlendExclusion(dst, src); } if (blend_type == 10) { return IPBlendMultiply(dst, src); } if (blend_type == 11) { return IPBlendHue(dst, src); } if (blend_type == 12) { return IPBlendSaturation(dst, src); } if (blend_type == 13) { return IPBlendColor(dst, src); } if (blend_type == 14) { return IPBlendLuminosity(dst, src); } return f16vec3(0.0hf); }
engine/impeller/entity/shaders/blending/blend_select.glsl/0
{ "file_path": "engine/impeller/entity/shaders/blending/blend_select.glsl", "repo_id": "engine", "token_count": 707 }
235
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. precision mediump float; layout(constant_id = 0) const float supports_decal = 1.0; #include <impeller/constants.glsl> #include <impeller/texture.glsl> #include <impeller/types.glsl> // These values must correspond to the order of the items in the // 'FilterContents::MorphType' enum class. const float16_t kMorphTypeDilate = 0.0hf; const float16_t kMorphTypeErode = 1.0hf; uniform f16sampler2D texture_sampler; uniform FragInfo { f16vec2 uv_offset; float16_t radius; float16_t morph_type; float supports_decal_sampler_address_mode; } frag_info; in highp vec2 v_texture_coords; out f16vec4 frag_color; void main() { f16vec4 result = frag_info.morph_type == kMorphTypeDilate ? f16vec4(0.0) : f16vec4(1.0); for (float16_t i = -frag_info.radius; i <= frag_info.radius; i++) { vec2 texture_coords = v_texture_coords + frag_info.uv_offset * i; f16vec4 color; if (supports_decal == 1) { color = texture(texture_sampler, texture_coords); } else { color = IPHalfSampleDecal(texture_sampler, texture_coords); } if (frag_info.morph_type == kMorphTypeDilate) { result = max(color, result); } else { result = min(color, result); } } frag_color = result; }
engine/impeller/entity/shaders/morphology_filter.frag/0
{ "file_path": "engine/impeller/entity/shaders/morphology_filter.frag", "repo_id": "engine", "token_count": 549 }
236
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. uniform sampler2D SAMPLER_EXTERNAL_OES_texture_sampler; in vec2 v_texture_coords; in float v_alpha; out vec4 frag_color; void main() { vec4 sampled = texture(SAMPLER_EXTERNAL_OES_texture_sampler, v_texture_coords); frag_color = sampled * v_alpha; }
engine/impeller/entity/shaders/texture_fill_external.frag/0
{ "file_path": "engine/impeller/entity/shaders/texture_fill_external.frag", "repo_id": "engine", "token_count": 150 }
237
layout(local_size_x = 128) in; layout(std430) buffer; struct SomeStruct { vec2 vf; uint i; }; layout(binding = 0) writeonly buffer Output { vec4 elements[]; } output_data; layout(binding = 1) readonly buffer Input0 { int some_int; ivec2 fixed_array[3]; vec4 elements[]; } input_data0; layout(binding = 2) readonly buffer Input1 { SomeStruct some_struct; uvec2 fixed_array[4]; vec4 elements[]; } input_data1; uniform Info { uint count; } info; void main() { uint ident = gl_GlobalInvocationID.x; // TODO(dnfield): https://github.com/flutter/flutter/issues/112683 // We should be able to use length here instead of an extra arrgument. if (ident >= info.count) { return; } output_data.elements[ident] = input_data0.elements[ident] * input_data1.elements[ident]; output_data.elements[ident].x += input_data0.fixed_array[1].x + input_data1.some_struct.i; output_data.elements[ident].y += input_data1.fixed_array[0].y + input_data1.some_struct.vf.x; output_data.elements[ident].z += input_data0.some_int + input_data1.some_struct.vf.y; }
engine/impeller/fixtures/sample.comp/0
{ "file_path": "engine/impeller/fixtures/sample.comp", "repo_id": "engine", "token_count": 436 }
238
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_GEOMETRY_CONSTANTS_H_ #define FLUTTER_IMPELLER_GEOMETRY_CONSTANTS_H_ namespace impeller { // e constexpr float kE = 2.7182818284590452354f; // log_2 e constexpr float kLog2E = 1.4426950408889634074f; // log_10 e constexpr float kLog10E = 0.43429448190325182765f; // log_e 2 constexpr float kLogE2 = 0.69314718055994530942f; // log_e 10 constexpr float kLogE10 = 2.30258509299404568402f; // pi constexpr float kPi = 3.14159265358979323846f; // pi*2 constexpr float k2Pi = 6.28318530717958647693f; // pi/2 constexpr float kPiOver2 = 1.57079632679489661923f; // pi/4 constexpr float kPiOver4 = 0.78539816339744830962f; // 1/pi constexpr float k1OverPi = 0.31830988618379067154f; // 2/pi constexpr float k2OverPi = 0.63661977236758134308f; // 2/sqrt(pi) constexpr float k2OverSqrtPi = 1.12837916709551257390f; // sqrt(2) constexpr float kSqrt2 = 1.41421356237309504880f; // 1/sqrt(2) constexpr float k1OverSqrt2 = 0.70710678118654752440f; // phi constexpr float kPhi = 1.61803398874989484820f; // 0.001 constexpr float kEhCloseEnough = 1e-3f; } // namespace impeller #endif // FLUTTER_IMPELLER_GEOMETRY_CONSTANTS_H_
engine/impeller/geometry/constants.h/0
{ "file_path": "engine/impeller/geometry/constants.h", "repo_id": "engine", "token_count": 574 }
239
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "path_component.h" #include <cmath> namespace impeller { /* * Based on: https://en.wikipedia.org/wiki/B%C3%A9zier_curve#Specific_cases */ static inline Scalar LinearSolve(Scalar t, Scalar p0, Scalar p1) { return p0 + t * (p1 - p0); } static inline Scalar QuadraticSolve(Scalar t, Scalar p0, Scalar p1, Scalar p2) { return (1 - t) * (1 - t) * p0 + // 2 * (1 - t) * t * p1 + // t * t * p2; } static inline Scalar QuadraticSolveDerivative(Scalar t, Scalar p0, Scalar p1, Scalar p2) { return 2 * (1 - t) * (p1 - p0) + // 2 * t * (p2 - p1); } static inline Scalar CubicSolve(Scalar t, Scalar p0, Scalar p1, Scalar p2, Scalar p3) { return (1 - t) * (1 - t) * (1 - t) * p0 + // 3 * (1 - t) * (1 - t) * t * p1 + // 3 * (1 - t) * t * t * p2 + // t * t * t * p3; } static inline Scalar CubicSolveDerivative(Scalar t, Scalar p0, Scalar p1, Scalar p2, Scalar p3) { return -3 * p0 * (1 - t) * (1 - t) + // p1 * (3 * (1 - t) * (1 - t) - 6 * (1 - t) * t) + p2 * (6 * (1 - t) * t - 3 * t * t) + // 3 * p3 * t * t; } Point LinearPathComponent::Solve(Scalar time) const { return { LinearSolve(time, p1.x, p2.x), // x LinearSolve(time, p1.y, p2.y), // y }; } void LinearPathComponent::AppendPolylinePoints( std::vector<Point>& points) const { if (points.size() == 0 || points.back() != p2) { points.push_back(p2); } } std::vector<Point> LinearPathComponent::Extrema() const { return {p1, p2}; } std::optional<Vector2> LinearPathComponent::GetStartDirection() const { if (p1 == p2) { return std::nullopt; } return (p1 - p2).Normalize(); } std::optional<Vector2> LinearPathComponent::GetEndDirection() const { if (p1 == p2) { return std::nullopt; } return (p2 - p1).Normalize(); } Point QuadraticPathComponent::Solve(Scalar time) const { return { QuadraticSolve(time, p1.x, cp.x, p2.x), // x QuadraticSolve(time, p1.y, cp.y, p2.y), // y }; } Point QuadraticPathComponent::SolveDerivative(Scalar time) const { return { QuadraticSolveDerivative(time, p1.x, cp.x, p2.x), // x QuadraticSolveDerivative(time, p1.y, cp.y, p2.y), // y }; } static Scalar ApproximateParabolaIntegral(Scalar x) { constexpr Scalar d = 0.67; return x / (1.0 - d + sqrt(sqrt(pow(d, 4) + 0.25 * x * x))); } void QuadraticPathComponent::AppendPolylinePoints( Scalar scale_factor, std::vector<Point>& points) const { ToLinearPathComponents(scale_factor, [&points](const Point& point) { points.emplace_back(point); }); } void QuadraticPathComponent::ToLinearPathComponents( Scalar scale_factor, const PointProc& proc) const { auto tolerance = kDefaultCurveTolerance / scale_factor; auto sqrt_tolerance = sqrt(tolerance); auto d01 = cp - p1; auto d12 = p2 - cp; auto dd = d01 - d12; auto cross = (p2 - p1).Cross(dd); auto x0 = d01.Dot(dd) * 1 / cross; auto x2 = d12.Dot(dd) * 1 / cross; auto scale = std::abs(cross / (hypot(dd.x, dd.y) * (x2 - x0))); auto a0 = ApproximateParabolaIntegral(x0); auto a2 = ApproximateParabolaIntegral(x2); Scalar val = 0.f; if (std::isfinite(scale)) { auto da = std::abs(a2 - a0); auto sqrt_scale = sqrt(scale); if ((x0 < 0 && x2 < 0) || (x0 >= 0 && x2 >= 0)) { val = da * sqrt_scale; } else { // cusp case auto xmin = sqrt_tolerance / sqrt_scale; val = sqrt_tolerance * da / ApproximateParabolaIntegral(xmin); } } auto u0 = ApproximateParabolaIntegral(a0); auto u2 = ApproximateParabolaIntegral(a2); auto uscale = 1 / (u2 - u0); auto line_count = std::max(1., ceil(0.5 * val / sqrt_tolerance)); auto step = 1 / line_count; for (size_t i = 1; i < line_count; i += 1) { auto u = i * step; auto a = a0 + (a2 - a0) * u; auto t = (ApproximateParabolaIntegral(a) - u0) * uscale; proc(Solve(t)); } proc(p2); } std::vector<Point> QuadraticPathComponent::Extrema() const { CubicPathComponent elevated(*this); return elevated.Extrema(); } std::optional<Vector2> QuadraticPathComponent::GetStartDirection() const { if (p1 != cp) { return (p1 - cp).Normalize(); } if (p1 != p2) { return (p1 - p2).Normalize(); } return std::nullopt; } std::optional<Vector2> QuadraticPathComponent::GetEndDirection() const { if (p2 != cp) { return (p2 - cp).Normalize(); } if (p2 != p1) { return (p2 - p1).Normalize(); } return std::nullopt; } Point CubicPathComponent::Solve(Scalar time) const { return { CubicSolve(time, p1.x, cp1.x, cp2.x, p2.x), // x CubicSolve(time, p1.y, cp1.y, cp2.y, p2.y), // y }; } Point CubicPathComponent::SolveDerivative(Scalar time) const { return { CubicSolveDerivative(time, p1.x, cp1.x, cp2.x, p2.x), // x CubicSolveDerivative(time, p1.y, cp1.y, cp2.y, p2.y), // y }; } void CubicPathComponent::AppendPolylinePoints( Scalar scale, std::vector<Point>& points) const { ToLinearPathComponents( scale, [&points](const Point& point) { points.emplace_back(point); }); } inline QuadraticPathComponent CubicPathComponent::Lower() const { return QuadraticPathComponent(3.0 * (cp1 - p1), 3.0 * (cp2 - cp1), 3.0 * (p2 - cp2)); } CubicPathComponent CubicPathComponent::Subsegment(Scalar t0, Scalar t1) const { auto p0 = Solve(t0); auto p3 = Solve(t1); auto d = Lower(); auto scale = (t1 - t0) * (1.0 / 3.0); auto p1 = p0 + scale * d.Solve(t0); auto p2 = p3 - scale * d.Solve(t1); return CubicPathComponent(p0, p1, p2, p3); } void CubicPathComponent::ToLinearPathComponents(Scalar scale, const PointProc& proc) const { constexpr Scalar accuracy = 0.1; // The maximum error, as a vector from the cubic to the best approximating // quadratic, is proportional to the third derivative, which is constant // across the segment. Thus, the error scales down as the third power of // the number of subdivisions. Our strategy then is to subdivide `t` evenly. // // This is an overestimate of the error because only the component // perpendicular to the first derivative is important. But the simplicity is // appealing. // This magic number is the square of 36 / sqrt(3). // See: http://caffeineowl.com/graphics/2d/vectorial/cubic2quad01.html auto max_hypot2 = 432.0 * accuracy * accuracy; auto p1x2 = 3.0 * cp1 - p1; auto p2x2 = 3.0 * cp2 - p2; auto p = p2x2 - p1x2; auto err = p.Dot(p); auto quad_count = std::max(1., ceil(pow(err / max_hypot2, 1. / 6.0))); for (size_t i = 0; i < quad_count; i++) { auto t0 = i / quad_count; auto t1 = (i + 1) / quad_count; auto seg = Subsegment(t0, t1); auto p1x2 = 3.0 * seg.cp1 - seg.p1; auto p2x2 = 3.0 * seg.cp2 - seg.p2; QuadraticPathComponent(seg.p1, ((p1x2 + p2x2) / 4.0), seg.p2) .ToLinearPathComponents(scale, proc); } } static inline bool NearEqual(Scalar a, Scalar b, Scalar epsilon) { return (a > (b - epsilon)) && (a < (b + epsilon)); } static inline bool NearZero(Scalar a) { return NearEqual(a, 0.0, 1e-12); } static void CubicPathBoundingPopulateValues(std::vector<Scalar>& values, Scalar p1, Scalar p2, Scalar p3, Scalar p4) { const Scalar a = 3.0 * (-p1 + 3.0 * p2 - 3.0 * p3 + p4); const Scalar b = 6.0 * (p1 - 2.0 * p2 + p3); const Scalar c = 3.0 * (p2 - p1); /* * Boundary conditions. */ if (NearZero(a)) { if (NearZero(b)) { return; } Scalar t = -c / b; if (t >= 0.0 && t <= 1.0) { values.emplace_back(t); } return; } Scalar b2Minus4AC = (b * b) - (4.0 * a * c); if (b2Minus4AC < 0.0) { return; } Scalar rootB2Minus4AC = ::sqrt(b2Minus4AC); /* From Numerical Recipes in C. * * q = -1/2 (b + sign(b) sqrt[b^2 - 4ac]) * x1 = q / a * x2 = c / q */ Scalar q = (b < 0) ? -(b - rootB2Minus4AC) / 2 : -(b + rootB2Minus4AC) / 2; { Scalar t = q / a; if (t >= 0.0 && t <= 1.0) { values.emplace_back(t); } } { Scalar t = c / q; if (t >= 0.0 && t <= 1.0) { values.emplace_back(t); } } } std::vector<Point> CubicPathComponent::Extrema() const { /* * As described in: https://pomax.github.io/bezierinfo/#extremities */ std::vector<Scalar> values; CubicPathBoundingPopulateValues(values, p1.x, cp1.x, cp2.x, p2.x); CubicPathBoundingPopulateValues(values, p1.y, cp1.y, cp2.y, p2.y); std::vector<Point> points = {p1, p2}; for (const auto& value : values) { points.emplace_back(Solve(value)); } return points; } std::optional<Vector2> CubicPathComponent::GetStartDirection() const { if (p1 != cp1) { return (p1 - cp1).Normalize(); } if (p1 != cp2) { return (p1 - cp2).Normalize(); } if (p1 != p2) { return (p1 - p2).Normalize(); } return std::nullopt; } std::optional<Vector2> CubicPathComponent::GetEndDirection() const { if (p2 != cp2) { return (p2 - cp2).Normalize(); } if (p2 != cp1) { return (p2 - cp1).Normalize(); } if (p2 != p1) { return (p2 - p1).Normalize(); } return std::nullopt; } std::optional<Vector2> PathComponentStartDirectionVisitor::operator()( const LinearPathComponent* component) { if (!component) { return std::nullopt; } return component->GetStartDirection(); } std::optional<Vector2> PathComponentStartDirectionVisitor::operator()( const QuadraticPathComponent* component) { if (!component) { return std::nullopt; } return component->GetStartDirection(); } std::optional<Vector2> PathComponentStartDirectionVisitor::operator()( const CubicPathComponent* component) { if (!component) { return std::nullopt; } return component->GetStartDirection(); } std::optional<Vector2> PathComponentEndDirectionVisitor::operator()( const LinearPathComponent* component) { if (!component) { return std::nullopt; } return component->GetEndDirection(); } std::optional<Vector2> PathComponentEndDirectionVisitor::operator()( const QuadraticPathComponent* component) { if (!component) { return std::nullopt; } return component->GetEndDirection(); } std::optional<Vector2> PathComponentEndDirectionVisitor::operator()( const CubicPathComponent* component) { if (!component) { return std::nullopt; } return component->GetEndDirection(); } } // namespace impeller
engine/impeller/geometry/path_component.cc/0
{ "file_path": "engine/impeller/geometry/path_component.cc", "repo_id": "engine", "token_count": 5170 }
240
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_GEOMETRY_SIGMA_H_ #define FLUTTER_IMPELLER_GEOMETRY_SIGMA_H_ #include "impeller/geometry/scalar.h" namespace impeller { /// For filters that use a Gaussian distribution, this is the `Radius` size to /// use per `Sigma` (standard deviation). /// /// This cutoff (sqrt(3)) is taken from Flutter and Skia (where the /// multiplicative inverse of this constant is used (1 / sqrt(3)): /// https://api.flutter.dev/flutter/dart-ui/Shadow/convertRadiusToSigma.html /// /// In practice, this value is somewhat arbitrary, and can be changed to a /// higher number to integrate more of the Gaussian function and render higher /// quality blurs (with exponentially diminishing returns for the same sigma /// input). Making this value any lower results in a noticable loss of /// quality in the blur. constexpr static float kKernelRadiusPerSigma = 1.73205080757; struct Radius; /// @brief In filters that use Gaussian distributions, "sigma" is a size of /// one standard deviation in terms of the local space pixel grid of /// the filter input. In other words, this determines how wide the /// distribution stretches. struct Sigma { Scalar sigma = 0.0; constexpr Sigma() = default; explicit constexpr Sigma(Scalar p_sigma) : sigma(p_sigma) {} operator Radius() const; // NOLINT(google-explicit-constructor) }; /// @brief For convolution filters, the "radius" is the size of the /// convolution kernel to use on the local space pixel grid of the /// filter input. /// For Gaussian blur kernels, this unit has a linear /// relationship with `Sigma`. See `kKernelRadiusPerSigma` for /// details on how this relationship works. struct Radius { Scalar radius = 0.0; constexpr Radius() = default; explicit constexpr Radius(Scalar p_radius) : radius(p_radius) {} operator Sigma() const; // NOLINT(google-explicit-constructor) }; } // namespace impeller #endif // FLUTTER_IMPELLER_GEOMETRY_SIGMA_H_
engine/impeller/geometry/sigma.h/0
{ "file_path": "engine/impeller/geometry/sigma.h", "repo_id": "engine", "token_count": 697 }
241
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <dlfcn.h> #include <filesystem> #include <memory> #include "flutter/impeller/golden_tests/golden_playground_test.h" #include "flutter/impeller/aiks/picture.h" #include "flutter/impeller/golden_tests/golden_digest.h" #include "flutter/impeller/golden_tests/metal_screenshotter.h" #include "flutter/impeller/golden_tests/vulkan_screenshotter.h" #include "flutter/third_party/abseil-cpp/absl/base/no_destructor.h" #include "impeller/typographer/backends/skia/typographer_context_skia.h" #include "impeller/typographer/typographer_context.h" #define GLFW_INCLUDE_NONE #include "third_party/glfw/include/GLFW/glfw3.h" namespace impeller { namespace { std::unique_ptr<PlaygroundImpl> MakeVulkanPlayground(bool enable_validations) { FML_CHECK(::glfwInit() == GLFW_TRUE); PlaygroundSwitches playground_switches; playground_switches.enable_vulkan_validation = enable_validations; return PlaygroundImpl::Create(PlaygroundBackend::kVulkan, playground_switches); } // Returns a static instance to a playground that can be used across tests. const std::unique_ptr<PlaygroundImpl>& GetSharedVulkanPlayground( bool enable_validations) { if (enable_validations) { static absl::NoDestructor<std::unique_ptr<PlaygroundImpl>> vulkan_validation_playground( MakeVulkanPlayground(/*enable_validations=*/true)); // TODO(https://github.com/flutter/flutter/issues/142237): This can be // removed when the thread local storage is removed. static fml::ScopedCleanupClosure context_cleanup( [&] { (*vulkan_validation_playground)->GetContext()->Shutdown(); }); return *vulkan_validation_playground; } else { static absl::NoDestructor<std::unique_ptr<PlaygroundImpl>> vulkan_playground(MakeVulkanPlayground(/*enable_validations=*/false)); // TODO(https://github.com/flutter/flutter/issues/142237): This can be // removed when the thread local storage is removed. static fml::ScopedCleanupClosure context_cleanup( [&] { (*vulkan_playground)->GetContext()->Shutdown(); }); return *vulkan_playground; } } } // namespace #define IMP_AIKSTEST(name) \ "impeller_Play_AiksTest_" #name "_Metal", \ "impeller_Play_AiksTest_" #name "_OpenGLES", \ "impeller_Play_AiksTest_" #name "_Vulkan" // If you add a new playground test to the aiks unittests and you do not want it // to also be a golden test, then add the test name here. static const std::vector<std::string> kSkipTests = { // TextRotated is flakey and we can't seem to get it to stabilize on Skia // Gold. IMP_AIKSTEST(TextRotated), // Runtime stage based tests get confused with a Metal context. "impeller_Play_AiksTest_CanRenderClippedRuntimeEffects_Vulkan", }; namespace { std::string GetTestName() { std::string suite_name = ::testing::UnitTest::GetInstance()->current_test_suite()->name(); std::string test_name = ::testing::UnitTest::GetInstance()->current_test_info()->name(); std::stringstream ss; ss << "impeller_" << suite_name << "_" << test_name; std::string result = ss.str(); // Make sure there are no slashes in the test name. std::replace(result.begin(), result.end(), '/', '_'); return result; } std::string GetGoldenFilename() { return GetTestName() + ".png"; } bool SaveScreenshot(std::unique_ptr<testing::Screenshot> screenshot) { if (!screenshot || !screenshot->GetBytes()) { FML_LOG(ERROR) << "Failed to collect screenshot for test " << GetTestName(); return false; } std::string test_name = GetTestName(); std::string filename = GetGoldenFilename(); testing::GoldenDigest::Instance()->AddImage( test_name, filename, screenshot->GetWidth(), screenshot->GetHeight()); if (!screenshot->WriteToPNG( testing::WorkingDirectory::Instance()->GetFilenamePath(filename))) { FML_LOG(ERROR) << "Failed to write screenshot to " << filename; return false; } return true; } bool ShouldTestHaveVulkanValidations() { std::string test_name = GetTestName(); return std::find(kVulkanDenyValidationTests.begin(), kVulkanDenyValidationTests.end(), test_name) == kVulkanDenyValidationTests.end(); } } // namespace struct GoldenPlaygroundTest::GoldenPlaygroundTestImpl { std::unique_ptr<PlaygroundImpl> test_vulkan_playground; std::unique_ptr<PlaygroundImpl> test_opengl_playground; std::unique_ptr<testing::Screenshotter> screenshotter; ISize window_size = ISize{1024, 768}; }; GoldenPlaygroundTest::GoldenPlaygroundTest() : typographer_context_(TypographerContextSkia::Make()), pimpl_(new GoldenPlaygroundTest::GoldenPlaygroundTestImpl()) {} GoldenPlaygroundTest::~GoldenPlaygroundTest() = default; void GoldenPlaygroundTest::SetTypographerContext( std::shared_ptr<TypographerContext> typographer_context) { typographer_context_ = std::move(typographer_context); }; void GoldenPlaygroundTest::TearDown() { ASSERT_FALSE(dlopen("/usr/local/lib/libMoltenVK.dylib", RTLD_NOLOAD)); } void GoldenPlaygroundTest::SetUp() { std::filesystem::path testing_assets_path = flutter::testing::GetTestingAssetsPath(); std::filesystem::path target_path = testing_assets_path.parent_path() .parent_path() .parent_path() .parent_path(); std::filesystem::path icd_path = target_path / "vk_swiftshader_icd.json"; setenv("VK_ICD_FILENAMES", icd_path.c_str(), 1); bool enable_vulkan_validations = ShouldTestHaveVulkanValidations(); switch (GetParam()) { case PlaygroundBackend::kMetal: pimpl_->screenshotter = std::make_unique<testing::MetalScreenshotter>(); break; case PlaygroundBackend::kVulkan: { const std::unique_ptr<PlaygroundImpl>& playground = GetSharedVulkanPlayground(enable_vulkan_validations); pimpl_->screenshotter = std::make_unique<testing::VulkanScreenshotter>(playground); break; } case PlaygroundBackend::kOpenGLES: { FML_CHECK(::glfwInit() == GLFW_TRUE); PlaygroundSwitches playground_switches; playground_switches.use_angle = true; pimpl_->test_opengl_playground = PlaygroundImpl::Create( PlaygroundBackend::kOpenGLES, playground_switches); pimpl_->screenshotter = std::make_unique<testing::VulkanScreenshotter>( pimpl_->test_opengl_playground); break; } } if (GetParam() == PlaygroundBackend::kMetal) { pimpl_->screenshotter = std::make_unique<testing::MetalScreenshotter>(); } else if (GetParam() == PlaygroundBackend::kVulkan) { const std::unique_ptr<PlaygroundImpl>& playground = GetSharedVulkanPlayground(enable_vulkan_validations); pimpl_->screenshotter = std::make_unique<testing::VulkanScreenshotter>(playground); } std::string test_name = GetTestName(); if (std::find(kSkipTests.begin(), kSkipTests.end(), test_name) != kSkipTests.end()) { GTEST_SKIP_( "GoldenPlaygroundTest doesn't support interactive playground tests " "yet."); } testing::GoldenDigest::Instance()->AddDimension( "gpu_string", GetContext()->DescribeGpuModel()); } PlaygroundBackend GoldenPlaygroundTest::GetBackend() const { return GetParam(); } bool GoldenPlaygroundTest::OpenPlaygroundHere(Picture picture) { AiksContext renderer(GetContext(), typographer_context_); auto screenshot = pimpl_->screenshotter->MakeScreenshot(renderer, picture, pimpl_->window_size); return SaveScreenshot(std::move(screenshot)); } bool GoldenPlaygroundTest::OpenPlaygroundHere( AiksPlaygroundCallback callback) { // NOLINT(performance-unnecessary-value-param) AiksContext renderer(GetContext(), typographer_context_); std::optional<Picture> picture; std::unique_ptr<testing::Screenshot> screenshot; for (int i = 0; i < 2; ++i) { picture = callback(renderer); if (!picture.has_value()) { return false; } screenshot = pimpl_->screenshotter->MakeScreenshot( renderer, picture.value(), pimpl_->window_size); } return SaveScreenshot(std::move(screenshot)); } bool GoldenPlaygroundTest::ImGuiBegin(const char* name, bool* p_open, ImGuiWindowFlags flags) { return false; } std::shared_ptr<Texture> GoldenPlaygroundTest::CreateTextureForFixture( const char* fixture_name, bool enable_mipmapping) const { std::shared_ptr<fml::Mapping> mapping = flutter::testing::OpenFixtureAsMapping(fixture_name); auto result = Playground::CreateTextureForMapping(GetContext(), mapping, enable_mipmapping); if (result) { result->SetLabel(fixture_name); } return result; } RuntimeStage::Map GoldenPlaygroundTest::OpenAssetAsRuntimeStage( const char* asset_name) const { const std::shared_ptr<fml::Mapping> fixture = flutter::testing::OpenFixtureAsMapping(asset_name); if (!fixture || fixture->GetSize() == 0) { return {}; } return RuntimeStage::DecodeRuntimeStages(fixture); } std::shared_ptr<Context> GoldenPlaygroundTest::GetContext() const { return pimpl_->screenshotter->GetPlayground().GetContext(); } std::shared_ptr<Context> GoldenPlaygroundTest::MakeContext() const { if (GetParam() == PlaygroundBackend::kMetal) { /// On Metal we create a context for each test. return GetContext(); } else if (GetParam() == PlaygroundBackend::kVulkan) { bool enable_vulkan_validations = ShouldTestHaveVulkanValidations(); FML_CHECK(!pimpl_->test_vulkan_playground) << "We don't support creating multiple contexts for one test"; pimpl_->test_vulkan_playground = MakeVulkanPlayground(enable_vulkan_validations); pimpl_->screenshotter = std::make_unique<testing::VulkanScreenshotter>( pimpl_->test_vulkan_playground); return pimpl_->test_vulkan_playground->GetContext(); } else { /// On OpenGL we create a context for each test. return GetContext(); } } Point GoldenPlaygroundTest::GetContentScale() const { return pimpl_->screenshotter->GetPlayground().GetContentScale(); } Scalar GoldenPlaygroundTest::GetSecondsElapsed() const { return 0.0f; } ISize GoldenPlaygroundTest::GetWindowSize() const { return pimpl_->window_size; } void GoldenPlaygroundTest::GoldenPlaygroundTest::SetWindowSize(ISize size) { pimpl_->window_size = size; } fml::Status GoldenPlaygroundTest::SetCapabilities( const std::shared_ptr<Capabilities>& capabilities) { return pimpl_->screenshotter->GetPlayground().SetCapabilities(capabilities); } } // namespace impeller
engine/impeller/golden_tests/golden_playground_test_mac.cc/0
{ "file_path": "engine/impeller/golden_tests/golden_playground_test_mac.cc", "repo_id": "engine", "token_count": 4083 }
242
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/playground/backend/gles/playground_impl_gles.h" #define IMPELLER_PLAYGROUND_SUPPORTS_ANGLE FML_OS_MACOSX #if IMPELLER_PLAYGROUND_SUPPORTS_ANGLE #include <dlfcn.h> #endif #define GLFW_INCLUDE_NONE #include "third_party/glfw/include/GLFW/glfw3.h" #include "flutter/fml/build_config.h" #include "impeller/entity/gles/entity_shaders_gles.h" #include "impeller/entity/gles/framebuffer_blend_shaders_gles.h" #include "impeller/entity/gles/modern_shaders_gles.h" #include "impeller/fixtures/gles/fixtures_shaders_gles.h" #include "impeller/playground/imgui/gles/imgui_shaders_gles.h" #include "impeller/renderer/backend/gles/context_gles.h" #include "impeller/renderer/backend/gles/surface_gles.h" #include "impeller/scene/shaders/gles/scene_shaders_gles.h" namespace impeller { class PlaygroundImplGLES::ReactorWorker final : public ReactorGLES::Worker { public: ReactorWorker() = default; // |ReactorGLES::Worker| bool CanReactorReactOnCurrentThreadNow( const ReactorGLES& reactor) const override { ReaderLock lock(mutex_); auto found = reactions_allowed_.find(std::this_thread::get_id()); if (found == reactions_allowed_.end()) { return false; } return found->second; } void SetReactionsAllowedOnCurrentThread(bool allowed) { WriterLock lock(mutex_); reactions_allowed_[std::this_thread::get_id()] = allowed; } private: mutable RWMutex mutex_; std::map<std::thread::id, bool> reactions_allowed_ IPLR_GUARDED_BY(mutex_); ReactorWorker(const ReactorWorker&) = delete; ReactorWorker& operator=(const ReactorWorker&) = delete; }; void PlaygroundImplGLES::DestroyWindowHandle(WindowHandle handle) { if (!handle) { return; } ::glfwDestroyWindow(reinterpret_cast<GLFWwindow*>(handle)); } PlaygroundImplGLES::PlaygroundImplGLES(PlaygroundSwitches switches) : PlaygroundImpl(switches), handle_(nullptr, &DestroyWindowHandle), worker_(std::shared_ptr<ReactorWorker>(new ReactorWorker())), use_angle_(switches.use_angle) { if (use_angle_) { #if IMPELLER_PLAYGROUND_SUPPORTS_ANGLE angle_glesv2_ = dlopen("libGLESv2.dylib", RTLD_LAZY); #endif FML_CHECK(angle_glesv2_ != nullptr); } ::glfwDefaultWindowHints(); #if FML_OS_MACOSX FML_CHECK(use_angle_) << "Must use Angle on macOS for OpenGL ES."; ::glfwWindowHint(GLFW_CONTEXT_CREATION_API, GLFW_EGL_CONTEXT_API); #endif // FML_OS_MACOSX ::glfwWindowHint(GLFW_CLIENT_API, GLFW_OPENGL_ES_API); ::glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 2); ::glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 0); ::glfwWindowHint(GLFW_RED_BITS, 8); ::glfwWindowHint(GLFW_GREEN_BITS, 8); ::glfwWindowHint(GLFW_BLUE_BITS, 8); ::glfwWindowHint(GLFW_ALPHA_BITS, 8); ::glfwWindowHint(GLFW_DEPTH_BITS, 32); // 32 bit depth buffer ::glfwWindowHint(GLFW_STENCIL_BITS, 8); // 8 bit stencil buffer ::glfwWindowHint(GLFW_SAMPLES, 4); // 4xMSAA ::glfwWindowHint(GLFW_VISIBLE, GLFW_FALSE); auto window = ::glfwCreateWindow(1, 1, "Test", nullptr, nullptr); ::glfwMakeContextCurrent(window); worker_->SetReactionsAllowedOnCurrentThread(true); handle_.reset(window); } PlaygroundImplGLES::~PlaygroundImplGLES() = default; static std::vector<std::shared_ptr<fml::Mapping>> ShaderLibraryMappingsForPlayground() { return { std::make_shared<fml::NonOwnedMapping>( impeller_entity_shaders_gles_data, impeller_entity_shaders_gles_length), std::make_shared<fml::NonOwnedMapping>( impeller_modern_shaders_gles_data, impeller_modern_shaders_gles_length), std::make_shared<fml::NonOwnedMapping>( impeller_framebuffer_blend_shaders_gles_data, impeller_framebuffer_blend_shaders_gles_length), std::make_shared<fml::NonOwnedMapping>( impeller_fixtures_shaders_gles_data, impeller_fixtures_shaders_gles_length), std::make_shared<fml::NonOwnedMapping>( impeller_imgui_shaders_gles_data, impeller_imgui_shaders_gles_length), std::make_shared<fml::NonOwnedMapping>( impeller_scene_shaders_gles_data, impeller_scene_shaders_gles_length), }; } // |PlaygroundImpl| std::shared_ptr<Context> PlaygroundImplGLES::GetContext() const { auto resolver = use_angle_ ? [](const char* name) -> void* { void* symbol = nullptr; #if IMPELLER_PLAYGROUND_SUPPORTS_ANGLE void* angle_glesv2 = dlopen("libGLESv2.dylib", RTLD_LAZY); symbol = dlsym(angle_glesv2, name); #endif FML_CHECK(symbol); return symbol; } : [](const char* name) -> void* { return reinterpret_cast<void*>(::glfwGetProcAddress(name)); }; auto gl = std::make_unique<ProcTableGLES>(resolver); if (!gl->IsValid()) { FML_LOG(ERROR) << "Proc table when creating a playground was invalid."; return nullptr; } auto context = ContextGLES::Create( std::move(gl), ShaderLibraryMappingsForPlayground(), true); if (!context) { FML_LOG(ERROR) << "Could not create context."; return nullptr; } auto worker_id = context->AddReactorWorker(worker_); if (!worker_id.has_value()) { FML_LOG(ERROR) << "Could not add reactor worker."; return nullptr; } return context; } // |PlaygroundImpl| PlaygroundImpl::WindowHandle PlaygroundImplGLES::GetWindowHandle() const { return handle_.get(); } // |PlaygroundImpl| std::unique_ptr<Surface> PlaygroundImplGLES::AcquireSurfaceFrame( std::shared_ptr<Context> context) { auto window = reinterpret_cast<GLFWwindow*>(GetWindowHandle()); int width = 0; int height = 0; ::glfwGetFramebufferSize(window, &width, &height); if (width <= 0 || height <= 0) { return nullptr; } SurfaceGLES::SwapCallback swap_callback = [window]() -> bool { ::glfwSwapBuffers(window); return true; }; return SurfaceGLES::WrapFBO(context, // swap_callback, // 0u, // PixelFormat::kR8G8B8A8UNormInt, // ISize::MakeWH(width, height) // ); } fml::Status PlaygroundImplGLES::SetCapabilities( const std::shared_ptr<Capabilities>& capabilities) { return fml::Status( fml::StatusCode::kUnimplemented, "PlaygroundImplGLES doesn't support setting the capabilities."); } } // namespace impeller
engine/impeller/playground/backend/gles/playground_impl_gles.cc/0
{ "file_path": "engine/impeller/playground/backend/gles/playground_impl_gles.cc", "repo_id": "engine", "token_count": 2715 }
243
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/playground/image/decompressed_image.h" #include <limits> #include "flutter/fml/mapping.h" #include "impeller/base/allocation.h" namespace impeller { DecompressedImage::DecompressedImage() = default; DecompressedImage::DecompressedImage( ISize size, Format format, std::shared_ptr<const fml::Mapping> allocation) : size_(size), format_(format), allocation_(std::move(allocation)) { if (!allocation_ || size.IsEmpty() || format_ == Format::kInvalid) { return; } is_valid_ = true; } DecompressedImage::~DecompressedImage() = default; bool DecompressedImage::IsValid() const { return is_valid_; } const ISize& DecompressedImage::GetSize() const { return size_; } DecompressedImage::Format DecompressedImage::GetFormat() const { return format_; } const std::shared_ptr<const fml::Mapping>& DecompressedImage::GetAllocation() const { return allocation_; } static size_t GetBytesPerPixel(DecompressedImage::Format format) { switch (format) { case DecompressedImage::Format::kInvalid: return 0u; case DecompressedImage::Format::kGrey: return 1u; case DecompressedImage::Format::kGreyAlpha: return 1u; case DecompressedImage::Format::kRGB: return 3u; case DecompressedImage::Format::kRGBA: return 4; } return 0u; } DecompressedImage DecompressedImage::ConvertToRGBA() const { if (!is_valid_) { return {}; } if (format_ == Format::kRGBA) { return DecompressedImage{size_, format_, allocation_}; } const auto bpp = GetBytesPerPixel(format_); const auto source_byte_size = size_.Area() * bpp; if (allocation_->GetSize() < source_byte_size) { return {}; } auto rgba_allocation = std::make_shared<Allocation>(); if (!rgba_allocation->Truncate(size_.Area() * 4u, false)) { return {}; } const uint8_t* source = allocation_->GetMapping(); uint8_t* dest = rgba_allocation->GetBuffer(); for (size_t i = 0, j = 0; i < source_byte_size; i += bpp, j += 4u) { switch (format_) { case DecompressedImage::Format::kGrey: dest[j + 0] = source[i]; dest[j + 1] = source[i]; dest[j + 2] = source[i]; dest[j + 3] = std::numeric_limits<uint8_t>::max(); break; case DecompressedImage::Format::kGreyAlpha: dest[j + 0] = std::numeric_limits<uint8_t>::max(); dest[j + 1] = std::numeric_limits<uint8_t>::max(); dest[j + 2] = std::numeric_limits<uint8_t>::max(); dest[j + 3] = source[i]; break; case DecompressedImage::Format::kRGB: dest[j + 0] = source[i + 0]; dest[j + 1] = source[i + 1]; dest[j + 2] = source[i + 2]; dest[j + 3] = std::numeric_limits<uint8_t>::max(); break; case DecompressedImage::Format::kInvalid: case DecompressedImage::Format::kRGBA: // Should never happen. The necessary checks have already been // performed. FML_CHECK(false); break; } } return DecompressedImage{ size_, Format::kRGBA, std::make_shared<fml::NonOwnedMapping>( rgba_allocation->GetBuffer(), // rgba_allocation->GetLength(), // [rgba_allocation](auto, auto) {}) // }; } } // namespace impeller
engine/impeller/playground/image/decompressed_image.cc/0
{ "file_path": "engine/impeller/playground/image/decompressed_image.cc", "repo_id": "engine", "token_count": 1396 }
244
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_PLAYGROUND_WIDGETS_H_ #define FLUTTER_IMPELLER_PLAYGROUND_WIDGETS_H_ #include <optional> #include <tuple> #include "impeller/base/strings.h" #include "impeller/geometry/color.h" #include "impeller/geometry/point.h" #include "third_party/imgui/imgui.h" namespace impeller { struct PlaygroundPoint { PlaygroundPoint(Point default_position, Scalar p_radius, Color p_color) : position(default_position), reset_position(default_position), radius(p_radius), color(p_color) {} Point position; Point reset_position; bool dragging = false; std::optional<Point> prev_mouse_pos; Scalar radius; Color color; PlaygroundPoint(const PlaygroundPoint&) = delete; PlaygroundPoint(PlaygroundPoint&&) = delete; PlaygroundPoint& operator=(const PlaygroundPoint&) = delete; }; Point DrawPlaygroundPoint(PlaygroundPoint& point); std::tuple<Point, Point> DrawPlaygroundLine(PlaygroundPoint& point_a, PlaygroundPoint& point_b); } // namespace impeller #endif // FLUTTER_IMPELLER_PLAYGROUND_WIDGETS_H_
engine/impeller/playground/widgets.h/0
{ "file_path": "engine/impeller/playground/widgets.h", "repo_id": "engine", "token_count": 469 }
245
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/renderer/backend/gles/context_gles.h" #include <memory> #include "impeller/base/config.h" #include "impeller/base/validation.h" #include "impeller/renderer/backend/gles/command_buffer_gles.h" #include "impeller/renderer/backend/gles/gpu_tracer_gles.h" #include "impeller/renderer/command_queue.h" namespace impeller { std::shared_ptr<ContextGLES> ContextGLES::Create( std::unique_ptr<ProcTableGLES> gl, const std::vector<std::shared_ptr<fml::Mapping>>& shader_libraries, bool enable_gpu_tracing) { return std::shared_ptr<ContextGLES>( new ContextGLES(std::move(gl), shader_libraries, enable_gpu_tracing)); } ContextGLES::ContextGLES( std::unique_ptr<ProcTableGLES> gl, const std::vector<std::shared_ptr<fml::Mapping>>& shader_libraries_mappings, bool enable_gpu_tracing) { reactor_ = std::make_shared<ReactorGLES>(std::move(gl)); if (!reactor_->IsValid()) { VALIDATION_LOG << "Could not create valid reactor."; return; } // Create the shader library. { auto library = std::shared_ptr<ShaderLibraryGLES>( new ShaderLibraryGLES(shader_libraries_mappings)); if (!library->IsValid()) { VALIDATION_LOG << "Could not create valid shader library."; return; } shader_library_ = std::move(library); } // Create the pipeline library. { pipeline_library_ = std::shared_ptr<PipelineLibraryGLES>(new PipelineLibraryGLES(reactor_)); } // Create allocators. { resource_allocator_ = std::shared_ptr<AllocatorGLES>(new AllocatorGLES(reactor_)); if (!resource_allocator_->IsValid()) { VALIDATION_LOG << "Could not create a resource allocator."; return; } } device_capabilities_ = reactor_->GetProcTable().GetCapabilities(); // Create the sampler library. { sampler_library_ = std::shared_ptr<SamplerLibraryGLES>(new SamplerLibraryGLES( device_capabilities_->SupportsDecalSamplerAddressMode())); } gpu_tracer_ = std::make_shared<GPUTracerGLES>(GetReactor()->GetProcTable(), enable_gpu_tracing); command_queue_ = std::make_shared<CommandQueue>(); is_valid_ = true; } ContextGLES::~ContextGLES() = default; Context::BackendType ContextGLES::GetBackendType() const { return Context::BackendType::kOpenGLES; } const ReactorGLES::Ref& ContextGLES::GetReactor() const { return reactor_; } std::optional<ReactorGLES::WorkerID> ContextGLES::AddReactorWorker( const std::shared_ptr<ReactorGLES::Worker>& worker) { if (!IsValid()) { return std::nullopt; } return reactor_->AddWorker(worker); } bool ContextGLES::RemoveReactorWorker(ReactorGLES::WorkerID id) { if (!IsValid()) { return false; } return reactor_->RemoveWorker(id); } bool ContextGLES::IsValid() const { return is_valid_; } void ContextGLES::Shutdown() {} // |Context| std::string ContextGLES::DescribeGpuModel() const { return reactor_->GetProcTable().GetDescription()->GetString(); } // |Context| std::shared_ptr<Allocator> ContextGLES::GetResourceAllocator() const { return resource_allocator_; } // |Context| std::shared_ptr<ShaderLibrary> ContextGLES::GetShaderLibrary() const { return shader_library_; } // |Context| std::shared_ptr<SamplerLibrary> ContextGLES::GetSamplerLibrary() const { return sampler_library_; } // |Context| std::shared_ptr<PipelineLibrary> ContextGLES::GetPipelineLibrary() const { return pipeline_library_; } // |Context| std::shared_ptr<CommandBuffer> ContextGLES::CreateCommandBuffer() const { return std::shared_ptr<CommandBufferGLES>( new CommandBufferGLES(weak_from_this(), reactor_)); } // |Context| const std::shared_ptr<const Capabilities>& ContextGLES::GetCapabilities() const { return device_capabilities_; } // |Context| std::shared_ptr<CommandQueue> ContextGLES::GetCommandQueue() const { return command_queue_; } } // namespace impeller
engine/impeller/renderer/backend/gles/context_gles.cc/0
{ "file_path": "engine/impeller/renderer/backend/gles/context_gles.cc", "repo_id": "engine", "token_count": 1518 }
246
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_RENDERER_BACKEND_GLES_PIPELINE_LIBRARY_GLES_H_ #define FLUTTER_IMPELLER_RENDERER_BACKEND_GLES_PIPELINE_LIBRARY_GLES_H_ #include "flutter/fml/macros.h" #include "impeller/renderer/backend/gles/reactor_gles.h" #include "impeller/renderer/pipeline_library.h" namespace impeller { class ContextGLES; class PipelineLibraryGLES final : public PipelineLibrary { public: // |PipelineLibrary| ~PipelineLibraryGLES() override; private: friend ContextGLES; ReactorGLES::Ref reactor_; PipelineMap pipelines_; explicit PipelineLibraryGLES(ReactorGLES::Ref reactor); // |PipelineLibrary| bool IsValid() const override; // |PipelineLibrary| PipelineFuture<PipelineDescriptor> GetPipeline( PipelineDescriptor descriptor) override; // |PipelineLibrary| PipelineFuture<ComputePipelineDescriptor> GetPipeline( ComputePipelineDescriptor descriptor) override; // |PipelineLibrary| void RemovePipelinesWithEntryPoint( std::shared_ptr<const ShaderFunction> function) override; PipelineLibraryGLES(const PipelineLibraryGLES&) = delete; PipelineLibraryGLES& operator=(const PipelineLibraryGLES&) = delete; }; } // namespace impeller #endif // FLUTTER_IMPELLER_RENDERER_BACKEND_GLES_PIPELINE_LIBRARY_GLES_H_
engine/impeller/renderer/backend/gles/pipeline_library_gles.h/0
{ "file_path": "engine/impeller/renderer/backend/gles/pipeline_library_gles.h", "repo_id": "engine", "token_count": 503 }
247
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_RENDERER_BACKEND_GLES_SURFACE_GLES_H_ #define FLUTTER_IMPELLER_RENDERER_BACKEND_GLES_SURFACE_GLES_H_ #include <functional> #include <memory> #include "flutter/fml/macros.h" #include "impeller/renderer/backend/gles/gles.h" #include "impeller/renderer/context.h" #include "impeller/renderer/surface.h" namespace impeller { class SurfaceGLES final : public Surface { public: using SwapCallback = std::function<bool(void)>; static std::unique_ptr<Surface> WrapFBO( const std::shared_ptr<Context>& context, SwapCallback swap_callback, GLuint fbo, PixelFormat color_format, ISize fbo_size); // |Surface| ~SurfaceGLES() override; private: SwapCallback swap_callback_; SurfaceGLES(SwapCallback swap_callback, const RenderTarget& target_desc); // |Surface| bool Present() const override; SurfaceGLES(const SurfaceGLES&) = delete; SurfaceGLES& operator=(const SurfaceGLES&) = delete; }; } // namespace impeller #endif // FLUTTER_IMPELLER_RENDERER_BACKEND_GLES_SURFACE_GLES_H_
engine/impeller/renderer/backend/gles/surface_gles.h/0
{ "file_path": "engine/impeller/renderer/backend/gles/surface_gles.h", "repo_id": "engine", "token_count": 449 }
248
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/renderer/backend/metal/blit_command_mtl.h" #include "impeller/renderer/backend/metal/device_buffer_mtl.h" #include "impeller/renderer/backend/metal/texture_mtl.h" namespace impeller { BlitEncodeMTL::~BlitEncodeMTL() = default; BlitCopyTextureToTextureCommandMTL::~BlitCopyTextureToTextureCommandMTL() = default; std::string BlitCopyTextureToTextureCommandMTL::GetLabel() const { return label; } bool BlitCopyTextureToTextureCommandMTL::Encode( id<MTLBlitCommandEncoder> encoder) const { auto source_mtl = TextureMTL::Cast(*source).GetMTLTexture(); if (!source_mtl) { return false; } auto destination_mtl = TextureMTL::Cast(*destination).GetMTLTexture(); if (!destination_mtl) { return false; } auto source_origin_mtl = MTLOriginMake(source_region.GetX(), source_region.GetY(), 0); auto source_size_mtl = MTLSizeMake(source_region.GetWidth(), source_region.GetHeight(), 1); auto destination_origin_mtl = MTLOriginMake(destination_origin.x, destination_origin.y, 0); [encoder copyFromTexture:source_mtl sourceSlice:0 sourceLevel:0 sourceOrigin:source_origin_mtl sourceSize:source_size_mtl toTexture:destination_mtl destinationSlice:0 destinationLevel:0 destinationOrigin:destination_origin_mtl]; return true; }; BlitCopyTextureToBufferCommandMTL::~BlitCopyTextureToBufferCommandMTL() = default; std::string BlitCopyTextureToBufferCommandMTL::GetLabel() const { return label; } bool BlitCopyTextureToBufferCommandMTL::Encode( id<MTLBlitCommandEncoder> encoder) const { auto source_mtl = TextureMTL::Cast(*source).GetMTLTexture(); if (!source_mtl) { return false; } auto destination_mtl = DeviceBufferMTL::Cast(*destination).GetMTLBuffer(); if (!destination_mtl) { return false; } auto source_origin_mtl = MTLOriginMake(source_region.GetX(), source_region.GetY(), 0); auto source_size_mtl = MTLSizeMake(source_region.GetWidth(), source_region.GetHeight(), 1); auto destination_bytes_per_pixel = BytesPerPixelForPixelFormat(source->GetTextureDescriptor().format); auto destination_bytes_per_row = source_size_mtl.width * destination_bytes_per_pixel; auto destination_bytes_per_image = source_size_mtl.height * destination_bytes_per_row; [encoder copyFromTexture:source_mtl sourceSlice:0 sourceLevel:0 sourceOrigin:source_origin_mtl sourceSize:source_size_mtl toBuffer:destination_mtl destinationOffset:destination_offset destinationBytesPerRow:destination_bytes_per_row destinationBytesPerImage:destination_bytes_per_image]; return true; }; BlitCopyBufferToTextureCommandMTL::~BlitCopyBufferToTextureCommandMTL() = default; std::string BlitCopyBufferToTextureCommandMTL::GetLabel() const { return label; } bool BlitCopyBufferToTextureCommandMTL::Encode( id<MTLBlitCommandEncoder> encoder) const { auto source_mtl = DeviceBufferMTL::Cast(*source.buffer).GetMTLBuffer(); if (!source_mtl) { return false; } auto destination_mtl = TextureMTL::Cast(*destination).GetMTLTexture(); if (!destination_mtl) { return false; } auto destination_origin_mtl = MTLOriginMake(destination_origin.x, destination_origin.y, 0); auto image_size = destination->GetTextureDescriptor().size; auto source_size_mtl = MTLSizeMake(image_size.width, image_size.height, 1); auto destination_bytes_per_pixel = BytesPerPixelForPixelFormat(destination->GetTextureDescriptor().format); auto destination_bytes_per_row = source_size_mtl.width * destination_bytes_per_pixel; auto destination_bytes_per_image = source_size_mtl.height * destination_bytes_per_row; [encoder copyFromBuffer:source_mtl sourceOffset:source.range.offset sourceBytesPerRow:destination_bytes_per_row sourceBytesPerImage:destination_bytes_per_image sourceSize:source_size_mtl toTexture:destination_mtl destinationSlice:0 destinationLevel:0 destinationOrigin:destination_origin_mtl]; return true; }; BlitGenerateMipmapCommandMTL::~BlitGenerateMipmapCommandMTL() = default; std::string BlitGenerateMipmapCommandMTL::GetLabel() const { return label; } bool BlitGenerateMipmapCommandMTL::Encode( id<MTLBlitCommandEncoder> encoder) const { return TextureMTL::Cast(*texture).GenerateMipmap(encoder); }; } // namespace impeller
engine/impeller/renderer/backend/metal/blit_command_mtl.mm/0
{ "file_path": "engine/impeller/renderer/backend/metal/blit_command_mtl.mm", "repo_id": "engine", "token_count": 1841 }
249
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/renderer/backend/metal/formats_mtl.h" #include <Metal/Metal.h> #include <memory> #include "impeller/renderer/render_pass.h" namespace impeller { MTLRenderPipelineColorAttachmentDescriptor* ToMTLRenderPipelineColorAttachmentDescriptor( ColorAttachmentDescriptor descriptor) { auto des = [[MTLRenderPipelineColorAttachmentDescriptor alloc] init]; des.pixelFormat = ToMTLPixelFormat(descriptor.format); des.blendingEnabled = descriptor.blending_enabled; des.sourceRGBBlendFactor = ToMTLBlendFactor(descriptor.src_color_blend_factor); des.rgbBlendOperation = ToMTLBlendOperation(descriptor.color_blend_op); des.destinationRGBBlendFactor = ToMTLBlendFactor(descriptor.dst_color_blend_factor); des.sourceAlphaBlendFactor = ToMTLBlendFactor(descriptor.src_alpha_blend_factor); des.alphaBlendOperation = ToMTLBlendOperation(descriptor.alpha_blend_op); des.destinationAlphaBlendFactor = ToMTLBlendFactor(descriptor.dst_alpha_blend_factor); des.writeMask = ToMTLColorWriteMask(descriptor.write_mask); return des; } MTLStencilDescriptor* ToMTLStencilDescriptor( const StencilAttachmentDescriptor& descriptor) { auto des = [[MTLStencilDescriptor alloc] init]; des.stencilCompareFunction = ToMTLCompareFunction(descriptor.stencil_compare); des.stencilFailureOperation = ToMTLStencilOperation(descriptor.stencil_failure); des.depthFailureOperation = ToMTLStencilOperation(descriptor.depth_failure); des.depthStencilPassOperation = ToMTLStencilOperation(descriptor.depth_stencil_pass); des.readMask = descriptor.read_mask; des.writeMask = descriptor.write_mask; return des; } MTLDepthStencilDescriptor* ToMTLDepthStencilDescriptor( std::optional<DepthAttachmentDescriptor> depth, std::optional<StencilAttachmentDescriptor> front, std::optional<StencilAttachmentDescriptor> back) { if (!depth) { depth = DepthAttachmentDescriptor{ // Always pass the depth test. .depth_compare = CompareFunction::kAlways, .depth_write_enabled = false, }; } auto des = [[MTLDepthStencilDescriptor alloc] init]; // These temporary variables are necessary for clang-tidy (Fuchsia LLVM // version 17.0.0git) to not crash. auto compare_function = ToMTLCompareFunction(depth->depth_compare); auto depth_write_enabled = depth->depth_write_enabled; des.depthCompareFunction = compare_function; des.depthWriteEnabled = depth_write_enabled; if (front.has_value()) { des.frontFaceStencil = ToMTLStencilDescriptor(front.value()); } if (back.has_value()) { des.backFaceStencil = ToMTLStencilDescriptor(back.value()); } return des; } MTLTextureDescriptor* ToMTLTextureDescriptor(const TextureDescriptor& desc) { if (!desc.IsValid()) { return nil; } auto mtl_desc = [[MTLTextureDescriptor alloc] init]; mtl_desc.textureType = ToMTLTextureType(desc.type); mtl_desc.pixelFormat = ToMTLPixelFormat(desc.format); mtl_desc.sampleCount = static_cast<NSUInteger>(desc.sample_count); mtl_desc.width = desc.size.width; mtl_desc.height = desc.size.height; mtl_desc.mipmapLevelCount = desc.mip_count; mtl_desc.usage = MTLTextureUsageUnknown; if (desc.usage & TextureUsage::kUnknown) { mtl_desc.usage |= MTLTextureUsageUnknown; } if (desc.usage & TextureUsage::kShaderRead) { mtl_desc.usage |= MTLTextureUsageShaderRead; } if (desc.usage & TextureUsage::kShaderWrite) { mtl_desc.usage |= MTLTextureUsageShaderWrite; } if (desc.usage & TextureUsage::kRenderTarget) { mtl_desc.usage |= MTLTextureUsageRenderTarget; } return mtl_desc; } MTLPixelFormat SafeMTLPixelFormatDepth24Unorm_Stencil8() { #if !FML_OS_IOS if (@available(macOS 10.11, *)) { return MTLPixelFormatDepth24Unorm_Stencil8; } #endif // FML_OS_IOS return MTLPixelFormatInvalid; } MTLPixelFormat SafeMTLPixelFormatBGR10_XR_sRGB() { if (@available(iOS 11, macOS 11.0, *)) { return MTLPixelFormatBGR10_XR_sRGB; } else { return MTLPixelFormatInvalid; } } MTLPixelFormat SafeMTLPixelFormatBGR10_XR() { if (@available(iOS 10, macOS 11.0, *)) { return MTLPixelFormatBGR10_XR; } else { return MTLPixelFormatInvalid; } } MTLPixelFormat SafeMTLPixelFormatBGRA10_XR() { if (@available(iOS 10, macOS 11.0, *)) { return MTLPixelFormatBGRA10_XR; } else { return MTLPixelFormatInvalid; } } } // namespace impeller
engine/impeller/renderer/backend/metal/formats_mtl.mm/0
{ "file_path": "engine/impeller/renderer/backend/metal/formats_mtl.mm", "repo_id": "engine", "token_count": 1697 }
250
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_ALLOCATOR_VK_H_ #define FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_ALLOCATOR_VK_H_ #include "impeller/core/allocator.h" #include "impeller/renderer/backend/vulkan/context_vk.h" #include "impeller/renderer/backend/vulkan/device_buffer_vk.h" #include "impeller/renderer/backend/vulkan/device_holder_vk.h" #include "impeller/renderer/backend/vulkan/vk.h" #include <cstdint> #include <memory> namespace impeller { class AllocatorVK final : public Allocator { public: // |Allocator| ~AllocatorVK() override; // Visible for testing size_t DebugGetHeapUsage() const; /// @brief Select a matching memory type for the given /// [memory_type_bits_requirement], or -1 if none is found. /// /// This only returns memory types with deviceLocal allocations. static int32_t FindMemoryTypeIndex( uint32_t memory_type_bits_requirement, vk::PhysicalDeviceMemoryProperties& memory_properties); // Visible for testing. static vk::ImageUsageFlags ToVKImageUsageFlags( PixelFormat format, TextureUsageMask usage, StorageMode mode, bool supports_memoryless_textures); private: friend class ContextVK; UniqueAllocatorVMA allocator_; UniquePoolVMA staging_buffer_pool_; std::weak_ptr<Context> context_; std::weak_ptr<DeviceHolderVK> device_holder_; ISize max_texture_size_; bool is_valid_ = false; bool supports_memoryless_textures_ = false; // TODO(jonahwilliams): figure out why CI can't create these buffer pools. bool created_buffer_pool_ = true; vk::PhysicalDeviceMemoryProperties memory_properties_; AllocatorVK(std::weak_ptr<Context> context, uint32_t vulkan_api_version, const vk::PhysicalDevice& physical_device, const std::shared_ptr<DeviceHolderVK>& device_holder, const vk::Instance& instance, const CapabilitiesVK& capabilities); // |Allocator| bool IsValid() const; // |Allocator| std::shared_ptr<DeviceBuffer> OnCreateBuffer( const DeviceBufferDescriptor& desc) override; // |Allocator| std::shared_ptr<Texture> OnCreateTexture( const TextureDescriptor& desc) override; // |Allocator| ISize GetMaxTextureSizeSupported() const override; // |Allocator| void DebugTraceMemoryStatistics() const override; AllocatorVK(const AllocatorVK&) = delete; AllocatorVK& operator=(const AllocatorVK&) = delete; }; } // namespace impeller #endif // FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_ALLOCATOR_VK_H_
engine/impeller/renderer/backend/vulkan/allocator_vk.h/0
{ "file_path": "engine/impeller/renderer/backend/vulkan/allocator_vk.h", "repo_id": "engine", "token_count": 994 }
251
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_COMMAND_ENCODER_VK_H_ #define FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_COMMAND_ENCODER_VK_H_ #include <cstdint> #include <functional> #include <optional> #include "impeller/renderer/backend/vulkan/command_pool_vk.h" #include "impeller/renderer/backend/vulkan/command_queue_vk.h" #include "impeller/renderer/backend/vulkan/context_vk.h" #include "impeller/renderer/backend/vulkan/descriptor_pool_vk.h" #include "impeller/renderer/backend/vulkan/device_holder_vk.h" #include "impeller/renderer/backend/vulkan/queue_vk.h" #include "impeller/renderer/backend/vulkan/shared_object_vk.h" #include "impeller/renderer/backend/vulkan/vk.h" namespace impeller { class ContextVK; class DeviceBuffer; class Buffer; class Texture; class TextureSourceVK; class TrackedObjectsVK; class FenceWaiterVK; class GPUProbe; class CommandEncoderFactoryVK { public: explicit CommandEncoderFactoryVK( const std::weak_ptr<const ContextVK>& context); std::shared_ptr<CommandEncoderVK> Create(); void SetLabel(const std::string& label); private: std::weak_ptr<const ContextVK> context_; std::optional<std::string> label_; CommandEncoderFactoryVK(const CommandEncoderFactoryVK&) = delete; CommandEncoderFactoryVK& operator=(const CommandEncoderFactoryVK&) = delete; }; class CommandEncoderVK { public: using SubmitCallback = std::function<void(bool)>; // Visible for testing. CommandEncoderVK(std::weak_ptr<const DeviceHolderVK> device_holder, std::shared_ptr<TrackedObjectsVK> tracked_objects, const std::shared_ptr<QueueVK>& queue, std::shared_ptr<FenceWaiterVK> fence_waiter); ~CommandEncoderVK(); bool IsValid() const; bool Track(std::shared_ptr<SharedObjectVK> object); bool Track(std::shared_ptr<const DeviceBuffer> buffer); bool IsTracking(const std::shared_ptr<const DeviceBuffer>& texture) const; bool Track(const std::shared_ptr<const Texture>& texture); bool IsTracking(const std::shared_ptr<const Texture>& texture) const; bool Track(std::shared_ptr<const TextureSourceVK> texture); vk::CommandBuffer GetCommandBuffer() const; void PushDebugGroup(std::string_view label) const; void PopDebugGroup() const; void InsertDebugMarker(std::string_view label) const; bool EndCommandBuffer() const; fml::StatusOr<vk::DescriptorSet> AllocateDescriptorSets( const vk::DescriptorSetLayout& layout, const ContextVK& context); private: friend class ContextVK; friend class CommandQueueVK; std::weak_ptr<const DeviceHolderVK> device_holder_; std::shared_ptr<TrackedObjectsVK> tracked_objects_; std::shared_ptr<QueueVK> queue_; const std::shared_ptr<FenceWaiterVK> fence_waiter_; std::shared_ptr<HostBuffer> host_buffer_; bool is_valid_ = true; void Reset(); CommandEncoderVK(const CommandEncoderVK&) = delete; CommandEncoderVK& operator=(const CommandEncoderVK&) = delete; }; } // namespace impeller #endif // FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_COMMAND_ENCODER_VK_H_
engine/impeller/renderer/backend/vulkan/command_encoder_vk.h/0
{ "file_path": "engine/impeller/renderer/backend/vulkan/command_encoder_vk.h", "repo_id": "engine", "token_count": 1157 }
252
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/renderer/backend/vulkan/descriptor_pool_vk.h" #include <optional> #include "impeller/base/validation.h" #include "impeller/renderer/backend/vulkan/resource_manager_vk.h" #include "vulkan/vulkan_enums.hpp" #include "vulkan/vulkan_handles.hpp" namespace impeller { struct DescriptorPoolSize { size_t buffer_bindings; size_t texture_bindings; size_t storage_bindings; size_t subpass_bindings; }; /// Descriptor pools are always allocated with the following sizes. static const constexpr DescriptorPoolSize kDefaultBindingSize = DescriptorPoolSize{ .buffer_bindings = 512u, // Buffer Bindings .texture_bindings = 256u, // Texture Bindings .storage_bindings = 32, .subpass_bindings = 4u // Subpass Bindings }; // Holds the command pool in a background thread, recyling it when not in use. class BackgroundDescriptorPoolVK final { public: BackgroundDescriptorPoolVK(BackgroundDescriptorPoolVK&&) = default; explicit BackgroundDescriptorPoolVK( vk::UniqueDescriptorPool&& pool, std::weak_ptr<DescriptorPoolRecyclerVK> recycler) : pool_(std::move(pool)), recycler_(std::move(recycler)) {} ~BackgroundDescriptorPoolVK() { auto const recycler = recycler_.lock(); // Not only does this prevent recycling when the context is being destroyed, // but it also prevents the destructor from effectively being called twice; // once for the original BackgroundCommandPoolVK() and once for the moved // BackgroundCommandPoolVK(). if (!recycler) { return; } recycler->Reclaim(std::move(pool_)); } private: BackgroundDescriptorPoolVK(const BackgroundDescriptorPoolVK&) = delete; BackgroundDescriptorPoolVK& operator=(const BackgroundDescriptorPoolVK&) = delete; vk::UniqueDescriptorPool pool_; uint32_t allocated_capacity_; std::weak_ptr<DescriptorPoolRecyclerVK> recycler_; }; DescriptorPoolVK::DescriptorPoolVK(std::weak_ptr<const ContextVK> context) : context_(std::move(context)) {} DescriptorPoolVK::~DescriptorPoolVK() { if (pools_.empty()) { return; } auto const context = context_.lock(); if (!context) { return; } auto const recycler = context->GetDescriptorPoolRecycler(); if (!recycler) { return; } for (auto i = 0u; i < pools_.size(); i++) { auto reset_pool_when_dropped = BackgroundDescriptorPoolVK(std::move(pools_[i]), recycler); UniqueResourceVKT<BackgroundDescriptorPoolVK> pool( context->GetResourceManager(), std::move(reset_pool_when_dropped)); } pools_.clear(); } fml::StatusOr<vk::DescriptorSet> DescriptorPoolVK::AllocateDescriptorSets( const vk::DescriptorSetLayout& layout, const ContextVK& context_vk) { if (pools_.empty()) { CreateNewPool(context_vk); } vk::DescriptorSetAllocateInfo set_info; set_info.setDescriptorPool(pools_.back().get()); set_info.setPSetLayouts(&layout); set_info.setDescriptorSetCount(1); vk::DescriptorSet set; auto result = context_vk.GetDevice().allocateDescriptorSets(&set_info, &set); if (result == vk::Result::eErrorOutOfPoolMemory) { // If the pool ran out of memory, we need to create a new pool. CreateNewPool(context_vk); set_info.setDescriptorPool(pools_.back().get()); result = context_vk.GetDevice().allocateDescriptorSets(&set_info, &set); } if (result != vk::Result::eSuccess) { VALIDATION_LOG << "Could not allocate descriptor sets: " << vk::to_string(result); return fml::Status(fml::StatusCode::kUnknown, ""); } return set; } fml::Status DescriptorPoolVK::CreateNewPool(const ContextVK& context_vk) { auto new_pool = context_vk.GetDescriptorPoolRecycler()->Get(); if (!new_pool) { return fml::Status(fml::StatusCode::kUnknown, "Failed to create descriptor pool"); } pools_.emplace_back(std::move(new_pool)); return fml::Status(); } void DescriptorPoolRecyclerVK::Reclaim(vk::UniqueDescriptorPool&& pool) { // Reset the pool on a background thread. auto strong_context = context_.lock(); if (!strong_context) { return; } auto device = strong_context->GetDevice(); device.resetDescriptorPool(pool.get()); // Move the pool to the recycled list. Lock recycled_lock(recycled_mutex_); if (recycled_.size() < kMaxRecycledPools) { recycled_.push_back(std::move(pool)); return; } } vk::UniqueDescriptorPool DescriptorPoolRecyclerVK::Get() { // Recycle a pool with a matching minumum capcity if it is available. auto recycled_pool = Reuse(); if (recycled_pool.has_value()) { return std::move(recycled_pool.value()); } return Create(); } vk::UniqueDescriptorPool DescriptorPoolRecyclerVK::Create() { auto strong_context = context_.lock(); if (!strong_context) { VALIDATION_LOG << "Unable to create a descriptor pool"; return {}; } std::vector<vk::DescriptorPoolSize> pools = { vk::DescriptorPoolSize{vk::DescriptorType::eCombinedImageSampler, kDefaultBindingSize.texture_bindings}, vk::DescriptorPoolSize{vk::DescriptorType::eUniformBuffer, kDefaultBindingSize.buffer_bindings}, vk::DescriptorPoolSize{vk::DescriptorType::eStorageBuffer, kDefaultBindingSize.storage_bindings}, vk::DescriptorPoolSize{vk::DescriptorType::eInputAttachment, kDefaultBindingSize.subpass_bindings}}; vk::DescriptorPoolCreateInfo pool_info; pool_info.setMaxSets(kDefaultBindingSize.texture_bindings + kDefaultBindingSize.buffer_bindings + kDefaultBindingSize.storage_bindings + kDefaultBindingSize.subpass_bindings); pool_info.setPoolSizes(pools); auto [result, pool] = strong_context->GetDevice().createDescriptorPoolUnique(pool_info); if (result != vk::Result::eSuccess) { VALIDATION_LOG << "Unable to create a descriptor pool"; } return std::move(pool); } std::optional<vk::UniqueDescriptorPool> DescriptorPoolRecyclerVK::Reuse() { Lock lock(recycled_mutex_); if (recycled_.empty()) { return std::nullopt; } auto recycled = std::move(recycled_[recycled_.size() - 1]); recycled_.pop_back(); return recycled; } } // namespace impeller
engine/impeller/renderer/backend/vulkan/descriptor_pool_vk.cc/0
{ "file_path": "engine/impeller/renderer/backend/vulkan/descriptor_pool_vk.cc", "repo_id": "engine", "token_count": 2453 }
253
#ifndef FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_LIMITS_VK_H_ #define FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_LIMITS_VK_H_ #include <stdint.h> namespace impeller { // Maximum size to use VMA image suballocation. Any allocation greater than or // equal to this value will use a dedicated VkDeviceMemory. // // This value was taken from ANGLE. constexpr size_t kImageSizeThresholdForDedicatedMemoryAllocation = 4 * 1024 * 1024; } // namespace impeller #endif // FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_LIMITS_VK_H_
engine/impeller/renderer/backend/vulkan/limits_vk.h/0
{ "file_path": "engine/impeller/renderer/backend/vulkan/limits_vk.h", "repo_id": "engine", "token_count": 204 }
254
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <sys/types.h> #include <functional> #include <memory> #include <utility> #include "fml/synchronization/waitable_event.h" #include "gtest/gtest.h" #include "impeller/renderer/backend/vulkan/resource_manager_vk.h" namespace impeller { namespace testing { // While expected to be a singleton per context, the class does not enforce it. TEST(ResourceManagerVKTest, CreatesANewInstance) { auto const a = ResourceManagerVK::Create(); auto const b = ResourceManagerVK::Create(); EXPECT_NE(a, b); } namespace { // Invokes the provided callback when the destructor is called. // // Can be moved, but not copied. class DeathRattle final { public: explicit DeathRattle(std::function<void()> callback) : callback_(std::move(callback)) {} DeathRattle(DeathRattle&&) = default; DeathRattle& operator=(DeathRattle&&) = default; ~DeathRattle() { callback_(); } private: std::function<void()> callback_; }; } // namespace TEST(ResourceManagerVKTest, ReclaimMovesAResourceAndDestroysIt) { auto const manager = ResourceManagerVK::Create(); auto waiter = fml::AutoResetWaitableEvent(); auto dead = false; auto rattle = DeathRattle([&waiter]() { waiter.Signal(); }); // Not killed immediately. EXPECT_FALSE(waiter.IsSignaledForTest()); { auto resource = UniqueResourceVKT<DeathRattle>(manager, std::move(rattle)); } waiter.Wait(); } // Regression test for https://github.com/flutter/flutter/issues/134482. TEST(ResourceManagerVKTest, TerminatesWhenOutOfScope) { // Originally, this shared_ptr was never destroyed, and the thread never // terminated. This test ensures that the thread terminates when the // ResourceManagerVK is out of scope. std::weak_ptr<ResourceManagerVK> manager; { auto shared = ResourceManagerVK::Create(); manager = shared; } // The thread should have terminated. EXPECT_EQ(manager.lock(), nullptr); } TEST(ResourceManagerVKTest, IsThreadSafe) { // In a typical app, there is a single ResourceManagerVK per app, shared b/w // threads. // // This test ensures that the ResourceManagerVK is thread-safe. std::weak_ptr<ResourceManagerVK> manager; { auto const manager = ResourceManagerVK::Create(); // Spawn two threads, and have them both put resources into the manager. struct MockResource {}; std::thread thread1([&manager]() { UniqueResourceVKT<MockResource>(manager, MockResource{}); }); std::thread thread2([&manager]() { UniqueResourceVKT<MockResource>(manager, MockResource{}); }); thread1.join(); thread2.join(); } // The thread should have terminated. EXPECT_EQ(manager.lock(), nullptr); } } // namespace testing } // namespace impeller
engine/impeller/renderer/backend/vulkan/resource_manager_vk_unittests.cc/0
{ "file_path": "engine/impeller/renderer/backend/vulkan/resource_manager_vk_unittests.cc", "repo_id": "engine", "token_count": 908 }
255
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_SWAPCHAIN_KHR_KHR_SURFACE_VK_H_ #define FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_SWAPCHAIN_KHR_KHR_SURFACE_VK_H_ #include <memory> #include "impeller/renderer/backend/vulkan/context_vk.h" #include "impeller/renderer/backend/vulkan/swapchain/khr/khr_swapchain_image_vk.h" #include "impeller/renderer/surface.h" namespace impeller { class KHRSurfaceVK final : public Surface { public: using SwapCallback = std::function<bool(void)>; /// @brief Wrap the swapchain image in a Surface, which provides the /// additional configuration required for usage as on onscreen render /// target by Impeller. /// /// This creates the associated MSAA and depth+stencil texture. static std::unique_ptr<KHRSurfaceVK> WrapSwapchainImage( const std::shared_ptr<Context>& context, std::shared_ptr<KHRSwapchainImageVK>& swapchain_image, SwapCallback swap_callback, bool enable_msaa = true); // |Surface| ~KHRSurfaceVK() override; private: SwapCallback swap_callback_; KHRSurfaceVK(const RenderTarget& target, SwapCallback swap_callback); // |Surface| bool Present() const override; KHRSurfaceVK(const KHRSurfaceVK&) = delete; KHRSurfaceVK& operator=(const KHRSurfaceVK&) = delete; }; } // namespace impeller #endif // FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_SWAPCHAIN_KHR_KHR_SURFACE_VK_H_
engine/impeller/renderer/backend/vulkan/swapchain/khr/khr_surface_vk.h/0
{ "file_path": "engine/impeller/renderer/backend/vulkan/swapchain/khr/khr_surface_vk.h", "repo_id": "engine", "token_count": 592 }
256
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/renderer/backend/vulkan/tracked_objects_vk.h" #include "impeller/renderer/backend/vulkan/gpu_tracer_vk.h" namespace impeller { TrackedObjectsVK::TrackedObjectsVK( const std::weak_ptr<const ContextVK>& context, const std::shared_ptr<CommandPoolVK>& pool, std::unique_ptr<GPUProbe> probe) : desc_pool_(context), probe_(std::move(probe)) { if (!pool) { return; } auto buffer = pool->CreateCommandBuffer(); if (!buffer) { return; } pool_ = pool; buffer_ = std::move(buffer); is_valid_ = true; } TrackedObjectsVK::~TrackedObjectsVK() { if (!buffer_) { return; } pool_->CollectCommandBuffer(std::move(buffer_)); } bool TrackedObjectsVK::IsValid() const { return is_valid_; } void TrackedObjectsVK::Track(std::shared_ptr<SharedObjectVK> object) { if (!object) { return; } tracked_objects_.insert(std::move(object)); } void TrackedObjectsVK::Track(std::shared_ptr<const DeviceBuffer> buffer) { if (!buffer) { return; } tracked_buffers_.insert(std::move(buffer)); } bool TrackedObjectsVK::IsTracking( const std::shared_ptr<const DeviceBuffer>& buffer) const { if (!buffer) { return false; } return tracked_buffers_.find(buffer) != tracked_buffers_.end(); } void TrackedObjectsVK::Track(std::shared_ptr<const TextureSourceVK> texture) { if (!texture) { return; } tracked_textures_.insert(std::move(texture)); } bool TrackedObjectsVK::IsTracking( const std::shared_ptr<const TextureSourceVK>& texture) const { if (!texture) { return false; } return tracked_textures_.find(texture) != tracked_textures_.end(); } vk::CommandBuffer TrackedObjectsVK::GetCommandBuffer() const { return *buffer_; } DescriptorPoolVK& TrackedObjectsVK::GetDescriptorPool() { return desc_pool_; } GPUProbe& TrackedObjectsVK::GetGPUProbe() const { return *probe_.get(); } } // namespace impeller
engine/impeller/renderer/backend/vulkan/tracked_objects_vk.cc/0
{ "file_path": "engine/impeller/renderer/backend/vulkan/tracked_objects_vk.cc", "repo_id": "engine", "token_count": 752 }
257
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/renderer/capabilities.h" #include "impeller/core/formats.h" namespace impeller { Capabilities::Capabilities() = default; Capabilities::~Capabilities() = default; class StandardCapabilities final : public Capabilities { public: // |Capabilities| ~StandardCapabilities() override = default; // |Capabilities| bool SupportsOffscreenMSAA() const override { return supports_offscreen_msaa_; } // |Capabilities| bool SupportsImplicitResolvingMSAA() const override { return false; } // |Capabilities| bool SupportsSSBO() const override { return supports_ssbo_; } // |Capabilities| bool SupportsBufferToTextureBlits() const override { return supports_buffer_to_texture_blits_; } // |Capabilities| bool SupportsTextureToTextureBlits() const override { return supports_texture_to_texture_blits_; } // |Capabilities| bool SupportsFramebufferFetch() const override { return supports_framebuffer_fetch_; } // |Capabilities| bool SupportsCompute() const override { return supports_compute_; } // |Capabilities| bool SupportsComputeSubgroups() const override { return supports_compute_subgroups_; } // |Capabilities| bool SupportsReadFromResolve() const override { return supports_read_from_resolve_; } // |Capabilities| bool SupportsDecalSamplerAddressMode() const override { return supports_decal_sampler_address_mode_; } // |Capabilities| PixelFormat GetDefaultColorFormat() const override { return default_color_format_; } // |Capabilities| PixelFormat GetDefaultStencilFormat() const override { return default_stencil_format_; } // |Capabilities| PixelFormat GetDefaultDepthStencilFormat() const override { return default_depth_stencil_format_; } // |Capabilities| bool SupportsDeviceTransientTextures() const override { return supports_device_transient_textures_; } // |Capabilities| PixelFormat GetDefaultGlyphAtlasFormat() const override { return default_glyph_atlas_format_; } private: StandardCapabilities(bool supports_offscreen_msaa, bool supports_ssbo, bool supports_buffer_to_texture_blits, bool supports_texture_to_texture_blits, bool supports_framebuffer_fetch, bool supports_compute, bool supports_compute_subgroups, bool supports_read_from_resolve, bool supports_decal_sampler_address_mode, bool supports_device_transient_textures, PixelFormat default_color_format, PixelFormat default_stencil_format, PixelFormat default_depth_stencil_format, PixelFormat default_glyph_atlas_format) : supports_offscreen_msaa_(supports_offscreen_msaa), supports_ssbo_(supports_ssbo), supports_buffer_to_texture_blits_(supports_buffer_to_texture_blits), supports_texture_to_texture_blits_(supports_texture_to_texture_blits), supports_framebuffer_fetch_(supports_framebuffer_fetch), supports_compute_(supports_compute), supports_compute_subgroups_(supports_compute_subgroups), supports_read_from_resolve_(supports_read_from_resolve), supports_decal_sampler_address_mode_( supports_decal_sampler_address_mode), supports_device_transient_textures_(supports_device_transient_textures), default_color_format_(default_color_format), default_stencil_format_(default_stencil_format), default_depth_stencil_format_(default_depth_stencil_format), default_glyph_atlas_format_(default_glyph_atlas_format) {} friend class CapabilitiesBuilder; bool supports_offscreen_msaa_ = false; bool supports_ssbo_ = false; bool supports_buffer_to_texture_blits_ = false; bool supports_texture_to_texture_blits_ = false; bool supports_framebuffer_fetch_ = false; bool supports_compute_ = false; bool supports_compute_subgroups_ = false; bool supports_read_from_resolve_ = false; bool supports_decal_sampler_address_mode_ = false; bool supports_device_transient_textures_ = false; PixelFormat default_color_format_ = PixelFormat::kUnknown; PixelFormat default_stencil_format_ = PixelFormat::kUnknown; PixelFormat default_depth_stencil_format_ = PixelFormat::kUnknown; PixelFormat default_glyph_atlas_format_ = PixelFormat::kUnknown; StandardCapabilities(const StandardCapabilities&) = delete; StandardCapabilities& operator=(const StandardCapabilities&) = delete; }; CapabilitiesBuilder::CapabilitiesBuilder() = default; CapabilitiesBuilder::~CapabilitiesBuilder() = default; CapabilitiesBuilder& CapabilitiesBuilder::SetSupportsOffscreenMSAA(bool value) { supports_offscreen_msaa_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetSupportsSSBO(bool value) { supports_ssbo_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetSupportsBufferToTextureBlits( bool value) { supports_buffer_to_texture_blits_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetSupportsTextureToTextureBlits( bool value) { supports_texture_to_texture_blits_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetSupportsFramebufferFetch( bool value) { supports_framebuffer_fetch_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetSupportsCompute(bool value) { supports_compute_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetSupportsComputeSubgroups( bool value) { supports_compute_subgroups_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetDefaultColorFormat( PixelFormat value) { default_color_format_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetDefaultStencilFormat( PixelFormat value) { default_stencil_format_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetDefaultDepthStencilFormat( PixelFormat value) { default_depth_stencil_format_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetSupportsReadFromResolve( bool read_from_resolve) { supports_read_from_resolve_ = read_from_resolve; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetSupportsDecalSamplerAddressMode( bool value) { supports_decal_sampler_address_mode_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetSupportsDeviceTransientTextures( bool value) { supports_device_transient_textures_ = value; return *this; } CapabilitiesBuilder& CapabilitiesBuilder::SetDefaultGlyphAtlasFormat( PixelFormat value) { default_glyph_atlas_format_ = value; return *this; } std::unique_ptr<Capabilities> CapabilitiesBuilder::Build() { return std::unique_ptr<StandardCapabilities>(new StandardCapabilities( // supports_offscreen_msaa_, // supports_ssbo_, // supports_buffer_to_texture_blits_, // supports_texture_to_texture_blits_, // supports_framebuffer_fetch_, // supports_compute_, // supports_compute_subgroups_, // supports_read_from_resolve_, // supports_decal_sampler_address_mode_, // supports_device_transient_textures_, // default_color_format_.value_or(PixelFormat::kUnknown), // default_stencil_format_.value_or(PixelFormat::kUnknown), // default_depth_stencil_format_.value_or(PixelFormat::kUnknown), // default_glyph_atlas_format_.value_or(PixelFormat::kUnknown) // )); } } // namespace impeller
engine/impeller/renderer/capabilities.cc/0
{ "file_path": "engine/impeller/renderer/capabilities.cc", "repo_id": "engine", "token_count": 3186 }
258
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/renderer/compute_tessellator.h" #include <cstdint> #include "impeller/core/host_buffer.h" #include "impeller/renderer/command_buffer.h" #include "impeller/renderer/path_polyline.comp.h" #include "impeller/renderer/pipeline_library.h" #include "impeller/renderer/stroke.comp.h" namespace impeller { ComputeTessellator::ComputeTessellator() = default; ComputeTessellator::~ComputeTessellator() = default; template <typename T> static std::shared_ptr<DeviceBuffer> CreateDeviceBuffer( const std::shared_ptr<Context>& context, const std::string& label, StorageMode storage_mode = StorageMode::kDevicePrivate) { DeviceBufferDescriptor desc; desc.storage_mode = storage_mode; desc.size = sizeof(T); auto buffer = context->GetResourceAllocator()->CreateBuffer(desc); buffer->SetLabel(label); return buffer; } ComputeTessellator& ComputeTessellator::SetStyle(Style value) { style_ = value; return *this; } ComputeTessellator& ComputeTessellator::SetStrokeWidth(Scalar value) { stroke_width_ = value; return *this; } ComputeTessellator& ComputeTessellator::SetStrokeJoin(Join value) { stroke_join_ = value; return *this; } ComputeTessellator& ComputeTessellator::SetStrokeCap(Cap value) { stroke_cap_ = value; return *this; } ComputeTessellator& ComputeTessellator::SetMiterLimit(Scalar value) { miter_limit_ = value; return *this; } ComputeTessellator& ComputeTessellator::SetCubicAccuracy(Scalar value) { cubic_accuracy_ = value; return *this; } ComputeTessellator& ComputeTessellator::SetQuadraticTolerance(Scalar value) { quad_tolerance_ = value; return *this; } ComputeTessellator::Status ComputeTessellator::Tessellate( const Path& path, HostBuffer& host_buffer, const std::shared_ptr<Context>& context, BufferView vertex_buffer, BufferView vertex_buffer_count, const CommandBuffer::CompletionCallback& callback) const { FML_DCHECK(style_ == Style::kStroke); using PS = PathPolylineComputeShader; using SS = StrokeComputeShader; auto cubic_count = path.GetComponentCount(Path::ComponentType::kCubic); auto quad_count = path.GetComponentCount(Path::ComponentType::kQuadratic) + (cubic_count * 6); auto line_count = path.GetComponentCount(Path::ComponentType::kLinear) + (quad_count * 6); if (cubic_count > kMaxCubicCount || quad_count > kMaxQuadCount || line_count > kMaxLineCount) { return Status::kTooManyComponents; } PS::Cubics<kMaxCubicCount> cubics{.count = 0}; PS::Quads<kMaxQuadCount> quads{.count = 0}; PS::Lines<kMaxLineCount> lines{.count = 0}; PS::Components<kMaxComponentCount> components{.count = 0}; PS::Config config{.cubic_accuracy = cubic_accuracy_, .quad_tolerance = quad_tolerance_}; path.EnumerateComponents( [&lines, &components](size_t index, const LinearPathComponent& linear) { ::memcpy(&lines.data[lines.count], &linear, sizeof(LinearPathComponent)); components.data[components.count++] = {lines.count++, 2}; }, [&quads, &components](size_t index, const QuadraticPathComponent& quad) { ::memcpy(&quads.data[quads.count], &quad, sizeof(QuadraticPathComponent)); components.data[components.count++] = {quads.count++, 3}; }, [&cubics, &components](size_t index, const CubicPathComponent& cubic) { ::memcpy(&cubics.data[cubics.count], &cubic, sizeof(CubicPathComponent)); components.data[components.count++] = {cubics.count++, 4}; }, [](size_t index, const ContourComponent& contour) {}); auto polyline_buffer = CreateDeviceBuffer<PS::Polyline<2048>>(context, "Polyline"); auto cmd_buffer = context->CreateCommandBuffer(); auto pass = cmd_buffer->CreateComputePass(); FML_DCHECK(pass && pass->IsValid()); { using PathPolylinePipelineBuilder = ComputePipelineBuilder<PS>; auto pipeline_desc = PathPolylinePipelineBuilder::MakeDefaultPipelineDescriptor(*context); FML_DCHECK(pipeline_desc.has_value()); auto compute_pipeline = context->GetPipelineLibrary()->GetPipeline(pipeline_desc).Get(); FML_DCHECK(compute_pipeline); pass->SetPipeline(compute_pipeline); pass->SetCommandLabel("Generate Polyline"); PS::BindConfig(*pass, host_buffer.EmplaceUniform(config)); PS::BindCubics(*pass, host_buffer.EmplaceStorageBuffer(cubics)); PS::BindQuads(*pass, host_buffer.EmplaceStorageBuffer(quads)); PS::BindLines(*pass, host_buffer.EmplaceStorageBuffer(lines)); PS::BindComponents(*pass, host_buffer.EmplaceStorageBuffer(components)); PS::BindPolyline(*pass, DeviceBuffer::AsBufferView(polyline_buffer)); if (!pass->Compute(ISize(line_count, 1)).ok()) { return Status::kCommandInvalid; } } { using StrokePipelineBuilder = ComputePipelineBuilder<SS>; auto pipeline_desc = StrokePipelineBuilder::MakeDefaultPipelineDescriptor(*context); FML_DCHECK(pipeline_desc.has_value()); auto compute_pipeline = context->GetPipelineLibrary()->GetPipeline(pipeline_desc).Get(); FML_DCHECK(compute_pipeline); pass->AddBufferMemoryBarrier(); pass->SetPipeline(compute_pipeline); pass->SetCommandLabel("Compute Stroke"); SS::Config config{ .width = stroke_width_, .cap = static_cast<uint32_t>(stroke_cap_), .join = static_cast<uint32_t>(stroke_join_), .miter_limit = miter_limit_, }; SS::BindConfig(*pass, host_buffer.EmplaceUniform(config)); SS::BindPolyline(*pass, DeviceBuffer::AsBufferView(polyline_buffer)); SS::BindVertexBufferCount(*pass, std::move(vertex_buffer_count)); SS::BindVertexBuffer(*pass, std::move(vertex_buffer)); if (!pass->Compute(ISize(line_count, 1)).ok()) { return Status::kCommandInvalid; } } if (!pass->EncodeCommands()) { return Status::kCommandInvalid; } if (!context->GetCommandQueue()->Submit({cmd_buffer}, callback).ok()) { return Status::kCommandInvalid; } return Status::kOk; } } // namespace impeller
engine/impeller/renderer/compute_tessellator.cc/0
{ "file_path": "engine/impeller/renderer/compute_tessellator.cc", "repo_id": "engine", "token_count": 2367 }
259
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_RENDERER_POOL_H_ #define FLUTTER_IMPELLER_RENDERER_POOL_H_ #include <cstdint> #include <memory> #include <mutex> namespace impeller { /// @brief A thread-safe pool with a limited byte size. /// @tparam T The type that the pool will contain. template <typename T> class Pool { public: explicit Pool(uint32_t limit_bytes) : limit_bytes_(limit_bytes) {} std::shared_ptr<T> Grab() { std::scoped_lock lock(mutex_); if (pool_.empty()) { return T::Create(); } std::shared_ptr<T> result = std::move(pool_.back()); pool_.pop_back(); size_ -= result->GetSize(); return result; } void Recycle(std::shared_ptr<T> object) { std::scoped_lock lock(mutex_); size_t object_size = object->GetSize(); if (size_ + object_size <= limit_bytes_ && object_size < (limit_bytes_ / 2)) { object->Reset(); size_ += object_size; pool_.emplace_back(std::move(object)); } } uint32_t GetSize() const { std::scoped_lock lock(mutex_); return size_; } private: std::vector<std::shared_ptr<T>> pool_; const uint32_t limit_bytes_; uint32_t size_ = 0; // Note: This would perform better as a lockless ring buffer. mutable std::mutex mutex_; }; } // namespace impeller #endif // FLUTTER_IMPELLER_RENDERER_POOL_H_
engine/impeller/renderer/pool.h/0
{ "file_path": "engine/impeller/renderer/pool.h", "repo_id": "engine", "token_count": 579 }
260
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_RENDERER_SHADER_KEY_H_ #define FLUTTER_IMPELLER_RENDERER_SHADER_KEY_H_ #include <memory> #include <string> #include <unordered_map> #include "flutter/fml/hash_combine.h" #include "flutter/fml/macros.h" #include "impeller/core/shader_types.h" namespace impeller { struct ShaderKey { std::string name; ShaderStage stage = ShaderStage::kUnknown; ShaderKey(std::string_view p_name, ShaderStage p_stage) : name({p_name.data(), p_name.size()}), stage(p_stage) {} struct Hash { size_t operator()(const ShaderKey& key) const { return fml::HashCombine(key.name, key.stage); } }; struct Equal { constexpr bool operator()(const ShaderKey& k1, const ShaderKey& k2) const { return k1.stage == k2.stage && k1.name == k2.name; } }; }; class ShaderFunction; using ShaderFunctionMap = std::unordered_map<ShaderKey, std::shared_ptr<const ShaderFunction>, ShaderKey::Hash, ShaderKey::Equal>; } // namespace impeller #endif // FLUTTER_IMPELLER_RENDERER_SHADER_KEY_H_
engine/impeller/renderer/shader_key.h/0
{ "file_path": "engine/impeller/renderer/shader_key.h", "repo_id": "engine", "token_count": 535 }
261
# Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//flutter/third_party/flatbuffers/flatbuffers.gni") import("../tools/impeller.gni") config("runtime_stage_config") { configs = [ "//flutter/impeller:impeller_public_config" ] include_dirs = [ "$root_gen_dir/flutter", "$root_gen_dir/flutter/impeller/runtime_stage", ] } flatbuffers("runtime_stage_types_flatbuffers") { flatbuffers = [ "runtime_stage_types.fbs" ] public_configs = [ ":runtime_stage_config" ] public_deps = [ "//flutter/third_party/flatbuffers" ] } flatbuffers("runtime_stage_flatbuffers") { flatbuffers = [ "runtime_stage.fbs" ] public_configs = [ ":runtime_stage_config" ] public_deps = [ ":runtime_stage_types_flatbuffers", "//flutter/third_party/flatbuffers", ] } impeller_component("runtime_stage") { sources = [ "runtime_stage.cc", "runtime_stage.h", ] public_deps = [ ":runtime_stage_flatbuffers", "../base", "../core", "//flutter/fml", ] } impeller_component("runtime_stage_unittests") { testonly = true sources = [ "runtime_stage_playground.cc", "runtime_stage_playground.h", "runtime_stage_unittests.cc", ] deps = [ ":runtime_stage", "../playground:playground_test", "//flutter/testing", ] }
engine/impeller/runtime_stage/BUILD.gn/0
{ "file_path": "engine/impeller/runtime_stage/BUILD.gn", "repo_id": "engine", "token_count": 548 }
262
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_SCENE_ANIMATION_ANIMATION_TRANSFORMS_H_ #define FLUTTER_IMPELLER_SCENE_ANIMATION_ANIMATION_TRANSFORMS_H_ #include "impeller/geometry/matrix_decomposition.h" namespace impeller { namespace scene { struct AnimationTransforms { MatrixDecomposition bind_pose; MatrixDecomposition animated_pose; }; } // namespace scene } // namespace impeller #endif // FLUTTER_IMPELLER_SCENE_ANIMATION_ANIMATION_TRANSFORMS_H_
engine/impeller/scene/animation/animation_transforms.h/0
{ "file_path": "engine/impeller/scene/animation/animation_transforms.h", "repo_id": "engine", "token_count": 213 }
263
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_SCENE_IMPORTER_SWITCHES_H_ #define FLUTTER_IMPELLER_SCENE_IMPORTER_SWITCHES_H_ #include <iostream> #include <memory> #include "flutter/fml/command_line.h" #include "flutter/fml/macros.h" #include "flutter/fml/unique_fd.h" #include "impeller/scene/importer/types.h" namespace impeller { namespace scene { namespace importer { struct Switches { std::shared_ptr<fml::UniqueFD> working_directory; std::string source_file_name; SourceType input_type; std::string output_file_name; Switches(); ~Switches(); explicit Switches(const fml::CommandLine& command_line); bool AreValid(std::ostream& explain) const; static void PrintHelp(std::ostream& stream); }; } // namespace importer } // namespace scene } // namespace impeller #endif // FLUTTER_IMPELLER_SCENE_IMPORTER_SWITCHES_H_
engine/impeller/scene/importer/switches.h/0
{ "file_path": "engine/impeller/scene/importer/switches.h", "repo_id": "engine", "token_count": 350 }
264
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_SCENE_SCENE_ENCODER_H_ #define FLUTTER_IMPELLER_SCENE_SCENE_ENCODER_H_ #include <memory> #include <string> #include <vector> #include "flutter/fml/macros.h" #include "impeller/renderer/command_buffer.h" #include "impeller/scene/camera.h" #include "impeller/scene/geometry.h" #include "impeller/scene/material.h" namespace impeller { namespace scene { class Scene; struct SceneCommand { std::string label; Matrix transform; Geometry* geometry; Material* material; }; class SceneEncoder { public: void Add(const SceneCommand& command); private: SceneEncoder(); std::shared_ptr<CommandBuffer> BuildSceneCommandBuffer( const SceneContext& scene_context, const Matrix& camera_transform, RenderTarget render_target) const; std::vector<SceneCommand> commands_; friend Scene; SceneEncoder(const SceneEncoder&) = delete; SceneEncoder& operator=(const SceneEncoder&) = delete; }; } // namespace scene } // namespace impeller #endif // FLUTTER_IMPELLER_SCENE_SCENE_ENCODER_H_
engine/impeller/scene/scene_encoder.h/0
{ "file_path": "engine/impeller/scene/scene_encoder.h", "repo_id": "engine", "token_count": 415 }
265
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_SHADER_ARCHIVE_SHADER_ARCHIVE_H_ #define FLUTTER_IMPELLER_SHADER_ARCHIVE_SHADER_ARCHIVE_H_ #include <memory> #include <type_traits> #include <unordered_map> #include "flutter/fml/hash_combine.h" #include "flutter/fml/macros.h" #include "flutter/fml/mapping.h" #include "impeller/shader_archive/shader_archive_types.h" namespace impeller { class MultiArchShaderArchive; class ShaderArchive { public: ShaderArchive(ShaderArchive&&); ~ShaderArchive(); bool IsValid() const; size_t GetShaderCount() const; std::shared_ptr<fml::Mapping> GetMapping(ArchiveShaderType type, std::string name) const; size_t IterateAllShaders( const std::function<bool(ArchiveShaderType type, const std::string& name, const std::shared_ptr<fml::Mapping>& mapping)>&) const; private: friend MultiArchShaderArchive; struct ShaderKey { ArchiveShaderType type = ArchiveShaderType::kFragment; std::string name; struct Hash { size_t operator()(const ShaderKey& key) const { return fml::HashCombine( static_cast<std::underlying_type_t<decltype(key.type)>>(key.type), key.name); } }; struct Equal { bool operator()(const ShaderKey& lhs, const ShaderKey& rhs) const { return lhs.type == rhs.type && lhs.name == rhs.name; } }; }; using Shaders = std::unordered_map<ShaderKey, std::shared_ptr<fml::Mapping>, ShaderKey::Hash, ShaderKey::Equal>; std::shared_ptr<const fml::Mapping> payload_; Shaders shaders_; bool is_valid_ = false; explicit ShaderArchive(std::shared_ptr<const fml::Mapping> payload); ShaderArchive(const ShaderArchive&) = delete; ShaderArchive& operator=(const ShaderArchive&) = delete; }; } // namespace impeller #endif // FLUTTER_IMPELLER_SHADER_ARCHIVE_SHADER_ARCHIVE_H_
engine/impeller/shader_archive/shader_archive.h/0
{ "file_path": "engine/impeller/shader_archive/shader_archive.h", "repo_id": "engine", "token_count": 987 }
266
# Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//flutter/testing/android/native_activity/native_activity.gni") import("../../tools/impeller.gni") config("public_android_config") { defines = [ "__ANDROID_UNAVAILABLE_SYMBOLS_ARE_WEAK__" ] } impeller_component("android") { sources = [ "choreographer.cc", "choreographer.h", "hardware_buffer.cc", "hardware_buffer.h", "native_window.cc", "native_window.h", "proc_table.cc", "proc_table.h", "surface_control.cc", "surface_control.h", "surface_transaction.cc", "surface_transaction.h", ] public_deps = [ "../../base", "../../geometry", "//flutter/fml", ] public_configs = [ ":public_android_config" ] } test_fixtures("unittests_fixtures") { fixtures = [] } source_set("unittests_lib") { visibility = [ ":*" ] testonly = true sources = [ "toolkit_android_unittests.cc" ] deps = [ ":android", ":unittests_fixtures", "//flutter/testing", ] } executable("unittests") { assert(is_android) testonly = true output_name = "impeller_toolkit_android_unittests" deps = [ ":unittests_lib" ] } native_activity_apk("apk_unittests") { apk_name = "impeller_toolkit_android_unittests" testonly = true deps = [ ":unittests_lib", "//flutter/testing/android/native_activity:gtest_activity", ] }
engine/impeller/toolkit/android/BUILD.gn/0
{ "file_path": "engine/impeller/toolkit/android/BUILD.gn", "repo_id": "engine", "token_count": 597 }
267
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "impeller/toolkit/egl/config.h" #include <utility> namespace impeller { namespace egl { Config::Config(ConfigDescriptor descriptor, EGLConfig config) : desc_(descriptor), config_(config) {} Config::~Config() = default; const ConfigDescriptor& Config::GetDescriptor() const { return desc_; } const EGLConfig& Config::GetHandle() const { return config_; } bool Config::IsValid() const { return config_ != nullptr; } } // namespace egl } // namespace impeller
engine/impeller/toolkit/egl/config.cc/0
{ "file_path": "engine/impeller/toolkit/egl/config.cc", "repo_id": "engine", "token_count": 207 }
268
# Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/compiled_action.gni") import("//flutter/common/config.gni") import("//flutter/impeller/tools/malioc.gni") import("//flutter/testing/testing.gni") declare_args() { # Maximum number of malioc processes to run in parallel. # # To avoid out-of-memory errors we explicitly reduce the number of jobs. impeller_concurrent_malioc_jobs = 1 } pool("malioc_pool") { depth = impeller_concurrent_malioc_jobs }
engine/impeller/tools/BUILD.gn/0
{ "file_path": "engine/impeller/tools/BUILD.gn", "repo_id": "engine", "token_count": 194 }
269
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_IMPELLER_TYPOGRAPHER_BACKENDS_SKIA_TYPEFACE_SKIA_H_ #define FLUTTER_IMPELLER_TYPOGRAPHER_BACKENDS_SKIA_TYPEFACE_SKIA_H_ #include "flutter/fml/macros.h" #include "impeller/base/backend_cast.h" #include "impeller/typographer/typeface.h" #include "third_party/skia/include/core/SkRefCnt.h" #include "third_party/skia/include/core/SkTypeface.h" namespace impeller { class TypefaceSkia final : public Typeface, public BackendCast<TypefaceSkia, Typeface> { public: explicit TypefaceSkia(sk_sp<SkTypeface> typeface); ~TypefaceSkia() override; // |Typeface| bool IsValid() const override; // |Comparable<Typeface>| std::size_t GetHash() const override; // |Comparable<Typeface>| bool IsEqual(const Typeface& other) const override; const sk_sp<SkTypeface>& GetSkiaTypeface() const; private: sk_sp<SkTypeface> typeface_; TypefaceSkia(const TypefaceSkia&) = delete; TypefaceSkia& operator=(const TypefaceSkia&) = delete; }; } // namespace impeller #endif // FLUTTER_IMPELLER_TYPOGRAPHER_BACKENDS_SKIA_TYPEFACE_SKIA_H_
engine/impeller/typographer/backends/skia/typeface_skia.h/0
{ "file_path": "engine/impeller/typographer/backends/skia/typeface_skia.h", "repo_id": "engine", "token_count": 480 }
270
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/display_list/testing/dl_test_snippets.h" #include "flutter/testing/testing.h" #include "impeller/playground/playground_test.h" #include "impeller/typographer/backends/skia/text_frame_skia.h" #include "impeller/typographer/backends/skia/typographer_context_skia.h" #include "impeller/typographer/lazy_glyph_atlas.h" #include "impeller/typographer/rectangle_packer.h" #include "third_party/skia/include/core/SkFont.h" #include "third_party/skia/include/core/SkFontMgr.h" #include "third_party/skia/include/core/SkRect.h" #include "third_party/skia/include/core/SkTextBlob.h" #include "third_party/skia/include/core/SkTypeface.h" #include "txt/platform.h" // TODO(zanderso): https://github.com/flutter/flutter/issues/127701 // NOLINTBEGIN(bugprone-unchecked-optional-access) namespace impeller { namespace testing { using TypographerTest = PlaygroundTest; INSTANTIATE_PLAYGROUND_SUITE(TypographerTest); static std::shared_ptr<GlyphAtlas> CreateGlyphAtlas( Context& context, const TypographerContext* typographer_context, GlyphAtlas::Type type, Scalar scale, const std::shared_ptr<GlyphAtlasContext>& atlas_context, const TextFrame& frame) { FontGlyphMap font_glyph_map; frame.CollectUniqueFontGlyphPairs(font_glyph_map, scale); return typographer_context->CreateGlyphAtlas(context, type, atlas_context, font_glyph_map); } TEST_P(TypographerTest, CanConvertTextBlob) { SkFont font = flutter::testing::CreateTestFontOfSize(12); auto blob = SkTextBlob::MakeFromString( "the quick brown fox jumped over the lazy dog.", font); ASSERT_TRUE(blob); auto frame = MakeTextFrameFromTextBlobSkia(blob); ASSERT_EQ(frame->GetRunCount(), 1u); for (const auto& run : frame->GetRuns()) { ASSERT_TRUE(run.IsValid()); ASSERT_EQ(run.GetGlyphCount(), 45u); } } TEST_P(TypographerTest, CanCreateRenderContext) { auto context = TypographerContextSkia::Make(); ASSERT_TRUE(context && context->IsValid()); } TEST_P(TypographerTest, CanCreateGlyphAtlas) { auto context = TypographerContextSkia::Make(); auto atlas_context = context->CreateGlyphAtlasContext(); ASSERT_TRUE(context && context->IsValid()); SkFont sk_font = flutter::testing::CreateTestFontOfSize(12); auto blob = SkTextBlob::MakeFromString("hello", sk_font); ASSERT_TRUE(blob); auto atlas = CreateGlyphAtlas( *GetContext(), context.get(), GlyphAtlas::Type::kAlphaBitmap, 1.0f, atlas_context, *MakeTextFrameFromTextBlobSkia(blob)); ASSERT_NE(atlas, nullptr); ASSERT_NE(atlas->GetTexture(), nullptr); ASSERT_EQ(atlas->GetType(), GlyphAtlas::Type::kAlphaBitmap); ASSERT_EQ(atlas->GetGlyphCount(), 4llu); std::optional<impeller::ScaledFont> first_scaled_font; std::optional<impeller::Glyph> first_glyph; Rect first_rect; atlas->IterateGlyphs([&](const ScaledFont& scaled_font, const Glyph& glyph, const Rect& rect) -> bool { first_scaled_font = scaled_font; first_glyph = glyph; first_rect = rect; return false; }); ASSERT_TRUE(first_scaled_font.has_value()); ASSERT_TRUE(atlas ->FindFontGlyphBounds( {first_scaled_font.value(), first_glyph.value()}) .has_value()); } TEST_P(TypographerTest, LazyAtlasTracksColor) { #if FML_OS_MACOSX auto mapping = flutter::testing::OpenFixtureAsSkData("Apple Color Emoji.ttc"); #else auto mapping = flutter::testing::OpenFixtureAsSkData("NotoColorEmoji.ttf"); #endif ASSERT_TRUE(mapping); sk_sp<SkFontMgr> font_mgr = txt::GetDefaultFontManager(); SkFont emoji_font(font_mgr->makeFromData(mapping), 50.0); SkFont sk_font = flutter::testing::CreateTestFontOfSize(12); auto blob = SkTextBlob::MakeFromString("hello", sk_font); ASSERT_TRUE(blob); auto frame = MakeTextFrameFromTextBlobSkia(blob); ASSERT_FALSE(frame->GetAtlasType() == GlyphAtlas::Type::kColorBitmap); LazyGlyphAtlas lazy_atlas(TypographerContextSkia::Make()); lazy_atlas.AddTextFrame(*frame, 1.0f); frame = MakeTextFrameFromTextBlobSkia( SkTextBlob::MakeFromString("😀 ", emoji_font)); ASSERT_TRUE(frame->GetAtlasType() == GlyphAtlas::Type::kColorBitmap); lazy_atlas.AddTextFrame(*frame, 1.0f); // Creates different atlases for color and red bitmap. auto color_atlas = lazy_atlas.CreateOrGetGlyphAtlas( *GetContext(), GlyphAtlas::Type::kColorBitmap); auto bitmap_atlas = lazy_atlas.CreateOrGetGlyphAtlas( *GetContext(), GlyphAtlas::Type::kAlphaBitmap); ASSERT_FALSE(color_atlas == bitmap_atlas); } TEST_P(TypographerTest, GlyphAtlasWithOddUniqueGlyphSize) { auto context = TypographerContextSkia::Make(); auto atlas_context = context->CreateGlyphAtlasContext(); ASSERT_TRUE(context && context->IsValid()); SkFont sk_font = flutter::testing::CreateTestFontOfSize(12); auto blob = SkTextBlob::MakeFromString("AGH", sk_font); ASSERT_TRUE(blob); auto atlas = CreateGlyphAtlas( *GetContext(), context.get(), GlyphAtlas::Type::kAlphaBitmap, 1.0f, atlas_context, *MakeTextFrameFromTextBlobSkia(blob)); ASSERT_NE(atlas, nullptr); ASSERT_NE(atlas->GetTexture(), nullptr); ASSERT_EQ(atlas->GetTexture()->GetSize().width, atlas->GetTexture()->GetSize().height); } TEST_P(TypographerTest, GlyphAtlasIsRecycledIfUnchanged) { auto context = TypographerContextSkia::Make(); auto atlas_context = context->CreateGlyphAtlasContext(); ASSERT_TRUE(context && context->IsValid()); SkFont sk_font = flutter::testing::CreateTestFontOfSize(12); auto blob = SkTextBlob::MakeFromString("spooky skellingtons", sk_font); ASSERT_TRUE(blob); auto atlas = CreateGlyphAtlas( *GetContext(), context.get(), GlyphAtlas::Type::kAlphaBitmap, 1.0f, atlas_context, *MakeTextFrameFromTextBlobSkia(blob)); ASSERT_NE(atlas, nullptr); ASSERT_NE(atlas->GetTexture(), nullptr); ASSERT_EQ(atlas, atlas_context->GetGlyphAtlas()); // now attempt to re-create an atlas with the same text blob. auto next_atlas = CreateGlyphAtlas( *GetContext(), context.get(), GlyphAtlas::Type::kAlphaBitmap, 1.0f, atlas_context, *MakeTextFrameFromTextBlobSkia(blob)); ASSERT_EQ(atlas, next_atlas); ASSERT_EQ(atlas_context->GetGlyphAtlas(), atlas); } TEST_P(TypographerTest, GlyphAtlasWithLotsOfdUniqueGlyphSize) { auto context = TypographerContextSkia::Make(); auto atlas_context = context->CreateGlyphAtlasContext(); ASSERT_TRUE(context && context->IsValid()); const char* test_string = "QWERTYUIOPASDFGHJKLZXCVBNMqewrtyuiopasdfghjklzxcvbnm,.<>[]{};':" "2134567890-=!@#$%^&*()_+" "œ∑´®†¥¨ˆøπ““‘‘åß∂ƒ©˙∆˚¬…æ≈ç√∫˜µ≤≥≥≥≥÷¡™£¢∞§¶•ªº–≠⁄€‹›fifl‡°·‚—±Œ„´‰Á¨Ø∏”’/" "* Í˝ */¸˛Ç◊ı˜Â¯˘¿"; SkFont sk_font = flutter::testing::CreateTestFontOfSize(12); auto blob = SkTextBlob::MakeFromString(test_string, sk_font); ASSERT_TRUE(blob); FontGlyphMap font_glyph_map; size_t size_count = 8; for (size_t index = 0; index < size_count; index += 1) { MakeTextFrameFromTextBlobSkia(blob)->CollectUniqueFontGlyphPairs( font_glyph_map, 0.6 * index); }; auto atlas = context->CreateGlyphAtlas(*GetContext(), GlyphAtlas::Type::kAlphaBitmap, atlas_context, font_glyph_map); ASSERT_NE(atlas, nullptr); ASSERT_NE(atlas->GetTexture(), nullptr); std::set<uint16_t> unique_glyphs; std::vector<uint16_t> total_glyphs; atlas->IterateGlyphs( [&](const ScaledFont& scaled_font, const Glyph& glyph, const Rect& rect) { unique_glyphs.insert(glyph.index); total_glyphs.push_back(glyph.index); return true; }); EXPECT_EQ(unique_glyphs.size() * size_count, atlas->GetGlyphCount()); EXPECT_EQ(total_glyphs.size(), atlas->GetGlyphCount()); EXPECT_TRUE(atlas->GetGlyphCount() > 0); EXPECT_TRUE(atlas->GetTexture()->GetSize().width > 0); EXPECT_TRUE(atlas->GetTexture()->GetSize().height > 0); } TEST_P(TypographerTest, GlyphAtlasTextureIsRecycledIfUnchanged) { auto context = TypographerContextSkia::Make(); auto atlas_context = context->CreateGlyphAtlasContext(); ASSERT_TRUE(context && context->IsValid()); SkFont sk_font = flutter::testing::CreateTestFontOfSize(12); auto blob = SkTextBlob::MakeFromString("spooky 1", sk_font); ASSERT_TRUE(blob); auto atlas = CreateGlyphAtlas( *GetContext(), context.get(), GlyphAtlas::Type::kAlphaBitmap, 1.0f, atlas_context, *MakeTextFrameFromTextBlobSkia(blob)); auto old_packer = atlas_context->GetRectPacker(); ASSERT_NE(atlas, nullptr); ASSERT_NE(atlas->GetTexture(), nullptr); ASSERT_EQ(atlas, atlas_context->GetGlyphAtlas()); auto* first_texture = atlas->GetTexture().get(); // Now create a new glyph atlas with a nearly identical blob. auto blob2 = SkTextBlob::MakeFromString("spooky 2", sk_font); auto next_atlas = CreateGlyphAtlas( *GetContext(), context.get(), GlyphAtlas::Type::kAlphaBitmap, 1.0f, atlas_context, *MakeTextFrameFromTextBlobSkia(blob2)); ASSERT_EQ(atlas, next_atlas); auto* second_texture = next_atlas->GetTexture().get(); auto new_packer = atlas_context->GetRectPacker(); ASSERT_EQ(second_texture, first_texture); ASSERT_EQ(old_packer, new_packer); } TEST_P(TypographerTest, GlyphAtlasTextureIsRecreatedIfTypeChanges) { auto context = TypographerContextSkia::Make(); auto atlas_context = context->CreateGlyphAtlasContext(); ASSERT_TRUE(context && context->IsValid()); SkFont sk_font = flutter::testing::CreateTestFontOfSize(12); auto blob = SkTextBlob::MakeFromString("spooky 1", sk_font); ASSERT_TRUE(blob); auto atlas = CreateGlyphAtlas( *GetContext(), context.get(), GlyphAtlas::Type::kAlphaBitmap, 1.0f, atlas_context, *MakeTextFrameFromTextBlobSkia(blob)); auto old_packer = atlas_context->GetRectPacker(); ASSERT_NE(atlas, nullptr); ASSERT_NE(atlas->GetTexture(), nullptr); ASSERT_EQ(atlas, atlas_context->GetGlyphAtlas()); auto* first_texture = atlas->GetTexture().get(); // now create a new glyph atlas with an identical blob, // but change the type. auto blob2 = SkTextBlob::MakeFromString("spooky 1", sk_font); auto next_atlas = CreateGlyphAtlas( *GetContext(), context.get(), GlyphAtlas::Type::kColorBitmap, 1.0f, atlas_context, *MakeTextFrameFromTextBlobSkia(blob2)); ASSERT_NE(atlas, next_atlas); auto* second_texture = next_atlas->GetTexture().get(); auto new_packer = atlas_context->GetRectPacker(); ASSERT_NE(second_texture, first_texture); ASSERT_NE(old_packer, new_packer); } TEST_P(TypographerTest, MaybeHasOverlapping) { sk_sp<SkFontMgr> font_mgr = txt::GetDefaultFontManager(); sk_sp<SkTypeface> typeface = font_mgr->matchFamilyStyle("Arial", SkFontStyle::Normal()); SkFont sk_font(typeface, 0.5f); auto frame = MakeTextFrameFromTextBlobSkia(SkTextBlob::MakeFromString("1", sk_font)); // Single character has no overlapping ASSERT_FALSE(frame->MaybeHasOverlapping()); auto frame_2 = MakeTextFrameFromTextBlobSkia( SkTextBlob::MakeFromString("123456789", sk_font)); ASSERT_FALSE(frame_2->MaybeHasOverlapping()); } TEST_P(TypographerTest, RectanglePackerAddsNonoverlapingRectangles) { auto packer = RectanglePacker::Factory(200, 100); ASSERT_NE(packer, nullptr); ASSERT_EQ(packer->percentFull(), 0); const SkIRect packer_area = SkIRect::MakeXYWH(0, 0, 200, 100); IPoint16 first_output = {-1, -1}; // Fill with sentinel values ASSERT_TRUE(packer->addRect(20, 20, &first_output)); // Make sure the rectangle is placed such that it is inside the bounds of // the packer's area. const SkIRect first_rect = SkIRect::MakeXYWH(first_output.x(), first_output.y(), 20, 20); ASSERT_TRUE(SkIRect::Intersects(packer_area, first_rect)); // Initial area was 200 x 100 = 20_000 // We added 20x20 = 400. 400 / 20_000 == 0.02 == 2% ASSERT_TRUE(flutter::testing::NumberNear(packer->percentFull(), 0.02)); IPoint16 second_output = {-1, -1}; ASSERT_TRUE(packer->addRect(140, 90, &second_output)); const SkIRect second_rect = SkIRect::MakeXYWH(second_output.x(), second_output.y(), 140, 90); // Make sure the rectangle is placed such that it is inside the bounds of // the packer's area but not in the are of the first rectangle. ASSERT_TRUE(SkIRect::Intersects(packer_area, second_rect)); ASSERT_FALSE(SkIRect::Intersects(first_rect, second_rect)); // We added another 90 x 140 = 12_600 units, now taking us to 13_000 // 13_000 / 20_000 == 0.65 == 65% ASSERT_TRUE(flutter::testing::NumberNear(packer->percentFull(), 0.65)); // There's enough area to add this rectangle, but no space big enough for // the 50 units of width. IPoint16 output; ASSERT_FALSE(packer->addRect(50, 50, &output)); // Should be unchanged. ASSERT_TRUE(flutter::testing::NumberNear(packer->percentFull(), 0.65)); packer->reset(); // Should be empty now. ASSERT_EQ(packer->percentFull(), 0); } TEST_P(TypographerTest, GlyphAtlasTextureIsRecycledWhenContentsAreNotRecreated) { auto context = TypographerContextSkia::Make(); auto atlas_context = context->CreateGlyphAtlasContext(); ASSERT_TRUE(context && context->IsValid()); SkFont sk_font = flutter::testing::CreateTestFontOfSize(12); auto blob = SkTextBlob::MakeFromString("ABCDEFGHIJKLMNOPQRSTUVQXYZ123456789", sk_font); ASSERT_TRUE(blob); auto atlas = CreateGlyphAtlas( *GetContext(), context.get(), GlyphAtlas::Type::kColorBitmap, 32.0f, atlas_context, *MakeTextFrameFromTextBlobSkia(blob)); auto old_packer = atlas_context->GetRectPacker(); ASSERT_NE(atlas, nullptr); ASSERT_NE(atlas->GetTexture(), nullptr); ASSERT_EQ(atlas, atlas_context->GetGlyphAtlas()); auto* first_texture = atlas->GetTexture().get(); // Now create a new glyph atlas with a completely different textblob. // everything should be different except for the underlying atlas texture. auto blob2 = SkTextBlob::MakeFromString("abcdefghijklmnopqrstuvwxyz123456789", sk_font); auto next_atlas = CreateGlyphAtlas( *GetContext(), context.get(), GlyphAtlas::Type::kColorBitmap, 32.0f, atlas_context, *MakeTextFrameFromTextBlobSkia(blob2)); ASSERT_NE(atlas, next_atlas); auto* second_texture = next_atlas->GetTexture().get(); auto new_packer = atlas_context->GetRectPacker(); ASSERT_NE(second_texture, first_texture); ASSERT_NE(old_packer, new_packer); } } // namespace testing } // namespace impeller // NOLINTEND(bugprone-unchecked-optional-access)
engine/impeller/typographer/typographer_unittests.cc/0
{ "file_path": "engine/impeller/typographer/typographer_unittests.cc", "repo_id": "engine", "token_count": 5799 }
271
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_GPU_HOST_BUFFER_H_ #define FLUTTER_LIB_GPU_HOST_BUFFER_H_ #include "flutter/lib/gpu/export.h" #include "flutter/lib/ui/dart_wrapper.h" #include "impeller/core/buffer_view.h" #include "impeller/core/host_buffer.h" #include "lib/gpu/context.h" #include "third_party/tonic/typed_data/dart_byte_data.h" namespace flutter { namespace gpu { class HostBuffer : public RefCountedDartWrappable<HostBuffer> { DEFINE_WRAPPERTYPEINFO(); FML_FRIEND_MAKE_REF_COUNTED(HostBuffer); public: explicit HostBuffer(Context* context); ~HostBuffer() override; std::shared_ptr<impeller::HostBuffer> GetBuffer(); size_t EmplaceBytes(const tonic::DartByteData& byte_data); std::optional<impeller::BufferView> GetBufferViewForOffset(size_t offset); private: size_t current_offset_ = 0; std::shared_ptr<impeller::HostBuffer> host_buffer_; std::unordered_map<size_t, impeller::BufferView> emplacements_; FML_DISALLOW_COPY_AND_ASSIGN(HostBuffer); }; } // namespace gpu } // namespace flutter //---------------------------------------------------------------------------- /// Exports /// extern "C" { FLUTTER_GPU_EXPORT extern void InternalFlutterGpu_HostBuffer_Initialize( Dart_Handle wrapper, flutter::gpu::Context* context); FLUTTER_GPU_EXPORT extern size_t InternalFlutterGpu_HostBuffer_EmplaceBytes( flutter::gpu::HostBuffer* wrapper, Dart_Handle byte_data); } // extern "C" #endif // FLUTTER_LIB_GPU_HOST_BUFFER_H_
engine/lib/gpu/host_buffer.h/0
{ "file_path": "engine/lib/gpu/host_buffer.h", "repo_id": "engine", "token_count": 583 }
272
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_GPU_RENDER_PIPELINE_H_ #define FLUTTER_LIB_GPU_RENDER_PIPELINE_H_ #include "flutter/lib/gpu/context.h" #include "flutter/lib/gpu/export.h" #include "flutter/lib/gpu/shader.h" #include "flutter/lib/ui/dart_wrapper.h" #include "impeller/renderer/pipeline_descriptor.h" namespace flutter { namespace gpu { class RenderPipeline : public RefCountedDartWrappable<RenderPipeline> { DEFINE_WRAPPERTYPEINFO(); FML_FRIEND_MAKE_REF_COUNTED(RenderPipeline); public: RenderPipeline(fml::RefPtr<flutter::gpu::Shader> vertex_shader, fml::RefPtr<flutter::gpu::Shader> fragment_shader); ~RenderPipeline() override; void BindToPipelineDescriptor(impeller::ShaderLibrary& library, impeller::PipelineDescriptor& desc); private: fml::RefPtr<flutter::gpu::Shader> vertex_shader_; fml::RefPtr<flutter::gpu::Shader> fragment_shader_; FML_DISALLOW_COPY_AND_ASSIGN(RenderPipeline); }; } // namespace gpu } // namespace flutter //---------------------------------------------------------------------------- /// Exports /// extern "C" { FLUTTER_GPU_EXPORT extern Dart_Handle InternalFlutterGpu_RenderPipeline_Initialize( Dart_Handle wrapper, flutter::gpu::Context* gpu_context, flutter::gpu::Shader* vertex_shader, flutter::gpu::Shader* fragment_shader); } // extern "C" #endif // FLUTTER_LIB_GPU_RENDER_PIPELINE_H_
engine/lib/gpu/render_pipeline.h/0
{ "file_path": "engine/lib/gpu/render_pipeline.h", "repo_id": "engine", "token_count": 613 }
273
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_DART_UI_H_ #define FLUTTER_LIB_UI_DART_UI_H_ #include "flutter/common/settings.h" #include "flutter/fml/macros.h" namespace flutter { class DartUI { public: static void InitForIsolate(const Settings& settings); private: FML_DISALLOW_IMPLICIT_CONSTRUCTORS(DartUI); }; } // namespace flutter #endif // FLUTTER_LIB_UI_DART_UI_H_
engine/lib/ui/dart_ui.h/0
{ "file_path": "engine/lib/ui/dart_ui.h", "repo_id": "engine", "token_count": 192 }
274
# ui_unittest Fixtures The files in this directory are used by the ui_unittests binary. The `ui_test.dart` file is either JIT or AOT compiled depending on the runtime mode of the test binary. Other files in this folder are used by tests to verify functionality. See `//lib/ui/BUILD.gn` and `//testing/testing.gni` for the build rules and GN template definitions that determine which files get included and compiled here.
engine/lib/ui/fixtures/README.md/0
{ "file_path": "engine/lib/ui/fixtures/README.md", "repo_id": "engine", "token_count": 119 }
275
# Copyright 2013 The Flutter Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import("//build/compiled_action.gni") import("//flutter/impeller/tools/impeller.gni") import("//flutter/testing/testing.gni") if (enable_unittests) { test_shaders = [ "blue_green_sampler.frag", "children_and_uniforms.frag", "functions.frag", "no_builtin_redefinition.frag", "no_uniforms.frag", "simple.frag", "uniforms.frag", "uniforms_sorted.frag", "uniform_arrays.frag", ] group("general_shaders") { testonly = true deps = [ ":fixtures" ] } impellerc("compile_general_shaders") { shaders = test_shaders shader_target_flags = [ "--sksl" ] intermediates_subdir = "iplr" sl_file_extension = "iplr" iplr = true } test_fixtures("fixtures") { deps = [ ":compile_general_shaders" ] fixtures = get_target_outputs(":compile_general_shaders") dest = "$root_gen_dir/flutter/lib/ui" } }
engine/lib/ui/fixtures/shaders/general_shaders/BUILD.gn/0
{ "file_path": "engine/lib/ui/fixtures/shaders/general_shaders/BUILD.gn", "repo_id": "engine", "token_count": 433 }
276
#version 320 es // Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. precision highp float; layout(location = 0) out vec4 fragColor; layout(location = 0) uniform float a; // performs Hermite interpolation between two values: // // smoothstep(edge0, edge1, 0) { // t = clamp((x - edge0) / (edge1 - edge0), 0.0, 1.0); // return t * t * (3.0 - 2.0 * t); // } void main() { fragColor = vec4( // smoothstep(1.0, 5.0, 3.0) is 0.5, subtract to get 0.0 smoothstep(a, 5.0, 3.0) - 0.5, // smoothstep(0.0, 2.0, 1.0) is 0.5, add 0.5 to get 1.0 smoothstep(0.0, 2.0, a) + 0.5, 0.0, 1.0); }
engine/lib/ui/fixtures/shaders/supported_glsl_op_shaders/49_smoothstep.frag/0
{ "file_path": "engine/lib/ui/fixtures/shaders/supported_glsl_op_shaders/49_smoothstep.frag", "repo_id": "engine", "token_count": 301 }
277
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/lib/ui/painting/color_filter.h" #include <cstring> #include "flutter/lib/ui/ui_dart_state.h" #include "third_party/tonic/converter/dart_converter.h" #include "third_party/tonic/dart_args.h" #include "third_party/tonic/dart_binding_macros.h" #include "third_party/tonic/dart_library_natives.h" namespace flutter { IMPLEMENT_WRAPPERTYPEINFO(ui, ColorFilter); void ColorFilter::Create(Dart_Handle wrapper) { UIDartState::ThrowIfUIOperationsProhibited(); auto res = fml::MakeRefCounted<ColorFilter>(); res->AssociateWithDartWrapper(wrapper); } void ColorFilter::initMode(int color, int blend_mode) { filter_ = DlBlendColorFilter::Make(static_cast<DlColor>(color), static_cast<DlBlendMode>(blend_mode)); } void ColorFilter::initMatrix(const tonic::Float32List& color_matrix) { FML_CHECK(color_matrix.num_elements() == 20); // Flutter still defines the matrix to be biased by 255 in the last column // (translate). skia is normalized, treating the last column as 0...1, so we // post-scale here before calling the skia factory. float matrix[20]; memcpy(matrix, color_matrix.data(), sizeof(matrix)); matrix[4] *= 1.0f / 255; matrix[9] *= 1.0f / 255; matrix[14] *= 1.0f / 255; matrix[19] *= 1.0f / 255; filter_ = DlMatrixColorFilter::Make(matrix); } void ColorFilter::initLinearToSrgbGamma() { filter_ = DlLinearToSrgbGammaColorFilter::kInstance; } void ColorFilter::initSrgbToLinearGamma() { filter_ = DlSrgbToLinearGammaColorFilter::kInstance; } ColorFilter::~ColorFilter() = default; } // namespace flutter
engine/lib/ui/painting/color_filter.cc/0
{ "file_path": "engine/lib/ui/painting/color_filter.cc", "repo_id": "engine", "token_count": 661 }
278
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/lib/ui/painting/image.h" #include <algorithm> #include <limits> #include "tonic/logging/dart_invoke.h" #if IMPELLER_SUPPORTS_RENDERING #include "flutter/lib/ui/painting/image_encoding_impeller.h" #endif #include "flutter/lib/ui/painting/image_encoding.h" #include "third_party/tonic/converter/dart_converter.h" #include "third_party/tonic/dart_args.h" #include "third_party/tonic/dart_binding_macros.h" #include "third_party/tonic/dart_library_natives.h" namespace flutter { typedef CanvasImage Image; // Since _Image is a private class, we can't use IMPLEMENT_WRAPPERTYPEINFO static const tonic::DartWrapperInfo kDartWrapperInfoUIImage("ui", "_Image"); const tonic::DartWrapperInfo& Image::dart_wrapper_info_ = kDartWrapperInfoUIImage; CanvasImage::CanvasImage() = default; CanvasImage::~CanvasImage() = default; Dart_Handle CanvasImage::CreateOuterWrapping() { Dart_Handle ui_lib = Dart_LookupLibrary(tonic::ToDart("dart:ui")); return tonic::DartInvokeField(ui_lib, "_wrapImage", {ToDart(this)}); } Dart_Handle CanvasImage::toByteData(int format, Dart_Handle callback) { return EncodeImage(this, format, callback); } void CanvasImage::dispose() { image_.reset(); ClearDartWrapper(); } int CanvasImage::colorSpace() { if (image_->skia_image()) { return ColorSpace::kSRGB; } else if (image_->impeller_texture()) { #if IMPELLER_SUPPORTS_RENDERING return ImageEncodingImpeller::GetColorSpace(image_->impeller_texture()); #endif // IMPELLER_SUPPORTS_RENDERING } return -1; } } // namespace flutter
engine/lib/ui/painting/image.cc/0
{ "file_path": "engine/lib/ui/painting/image.cc", "repo_id": "engine", "token_count": 636 }
279
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/lib/ui/painting/image_encoding_impeller.h" #include "flutter/lib/ui/painting/image.h" #include "impeller/core/device_buffer.h" #include "impeller/core/formats.h" #include "impeller/renderer/command_buffer.h" #include "impeller/renderer/context.h" #include "third_party/skia/include/core/SkBitmap.h" #include "third_party/skia/include/core/SkImage.h" namespace flutter { namespace { std::optional<SkColorType> ToSkColorType(impeller::PixelFormat format) { switch (format) { case impeller::PixelFormat::kR8G8B8A8UNormInt: return SkColorType::kRGBA_8888_SkColorType; case impeller::PixelFormat::kR16G16B16A16Float: return SkColorType::kRGBA_F16_SkColorType; case impeller::PixelFormat::kB8G8R8A8UNormInt: return SkColorType::kBGRA_8888_SkColorType; case impeller::PixelFormat::kB10G10R10XR: return SkColorType::kBGR_101010x_XR_SkColorType; default: return std::nullopt; } } sk_sp<SkImage> ConvertBufferToSkImage( const std::shared_ptr<impeller::DeviceBuffer>& buffer, SkColorType color_type, SkISize dimensions) { SkImageInfo image_info = SkImageInfo::Make(dimensions, color_type, SkAlphaType::kPremul_SkAlphaType); SkBitmap bitmap; auto func = [](void* addr, void* context) { auto buffer = static_cast<std::shared_ptr<impeller::DeviceBuffer>*>(context); buffer->reset(); delete buffer; }; auto bytes_per_pixel = image_info.bytesPerPixel(); bitmap.installPixels(image_info, buffer->OnGetContents(), dimensions.width() * bytes_per_pixel, func, new std::shared_ptr<impeller::DeviceBuffer>(buffer)); bitmap.setImmutable(); sk_sp<SkImage> raster_image = SkImages::RasterFromBitmap(bitmap); return raster_image; } [[nodiscard]] fml::Status DoConvertImageToRasterImpeller( const sk_sp<DlImage>& dl_image, const std::function<void(fml::StatusOr<sk_sp<SkImage>>)>& encode_task, const std::shared_ptr<const fml::SyncSwitch>& is_gpu_disabled_sync_switch, const std::shared_ptr<impeller::Context>& impeller_context) { fml::Status result; is_gpu_disabled_sync_switch->Execute( fml::SyncSwitch::Handlers() .SetIfTrue([&result] { result = fml::Status(fml::StatusCode::kUnavailable, "GPU unavailable."); }) .SetIfFalse([&dl_image, &encode_task, &impeller_context] { ImageEncodingImpeller::ConvertDlImageToSkImage( dl_image, encode_task, impeller_context); })); return result; } /// Same as `DoConvertImageToRasterImpeller` but it will attempt to retry the /// operation if `DoConvertImageToRasterImpeller` returns kUnavailable when the /// GPU becomes available again. void DoConvertImageToRasterImpellerWithRetry( const sk_sp<DlImage>& dl_image, std::function<void(fml::StatusOr<sk_sp<SkImage>>)>&& encode_task, const std::shared_ptr<const fml::SyncSwitch>& is_gpu_disabled_sync_switch, const std::shared_ptr<impeller::Context>& impeller_context, const fml::RefPtr<fml::TaskRunner>& retry_runner) { fml::Status status = DoConvertImageToRasterImpeller( dl_image, encode_task, is_gpu_disabled_sync_switch, impeller_context); if (!status.ok()) { // If the conversion failed because of the GPU is unavailable, store the // task on the Context so it can be executed when the GPU becomes available. if (status.code() == fml::StatusCode::kUnavailable) { impeller_context->StoreTaskForGPU( [dl_image, encode_task = std::move(encode_task), is_gpu_disabled_sync_switch, impeller_context, retry_runner]() mutable { auto retry_task = [dl_image, encode_task = std::move(encode_task), is_gpu_disabled_sync_switch, impeller_context] { fml::Status retry_status = DoConvertImageToRasterImpeller( dl_image, encode_task, is_gpu_disabled_sync_switch, impeller_context); if (!retry_status.ok()) { // The retry failed for some reason, maybe the GPU became // unavailable again. Don't retry again, just fail in this case. encode_task(retry_status); } }; // If a `retry_runner` is specified, post the retry to it, otherwise // execute it directly. if (retry_runner) { retry_runner->PostTask(retry_task); } else { retry_task(); } }); } else { // Pass on errors that are not `kUnavailable`. encode_task(status); } } } } // namespace void ImageEncodingImpeller::ConvertDlImageToSkImage( const sk_sp<DlImage>& dl_image, std::function<void(fml::StatusOr<sk_sp<SkImage>>)> encode_task, const std::shared_ptr<impeller::Context>& impeller_context) { auto texture = dl_image->impeller_texture(); if (impeller_context == nullptr) { encode_task(fml::Status(fml::StatusCode::kFailedPrecondition, "Impeller context was null.")); return; } if (texture == nullptr) { encode_task( fml::Status(fml::StatusCode::kFailedPrecondition, "Image was null.")); return; } auto dimensions = dl_image->dimensions(); auto color_type = ToSkColorType(texture->GetTextureDescriptor().format); if (dimensions.isEmpty()) { encode_task(fml::Status(fml::StatusCode::kFailedPrecondition, "Image dimensions were empty.")); return; } if (!color_type.has_value()) { encode_task(fml::Status(fml::StatusCode::kUnimplemented, "Failed to get color type from pixel format.")); return; } impeller::DeviceBufferDescriptor buffer_desc; buffer_desc.storage_mode = impeller::StorageMode::kHostVisible; buffer_desc.readback = true; // set to false for testing. buffer_desc.size = texture->GetTextureDescriptor().GetByteSizeOfBaseMipLevel(); auto buffer = impeller_context->GetResourceAllocator()->CreateBuffer(buffer_desc); auto command_buffer = impeller_context->CreateCommandBuffer(); command_buffer->SetLabel("BlitTextureToBuffer Command Buffer"); auto pass = command_buffer->CreateBlitPass(); pass->SetLabel("BlitTextureToBuffer Blit Pass"); pass->AddCopy(texture, buffer); pass->EncodeCommands(impeller_context->GetResourceAllocator()); auto completion = [buffer, color_type = color_type.value(), dimensions, encode_task = std::move(encode_task)]( impeller::CommandBuffer::Status status) { if (status != impeller::CommandBuffer::Status::kCompleted) { encode_task(fml::Status(fml::StatusCode::kUnknown, "")); return; } buffer->Invalidate(); auto sk_image = ConvertBufferToSkImage(buffer, color_type, dimensions); encode_task(sk_image); }; if (!impeller_context->GetCommandQueue() ->Submit({command_buffer}, completion) .ok()) { FML_LOG(ERROR) << "Failed to submit commands."; } } void ImageEncodingImpeller::ConvertImageToRaster( const sk_sp<DlImage>& dl_image, std::function<void(fml::StatusOr<sk_sp<SkImage>>)> encode_task, const fml::RefPtr<fml::TaskRunner>& raster_task_runner, const fml::RefPtr<fml::TaskRunner>& io_task_runner, const std::shared_ptr<const fml::SyncSwitch>& is_gpu_disabled_sync_switch, const std::shared_ptr<impeller::Context>& impeller_context) { auto original_encode_task = std::move(encode_task); encode_task = [original_encode_task = std::move(original_encode_task), io_task_runner](fml::StatusOr<sk_sp<SkImage>> image) mutable { fml::TaskRunner::RunNowOrPostTask( io_task_runner, [original_encode_task = std::move(original_encode_task), image = std::move(image)]() { original_encode_task(image); }); }; if (dl_image->owning_context() != DlImage::OwningContext::kRaster) { DoConvertImageToRasterImpellerWithRetry(dl_image, std::move(encode_task), is_gpu_disabled_sync_switch, impeller_context, /*retry_runner=*/nullptr); return; } raster_task_runner->PostTask([dl_image, encode_task = std::move(encode_task), io_task_runner, is_gpu_disabled_sync_switch, impeller_context, raster_task_runner]() mutable { DoConvertImageToRasterImpellerWithRetry( dl_image, std::move(encode_task), is_gpu_disabled_sync_switch, impeller_context, raster_task_runner); }); } int ImageEncodingImpeller::GetColorSpace( const std::shared_ptr<impeller::Texture>& texture) { const impeller::TextureDescriptor& desc = texture->GetTextureDescriptor(); switch (desc.format) { case impeller::PixelFormat::kB10G10R10XR: // intentional_fallthrough case impeller::PixelFormat::kR16G16B16A16Float: return ColorSpace::kExtendedSRGB; default: return ColorSpace::kSRGB; } } } // namespace flutter
engine/lib/ui/painting/image_encoding_impeller.cc/0
{ "file_path": "engine/lib/ui/painting/image_encoding_impeller.cc", "repo_id": "engine", "token_count": 3957 }
280
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_PAINTING_IMAGE_SHADER_H_ #define FLUTTER_LIB_UI_PAINTING_IMAGE_SHADER_H_ #include "flutter/lib/ui/dart_wrapper.h" #include "flutter/lib/ui/painting/gradient.h" #include "flutter/lib/ui/painting/image.h" #include "flutter/lib/ui/painting/matrix.h" #include "flutter/lib/ui/painting/shader.h" #include "third_party/skia/include/core/SkMatrix.h" #include "third_party/skia/include/core/SkShader.h" #include "third_party/tonic/typed_data/typed_list.h" namespace flutter { class ImageShader : public Shader { DEFINE_WRAPPERTYPEINFO(); FML_FRIEND_MAKE_REF_COUNTED(ImageShader); public: ~ImageShader() override; static void Create(Dart_Handle wrapper); Dart_Handle initWithImage(CanvasImage* image, DlTileMode tmx, DlTileMode tmy, int filter_quality_index, Dart_Handle matrix_handle); std::shared_ptr<DlColorSource> shader(DlImageSampling) override; int width(); int height(); void dispose(); private: ImageShader(); sk_sp<const DlImage> image_; bool sampling_is_locked_; std::shared_ptr<DlImageColorSource> cached_shader_; }; } // namespace flutter #endif // FLUTTER_LIB_UI_PAINTING_IMAGE_SHADER_H_
engine/lib/ui/painting/image_shader.h/0
{ "file_path": "engine/lib/ui/painting/image_shader.h", "repo_id": "engine", "token_count": 606 }
281
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_PAINTING_PICTURE_H_ #define FLUTTER_LIB_UI_PAINTING_PICTURE_H_ #include "flutter/display_list/display_list.h" #include "flutter/flow/layers/layer_tree.h" #include "flutter/lib/ui/dart_wrapper.h" #include "flutter/lib/ui/painting/image.h" #include "flutter/lib/ui/ui_dart_state.h" namespace flutter { class Canvas; class Picture : public RefCountedDartWrappable<Picture> { DEFINE_WRAPPERTYPEINFO(); FML_FRIEND_MAKE_REF_COUNTED(Picture); public: ~Picture() override; static void CreateAndAssociateWithDartWrapper( Dart_Handle dart_handle, sk_sp<DisplayList> display_list); sk_sp<DisplayList> display_list() const { return display_list_; } Dart_Handle toImage(uint32_t width, uint32_t height, Dart_Handle raw_image_callback); void toImageSync(uint32_t width, uint32_t height, Dart_Handle raw_image_handle); void dispose(); size_t GetAllocationSize() const; static void RasterizeToImageSync(sk_sp<DisplayList> display_list, uint32_t width, uint32_t height, Dart_Handle raw_image_handle); static Dart_Handle RasterizeToImage(const sk_sp<DisplayList>& display_list, uint32_t width, uint32_t height, Dart_Handle raw_image_callback); static Dart_Handle RasterizeLayerTreeToImage( std::unique_ptr<LayerTree> layer_tree, Dart_Handle raw_image_callback); // Callers may provide either a display list or a layer tree, but not both. // // If a layer tree is provided, it will be flattened on the raster thread, and // picture_bounds should be the layer tree's frame_size(). static Dart_Handle DoRasterizeToImage(const sk_sp<DisplayList>& display_list, std::unique_ptr<LayerTree> layer_tree, uint32_t width, uint32_t height, Dart_Handle raw_image_callback); private: explicit Picture(sk_sp<DisplayList> display_list); sk_sp<DisplayList> display_list_; }; } // namespace flutter #endif // FLUTTER_LIB_UI_PAINTING_PICTURE_H_
engine/lib/ui/painting/picture.h/0
{ "file_path": "engine/lib/ui/painting/picture.h", "repo_id": "engine", "token_count": 1174 }
282
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. part of dart.ui; /// Signature of callbacks that have no arguments and return no data. typedef VoidCallback = void Function(); /// Signature for [PlatformDispatcher.onBeginFrame]. typedef FrameCallback = void Function(Duration duration); /// Signature for [PlatformDispatcher.onReportTimings]. /// /// {@template dart.ui.TimingsCallback.list} /// The callback takes a list of [FrameTiming] because it may not be /// immediately triggered after each frame. Instead, Flutter tries to batch /// frames together and send all their timings at once to decrease the /// overhead (as this is available in the release mode). The list is sorted in /// ascending order of time (earliest frame first). The timing of any frame /// will be sent within about 1 second (100ms if in the profile/debug mode) /// even if there are no later frames to batch. The timing of the first frame /// will be sent immediately without batching. /// {@endtemplate} typedef TimingsCallback = void Function(List<FrameTiming> timings); /// Signature for [PlatformDispatcher.onPointerDataPacket]. typedef PointerDataPacketCallback = void Function(PointerDataPacket packet); /// Signature for [PlatformDispatcher.onKeyData]. /// /// The callback should return true if the key event has been handled by the /// framework and should not be propagated further. typedef KeyDataCallback = bool Function(KeyData data); /// Signature for [PlatformDispatcher.onSemanticsActionEvent]. typedef SemanticsActionEventCallback = void Function(SemanticsActionEvent action); /// Signature for responses to platform messages. /// /// Used as a parameter to [PlatformDispatcher.sendPlatformMessage] and /// [PlatformDispatcher.onPlatformMessage]. typedef PlatformMessageResponseCallback = void Function(ByteData? data); /// Deprecated. Migrate to [ChannelBuffers.setListener] instead. /// /// Signature for [PlatformDispatcher.onPlatformMessage]. @Deprecated( 'Migrate to ChannelBuffers.setListener instead. ' 'This feature was deprecated after v3.11.0-20.0.pre.', ) typedef PlatformMessageCallback = void Function(String name, ByteData? data, PlatformMessageResponseCallback? callback); // Signature for _setNeedsReportTimings. typedef _SetNeedsReportTimingsFunc = void Function(bool value); /// Signature for [PlatformDispatcher.onError]. /// /// If this method returns false, the engine may use some fallback method to /// provide information about the error. /// /// After calling this method, the process or the VM may terminate. Some severe /// unhandled errors may not be able to call this method either, such as Dart /// compilation errors or process terminating errors. typedef ErrorCallback = bool Function(Object exception, StackTrace stackTrace); // A gesture setting value that indicates it has not been set by the engine. const double _kUnsetGestureSetting = -1.0; // A message channel to receive KeyData from the platform. // // See embedder.cc::kFlutterKeyDataChannel for more information. const String _kFlutterKeyDataChannel = 'flutter/keydata'; @pragma('vm:entry-point') ByteData? _wrapUnmodifiableByteData(ByteData? byteData) => byteData?.asUnmodifiableView(); /// A token that represents a root isolate. class RootIsolateToken { RootIsolateToken._(this._token); /// An enumeration representing the root isolate (0 if not a root isolate). final int _token; /// The token for the root isolate that is executing this Dart code. If this /// Dart code is not executing on a root isolate [instance] will be null. static final RootIsolateToken? instance = () { final int token = __getRootIsolateToken(); return token == 0 ? null : RootIsolateToken._(token); }(); @Native<Int64 Function()>(symbol: 'PlatformConfigurationNativeApi::GetRootIsolateToken') external static int __getRootIsolateToken(); } /// Platform event dispatcher singleton. /// /// The most basic interface to the host operating system's interface. /// /// This is the central entry point for platform messages and configuration /// events from the platform. /// /// It exposes the core scheduler API, the input event callback, the graphics /// drawing API, and other such core services. /// /// It manages the list of the application's [views] as well as the /// [configuration] of various platform attributes. /// /// Consider avoiding static references to this singleton through /// [PlatformDispatcher.instance] and instead prefer using a binding for /// dependency resolution such as `WidgetsBinding.instance.platformDispatcher`. /// See [PlatformDispatcher.instance] for more information about why this is /// preferred. class PlatformDispatcher { /// Private constructor, since only dart:ui is supposed to create one of /// these. Use [instance] to access the singleton. PlatformDispatcher._() { _setNeedsReportTimings = _nativeSetNeedsReportTimings; } /// The [PlatformDispatcher] singleton. /// /// Consider avoiding static references to this singleton through /// [PlatformDispatcher.instance] and instead prefer using a binding for /// dependency resolution such as `WidgetsBinding.instance.platformDispatcher`. /// /// Static access of this object means that Flutter has few, if any options to /// fake or mock the given object in tests. Even in cases where Dart offers /// special language constructs to forcefully shadow such properties, those /// mechanisms would only be reasonable for tests and they would not be /// reasonable for a future of Flutter where we legitimately want to select an /// appropriate implementation at runtime. /// /// The only place that `WidgetsBinding.instance.platformDispatcher` is /// inappropriate is if access to these APIs is required before the binding is /// initialized by invoking `runApp()` or /// `WidgetsFlutterBinding.instance.ensureInitialized()`. In that case, it is /// necessary (though unfortunate) to use the [PlatformDispatcher.instance] /// object statically. static PlatformDispatcher get instance => _instance; static final PlatformDispatcher _instance = PlatformDispatcher._(); _PlatformConfiguration _configuration = const _PlatformConfiguration(); /// Called when the platform configuration changes. /// /// The engine invokes this callback in the same zone in which the callback /// was set. VoidCallback? get onPlatformConfigurationChanged => _onPlatformConfigurationChanged; VoidCallback? _onPlatformConfigurationChanged; Zone _onPlatformConfigurationChangedZone = Zone.root; set onPlatformConfigurationChanged(VoidCallback? callback) { _onPlatformConfigurationChanged = callback; _onPlatformConfigurationChangedZone = Zone.current; } /// The current list of displays. /// /// If any of their configurations change, [onMetricsChanged] will be called. /// /// To get the display for a [FlutterView], use [FlutterView.display]. /// /// Platforms may limit what information is available to the application with /// regard to secondary displays and/or displays that do not have an active /// application window. /// /// Presently, on Android and Web this collection will only contain the /// display that the current window is on. On iOS, it will only contains the /// main display on the phone or tablet. On Desktop, it will contain only /// a main display with a valid refresh rate but invalid size and device /// pixel ratio values. // TODO(dnfield): Update these docs when https://github.com/flutter/flutter/issues/125939 // and https://github.com/flutter/flutter/issues/125938 are resolved. Iterable<Display> get displays => _displays.values; final Map<int, Display> _displays = <int, Display>{}; /// The current list of views, including top level platform windows used by /// the application. /// /// If any of their configurations change, [onMetricsChanged] will be called. Iterable<FlutterView> get views => _views.values; final Map<int, FlutterView> _views = <int, FlutterView>{}; /// Returns the [FlutterView] with the provided ID if one exists, or null /// otherwise. FlutterView? view({required int id}) => _views[id]; /// The [FlutterView] provided by the engine if the platform is unable to /// create windows, or, for backwards compatibility. /// /// If the platform provides an implicit view, it can be used to bootstrap /// the framework. This is common for platforms designed for single-view /// applications like mobile devices with a single display. /// /// Applications and libraries must not rely on this property being set /// as it may be null depending on the engine's configuration. Instead, /// consider using [View.of] to lookup the [FlutterView] the current /// [BuildContext] is drawing into. /// /// While the properties on the referenced [FlutterView] may change, /// the reference itself is guaranteed to never change over the lifetime /// of the application: if this property is null at startup, it will remain /// so throughout the entire lifetime of the application. If it points to a /// specific [FlutterView], it will continue to point to the same view until /// the application is shut down (although the engine may replace or remove /// the underlying backing surface of the view at its discretion). /// /// See also: /// /// * [View.of], for accessing the current view. /// * [PlatformDispatcher.views] for a list of all [FlutterView]s provided /// by the platform. FlutterView? get implicitView { final FlutterView? result = _views[_implicitViewId]; // Make sure [implicitView] agrees with `_implicitViewId`. assert((result != null) == (_implicitViewId != null), (_implicitViewId != null) ? 'The implicit view ID is $_implicitViewId, but the implicit view does not exist.' : 'The implicit view ID is null, but the implicit view exists.'); // Make sure [implicitView] never chages. assert(() { if (_debugRecordedLastImplicitView) { assert(identical(_debugLastImplicitView, result), 'The implicitView has changed:\n' 'Last: $_debugLastImplicitView\nCurrent: $result'); } else { _debugLastImplicitView = result; _debugRecordedLastImplicitView = true; } return true; }()); return result; } FlutterView? _debugLastImplicitView; bool _debugRecordedLastImplicitView = false; /// A callback that is invoked whenever the [ViewConfiguration] of any of the /// [views] changes. /// /// For example when the device is rotated or when the application is resized /// (e.g. when showing applications side-by-side on Android), /// `onMetricsChanged` is called. /// /// The engine invokes this callback in the same zone in which the callback /// was set. /// /// The framework registers with this callback and updates the layout /// appropriately. /// /// See also: /// /// * [WidgetsBindingObserver], for a mechanism at the widgets layer to /// register for notifications when this is called. /// * [MediaQuery.of], a simpler mechanism for the same. VoidCallback? get onMetricsChanged => _onMetricsChanged; VoidCallback? _onMetricsChanged; Zone _onMetricsChangedZone = Zone.root; set onMetricsChanged(VoidCallback? callback) { _onMetricsChanged = callback; _onMetricsChangedZone = Zone.current; } // Called from the engine, via hooks.dart // // Adds a new view with the specific view configuration. // // The implicit view must be added before [implicitView] is first called, // which is typically the main function. void _addView(int id, _ViewConfiguration viewConfiguration) { assert(!_views.containsKey(id), 'View ID $id already exists.'); _views[id] = FlutterView._(id, this, viewConfiguration); _invoke(onMetricsChanged, _onMetricsChangedZone); } // Called from the engine, via hooks.dart // // Removes the specific view. // // The target view must must exist. The implicit view must not be removed, // or an assertion will be triggered. void _removeView(int id) { assert(id != _implicitViewId, 'The implicit view #$id can not be removed.'); if (id == _implicitViewId) { return; } assert(_views.containsKey(id), 'View ID $id does not exist.'); _views.remove(id); _invoke(onMetricsChanged, _onMetricsChangedZone); } // Called from the engine, via hooks.dart. // // Updates the available displays. void _updateDisplays(List<Display> displays) { _displays.clear(); for (final Display display in displays) { _displays[display.id] = display; } _invoke(onMetricsChanged, _onMetricsChangedZone); } // Called from the engine, via hooks.dart // // Updates the metrics of the window with the given id. void _updateWindowMetrics(int viewId, _ViewConfiguration viewConfiguration) { assert(_views.containsKey(viewId), 'View $viewId does not exist.'); _views[viewId]!._viewConfiguration = viewConfiguration; _invoke(onMetricsChanged, _onMetricsChangedZone); } /// A callback invoked immediately after the focus is transitioned across [FlutterView]s. /// /// When the platform moves the focus from one [FlutterView] to another, this /// callback is invoked indicating the new view that has focus and the direction /// in which focus was received. For example, if focus is moved to the [FlutterView] /// with ID 2 in the forward direction (could be the result of pressing tab) /// the callback receives a [ViewFocusEvent] with [ViewFocusState.focused] and /// [ViewFocusDirection.forward]. /// /// Typically, receivers of this event respond by moving the focus to the first /// focusable widget inside the [FlutterView] with ID 2. If a view receives /// focus in the backward direction (could be the result of pressing shift + tab), /// typically the last focusable widget inside that view is focused. /// /// The platform may remove focus from a [FlutterView]. For example, on the web, /// the browser can move focus to another element, or to the browser's built-in UI. /// On desktop, the operating system can switch to another window (e.g. using Alt + Tab on Windows). /// In scenarios like these, [onViewFocusChange] will be invoked with [ViewFocusState.unfocused] and /// [ViewFocusDirection.undefined]. /// /// Receivers typically respond to this event by removing all focus indications /// from the app. /// /// Apps can also programmatically request to move the focus to a desired /// [FlutterView] by calling [requestViewFocusChange]. /// /// The callback is invoked in the same zone in which the callback was set. /// /// See also: /// /// * [requestViewFocusChange] to programmatically instruct the platform to move focus to a different [FlutterView]. /// * [ViewFocusState] for a list of allowed focus transitions. /// * [ViewFocusDirection] for a list of allowed focus directions. /// * [ViewFocusEvent], which is the event object provided to the callback. ViewFocusChangeCallback? get onViewFocusChange => _onViewFocusChange; ViewFocusChangeCallback? _onViewFocusChange; // ignore: unused_field, field will be used when platforms other than web use these focus APIs. Zone _onViewFocusChangeZone = Zone.root; set onViewFocusChange(ViewFocusChangeCallback? callback) { _onViewFocusChange = callback; _onViewFocusChangeZone = Zone.current; } /// Requests a focus change of the [FlutterView] with ID [viewId]. /// /// If an app would like to request the engine to move focus, in forward direction, /// to the [FlutterView] with ID 1 it should call this method with [ViewFocusState.focused] /// and [ViewFocusDirection.forward]. /// /// There is no need to call this method if the view in question already has /// focus as it won't have any effect. /// /// A call to this method will lead to the engine calling [onViewFocusChange] /// if the request is successfully fulfilled. /// /// See also: /// /// * [onViewFocusChange], a callback to subscribe to view focus change events. void requestViewFocusChange({ required int viewId, required ViewFocusState state, required ViewFocusDirection direction, }) { // TODO(tugorez): implement this method. At the moment will be a no op call. } /// A callback invoked when any view begins a frame. /// /// A callback that is invoked to notify the application that it is an /// appropriate time to provide a scene using the [SceneBuilder] API and the /// [FlutterView.render] method. /// /// When possible, this is driven by the hardware VSync signal of the attached /// screen with the highest VSync rate. This is only called if /// [PlatformDispatcher.scheduleFrame] has been called since the last time /// this callback was invoked. FrameCallback? get onBeginFrame => _onBeginFrame; FrameCallback? _onBeginFrame; Zone _onBeginFrameZone = Zone.root; set onBeginFrame(FrameCallback? callback) { _onBeginFrame = callback; _onBeginFrameZone = Zone.current; } // Called from the engine, via hooks.dart void _beginFrame(int microseconds) { _invoke1<Duration>( onBeginFrame, _onBeginFrameZone, Duration(microseconds: microseconds), ); } /// A callback that is invoked for each frame after [onBeginFrame] has /// completed and after the microtask queue has been drained. /// /// This can be used to implement a second phase of frame rendering that /// happens after any deferred work queued by the [onBeginFrame] phase. VoidCallback? get onDrawFrame => _onDrawFrame; VoidCallback? _onDrawFrame; Zone _onDrawFrameZone = Zone.root; set onDrawFrame(VoidCallback? callback) { _onDrawFrame = callback; _onDrawFrameZone = Zone.current; } // Called from the engine, via hooks.dart void _drawFrame() { _invoke(onDrawFrame, _onDrawFrameZone); } /// A callback that is invoked when pointer data is available. /// /// The framework invokes this callback in the same zone in which the callback /// was set. /// /// See also: /// /// * [GestureBinding], the Flutter framework class which manages pointer /// events. PointerDataPacketCallback? get onPointerDataPacket => _onPointerDataPacket; PointerDataPacketCallback? _onPointerDataPacket; Zone _onPointerDataPacketZone = Zone.root; set onPointerDataPacket(PointerDataPacketCallback? callback) { _onPointerDataPacket = callback; _onPointerDataPacketZone = Zone.current; } // Called from the engine, via hooks.dart void _dispatchPointerDataPacket(ByteData packet) { if (onPointerDataPacket != null) { _invoke1<PointerDataPacket>( onPointerDataPacket, _onPointerDataPacketZone, _unpackPointerDataPacket(packet), ); } } // This value must match kPointerDataFieldCount in pointer_data.cc. (The // pointer_data.cc also lists other locations that must be kept consistent.) static const int _kPointerDataFieldCount = 36; static PointerDataPacket _unpackPointerDataPacket(ByteData packet) { const int kStride = Int64List.bytesPerElement; const int kBytesPerPointerData = _kPointerDataFieldCount * kStride; final int length = packet.lengthInBytes ~/ kBytesPerPointerData; assert(length * kBytesPerPointerData == packet.lengthInBytes); final List<PointerData> data = <PointerData>[]; for (int i = 0; i < length; ++i) { int offset = i * _kPointerDataFieldCount; data.add(PointerData( // The unpacking code must match the struct in pointer_data.h. embedderId: packet.getInt64(kStride * offset++, _kFakeHostEndian), timeStamp: Duration(microseconds: packet.getInt64(kStride * offset++, _kFakeHostEndian)), change: PointerChange.values[packet.getInt64(kStride * offset++, _kFakeHostEndian)], kind: PointerDeviceKind.values[packet.getInt64(kStride * offset++, _kFakeHostEndian)], signalKind: PointerSignalKind.values[packet.getInt64(kStride * offset++, _kFakeHostEndian)], device: packet.getInt64(kStride * offset++, _kFakeHostEndian), pointerIdentifier: packet.getInt64(kStride * offset++, _kFakeHostEndian), physicalX: packet.getFloat64(kStride * offset++, _kFakeHostEndian), physicalY: packet.getFloat64(kStride * offset++, _kFakeHostEndian), physicalDeltaX: packet.getFloat64(kStride * offset++, _kFakeHostEndian), physicalDeltaY: packet.getFloat64(kStride * offset++, _kFakeHostEndian), buttons: packet.getInt64(kStride * offset++, _kFakeHostEndian), obscured: packet.getInt64(kStride * offset++, _kFakeHostEndian) != 0, synthesized: packet.getInt64(kStride * offset++, _kFakeHostEndian) != 0, pressure: packet.getFloat64(kStride * offset++, _kFakeHostEndian), pressureMin: packet.getFloat64(kStride * offset++, _kFakeHostEndian), pressureMax: packet.getFloat64(kStride * offset++, _kFakeHostEndian), distance: packet.getFloat64(kStride * offset++, _kFakeHostEndian), distanceMax: packet.getFloat64(kStride * offset++, _kFakeHostEndian), size: packet.getFloat64(kStride * offset++, _kFakeHostEndian), radiusMajor: packet.getFloat64(kStride * offset++, _kFakeHostEndian), radiusMinor: packet.getFloat64(kStride * offset++, _kFakeHostEndian), radiusMin: packet.getFloat64(kStride * offset++, _kFakeHostEndian), radiusMax: packet.getFloat64(kStride * offset++, _kFakeHostEndian), orientation: packet.getFloat64(kStride * offset++, _kFakeHostEndian), tilt: packet.getFloat64(kStride * offset++, _kFakeHostEndian), platformData: packet.getInt64(kStride * offset++, _kFakeHostEndian), scrollDeltaX: packet.getFloat64(kStride * offset++, _kFakeHostEndian), scrollDeltaY: packet.getFloat64(kStride * offset++, _kFakeHostEndian), panX: packet.getFloat64(kStride * offset++, _kFakeHostEndian), panY: packet.getFloat64(kStride * offset++, _kFakeHostEndian), panDeltaX: packet.getFloat64(kStride * offset++, _kFakeHostEndian), panDeltaY: packet.getFloat64(kStride * offset++, _kFakeHostEndian), scale: packet.getFloat64(kStride * offset++, _kFakeHostEndian), rotation: packet.getFloat64(kStride * offset++, _kFakeHostEndian), viewId: packet.getInt64(kStride * offset++, _kFakeHostEndian), )); assert(offset == (i + 1) * _kPointerDataFieldCount); } return PointerDataPacket(data: data); } static ChannelCallback _keyDataListener(KeyDataCallback onKeyData, Zone zone) => (ByteData? packet, PlatformMessageResponseCallback callback) { _invoke1<KeyData>( (KeyData keyData) { final bool handled = onKeyData(keyData); final Uint8List response = Uint8List(1); response[0] = handled ? 1 : 0; callback(response.buffer.asByteData()); }, zone, _unpackKeyData(packet!), ); }; /// A callback that is invoked when key data is available. /// /// The framework invokes this callback in the same zone in which the callback /// was set. /// /// The callback should return true if the key event has been handled by the /// framework and should not be propagated further. KeyDataCallback? get onKeyData => _onKeyData; KeyDataCallback? _onKeyData; set onKeyData(KeyDataCallback? callback) { _onKeyData = callback; if (callback != null) { channelBuffers.setListener(_kFlutterKeyDataChannel, _keyDataListener(callback, Zone.current)); } else { channelBuffers.clearListener(_kFlutterKeyDataChannel); } } // If this value changes, update the encoding code in the following files: // // * key_data.h (kKeyDataFieldCount) // * KeyData.java (KeyData.FIELD_COUNT) static const int _kKeyDataFieldCount = 6; // The packet structure is described in `key_data_packet.h`. static KeyData _unpackKeyData(ByteData packet) { const int kStride = Int64List.bytesPerElement; int offset = 0; final int charDataSize = packet.getUint64(kStride * offset++, _kFakeHostEndian); final String? character = charDataSize == 0 ? null : utf8.decoder.convert( packet.buffer.asUint8List(kStride * (offset + _kKeyDataFieldCount), charDataSize)); final KeyData keyData = KeyData( timeStamp: Duration(microseconds: packet.getUint64(kStride * offset++, _kFakeHostEndian)), type: KeyEventType.values[packet.getInt64(kStride * offset++, _kFakeHostEndian)], physical: packet.getUint64(kStride * offset++, _kFakeHostEndian), logical: packet.getUint64(kStride * offset++, _kFakeHostEndian), character: character, synthesized: packet.getUint64(kStride * offset++, _kFakeHostEndian) != 0, ); return keyData; } /// A callback that is invoked to report the [FrameTiming] of recently /// rasterized frames. /// /// It's preferred to use [SchedulerBinding.addTimingsCallback] than to use /// [onReportTimings] directly because [SchedulerBinding.addTimingsCallback] /// allows multiple callbacks. /// /// This can be used to see if the application has missed frames (through /// [FrameTiming.buildDuration] and [FrameTiming.rasterDuration]), or high /// latencies (through [FrameTiming.totalSpan]). /// /// Unlike [Timeline], the timing information here is available in the release /// mode (additional to the profile and the debug mode). Hence this can be /// used to monitor the application's performance in the wild. /// /// {@macro dart.ui.TimingsCallback.list} /// /// If this is null, no additional work will be done. If this is not null, /// Flutter spends less than 0.1ms every 1 second to report the timings /// (measured on iPhone6S). The 0.1ms is about 0.6% of 16ms (frame budget for /// 60fps), or 0.01% CPU usage per second. TimingsCallback? get onReportTimings => _onReportTimings; TimingsCallback? _onReportTimings; Zone _onReportTimingsZone = Zone.root; set onReportTimings(TimingsCallback? callback) { if ((callback == null) != (_onReportTimings == null)) { _setNeedsReportTimings(callback != null); } _onReportTimings = callback; _onReportTimingsZone = Zone.current; } late _SetNeedsReportTimingsFunc _setNeedsReportTimings; void _nativeSetNeedsReportTimings(bool value) => __nativeSetNeedsReportTimings(value); @Native<Void Function(Bool)>(symbol: 'PlatformConfigurationNativeApi::SetNeedsReportTimings') external static void __nativeSetNeedsReportTimings(bool value); // Called from the engine, via hooks.dart void _reportTimings(List<int> timings) { assert(timings.length % FrameTiming._dataLength == 0); final List<FrameTiming> frameTimings = <FrameTiming>[]; for (int i = 0; i < timings.length; i += FrameTiming._dataLength) { frameTimings.add(FrameTiming._(timings.sublist(i, i + FrameTiming._dataLength))); } _invoke1(onReportTimings, _onReportTimingsZone, frameTimings); } /// Sends a message to a platform-specific plugin. /// /// The `name` parameter determines which plugin receives the message. The /// `data` parameter contains the message payload and is typically UTF-8 /// encoded JSON but can be arbitrary data. If the plugin replies to the /// message, `callback` will be called with the response. /// /// The framework invokes [callback] in the same zone in which this method was /// called. void sendPlatformMessage(String name, ByteData? data, PlatformMessageResponseCallback? callback) { final String? error = _sendPlatformMessage(name, _zonedPlatformMessageResponseCallback(callback), data); if (error != null) { throw Exception(error); } } String? _sendPlatformMessage(String name, PlatformMessageResponseCallback? callback, ByteData? data) => __sendPlatformMessage(name, callback, data); @Native<Handle Function(Handle, Handle, Handle)>(symbol: 'PlatformConfigurationNativeApi::SendPlatformMessage') external static String? __sendPlatformMessage(String name, PlatformMessageResponseCallback? callback, ByteData? data); /// Sends a message to a platform-specific plugin via a [SendPort]. /// /// This operates similarly to [sendPlatformMessage] but is used when sending /// messages from background isolates. The [port] parameter allows Flutter to /// know which isolate to send the result to. The [name] parameter is the name /// of the channel communication will happen on. The [data] parameter is the /// payload of the message. The [identifier] parameter is a unique integer /// assigned to the message. void sendPortPlatformMessage( String name, ByteData? data, int identifier, SendPort port) { final String? error = _sendPortPlatformMessage(name, identifier, port.nativePort, data); if (error != null) { throw Exception(error); } } String? _sendPortPlatformMessage(String name, int identifier, int port, ByteData? data) => __sendPortPlatformMessage(name, identifier, port, data); @Native<Handle Function(Handle, Handle, Handle, Handle)>(symbol: 'PlatformConfigurationNativeApi::SendPortPlatformMessage') external static String? __sendPortPlatformMessage(String name, int identifier, int port, ByteData? data); /// Registers the current isolate with the isolate identified with by the /// [token]. This is required if platform channels are to be used on a /// background isolate. void registerBackgroundIsolate(RootIsolateToken token) { DartPluginRegistrant.ensureInitialized(); __registerBackgroundIsolate(token._token); } @Native<Void Function(Int64)>(symbol: 'PlatformConfigurationNativeApi::RegisterBackgroundIsolate') external static void __registerBackgroundIsolate(int rootIsolateId); /// Deprecated. Migrate to [ChannelBuffers.setListener] instead. /// /// Called whenever this platform dispatcher receives a message from a /// platform-specific plugin. /// /// The `name` parameter determines which plugin sent the message. The `data` /// parameter is the payload and is typically UTF-8 encoded JSON but can be /// arbitrary data. /// /// Message handlers must call the function given in the `callback` parameter. /// If the handler does not need to respond, the handler should pass null to /// the callback. /// /// The framework invokes this callback in the same zone in which the callback /// was set. @Deprecated( 'Migrate to ChannelBuffers.setListener instead. ' 'This feature was deprecated after v3.11.0-20.0.pre.', ) PlatformMessageCallback? get onPlatformMessage => _onPlatformMessage; PlatformMessageCallback? _onPlatformMessage; Zone _onPlatformMessageZone = Zone.root; @Deprecated( 'Migrate to ChannelBuffers.setListener instead. ' 'This feature was deprecated after v3.11.0-20.0.pre.', ) set onPlatformMessage(PlatformMessageCallback? callback) { _onPlatformMessage = callback; _onPlatformMessageZone = Zone.current; } /// Called by [_dispatchPlatformMessage]. void _respondToPlatformMessage(int responseId, ByteData? data) => __respondToPlatformMessage(responseId, data); @Native<Void Function(IntPtr, Handle)>(symbol: 'PlatformConfigurationNativeApi::RespondToPlatformMessage') external static void __respondToPlatformMessage(int responseId, ByteData? data); /// Wraps the given [callback] in another callback that ensures that the /// original callback is called in the zone it was registered in. static PlatformMessageResponseCallback? _zonedPlatformMessageResponseCallback( PlatformMessageResponseCallback? callback, ) { if (callback == null) { return null; } // Store the zone in which the callback is being registered. final Zone registrationZone = Zone.current; return (ByteData? data) { registrationZone.runUnaryGuarded(callback, data); }; } /// Send a message to the framework using the [ChannelBuffers]. /// /// This method constructs the appropriate callback to respond /// with the given `responseId`. It should only be called for messages /// from the platform. void _dispatchPlatformMessage(String name, ByteData? data, int responseId) { if (name == ChannelBuffers.kControlChannelName) { try { channelBuffers.handleMessage(data!); } finally { _respondToPlatformMessage(responseId, null); } } else if (onPlatformMessage != null) { _invoke3<String, ByteData?, PlatformMessageResponseCallback>( onPlatformMessage, _onPlatformMessageZone, name, data, (ByteData? responseData) { _respondToPlatformMessage(responseId, responseData); }, ); } else { channelBuffers.push(name, data, (ByteData? responseData) { _respondToPlatformMessage(responseId, responseData); }); } } /// Set the debug name associated with this platform dispatcher's root /// isolate. /// /// Normally debug names are automatically generated from the Dart port, entry /// point, and source file. For example: `main.dart$main-1234`. /// /// This can be combined with flutter tools `--isolate-filter` flag to debug /// specific root isolates. For example: `flutter attach --isolate-filter=[name]`. /// Note that this does not rename any child isolates of the root. void setIsolateDebugName(String name) => _setIsolateDebugName(name); @Native<Void Function(Handle)>(symbol: 'PlatformConfigurationNativeApi::SetIsolateDebugName') external static void _setIsolateDebugName(String name); /// Requests the Dart VM to adjusts the GC heuristics based on the requested `performance_mode`. /// /// This operation is a no-op of web. The request to change a performance may be ignored by the /// engine or not resolve in a predictable way. /// /// See [DartPerformanceMode] for more information on individual performance modes. void requestDartPerformanceMode(DartPerformanceMode mode) { _requestDartPerformanceMode(mode.index); } @Native<Int Function(Int)>(symbol: 'PlatformConfigurationNativeApi::RequestDartPerformanceMode') external static int _requestDartPerformanceMode(int mode); /// The embedder can specify data that the isolate can request synchronously /// on launch. This accessor fetches that data. /// /// This data is persistent for the duration of the Flutter application and is /// available even after isolate restarts. Because of this lifecycle, the size /// of this data must be kept to a minimum. /// /// For asynchronous communication between the embedder and isolate, a /// platform channel may be used. ByteData? getPersistentIsolateData() => _getPersistentIsolateData(); @Native<Handle Function()>(symbol: 'PlatformConfigurationNativeApi::GetPersistentIsolateData') external static ByteData? _getPersistentIsolateData(); /// Requests that, at the next appropriate opportunity, the [onBeginFrame] and /// [onDrawFrame] callbacks be invoked. /// /// See also: /// /// * [SchedulerBinding], the Flutter framework class which manages the /// scheduling of frames. /// * [scheduleWarmUpFrame], which should only be used to schedule warm up /// frames. void scheduleFrame() => _scheduleFrame(); @Native<Void Function()>(symbol: 'PlatformConfigurationNativeApi::ScheduleFrame') external static void _scheduleFrame(); /// Schedule a frame to run as soon as possible, rather than waiting for the /// engine to request a frame in response to a system "Vsync" signal. /// /// The application can call this method as soon as it starts up so that the /// first frame (which is likely to be quite expensive) can start a few extra /// milliseconds earlier. Using it in other situations might lead to /// unintended results, such as screen tearing. Depending on platforms and /// situations, the warm up frame might or might not be actually rendered onto /// the screen. /// /// For more introduction to the warm up frame, see /// [SchedulerBinding.scheduleWarmUpFrame]. /// /// This method uses the provided callbacks as the begin frame callback and /// the draw frame callback instead of [onBeginFrame] and [onDrawFrame]. /// /// See also: /// /// * [SchedulerBinding.scheduleWarmUpFrame], which uses this method, and /// introduces the warm up frame in more details. /// * [scheduleFrame], which schedules the frame at the next appropriate /// opportunity and should be used to render regular frames. void scheduleWarmUpFrame({required VoidCallback beginFrame, required VoidCallback drawFrame}) { // We use timers here to ensure that microtasks flush in between. Timer.run(beginFrame); Timer.run(() { drawFrame(); _endWarmUpFrame(); }); } @Native<Void Function()>(symbol: 'PlatformConfigurationNativeApi::EndWarmUpFrame') external static void _endWarmUpFrame(); /// Additional accessibility features that may be enabled by the platform. AccessibilityFeatures get accessibilityFeatures => _configuration.accessibilityFeatures; /// A callback that is invoked when the value of [accessibilityFeatures] /// changes. /// /// The framework invokes this callback in the same zone in which the callback /// was set. VoidCallback? get onAccessibilityFeaturesChanged => _onAccessibilityFeaturesChanged; VoidCallback? _onAccessibilityFeaturesChanged; Zone _onAccessibilityFeaturesChangedZone = Zone.root; set onAccessibilityFeaturesChanged(VoidCallback? callback) { _onAccessibilityFeaturesChanged = callback; _onAccessibilityFeaturesChangedZone = Zone.current; } // Called from the engine, via hooks.dart void _updateAccessibilityFeatures(int values) { final AccessibilityFeatures newFeatures = AccessibilityFeatures._(values); final _PlatformConfiguration previousConfiguration = _configuration; if (newFeatures == previousConfiguration.accessibilityFeatures) { return; } _configuration = previousConfiguration.copyWith( accessibilityFeatures: newFeatures, ); _invoke(onPlatformConfigurationChanged, _onPlatformConfigurationChangedZone,); _invoke(onAccessibilityFeaturesChanged, _onAccessibilityFeaturesChangedZone,); } /// Change the retained semantics data about this platform dispatcher. /// /// If [semanticsEnabled] is true, the user has requested that this function /// be called whenever the semantic content of this platform dispatcher /// changes. /// /// In either case, this function disposes the given update, which means the /// semantics update cannot be used further. @Deprecated(''' In a multi-view world, the platform dispatcher can no longer provide apis to update semantics since each view will host its own semantics tree. Semantics updates must be passed to an individual [FlutterView]. To update semantics, use PlatformDispatcher.instance.views to get a [FlutterView] and call `updateSemantics`. ''') void updateSemantics(SemanticsUpdate update) => _updateSemantics(update as _NativeSemanticsUpdate); @Native<Void Function(Pointer<Void>)>(symbol: 'PlatformConfigurationNativeApi::UpdateSemantics') external static void _updateSemantics(_NativeSemanticsUpdate update); /// The system-reported default locale of the device. /// /// This establishes the language and formatting conventions that application /// should, if possible, use to render their user interface. /// /// This is the first locale selected by the user and is the user's primary /// locale (the locale the device UI is displayed in) /// /// This is equivalent to `locales.first`, except that it will provide an /// undefined (using the language tag "und") non-null locale if the [locales] /// list has not been set or is empty. Locale get locale => locales.isEmpty ? const Locale.fromSubtags() : locales.first; /// The full system-reported supported locales of the device. /// /// This establishes the language and formatting conventions that application /// should, if possible, use to render their user interface. /// /// The list is ordered in order of priority, with lower-indexed locales being /// preferred over higher-indexed ones. The first element is the primary /// [locale]. /// /// The [onLocaleChanged] callback is called whenever this value changes. /// /// See also: /// /// * [WidgetsBindingObserver], for a mechanism at the widgets layer to /// observe when this value changes. List<Locale> get locales => _configuration.locales; /// Performs the platform-native locale resolution. /// /// Each platform may return different results. /// /// If the platform fails to resolve a locale, then this will return null. /// /// This method returns synchronously and is a direct call to /// platform specific APIs without invoking method channels. Locale? computePlatformResolvedLocale(List<Locale> supportedLocales) { final List<String?> supportedLocalesData = <String?>[]; for (final Locale locale in supportedLocales) { supportedLocalesData.add(locale.languageCode); supportedLocalesData.add(locale.countryCode); supportedLocalesData.add(locale.scriptCode); } final List<String> result = _computePlatformResolvedLocale(supportedLocalesData); if (result.isNotEmpty) { return Locale.fromSubtags( languageCode: result[0], countryCode: result[1] == '' ? null : result[1], scriptCode: result[2] == '' ? null : result[2]); } return null; } List<String> _computePlatformResolvedLocale(List<String?> supportedLocalesData) => __computePlatformResolvedLocale(supportedLocalesData); @Native<Handle Function(Handle)>(symbol: 'PlatformConfigurationNativeApi::ComputePlatformResolvedLocale') external static List<String> __computePlatformResolvedLocale(List<String?> supportedLocalesData); /// A callback that is invoked whenever [locale] changes value. /// /// The framework invokes this callback in the same zone in which the callback /// was set. /// /// See also: /// /// * [WidgetsBindingObserver], for a mechanism at the widgets layer to /// observe when this callback is invoked. VoidCallback? get onLocaleChanged => _onLocaleChanged; VoidCallback? _onLocaleChanged; Zone _onLocaleChangedZone = Zone.root; set onLocaleChanged(VoidCallback? callback) { _onLocaleChanged = callback; _onLocaleChangedZone = Zone.current; } // Called from the engine, via hooks.dart void _updateLocales(List<String> locales) { const int stringsPerLocale = 4; final int numLocales = locales.length ~/ stringsPerLocale; final _PlatformConfiguration previousConfiguration = _configuration; final List<Locale> newLocales = <Locale>[]; bool localesDiffer = numLocales != previousConfiguration.locales.length; for (int localeIndex = 0; localeIndex < numLocales; localeIndex++) { final String countryCode = locales[localeIndex * stringsPerLocale + 1]; final String scriptCode = locales[localeIndex * stringsPerLocale + 2]; newLocales.add(Locale.fromSubtags( languageCode: locales[localeIndex * stringsPerLocale], countryCode: countryCode.isEmpty ? null : countryCode, scriptCode: scriptCode.isEmpty ? null : scriptCode, )); if (!localesDiffer && newLocales[localeIndex] != previousConfiguration.locales[localeIndex]) { localesDiffer = true; } } if (!localesDiffer) { return; } _configuration = previousConfiguration.copyWith(locales: newLocales); _invoke(onPlatformConfigurationChanged, _onPlatformConfigurationChangedZone); _invoke(onLocaleChanged, _onLocaleChangedZone); } // Called from the engine, via hooks.dart String _localeClosure() => locale.toString(); /// The lifecycle state immediately after dart isolate initialization. /// /// This property will not be updated as the lifecycle changes. /// /// It is used to initialize [SchedulerBinding.lifecycleState] at startup with /// any buffered lifecycle state events. String get initialLifecycleState { _initialLifecycleStateAccessed = true; return _initialLifecycleState; } late String _initialLifecycleState; /// Tracks if the initial state has been accessed. Once accessed, we will stop /// updating the [initialLifecycleState], as it is not the preferred way to /// access the state. bool _initialLifecycleStateAccessed = false; // Called from the engine, via hooks.dart void _updateInitialLifecycleState(String state) { // We do not update the state if the state has already been used to initialize // the lifecycleState. if (!_initialLifecycleStateAccessed) { _initialLifecycleState = state; } } /// The setting indicating whether time should always be shown in the 24-hour /// format. /// /// This option is used by [showTimePicker]. bool get alwaysUse24HourFormat => _configuration.alwaysUse24HourFormat; /// The system-reported text scale. /// /// This establishes the text scaling factor to use when rendering text, /// according to the user's platform preferences. /// /// The [onTextScaleFactorChanged] callback is called whenever this value /// changes. /// /// See also: /// /// * [WidgetsBindingObserver], for a mechanism at the widgets layer to /// observe when this value changes. double get textScaleFactor => _configuration.textScaleFactor; /// A callback that is invoked whenever [textScaleFactor] changes value. /// /// The framework invokes this callback in the same zone in which the callback /// was set. /// /// See also: /// /// * [WidgetsBindingObserver], for a mechanism at the widgets layer to /// observe when this callback is invoked. VoidCallback? get onTextScaleFactorChanged => _onTextScaleFactorChanged; VoidCallback? _onTextScaleFactorChanged; Zone _onTextScaleFactorChangedZone = Zone.root; set onTextScaleFactorChanged(VoidCallback? callback) { _onTextScaleFactorChanged = callback; _onTextScaleFactorChangedZone = Zone.current; } /// Whether the spell check service is supported on the current platform. /// /// This option is used by [EditableTextState] to define its /// [SpellCheckConfiguration] when a default spell check service /// is requested. bool get nativeSpellCheckServiceDefined => _nativeSpellCheckServiceDefined; bool _nativeSpellCheckServiceDefined = false; /// Whether briefly displaying the characters as you type in obscured text /// fields is enabled in system settings. /// /// See also: /// /// * [EditableText.obscureText], which when set to true hides the text in /// the text field. bool get brieflyShowPassword => _brieflyShowPassword; bool _brieflyShowPassword = true; /// The setting indicating the current brightness mode of the host platform. /// If the platform has no preference, [platformBrightness] defaults to /// [Brightness.light]. Brightness get platformBrightness => _configuration.platformBrightness; /// A callback that is invoked whenever [platformBrightness] changes value. /// /// The framework invokes this callback in the same zone in which the callback /// was set. /// /// See also: /// /// * [WidgetsBindingObserver], for a mechanism at the widgets layer to /// observe when this callback is invoked. VoidCallback? get onPlatformBrightnessChanged => _onPlatformBrightnessChanged; VoidCallback? _onPlatformBrightnessChanged; Zone _onPlatformBrightnessChangedZone = Zone.root; set onPlatformBrightnessChanged(VoidCallback? callback) { _onPlatformBrightnessChanged = callback; _onPlatformBrightnessChangedZone = Zone.current; } /// The setting indicating the current system font of the host platform. String? get systemFontFamily => _configuration.systemFontFamily; /// A callback that is invoked whenever [systemFontFamily] changes value. /// /// The framework invokes this callback in the same zone in which the callback /// was set. /// /// See also: /// /// * [WidgetsBindingObserver], for a mechanism at the widgets layer to /// observe when this callback is invoked. VoidCallback? get onSystemFontFamilyChanged => _onSystemFontFamilyChanged; VoidCallback? _onSystemFontFamilyChanged; Zone _onSystemFontFamilyChangedZone = Zone.root; set onSystemFontFamilyChanged(VoidCallback? callback) { _onSystemFontFamilyChanged = callback; _onSystemFontFamilyChangedZone = Zone.current; } // Called from the engine, via hooks.dart void _updateUserSettingsData(String jsonData) { final Map<String, Object?> data = json.decode(jsonData) as Map<String, Object?>; if (data.isEmpty) { return; } final double textScaleFactor = (data['textScaleFactor']! as num).toDouble(); final bool alwaysUse24HourFormat = data['alwaysUse24HourFormat']! as bool; final bool? nativeSpellCheckServiceDefined = data['nativeSpellCheckServiceDefined'] as bool?; if (nativeSpellCheckServiceDefined != null) { _nativeSpellCheckServiceDefined = nativeSpellCheckServiceDefined; } else { _nativeSpellCheckServiceDefined = false; } // This field is optional. final bool? brieflyShowPassword = data['brieflyShowPassword'] as bool?; if (brieflyShowPassword != null) { _brieflyShowPassword = brieflyShowPassword; } final Brightness platformBrightness = switch (data['platformBrightness']) { 'dark' => Brightness.dark, 'light' => Brightness.light, final Object? value => throw StateError('$value is not a valid platformBrightness.'), }; final String? systemFontFamily = data['systemFontFamily'] as String?; final int? configurationId = data['configurationId'] as int?; final _PlatformConfiguration previousConfiguration = _configuration; final bool platformBrightnessChanged = previousConfiguration.platformBrightness != platformBrightness; final bool textScaleFactorChanged = previousConfiguration.textScaleFactor != textScaleFactor; final bool alwaysUse24HourFormatChanged = previousConfiguration.alwaysUse24HourFormat != alwaysUse24HourFormat; final bool systemFontFamilyChanged = previousConfiguration.systemFontFamily != systemFontFamily; if (!platformBrightnessChanged && !textScaleFactorChanged && !alwaysUse24HourFormatChanged && !systemFontFamilyChanged && configurationId == null) { return; } _configuration = previousConfiguration.copyWith( textScaleFactor: textScaleFactor, alwaysUse24HourFormat: alwaysUse24HourFormat, platformBrightness: platformBrightness, systemFontFamily: systemFontFamily, configurationId: configurationId, ); _invoke(onPlatformConfigurationChanged, _onPlatformConfigurationChangedZone); if (textScaleFactorChanged) { _cachedFontSizes = null; _invoke(onTextScaleFactorChanged, _onTextScaleFactorChangedZone); } if (platformBrightnessChanged) { _invoke(onPlatformBrightnessChanged, _onPlatformBrightnessChangedZone); } if (systemFontFamilyChanged) { _invoke(onSystemFontFamilyChanged, _onSystemFontFamilyChangedZone); } } /// Whether the user has requested that updateSemantics be called when the /// semantic contents of a view changes. /// /// The [onSemanticsEnabledChanged] callback is called whenever this value /// changes. bool get semanticsEnabled => _configuration.semanticsEnabled; /// A callback that is invoked when the value of [semanticsEnabled] changes. /// /// The framework invokes this callback in the same zone in which the /// callback was set. VoidCallback? get onSemanticsEnabledChanged => _onSemanticsEnabledChanged; VoidCallback? _onSemanticsEnabledChanged; Zone _onSemanticsEnabledChangedZone = Zone.root; set onSemanticsEnabledChanged(VoidCallback? callback) { _onSemanticsEnabledChanged = callback; _onSemanticsEnabledChangedZone = Zone.current; } // Called from the engine, via hooks.dart void _updateSemanticsEnabled(bool enabled) { final _PlatformConfiguration previousConfiguration = _configuration; if (previousConfiguration.semanticsEnabled == enabled) { return; } _configuration = previousConfiguration.copyWith( semanticsEnabled: enabled, ); _invoke(onPlatformConfigurationChanged, _onPlatformConfigurationChangedZone); _invoke(onSemanticsEnabledChanged, _onSemanticsEnabledChangedZone); } /// A callback that is invoked whenever the user requests an action to be /// performed on a semantics node. /// /// This callback is used when the user expresses the action they wish to /// perform based on the semantics node supplied by updateSemantics. /// /// The framework invokes this callback in the same zone in which the /// callback was set. SemanticsActionEventCallback? get onSemanticsActionEvent => _onSemanticsActionEvent; SemanticsActionEventCallback? _onSemanticsActionEvent; Zone _onSemanticsActionEventZone = Zone.root; set onSemanticsActionEvent(SemanticsActionEventCallback? callback) { _onSemanticsActionEvent = callback; _onSemanticsActionEventZone = Zone.current; } // Called from the engine via hooks.dart. void _updateFrameData(int frameNumber) { final FrameData previous = _frameData; if (previous.frameNumber == frameNumber) { return; } _frameData = FrameData._(frameNumber: frameNumber); _invoke(onFrameDataChanged, _onFrameDataChangedZone); } /// The [FrameData] object for the current frame. FrameData get frameData => _frameData; FrameData _frameData = const FrameData._(); /// A callback that is invoked when the window updates the [FrameData]. VoidCallback? get onFrameDataChanged => _onFrameDataChanged; VoidCallback? _onFrameDataChanged; Zone _onFrameDataChangedZone = Zone.root; set onFrameDataChanged(VoidCallback? callback) { _onFrameDataChanged = callback; _onFrameDataChangedZone = Zone.current; } // Called from the engine, via hooks.dart void _dispatchSemanticsAction(int nodeId, int action, ByteData? args) { _invoke1<SemanticsActionEvent>( onSemanticsActionEvent, _onSemanticsActionEventZone, SemanticsActionEvent( type: SemanticsAction.fromIndex(action)!, nodeId: nodeId, viewId: 0, // TODO(goderbauer): Wire up the real view ID. arguments: args, ), ); } ErrorCallback? _onError; Zone? _onErrorZone; /// A callback that is invoked when an unhandled error occurs in the root /// isolate. /// /// This callback must return `true` if it has handled the error. Otherwise, /// it must return `false` and a fallback mechanism such as printing to stderr /// will be used, as configured by the specific platform embedding via /// `Settings::unhandled_exception_callback`. /// /// The VM or the process may exit or become unresponsive after calling this /// callback. The callback will not be called for exceptions that cause the VM /// or process to terminate or become unresponsive before the callback can be /// invoked. /// /// This callback is not directly invoked by errors in child isolates of the /// root isolate. Programs that create new isolates must listen for errors on /// those isolates and forward the errors to the root isolate. ErrorCallback? get onError => _onError; set onError(ErrorCallback? callback) { _onError = callback; _onErrorZone = Zone.current; } bool _dispatchError(Object error, StackTrace stackTrace) { if (_onError == null) { return false; } assert(_onErrorZone != null); if (identical(_onErrorZone, Zone.current)) { return _onError!(error, stackTrace); } else { try { return _onErrorZone!.runBinary<bool, Object, StackTrace>(_onError!, error, stackTrace); } catch (e, s) { _onErrorZone!.handleUncaughtError(e, s); return false; } } } /// The route or path that the embedder requested when the application was /// launched. /// /// This will be the string "`/`" if no particular route was requested. /// /// ## Android /// /// On Android, calling /// [`FlutterView.setInitialRoute`](/javadoc/io/flutter/view/FlutterView.html#setInitialRoute-java.lang.String-) /// will set this value. The value must be set sufficiently early, i.e. before /// the [runApp] call is executed in Dart, for this to have any effect on the /// framework. The `createFlutterView` method in your `FlutterActivity` /// subclass is a suitable time to set the value. The application's /// `AndroidManifest.xml` file must also be updated to have a suitable /// [`<intent-filter>`](https://developer.android.com/guide/topics/manifest/intent-filter-element.html). /// /// ## iOS /// /// On iOS, calling /// [`FlutterViewController.setInitialRoute`](/ios-embedder/interface_flutter_view_controller.html#a7f269c2da73312f856d42611cc12a33f) /// will set this value. The value must be set sufficiently early, i.e. before /// the [runApp] call is executed in Dart, for this to have any effect on the /// framework. The `application:didFinishLaunchingWithOptions:` method is a /// suitable time to set this value. /// /// See also: /// /// * [Navigator], a widget that handles routing. /// * [SystemChannels.navigation], which handles subsequent navigation /// requests from the embedder. String get defaultRouteName => _defaultRouteName(); @Native<Handle Function()>(symbol: 'PlatformConfigurationNativeApi::DefaultRouteName') external static String _defaultRouteName(); /// Computes the scaled font size from the given `unscaledFontSize`, according /// to the user's platform preferences. /// /// Many platforms allow users to scale text globally for better readability. /// Given the font size the app developer specified in logical pixels, this /// method converts it to the preferred font size (also in logical pixels) that /// accounts for platform-wide text scaling. The return value is always /// non-negative. /// /// The scaled value of the same font size input may change if the user changes /// the text scaling preference (in system settings for example). The /// [onTextScaleFactorChanged] callback can be used to monitor such changes. /// /// Instead of directly calling this method, applications should typically use /// [MediaQuery.textScalerOf] to retrive the scaled font size in a widget tree, /// so text in the app resizes properly when the text scaling preference /// changes. double scaleFontSize(double unscaledFontSize) { assert(unscaledFontSize >= 0); assert(unscaledFontSize.isFinite); if (textScaleFactor == 1.0) { return unscaledFontSize; } final int unscaledFloor = unscaledFontSize.floor(); final int unscaledCeil = unscaledFontSize.ceil(); if (unscaledFloor == unscaledCeil) { // No need to interpolate if the input value is an integer. return _scaleAndMemoize(unscaledFloor) ?? unscaledFontSize * textScaleFactor; } assert(unscaledCeil - unscaledFloor == 1, 'Unexpected interpolation range: $unscaledFloor - $unscaledCeil.'); return switch ((_scaleAndMemoize(unscaledFloor), _scaleAndMemoize(unscaledCeil))) { (null, _) || (_, null) => unscaledFontSize * textScaleFactor, (final double lower, final double upper) => lower + (upper - lower) * (unscaledFontSize - unscaledFloor), }; } // The cache is cleared when the text scale factor changes. Map<int, double>? _cachedFontSizes; // This method returns null if an error is encountered. double? _scaleAndMemoize(int unscaledFontSize) { final int? configurationId = _configuration.configurationId; if (configurationId == null) { // The platform uses linear scaling, or the platform hasn't sent us a // configuration yet. return null; } final double? cachedValue = _cachedFontSizes?[unscaledFontSize]; if (cachedValue != null) { assert(cachedValue >= 0); return cachedValue; } final double unscaledFontSizeDouble = unscaledFontSize.toDouble(); final double fontSize = PlatformDispatcher._getScaledFontSize(unscaledFontSizeDouble, configurationId); if (fontSize >= 0) { return (_cachedFontSizes ??= <int, double>{})[unscaledFontSize] = fontSize; } switch (fontSize) { case -1: // Invalid configuration id. This error can be unrecoverable as the // _getScaledFontSize function can be destructive. assert(false, 'Flutter Error: incorrect configuration id: $configurationId.'); case final double errorCode: assert(false, 'Unknown error: GetScaledFontSize failed with $errorCode.'); } return null; } // Calls the platform's text scaling implementation to scale the given // `unscaledFontSize`. // // The `configurationId` parameter tells the embedder which platform // configuration to use for computing the scaled font size. When the user // changes the platform configuration, the configuration data will first be // made available on the platform thread before being dispatched asynchronously // to the Flutter UI thread. Since this call is synchronous, without this // identifier, it could call into the embber who's using a newer configuration // that Flutter has not received yet. The `configurationId` parameter must be // the lastest configuration id received from the platform // (`_configuration.configurationId`). Using an incorrect id could result in // an unrecoverable error. // // Currently this is only implemented on newer versions of Android (SDK level // 34, using the `TypedValue#applyDimension` API). Platforms that do not have // the capability will never send a `configurationId` to [PlatformDispatcher], // and should not call this method. This method returns -1 when the specified // configurationId does not match any configuration. @Native<Double Function(Double, Int)>(symbol: 'PlatformConfigurationNativeApi::GetScaledFontSize') external static double _getScaledFontSize(double unscaledFontSize, int configurationId); } /// Configuration of the platform. /// /// Immutable class (but can't use @immutable in dart:ui) class _PlatformConfiguration { const _PlatformConfiguration({ this.accessibilityFeatures = const AccessibilityFeatures._(0), this.alwaysUse24HourFormat = false, this.semanticsEnabled = false, this.platformBrightness = Brightness.light, this.textScaleFactor = 1.0, this.locales = const <Locale>[], this.defaultRouteName, this.systemFontFamily, this.configurationId, }); _PlatformConfiguration copyWith({ AccessibilityFeatures? accessibilityFeatures, bool? alwaysUse24HourFormat, bool? semanticsEnabled, Brightness? platformBrightness, double? textScaleFactor, List<Locale>? locales, String? defaultRouteName, String? systemFontFamily, int? configurationId, }) { return _PlatformConfiguration( accessibilityFeatures: accessibilityFeatures ?? this.accessibilityFeatures, alwaysUse24HourFormat: alwaysUse24HourFormat ?? this.alwaysUse24HourFormat, semanticsEnabled: semanticsEnabled ?? this.semanticsEnabled, platformBrightness: platformBrightness ?? this.platformBrightness, textScaleFactor: textScaleFactor ?? this.textScaleFactor, locales: locales ?? this.locales, defaultRouteName: defaultRouteName ?? this.defaultRouteName, systemFontFamily: systemFontFamily ?? this.systemFontFamily, configurationId: configurationId ?? this.configurationId, ); } /// Additional accessibility features that may be enabled by the platform. final AccessibilityFeatures accessibilityFeatures; /// The setting indicating whether time should always be shown in the 24-hour /// format. final bool alwaysUse24HourFormat; /// Whether the user has requested that updateSemantics be called when the /// semantic contents of a view changes. final bool semanticsEnabled; /// The setting indicating the current brightness mode of the host platform. /// If the platform has no preference, [platformBrightness] defaults to /// [Brightness.light]. final Brightness platformBrightness; /// The system-reported text scale. final double textScaleFactor; /// The full system-reported supported locales of the device. final List<Locale> locales; /// The route or path that the embedder requested when the application was /// launched. final String? defaultRouteName; /// The system-reported default font family. final String? systemFontFamily; /// A unique identifier for this [_PlatformConfiguration]. /// /// This unique identifier is optionally assigned by the platform embedder. /// Dart code that runs on the Flutter UI thread and synchronously invokes /// platform APIs can use this identifier to tell the embedder to use the /// configuration that matches the current [_PlatformConfiguration] in /// dart:ui. See the [_getScaledFontSize] function for an example. /// /// This field's nullability also indicates whether the platform supports /// nonlinear text scaling (as it's the only feature that requires synchronous /// invocation of platform APIs). This field is always null if the platform /// does not use nonlinear text scaling, or when dart:ui has not received any /// configuration updates from the embedder yet. The _getScaledFontSize /// function should not be called in either case. final int? configurationId; } /// An immutable view configuration. class _ViewConfiguration { const _ViewConfiguration({ this.devicePixelRatio = 1.0, this.size = Size.zero, this.viewInsets = ViewPadding.zero, this.viewPadding = ViewPadding.zero, this.systemGestureInsets = ViewPadding.zero, this.padding = ViewPadding.zero, this.gestureSettings = const GestureSettings(), this.displayFeatures = const <DisplayFeature>[], this.displayId = 0, }); /// The identifier for a display for this view, in /// [PlatformDispatcher._displays]. final int displayId; /// The pixel density of the output surface. final double devicePixelRatio; /// The size requested for the view in physical pixels. final Size size; /// The number of physical pixels on each side of the display rectangle into /// which the view can render, but over which the operating system will likely /// place system UI, such as the keyboard, that fully obscures any content. /// /// The relationship between this [viewInsets], [viewPadding], and [padding] /// are described in more detail in the documentation for [FlutterView]. final ViewPadding viewInsets; /// The number of physical pixels on each side of the display rectangle into /// which the view can render, but which may be partially obscured by system /// UI (such as the system notification area), or physical intrusions in /// the display (e.g. overscan regions on television screens or phone sensor /// housings). /// /// Unlike [padding], this value does not change relative to [viewInsets]. /// For example, on an iPhone X, it will not change in response to the soft /// keyboard being visible or hidden, whereas [padding] will. /// /// The relationship between this [viewInsets], [viewPadding], and [padding] /// are described in more detail in the documentation for [FlutterView]. final ViewPadding viewPadding; /// The number of physical pixels on each side of the display rectangle into /// which the view can render, but where the operating system will consume /// input gestures for the sake of system navigation. /// /// For example, an operating system might use the vertical edges of the /// screen, where swiping inwards from the edges takes users backward /// through the history of screens they previously visited. final ViewPadding systemGestureInsets; /// The number of physical pixels on each side of the display rectangle into /// which the view can render, but which may be partially obscured by system /// UI (such as the system notification area), or physical intrusions in /// the display (e.g. overscan regions on television screens or phone sensor /// housings). /// /// The relationship between this [viewInsets], [viewPadding], and [padding] /// are described in more detail in the documentation for [FlutterView]. final ViewPadding padding; /// Additional configuration for touch gestures performed on this view. /// /// For example, the touch slop defined in physical pixels may be provided /// by the gesture settings and should be preferred over the framework /// touch slop constant. final GestureSettings gestureSettings; /// Areas of the display that are obstructed by hardware features. /// /// This list is populated only on Android. If the device has no display /// features, this list is empty. /// /// The coordinate space in which the [DisplayFeature.bounds] are defined spans /// across the screens currently in use. This means that the space between the screens /// is virtually part of the Flutter view space, with the [DisplayFeature.bounds] /// of the display feature as an obstructed area. The [DisplayFeature.type] can /// be used to determine if this display feature obstructs the screen or not. /// For example, [DisplayFeatureType.hinge] and [DisplayFeatureType.cutout] both /// obstruct the display, while [DisplayFeatureType.fold] is a crease in the display. /// /// Folding [DisplayFeature]s like the [DisplayFeatureType.hinge] and /// [DisplayFeatureType.fold] also have a [DisplayFeature.state] which can be /// used to determine the posture the device is in. final List<DisplayFeature> displayFeatures; @override String toString() { return '$runtimeType[size: $size]'; } } /// Various important time points in the lifetime of a frame. /// /// [FrameTiming] records a timestamp of each phase for performance analysis. enum FramePhase { /// The timestamp of the vsync signal given by the operating system. /// /// See also [FrameTiming.vsyncOverhead]. vsyncStart, /// When the UI thread starts building a frame. /// /// See also [FrameTiming.buildDuration]. buildStart, /// When the UI thread finishes building a frame. /// /// See also [FrameTiming.buildDuration]. buildFinish, /// When the raster thread starts rasterizing a frame. /// /// See also [FrameTiming.rasterDuration]. rasterStart, /// When the raster thread finishes rasterizing a frame. /// /// See also [FrameTiming.rasterDuration]. rasterFinish, /// When the raster thread finished rasterizing a frame in wall-time. /// /// This is useful for correlating time raster finish time with the system /// clock to integrate with other profiling tools. rasterFinishWallTime, } enum _FrameTimingInfo { /// The number of engine layers cached in the raster cache during the frame. layerCacheCount, /// The number of bytes used to cache engine layers during the frame. layerCacheBytes, /// The number of picture layers cached in the raster cache during the frame. pictureCacheCount, /// The number of bytes used to cache pictures during the frame. pictureCacheBytes, /// The frame number of the frame. frameNumber, } /// Time-related performance metrics of a frame. /// /// If you're using the whole Flutter framework, please use /// [SchedulerBinding.addTimingsCallback] to get this. It's preferred over using /// [PlatformDispatcher.onReportTimings] directly because /// [SchedulerBinding.addTimingsCallback] allows multiple callbacks. If /// [SchedulerBinding] is unavailable, then see [PlatformDispatcher.onReportTimings] /// for how to get this. /// /// The metrics in debug mode (`flutter run` without any flags) may be very /// different from those in profile and release modes due to the debug overhead. /// Therefore it's recommended to only monitor and analyze performance metrics /// in profile and release modes. class FrameTiming { /// Construct [FrameTiming] with raw timestamps in microseconds. /// /// This constructor is used for unit test only. Real [FrameTiming]s should /// be retrieved from [PlatformDispatcher.onReportTimings]. /// /// If the [frameNumber] is not provided, it defaults to `-1`. factory FrameTiming({ required int vsyncStart, required int buildStart, required int buildFinish, required int rasterStart, required int rasterFinish, required int rasterFinishWallTime, int layerCacheCount = 0, int layerCacheBytes = 0, int pictureCacheCount = 0, int pictureCacheBytes = 0, int frameNumber = -1, }) { return FrameTiming._(<int>[ vsyncStart, buildStart, buildFinish, rasterStart, rasterFinish, rasterFinishWallTime, layerCacheCount, layerCacheBytes, pictureCacheCount, pictureCacheBytes, frameNumber, ]); } /// Construct [FrameTiming] with raw timestamps in microseconds. /// /// List [timestamps] must have the same number of elements as /// [FramePhase.values]. /// /// This constructor is usually only called by the Flutter engine, or a test. /// To get the [FrameTiming] of your app, see [PlatformDispatcher.onReportTimings]. FrameTiming._(this._data) : assert(_data.length == _dataLength); static final int _dataLength = FramePhase.values.length + _FrameTimingInfo.values.length; /// This is a raw timestamp in microseconds from some epoch. The epoch in all /// [FrameTiming] is the same, but it may not match [DateTime]'s epoch. int timestampInMicroseconds(FramePhase phase) => _data[phase.index]; Duration _rawDuration(FramePhase phase) => Duration(microseconds: _data[phase.index]); int _rawInfo(_FrameTimingInfo info) => _data[FramePhase.values.length + info.index]; /// The duration to build the frame on the UI thread. /// /// The build starts approximately when [PlatformDispatcher.onBeginFrame] is /// called. The [Duration] in the [PlatformDispatcher.onBeginFrame] callback /// is exactly the `Duration(microseconds: /// timestampInMicroseconds(FramePhase.buildStart))`. /// /// The build finishes when [FlutterView.render] is called. /// /// {@template dart.ui.FrameTiming.fps_smoothness_milliseconds} /// To ensure smooth animations of X fps, this should not exceed 1000/X /// milliseconds. /// {@endtemplate} /// {@template dart.ui.FrameTiming.fps_milliseconds} /// That's about 16ms for 60fps, and 8ms for 120fps. /// {@endtemplate} Duration get buildDuration => _rawDuration(FramePhase.buildFinish) - _rawDuration(FramePhase.buildStart); /// The duration to rasterize the frame on the raster thread. /// /// {@macro dart.ui.FrameTiming.fps_smoothness_milliseconds} /// {@macro dart.ui.FrameTiming.fps_milliseconds} Duration get rasterDuration => _rawDuration(FramePhase.rasterFinish) - _rawDuration(FramePhase.rasterStart); /// The duration between receiving the vsync signal and starting building the /// frame. Duration get vsyncOverhead => _rawDuration(FramePhase.buildStart) - _rawDuration(FramePhase.vsyncStart); /// The timespan between vsync start and raster finish. /// /// To achieve the lowest latency on an X fps display, this should not exceed /// 1000/X milliseconds. /// {@macro dart.ui.FrameTiming.fps_milliseconds} /// /// See also [vsyncOverhead], [buildDuration] and [rasterDuration]. Duration get totalSpan => _rawDuration(FramePhase.rasterFinish) - _rawDuration(FramePhase.vsyncStart); /// The number of layers stored in the raster cache during the frame. /// /// See also [layerCacheBytes], [pictureCacheCount] and [pictureCacheBytes]. int get layerCacheCount => _rawInfo(_FrameTimingInfo.layerCacheCount); /// The number of bytes of image data used to cache layers during the frame. /// /// See also [layerCacheCount], [layerCacheMegabytes], [pictureCacheCount] and [pictureCacheBytes]. int get layerCacheBytes => _rawInfo(_FrameTimingInfo.layerCacheBytes); /// The number of megabytes of image data used to cache layers during the frame. /// /// See also [layerCacheCount], [layerCacheBytes], [pictureCacheCount] and [pictureCacheBytes]. double get layerCacheMegabytes => layerCacheBytes / 1024.0 / 1024.0; /// The number of pictures stored in the raster cache during the frame. /// /// See also [layerCacheCount], [layerCacheBytes] and [pictureCacheBytes]. int get pictureCacheCount => _rawInfo(_FrameTimingInfo.pictureCacheCount); /// The number of bytes of image data used to cache pictures during the frame. /// /// See also [layerCacheCount], [layerCacheBytes], [pictureCacheCount] and [pictureCacheMegabytes]. int get pictureCacheBytes => _rawInfo(_FrameTimingInfo.pictureCacheBytes); /// The number of megabytes of image data used to cache pictures during the frame. /// /// See also [layerCacheCount], [layerCacheBytes], [pictureCacheCount] and [pictureCacheBytes]. double get pictureCacheMegabytes => pictureCacheBytes / 1024.0 / 1024.0; /// The frame key associated with this frame measurement. int get frameNumber => _data.last; final List<int> _data; // some elements in microseconds, some in bytes, some are counts String _formatMS(Duration duration) => '${duration.inMicroseconds * 0.001}ms'; @override String toString() { return '$runtimeType(buildDuration: ${_formatMS(buildDuration)}, ' 'rasterDuration: ${_formatMS(rasterDuration)}, ' 'vsyncOverhead: ${_formatMS(vsyncOverhead)}, ' 'totalSpan: ${_formatMS(totalSpan)}, ' 'layerCacheCount: $layerCacheCount, ' 'layerCacheBytes: $layerCacheBytes, ' 'pictureCacheCount: $pictureCacheCount, ' 'pictureCacheBytes: $pictureCacheBytes, ' 'frameNumber: ${_data.last})'; } } /// States that an application can be in once it is running. /// /// States not supported on a platform will be synthesized by the framework when /// transitioning between states which are supported, so that all /// implementations share the same state machine. /// /// The initial value for the state is the [detached] state, updated to the /// current state (usually [resumed]) as soon as the first lifecycle update is /// received from the platform. /// /// For historical and name collision reasons, Flutter's application state names /// do not correspond one to one with the state names on all platforms. On /// Android, for instance, when the OS calls /// [`Activity.onPause`](https://developer.android.com/reference/android/app/Activity#onPause()), /// Flutter will enter the [inactive] state, but when Android calls /// [`Activity.onStop`](https://developer.android.com/reference/android/app/Activity#onStop()), /// Flutter enters the [paused] state. See the individual state's documentation /// for descriptions of what they mean on each platform. /// /// The current application state can be obtained from /// [SchedulerBinding.instance.lifecycleState], and changes to the state can be /// observed by creating an [AppLifecycleListener], or by using a /// [WidgetsBindingObserver] by overriding the /// [WidgetsBindingObserver.didChangeAppLifecycleState] method. /// /// Applications should not rely on always receiving all possible notifications. /// /// For example, if the application is killed with a task manager, a kill /// signal, the user pulls the power from the device, or there is a rapid /// unscheduled disassembly of the device, no notification will be sent before /// the application is suddenly terminated, and some states may be skipped. /// /// See also: /// /// * [AppLifecycleListener], an object used observe the lifecycle state that /// provides state transition callbacks. /// * [WidgetsBindingObserver], for a mechanism to observe the lifecycle state /// from the widgets layer. /// * iOS's [IOKit activity /// lifecycle](https://developer.apple.com/documentation/uikit/app_and_environment/managing_your_app_s_life_cycle?language=objc) /// documentation. /// * Android's [activity /// lifecycle](https://developer.android.com/guide/components/activities/activity-lifecycle) /// documentation. /// * macOS's [AppKit activity /// lifecycle](https://developer.apple.com/documentation/appkit/nsapplicationdelegate?language=objc) /// documentation. enum AppLifecycleState { /// The application is still hosted by a Flutter engine but is detached from /// any host views. /// /// The application defaults to this state before it initializes, and can be /// in this state (applicable on Android, iOS, and web) after all views have been /// detached. /// /// When the application is in this state, the engine is running without a /// view. /// /// This state is only entered on iOS, Android, and web, although on all platforms /// it is the default state before the application begins running. detached, /// On all platforms, this state indicates that the application is in the /// default running mode for a running application that has input focus and is /// visible. /// /// On Android, this state corresponds to the Flutter host view having focus /// ([`Activity.onWindowFocusChanged`](https://developer.android.com/reference/android/app/Activity#onWindowFocusChanged(boolean)) /// was called with true) while in Android's "resumed" state. It is possible /// for the Flutter app to be in the [inactive] state while still being in /// Android's /// ["onResume"](https://developer.android.com/guide/components/activities/activity-lifecycle) /// state if the app has lost focus /// ([`Activity.onWindowFocusChanged`](https://developer.android.com/reference/android/app/Activity#onWindowFocusChanged(boolean)) /// was called with false), but hasn't had /// [`Activity.onPause`](https://developer.android.com/reference/android/app/Activity#onPause()) /// called on it. /// /// On iOS and macOS, this corresponds to the app running in the foreground /// active state. resumed, /// At least one view of the application is visible, but none have input /// focus. The application is otherwise running normally. /// /// On non-web desktop platforms, this corresponds to an application that is /// not in the foreground, but still has visible windows. /// /// On the web, this corresponds to an application that is running in a /// window or tab that does not have input focus. /// /// On iOS and macOS, this state corresponds to the Flutter host view running in the /// foreground inactive state. Apps transition to this state when in a phone /// call, when responding to a TouchID request, when entering the app switcher /// or the control center, or when the UIViewController hosting the Flutter /// app is transitioning. /// /// On Android, this corresponds to the Flutter host view running in Android's /// paused state (i.e. /// [`Activity.onPause`](https://developer.android.com/reference/android/app/Activity#onPause()) /// has been called), or in Android's "resumed" state (i.e. /// [`Activity.onResume`](https://developer.android.com/reference/android/app/Activity#onResume()) /// has been called) but does not have window focus. Examples of when apps /// transition to this state include when the app is partially obscured or /// another activity is focused, a app running in a split screen that isn't /// the current app, an app interrupted by a phone call, a picture-in-picture /// app, a system dialog, another view. It will also be inactive when the /// notification window shade is down, or the application switcher is visible. /// /// On Android and iOS, apps in this state should assume that they may be /// [hidden] and [paused] at any time. inactive, /// All views of an application are hidden, either because the application is /// about to be paused (on iOS and Android), or because it has been minimized /// or placed on a desktop that is no longer visible (on non-web desktop), or /// is running in a window or tab that is no longer visible (on the web). /// /// On iOS and Android, in order to keep the state machine the same on all /// platforms, a transition to this state is synthesized before the [paused] /// state is entered when coming from [inactive], and before the [inactive] /// state is entered when coming from [paused]. This allows cross-platform /// implementations that want to know when an app is conceptually "hidden" to /// only write one handler. hidden, /// The application is not currently visible to the user, and not responding /// to user input. /// /// When the application is in this state, the engine will not call the /// [PlatformDispatcher.onBeginFrame] and [PlatformDispatcher.onDrawFrame] /// callbacks. /// /// This state is only entered on iOS and Android. paused, } /// The possible responses to a request to exit the application. /// /// The request is typically responded to by creating an [AppLifecycleListener] /// and supplying an [AppLifecycleListener.onExitRequested] callback, or by /// overriding [WidgetsBindingObserver.didRequestAppExit]. enum AppExitResponse { /// Exiting the application can proceed. exit, /// Cancel the exit: do not exit the application. cancel, } /// The type of application exit to perform when calling /// [ServicesBinding.exitApplication]. enum AppExitType { /// Requests that the application start an orderly exit, sending a request /// back to the framework through the [WidgetsBinding]. If that responds /// with [AppExitResponse.exit], then proceed with the same steps as a /// [required] exit. If that responds with [AppExitResponse.cancel], then the /// exit request is canceled and the application continues executing normally. cancelable, /// A non-cancelable orderly exit request. The engine will shut down the /// engine and call the native UI toolkit's exit API. /// /// If you need an even faster and more dangerous exit, then call `dart:io`'s /// `exit()` directly, and even the native toolkit's exit API won't be called. /// This is quite dangerous, though, since it's possible that the engine will /// crash because it hasn't been properly shut down, causing the app to crash /// on exit. required, } /// A representation of distances for each of the four edges of a rectangle, /// used to encode the view insets and padding that applications should place /// around their user interface, as exposed by [FlutterView.viewInsets] and /// [FlutterView.padding]. View insets and padding are preferably read via /// [MediaQuery.of]. /// /// For a generic class that represents distances around a rectangle, see the /// [EdgeInsets] class. /// /// See also: /// /// * [WidgetsBindingObserver], for a widgets layer mechanism to receive /// notifications when the padding changes. /// * [MediaQuery.of], for the preferred mechanism for accessing these values. /// * [Scaffold], which automatically applies the padding in material design /// applications. class ViewPadding { const ViewPadding._({ required this.left, required this.top, required this.right, required this.bottom }); /// The distance from the left edge to the first unpadded pixel, in physical pixels. final double left; /// The distance from the top edge to the first unpadded pixel, in physical pixels. final double top; /// The distance from the right edge to the first unpadded pixel, in physical pixels. final double right; /// The distance from the bottom edge to the first unpadded pixel, in physical pixels. final double bottom; /// A view padding that has zeros for each edge. static const ViewPadding zero = ViewPadding._(left: 0.0, top: 0.0, right: 0.0, bottom: 0.0); @override String toString() { return 'ViewPadding(left: $left, top: $top, right: $right, bottom: $bottom)'; } } /// Deprecated. Will be removed in a future version of Flutter. /// /// Use [ViewPadding] instead. @Deprecated( 'Use ViewPadding instead. ' 'This feature was deprecated after v3.8.0-14.0.pre.', ) typedef WindowPadding = ViewPadding; /// Immutable layout constraints for [FlutterView]s. /// /// Similar to [BoxConstraints], a [Size] respects a [ViewConstraints] if, and /// only if, all of the following relations hold: /// /// * [minWidth] <= [Size.width] <= [maxWidth] /// * [minHeight] <= [Size.height] <= [maxHeight] /// /// The constraints themselves must satisfy these relations: /// /// * 0.0 <= [minWidth] <= [maxWidth] <= [double.infinity] /// * 0.0 <= [minHeight] <= [maxHeight] <= [double.infinity] /// /// For each constraint, [double.infinity] is a legal value. /// /// For a generic class that represents these kind of constraints, see the /// [BoxConstraints] class. class ViewConstraints { /// Creates view constraints with the given constraints. const ViewConstraints({ this.minWidth = 0.0, this.maxWidth = double.infinity, this.minHeight = 0.0, this.maxHeight = double.infinity, }); /// Creates view constraints that is respected only by the given size. ViewConstraints.tight(Size size) : minWidth = size.width, maxWidth = size.width, minHeight = size.height, maxHeight = size.height; /// The minimum width that satisfies the constraints. final double minWidth; /// The maximum width that satisfies the constraints. /// /// Might be [double.infinity]. final double maxWidth; /// The minimum height that satisfies the constraints. final double minHeight; /// The maximum height that satisfies the constraints. /// /// Might be [double.infinity]. final double maxHeight; /// Whether the given size satisfies the constraints. bool isSatisfiedBy(Size size) { return (minWidth <= size.width) && (size.width <= maxWidth) && (minHeight <= size.height) && (size.height <= maxHeight); } /// Whether there is exactly one size that satisfies the constraints. bool get isTight => minWidth >= maxWidth && minHeight >= maxHeight; /// Scales each constraint parameter by the inverse of the given factor. ViewConstraints operator/(double factor) { return ViewConstraints( minWidth: minWidth / factor, maxWidth: maxWidth / factor, minHeight: minHeight / factor, maxHeight: maxHeight / factor, ); } @override bool operator ==(Object other) { if (identical(this, other)) { return true; } if (other.runtimeType != runtimeType) { return false; } return other is ViewConstraints && other.minWidth == minWidth && other.maxWidth == maxWidth && other.minHeight == minHeight && other.maxHeight == maxHeight; } @override int get hashCode => Object.hash(minWidth, maxWidth, minHeight, maxHeight); @override String toString() { if (minWidth == double.infinity && minHeight == double.infinity) { return 'ViewConstraints(biggest)'; } if (minWidth == 0 && maxWidth == double.infinity && minHeight == 0 && maxHeight == double.infinity) { return 'ViewConstraints(unconstrained)'; } String describe(double min, double max, String dim) { if (min == max) { return '$dim=${min.toStringAsFixed(1)}'; } return '${min.toStringAsFixed(1)}<=$dim<=${max.toStringAsFixed(1)}'; } final String width = describe(minWidth, maxWidth, 'w'); final String height = describe(minHeight, maxHeight, 'h'); return 'ViewConstraints($width, $height)'; } } /// Area of the display that may be obstructed by a hardware feature. /// /// This is populated only on Android. /// /// The [bounds] are measured in logical pixels. On devices with two screens the /// coordinate system starts with (0,0) in the top-left corner of the left or top screen /// and expands to include both screens and the visual space between them. /// /// The [type] describes the behaviour and if [DisplayFeature] obstructs the display. /// For example, [DisplayFeatureType.hinge] and [DisplayFeatureType.cutout] both obstruct the display, /// while [DisplayFeatureType.fold] does not. /// /// ![Device with a hinge display feature](https://flutter.github.io/assets-for-api-docs/assets/hardware/display_feature_hinge.png) /// /// ![Device with a fold display feature](https://flutter.github.io/assets-for-api-docs/assets/hardware/display_feature_fold.png) /// /// ![Device with a cutout display feature](https://flutter.github.io/assets-for-api-docs/assets/hardware/display_feature_cutout.png) /// /// The [state] contains information about the posture for foldable features /// ([DisplayFeatureType.hinge] and [DisplayFeatureType.fold]). The posture is /// the shape of the display, for example [DisplayFeatureState.postureFlat] or /// [DisplayFeatureState.postureHalfOpened]. For [DisplayFeatureType.cutout], /// the state is not used and has the [DisplayFeatureState.unknown] value. class DisplayFeature { const DisplayFeature({ required this.bounds, required this.type, required this.state, }) : assert(!identical(type, DisplayFeatureType.cutout) || identical(state, DisplayFeatureState.unknown)); /// The area of the flutter view occupied by this display feature, measured in logical pixels. /// /// On devices with two screens, the Flutter view spans from the top-left corner /// of the left or top screen to the bottom-right corner of the right or bottom screen, /// including the visual area occupied by any display feature. Bounds of display /// features are reported in this coordinate system. /// /// For example, on a dual screen device in portrait mode: /// /// * [Rect.left] gives you the size of left screen, in logical pixels. /// * [Rect.right] gives you the size of the left screen + the hinge width. final Rect bounds; /// Type of display feature, e.g. hinge, fold, cutout. final DisplayFeatureType type; /// Posture of display feature, which is populated only for folds and hinges. /// /// For cutouts, this is [DisplayFeatureState.unknown] final DisplayFeatureState state; @override bool operator ==(Object other) { if (identical(this, other)) { return true; } if (other.runtimeType != runtimeType) { return false; } return other is DisplayFeature && bounds == other.bounds && type == other.type && state == other.state; } @override int get hashCode => Object.hash(bounds, type, state); @override String toString() { return 'DisplayFeature(rect: $bounds, type: $type, state: $state)'; } } /// Type of [DisplayFeature], describing the [DisplayFeature] behaviour and if /// it obstructs the display. /// /// Some types of [DisplayFeature], like [DisplayFeatureType.fold], can be /// reported without actually impeding drawing on the screen. They are useful /// for knowing where the display is bent or has a crease. The /// [DisplayFeature.bounds] can be 0-width in such cases. /// /// The shape formed by the screens for types [DisplayFeatureType.fold] and /// [DisplayFeatureType.hinge] is called the posture and is exposed in /// [DisplayFeature.state]. For example, the [DisplayFeatureState.postureFlat] posture /// means the screens form a flat surface. /// /// ![Device with a hinge display feature](https://flutter.github.io/assets-for-api-docs/assets/hardware/display_feature_hinge.png) /// /// ![Device with a fold display feature](https://flutter.github.io/assets-for-api-docs/assets/hardware/display_feature_fold.png) /// /// ![Device with a cutout display feature](https://flutter.github.io/assets-for-api-docs/assets/hardware/display_feature_cutout.png) enum DisplayFeatureType { /// [DisplayFeature] type is new and not yet known to Flutter. unknown, /// A fold in the flexible screen without a physical gap. /// /// The bounds for this display feature type indicate where the display makes a crease. fold, /// A physical separation with a hinge that allows two display panels to fold. hinge, /// A non-displaying area of the screen, usually housing cameras or sensors. cutout, } /// State of the display feature, which contains information about the posture /// for foldable features. /// /// The posture is the shape made by the parts of the flexible screen or /// physical screen panels. They are inspired by and similar to /// [Android Postures](https://developer.android.com/guide/topics/ui/foldables#postures). /// /// * For [DisplayFeatureType.fold]s & [DisplayFeatureType.hinge]s, the state is /// the posture. /// * For [DisplayFeatureType.cutout]s, the state is not used and has the /// [DisplayFeatureState.unknown] value. enum DisplayFeatureState { /// The display feature is a [DisplayFeatureType.cutout] or this state is new /// and not yet known to Flutter. unknown, /// The foldable device is completely open. /// /// The screen space that is presented to the user is flat. postureFlat, /// Fold angle is in an intermediate position between opened and closed state. /// /// There is a non-flat angle between parts of the flexible screen or between /// physical screen panels such that the screens start to face each other. postureHalfOpened, } /// An identifier used to select a user's language and formatting preferences. /// /// This represents a [Unicode Language /// Identifier](https://www.unicode.org/reports/tr35/#Unicode_language_identifier) /// (i.e. without Locale extensions), except variants are not supported. /// /// Locales are canonicalized according to the "preferred value" entries in the /// [IANA Language Subtag /// Registry](https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry). /// For example, `const Locale('he')` and `const Locale('iw')` are equal and /// both have the [languageCode] `he`, because `iw` is a deprecated language /// subtag that was replaced by the subtag `he`. /// /// See also: /// /// * [PlatformDispatcher.locale], which specifies the system's currently selected /// [Locale]. class Locale { /// Creates a new Locale object. The first argument is the /// primary language subtag, the second is the region (also /// referred to as 'country') subtag. /// /// For example: /// /// ```dart /// const Locale swissFrench = Locale('fr', 'CH'); /// const Locale canadianFrench = Locale('fr', 'CA'); /// ``` /// /// The primary language subtag must not be null. The region subtag is /// optional. When there is no region/country subtag, the parameter should /// be omitted or passed `null` instead of an empty-string. /// /// The subtag values are _case sensitive_ and must be one of the valid /// subtags according to CLDR supplemental data: /// [language](https://github.com/unicode-org/cldr/blob/master/common/validity/language.xml), /// [region](https://github.com/unicode-org/cldr/blob/master/common/validity/region.xml). The /// primary language subtag must be at least two and at most eight lowercase /// letters, but not four letters. The region subtag must be two /// uppercase letters or three digits. See the [Unicode Language /// Identifier](https://www.unicode.org/reports/tr35/#Unicode_language_identifier) /// specification. /// /// Validity is not checked by default, but some methods may throw away /// invalid data. /// /// See also: /// /// * [Locale.fromSubtags], which also allows a [scriptCode] to be /// specified. const Locale( this._languageCode, [ this._countryCode, ]) : assert(_languageCode != ''), scriptCode = null; /// Creates a new Locale object. /// /// The keyword arguments specify the subtags of the Locale. /// /// The subtag values are _case sensitive_ and must be valid subtags according /// to CLDR supplemental data: /// [language](https://github.com/unicode-org/cldr/blob/master/common/validity/language.xml), /// [script](https://github.com/unicode-org/cldr/blob/master/common/validity/script.xml) and /// [region](https://github.com/unicode-org/cldr/blob/master/common/validity/region.xml) for /// each of languageCode, scriptCode and countryCode respectively. /// /// The [languageCode] subtag is optional. When there is no language subtag, /// the parameter should be omitted or set to "und". When not supplied, the /// [languageCode] defaults to "und", an undefined language code. /// /// The [countryCode] subtag is optional. When there is no country subtag, /// the parameter should be omitted or passed `null` instead of an empty-string. /// /// Validity is not checked by default, but some methods may throw away /// invalid data. const Locale.fromSubtags({ String languageCode = 'und', this.scriptCode, String? countryCode, }) : assert(languageCode != ''), _languageCode = languageCode, assert(scriptCode != ''), assert(countryCode != ''), _countryCode = countryCode; /// The primary language subtag for the locale. /// /// This must not be null. It may be 'und', representing 'undefined'. /// /// This is expected to be string registered in the [IANA Language Subtag /// Registry](https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry) /// with the type "language". The string specified must match the case of the /// string in the registry. /// /// Language subtags that are deprecated in the registry and have a preferred /// code are changed to their preferred code. For example, `const /// Locale('he')` and `const Locale('iw')` are equal, and both have the /// [languageCode] `he`, because `iw` is a deprecated language subtag that was /// replaced by the subtag `he`. /// /// This must be a valid Unicode Language subtag as listed in [Unicode CLDR /// supplemental /// data](https://github.com/unicode-org/cldr/blob/master/common/validity/language.xml). /// /// See also: /// /// * [Locale.fromSubtags], which describes the conventions for creating /// [Locale] objects. String get languageCode => _deprecatedLanguageSubtagMap[_languageCode] ?? _languageCode; final String _languageCode; // This map is generated by //flutter/tools/gen_locale.dart // Mappings generated for language subtag registry as of 2019-02-27. static const Map<String, String> _deprecatedLanguageSubtagMap = <String, String>{ 'in': 'id', // Indonesian; deprecated 1989-01-01 'iw': 'he', // Hebrew; deprecated 1989-01-01 'ji': 'yi', // Yiddish; deprecated 1989-01-01 'jw': 'jv', // Javanese; deprecated 2001-08-13 'mo': 'ro', // Moldavian, Moldovan; deprecated 2008-11-22 'aam': 'aas', // Aramanik; deprecated 2015-02-12 'adp': 'dz', // Adap; deprecated 2015-02-12 'aue': 'ktz', // ǂKxʼauǁʼein; deprecated 2015-02-12 'ayx': 'nun', // Ayi (China); deprecated 2011-08-16 'bgm': 'bcg', // Baga Mboteni; deprecated 2016-05-30 'bjd': 'drl', // Bandjigali; deprecated 2012-08-12 'ccq': 'rki', // Chaungtha; deprecated 2012-08-12 'cjr': 'mom', // Chorotega; deprecated 2010-03-11 'cka': 'cmr', // Khumi Awa Chin; deprecated 2012-08-12 'cmk': 'xch', // Chimakum; deprecated 2010-03-11 'coy': 'pij', // Coyaima; deprecated 2016-05-30 'cqu': 'quh', // Chilean Quechua; deprecated 2016-05-30 'drh': 'khk', // Darkhat; deprecated 2010-03-11 'drw': 'prs', // Darwazi; deprecated 2010-03-11 'gav': 'dev', // Gabutamon; deprecated 2010-03-11 'gfx': 'vaj', // Mangetti Dune ǃXung; deprecated 2015-02-12 'ggn': 'gvr', // Eastern Gurung; deprecated 2016-05-30 'gti': 'nyc', // Gbati-ri; deprecated 2015-02-12 'guv': 'duz', // Gey; deprecated 2016-05-30 'hrr': 'jal', // Horuru; deprecated 2012-08-12 'ibi': 'opa', // Ibilo; deprecated 2012-08-12 'ilw': 'gal', // Talur; deprecated 2013-09-10 'jeg': 'oyb', // Jeng; deprecated 2017-02-23 'kgc': 'tdf', // Kasseng; deprecated 2016-05-30 'kgh': 'kml', // Upper Tanudan Kalinga; deprecated 2012-08-12 'koj': 'kwv', // Sara Dunjo; deprecated 2015-02-12 'krm': 'bmf', // Krim; deprecated 2017-02-23 'ktr': 'dtp', // Kota Marudu Tinagas; deprecated 2016-05-30 'kvs': 'gdj', // Kunggara; deprecated 2016-05-30 'kwq': 'yam', // Kwak; deprecated 2015-02-12 'kxe': 'tvd', // Kakihum; deprecated 2015-02-12 'kzj': 'dtp', // Coastal Kadazan; deprecated 2016-05-30 'kzt': 'dtp', // Tambunan Dusun; deprecated 2016-05-30 'lii': 'raq', // Lingkhim; deprecated 2015-02-12 'lmm': 'rmx', // Lamam; deprecated 2014-02-28 'meg': 'cir', // Mea; deprecated 2013-09-10 'mst': 'mry', // Cataelano Mandaya; deprecated 2010-03-11 'mwj': 'vaj', // Maligo; deprecated 2015-02-12 'myt': 'mry', // Sangab Mandaya; deprecated 2010-03-11 'nad': 'xny', // Nijadali; deprecated 2016-05-30 'ncp': 'kdz', // Ndaktup; deprecated 2018-03-08 'nnx': 'ngv', // Ngong; deprecated 2015-02-12 'nts': 'pij', // Natagaimas; deprecated 2016-05-30 'oun': 'vaj', // ǃOǃung; deprecated 2015-02-12 'pcr': 'adx', // Panang; deprecated 2013-09-10 'pmc': 'huw', // Palumata; deprecated 2016-05-30 'pmu': 'phr', // Mirpur Panjabi; deprecated 2015-02-12 'ppa': 'bfy', // Pao; deprecated 2016-05-30 'ppr': 'lcq', // Piru; deprecated 2013-09-10 'pry': 'prt', // Pray 3; deprecated 2016-05-30 'puz': 'pub', // Purum Naga; deprecated 2014-02-28 'sca': 'hle', // Sansu; deprecated 2012-08-12 'skk': 'oyb', // Sok; deprecated 2017-02-23 'tdu': 'dtp', // Tempasuk Dusun; deprecated 2016-05-30 'thc': 'tpo', // Tai Hang Tong; deprecated 2016-05-30 'thx': 'oyb', // The; deprecated 2015-02-12 'tie': 'ras', // Tingal; deprecated 2011-08-16 'tkk': 'twm', // Takpa; deprecated 2011-08-16 'tlw': 'weo', // South Wemale; deprecated 2012-08-12 'tmp': 'tyj', // Tai Mène; deprecated 2016-05-30 'tne': 'kak', // Tinoc Kallahan; deprecated 2016-05-30 'tnf': 'prs', // Tangshewi; deprecated 2010-03-11 'tsf': 'taj', // Southwestern Tamang; deprecated 2015-02-12 'uok': 'ema', // Uokha; deprecated 2015-02-12 'xba': 'cax', // Kamba (Brazil); deprecated 2016-05-30 'xia': 'acn', // Xiandao; deprecated 2013-09-10 'xkh': 'waw', // Karahawyana; deprecated 2016-05-30 'xsj': 'suj', // Subi; deprecated 2015-02-12 'ybd': 'rki', // Yangbye; deprecated 2012-08-12 'yma': 'lrr', // Yamphe; deprecated 2012-08-12 'ymt': 'mtm', // Mator-Taygi-Karagas; deprecated 2015-02-12 'yos': 'zom', // Yos; deprecated 2013-09-10 'yuu': 'yug', // Yugh; deprecated 2014-02-28 }; /// The script subtag for the locale. /// /// This may be null, indicating that there is no specified script subtag. /// /// This must be a valid Unicode Language Identifier script subtag as listed /// in [Unicode CLDR supplemental /// data](https://github.com/unicode-org/cldr/blob/master/common/validity/script.xml). /// /// See also: /// /// * [Locale.fromSubtags], which describes the conventions for creating /// [Locale] objects. final String? scriptCode; /// The region subtag for the locale. /// /// This may be null, indicating that there is no specified region subtag. /// /// This is expected to be string registered in the [IANA Language Subtag /// Registry](https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry) /// with the type "region". The string specified must match the case of the /// string in the registry. /// /// Region subtags that are deprecated in the registry and have a preferred /// code are changed to their preferred code. For example, `const Locale('de', /// 'DE')` and `const Locale('de', 'DD')` are equal, and both have the /// [countryCode] `DE`, because `DD` is a deprecated language subtag that was /// replaced by the subtag `DE`. /// /// See also: /// /// * [Locale.fromSubtags], which describes the conventions for creating /// [Locale] objects. String? get countryCode => _deprecatedRegionSubtagMap[_countryCode] ?? _countryCode; final String? _countryCode; // This map is generated by //flutter/tools/gen_locale.dart // Mappings generated for language subtag registry as of 2019-02-27. static const Map<String, String> _deprecatedRegionSubtagMap = <String, String>{ 'BU': 'MM', // Burma; deprecated 1989-12-05 'DD': 'DE', // German Democratic Republic; deprecated 1990-10-30 'FX': 'FR', // Metropolitan France; deprecated 1997-07-14 'TP': 'TL', // East Timor; deprecated 2002-05-20 'YD': 'YE', // Democratic Yemen; deprecated 1990-08-14 'ZR': 'CD', // Zaire; deprecated 1997-07-14 }; @override bool operator ==(Object other) { if (identical(this, other)) { return true; } if (other is! Locale) { return false; } final String? thisCountryCode = countryCode; final String? otherCountryCode = other.countryCode; return other.languageCode == languageCode && other.scriptCode == scriptCode // scriptCode cannot be '' && (other.countryCode == thisCountryCode // Treat '' as equal to null. || otherCountryCode != null && otherCountryCode.isEmpty && thisCountryCode == null || thisCountryCode != null && thisCountryCode.isEmpty && other.countryCode == null); } @override int get hashCode => Object.hash(languageCode, scriptCode, countryCode == '' ? null : countryCode); static Locale? _cachedLocale; static String? _cachedLocaleString; /// Returns a string representing the locale. /// /// This identifier happens to be a valid Unicode Locale Identifier using /// underscores as separator, however it is intended to be used for debugging /// purposes only. For parsable results, use [toLanguageTag] instead. @keepToString @override String toString() { if (!identical(_cachedLocale, this)) { _cachedLocale = this; _cachedLocaleString = _rawToString('_'); } return _cachedLocaleString!; } /// Returns a syntactically valid Unicode BCP47 Locale Identifier. /// /// Some examples of such identifiers: "en", "es-419", "hi-Deva-IN" and /// "zh-Hans-CN". See http://www.unicode.org/reports/tr35/ for technical /// details. String toLanguageTag() => _rawToString('-'); String _rawToString(String separator) { final StringBuffer out = StringBuffer(languageCode); if (scriptCode != null && scriptCode!.isNotEmpty) { out.write('$separator$scriptCode'); } final String? countryCode = _countryCode; if (countryCode != null && countryCode.isNotEmpty) { out.write('$separator${this.countryCode}'); } return out.toString(); } } /// Various performance modes for tuning the Dart VM's GC performance. /// /// For the editor of this enum, please keep the order in sync with `Dart_PerformanceMode` /// in [dart_api.h](https://github.com/dart-lang/sdk/blob/main/runtime/include/dart_api.h#L1302). enum DartPerformanceMode { /// This is the default mode that the Dart VM is in. balanced, /// Optimize for low latency, at the expense of throughput and memory overhead /// by performing work in smaller batches (requiring more overhead) or by /// delaying work (requiring more memory). An embedder should not remain in /// this mode indefinitely. latency, /// Optimize for high throughput, at the expense of latency and memory overhead /// by performing work in larger batches with more intervening growth. throughput, /// Optimize for low memory, at the expensive of throughput and latency by more /// frequently performing work. memory, } /// An event to request a [SemanticsAction] of [type] to be performed on the /// [SemanticsNode] identified by [nodeId] owned by the [FlutterView] identified /// by [viewId]. /// /// Used by [SemanticsBinding.performSemanticsAction]. class SemanticsActionEvent { /// Creates a [SemanticsActionEvent]. const SemanticsActionEvent({ required this.type, required this.viewId, required this.nodeId, this.arguments, }); /// The type of action to be performed. final SemanticsAction type; /// The id of the [FlutterView] the [SemanticsNode] identified by [nodeId] is /// associated with. final int viewId; /// The id of the [SemanticsNode] on which the action is to be performed. final int nodeId; /// Optional arguments for the action. final Object? arguments; static const Object _noArgumentPlaceholder = Object(); /// Create a clone of the [SemanticsActionEvent] but with provided parameters /// replaced. SemanticsActionEvent copyWith({ SemanticsAction? type, int? viewId, int? nodeId, Object? arguments = _noArgumentPlaceholder, }) { return SemanticsActionEvent( type: type ?? this.type, viewId: viewId ?? this.viewId, nodeId: nodeId ?? this.nodeId, arguments: arguments == _noArgumentPlaceholder ? this.arguments : arguments, ); } } /// Signature for [PlatformDispatcher.onViewFocusChange]. typedef ViewFocusChangeCallback = void Function(ViewFocusEvent viewFocusEvent); /// An event for the engine to communicate view focus changes to the app. /// /// This value will be typically passed to the [PlatformDispatcher.onViewFocusChange] /// callback. final class ViewFocusEvent { /// Creates a [ViewFocusChange]. const ViewFocusEvent({ required this.viewId, required this.state, required this.direction, }); /// The ID of the [FlutterView] that experienced a focus change. final int viewId; /// The state focus changed to. final ViewFocusState state; /// The direction focus changed to. final ViewFocusDirection direction; @override String toString() { return 'ViewFocusEvent(viewId: $viewId, state: $state, direction: $direction)'; } } /// Represents the focus state of a given [FlutterView]. /// /// When focus is lost, the view's focus state changes to [ViewFocusState.unfocused]. /// /// When focus is gained, the view's focus state changes to [ViewFocusState.focused]. /// /// Valid transitions within a view are: /// /// - [ViewFocusState.focused] to [ViewFocusState.unfocused]. /// - [ViewFocusState.unfocused] to [ViewFocusState.focused]. /// /// See also: /// /// * [ViewFocusDirection], that specifies the focus direction. /// * [ViewFocusEvent], that conveys information about a [FlutterView] focus change. enum ViewFocusState { /// Specifies that a view does not have platform focus. unfocused, /// Specifies that a view has platform focus. focused, } /// Represents the direction in which the focus transitioned across [FlutterView]s. /// /// See also: /// /// * [ViewFocusState], that specifies the current focus state of a [FlutterView]. /// * [ViewFocusEvent], that conveys information about a [FlutterView] focus change. enum ViewFocusDirection { /// Indicates the focus transition did not have a direction. /// /// This is typically associated with focus being programmatically requested or /// when focus is lost. undefined, /// Indicates the focus transition was performed in a forward direction. /// /// This is typically result of the user pressing tab. forward, /// Indicates the focus transition was performed in a backward direction. /// /// This is typically result of the user pressing shift + tab. backward, }
engine/lib/ui/platform_dispatcher.dart/0
{ "file_path": "engine/lib/ui/platform_dispatcher.dart", "repo_id": "engine", "token_count": 34187 }
283
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "flutter/lib/ui/semantics/string_attribute.h" #include "flutter/fml/logging.h" #include "flutter/lib/ui/ui_dart_state.h" #include "third_party/tonic/dart_args.h" #include "third_party/tonic/dart_binding_macros.h" #include <memory> #include <utility> namespace flutter { IMPLEMENT_WRAPPERTYPEINFO(ui, NativeStringAttribute); NativeStringAttribute::NativeStringAttribute() {} NativeStringAttribute::~NativeStringAttribute() {} void NativeStringAttribute::initSpellOutStringAttribute( Dart_Handle string_attribute_handle, int32_t start, int32_t end) { UIDartState::ThrowIfUIOperationsProhibited(); auto native_string_attribute = fml::MakeRefCounted<NativeStringAttribute>(); native_string_attribute->AssociateWithDartWrapper(string_attribute_handle); native_string_attribute->attribute_ = std::make_shared<SpellOutStringAttribute>(); native_string_attribute->attribute_->start = start; native_string_attribute->attribute_->end = end; native_string_attribute->attribute_->type = StringAttributeType::kSpellOut; } void NativeStringAttribute::initLocaleStringAttribute( Dart_Handle string_attribute_handle, int32_t start, int32_t end, std::string locale) { UIDartState::ThrowIfUIOperationsProhibited(); auto native_string_attribute = fml::MakeRefCounted<NativeStringAttribute>(); native_string_attribute->AssociateWithDartWrapper(string_attribute_handle); auto locale_attribute = std::make_shared<LocaleStringAttribute>(); locale_attribute->start = start; locale_attribute->end = end; locale_attribute->type = StringAttributeType::kLocale; locale_attribute->locale = std::move(locale); native_string_attribute->attribute_ = locale_attribute; } const StringAttributePtr NativeStringAttribute::GetAttribute() const { return attribute_; } } // namespace flutter
engine/lib/ui/semantics/string_attribute.cc/0
{ "file_path": "engine/lib/ui/semantics/string_attribute.cc", "repo_id": "engine", "token_count": 620 }
284
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_UI_DART_STATE_H_ #define FLUTTER_LIB_UI_UI_DART_STATE_H_ #include <memory> #include <string> #include <utility> #include "flutter/common/settings.h" #include "flutter/common/task_runners.h" #include "flutter/fml/build_config.h" #include "flutter/fml/memory/weak_ptr.h" #include "flutter/fml/synchronization/waitable_event.h" #include "flutter/lib/ui/io_manager.h" #include "flutter/lib/ui/isolate_name_server/isolate_name_server.h" #include "flutter/lib/ui/painting/image_decoder.h" #include "flutter/lib/ui/snapshot_delegate.h" #include "flutter/lib/ui/volatile_path_tracker.h" #include "flutter/shell/common/platform_message_handler.h" #include "impeller/runtime_stage/runtime_stage.h" #include "third_party/dart/runtime/include/dart_api.h" #include "third_party/skia/include/gpu/GrDirectContext.h" #include "third_party/tonic/dart_microtask_queue.h" #include "third_party/tonic/dart_persistent_value.h" #include "third_party/tonic/dart_state.h" namespace flutter { class FontSelector; class ImageGeneratorRegistry; class PlatformConfiguration; class PlatformMessage; class UIDartState : public tonic::DartState { public: static UIDartState* Current(); /// @brief The subset of state which is owned by the shell or engine /// and passed through the RuntimeController into DartIsolates. /// If a shell-owned resource needs to be exposed to the framework via /// UIDartState, a pointer to the resource can be added to this /// struct with appropriate default construction. struct Context { explicit Context(const TaskRunners& task_runners); Context(const TaskRunners& task_runners, fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate, fml::WeakPtr<IOManager> io_manager, fml::RefPtr<SkiaUnrefQueue> unref_queue, fml::WeakPtr<ImageDecoder> image_decoder, fml::WeakPtr<ImageGeneratorRegistry> image_generator_registry, std::string advisory_script_uri, std::string advisory_script_entrypoint, std::shared_ptr<VolatilePathTracker> volatile_path_tracker, std::shared_ptr<fml::ConcurrentTaskRunner> concurrent_task_runner, bool enable_impeller, impeller::RuntimeStageBackend runtime_stage_backend); /// The task runners used by the shell hosting this runtime controller. This /// may be used by the isolate to scheduled asynchronous texture uploads or /// post tasks to the platform task runner. const TaskRunners task_runners; /// The snapshot delegate used by the /// isolate to gather raster snapshots /// of Flutter view hierarchies. fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate; /// The IO manager used by the isolate for asynchronous texture uploads. fml::WeakPtr<IOManager> io_manager; /// The unref queue used by the isolate to collect resources that may /// reference resources on the GPU. fml::RefPtr<SkiaUnrefQueue> unref_queue; /// The image decoder. fml::WeakPtr<ImageDecoder> image_decoder; /// Cascading registry of image generator builders. Given compressed image /// bytes as input, this is used to find and create image generators, which /// can then be used for image decoding. fml::WeakPtr<ImageGeneratorRegistry> image_generator_registry; /// The advisory script URI (only used for debugging). This does not affect /// the code being run in the isolate in any way. std::string advisory_script_uri; /// The advisory script entrypoint (only used for debugging). This does not /// affect the code being run in the isolate in any way. The isolate must be /// transitioned to the running state explicitly by the caller. std::string advisory_script_entrypoint; /// Cache for tracking path volatility. std::shared_ptr<VolatilePathTracker> volatile_path_tracker; /// The task runner whose tasks may be executed concurrently on a pool /// of shared worker threads. std::shared_ptr<fml::ConcurrentTaskRunner> concurrent_task_runner; /// Whether Impeller is enabled or not. bool enable_impeller = false; /// The expected backend for runtime stage shaders. impeller::RuntimeStageBackend runtime_stage_backend; }; Dart_Port main_port() const { return main_port_; } // Root isolate of the VM application bool IsRootIsolate() const { return is_root_isolate_; } static void ThrowIfUIOperationsProhibited(); void SetDebugName(const std::string& name); const std::string& debug_name() const { return debug_name_; } const std::string& logger_prefix() const { return logger_prefix_; } PlatformConfiguration* platform_configuration() const { return platform_configuration_.get(); } void SetPlatformMessageHandler(std::weak_ptr<PlatformMessageHandler> handler); Dart_Handle HandlePlatformMessage(std::unique_ptr<PlatformMessage> message); const TaskRunners& GetTaskRunners() const; void ScheduleMicrotask(Dart_Handle handle); void FlushMicrotasksNow(); fml::WeakPtr<IOManager> GetIOManager() const; fml::RefPtr<flutter::SkiaUnrefQueue> GetSkiaUnrefQueue() const; std::shared_ptr<VolatilePathTracker> GetVolatilePathTracker() const; std::shared_ptr<fml::ConcurrentTaskRunner> GetConcurrentTaskRunner() const; fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> GetSnapshotDelegate() const; fml::WeakPtr<ImageDecoder> GetImageDecoder() const; fml::WeakPtr<ImageGeneratorRegistry> GetImageGeneratorRegistry() const; std::shared_ptr<IsolateNameServer> GetIsolateNameServer() const; tonic::DartErrorHandleType GetLastError(); // Logs `print` messages from the application via an embedder-specified // logging mechanism. // // @param[in] tag A component name or tag that identifies the logging // application. // @param[in] message The message to be logged. void LogMessage(const std::string& tag, const std::string& message) const; UnhandledExceptionCallback unhandled_exception_callback() const { return unhandled_exception_callback_; } /// Returns a enumeration that uniquely represents this root isolate. /// Returns `0` if called from a non-root isolate. int64_t GetRootIsolateToken() const; /// Whether Impeller is enabled for this application. bool IsImpellerEnabled() const; /// The expected type for runtime stage shaders. impeller::RuntimeStageBackend GetRuntimeStageBackend() const; virtual Dart_Isolate CreatePlatformIsolate(Dart_Handle entry_point, char** error); protected: UIDartState(TaskObserverAdd add_callback, TaskObserverRemove remove_callback, std::string logger_prefix, UnhandledExceptionCallback unhandled_exception_callback, LogMessageCallback log_message_callback, std::shared_ptr<IsolateNameServer> isolate_name_server, bool is_root_isolate_, const UIDartState::Context& context); ~UIDartState() override; void SetPlatformConfiguration( std::unique_ptr<PlatformConfiguration> platform_configuration); const std::string& GetAdvisoryScriptURI() const; private: void DidSetIsolate() override; const TaskObserverAdd add_callback_; const TaskObserverRemove remove_callback_; const std::string logger_prefix_; Dart_Port main_port_ = ILLEGAL_PORT; const bool is_root_isolate_; std::string debug_name_; std::unique_ptr<PlatformConfiguration> platform_configuration_; std::weak_ptr<PlatformMessageHandler> platform_message_handler_; tonic::DartMicrotaskQueue microtask_queue_; UnhandledExceptionCallback unhandled_exception_callback_; LogMessageCallback log_message_callback_; const std::shared_ptr<IsolateNameServer> isolate_name_server_; UIDartState::Context context_; void AddOrRemoveTaskObserver(bool add); }; } // namespace flutter #endif // FLUTTER_LIB_UI_UI_DART_STATE_H_
engine/lib/ui/ui_dart_state.h/0
{ "file_path": "engine/lib/ui/ui_dart_state.h", "repo_id": "engine", "token_count": 2706 }
285
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_WINDOW_PLATFORM_MESSAGE_RESPONSE_H_ #define FLUTTER_LIB_UI_WINDOW_PLATFORM_MESSAGE_RESPONSE_H_ #include <vector> #include "flutter/fml/mapping.h" #include "flutter/fml/memory/ref_counted.h" #include "flutter/fml/memory/ref_ptr.h" namespace flutter { class PlatformMessageResponse : public fml::RefCountedThreadSafe<PlatformMessageResponse> { FML_FRIEND_REF_COUNTED_THREAD_SAFE(PlatformMessageResponse); public: // Callable on any thread. virtual void Complete(std::unique_ptr<fml::Mapping> data) = 0; virtual void CompleteEmpty() = 0; bool is_complete() const { return is_complete_; } protected: PlatformMessageResponse(); virtual ~PlatformMessageResponse(); bool is_complete_ = false; }; } // namespace flutter #endif // FLUTTER_LIB_UI_WINDOW_PLATFORM_MESSAGE_RESPONSE_H_
engine/lib/ui/window/platform_message_response.h/0
{ "file_path": "engine/lib/ui/window/platform_message_response.h", "repo_id": "engine", "token_count": 348 }
286
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef FLUTTER_LIB_UI_WINDOW_VIEWPORT_METRICS_H_ #define FLUTTER_LIB_UI_WINDOW_VIEWPORT_METRICS_H_ #include <ostream> #include <vector> namespace flutter { struct ViewportMetrics { ViewportMetrics(); ViewportMetrics(double p_device_pixel_ratio, double p_physical_width, double p_physical_height, double p_physical_touch_slop, size_t display_id); ViewportMetrics(double p_device_pixel_ratio, double p_physical_width, double p_physical_height, double p_physical_padding_top, double p_physical_padding_right, double p_physical_padding_bottom, double p_physical_padding_left, double p_physical_view_inset_top, double p_physical_view_inset_right, double p_physical_view_inset_bottom, double p_physical_view_inset_left, double p_physical_system_gesture_inset_top, double p_physical_system_gesture_inset_right, double p_physical_system_gesture_inset_bottom, double p_physical_system_gesture_inset_left, double p_physical_touch_slop, const std::vector<double>& p_physical_display_features_bounds, const std::vector<int>& p_physical_display_features_type, const std::vector<int>& p_physical_display_features_state, size_t p_display_id); double device_pixel_ratio = 1.0; double physical_width = 0; double physical_height = 0; double physical_padding_top = 0; double physical_padding_right = 0; double physical_padding_bottom = 0; double physical_padding_left = 0; double physical_view_inset_top = 0; double physical_view_inset_right = 0; double physical_view_inset_bottom = 0; double physical_view_inset_left = 0; double physical_system_gesture_inset_top = 0; double physical_system_gesture_inset_right = 0; double physical_system_gesture_inset_bottom = 0; double physical_system_gesture_inset_left = 0; double physical_touch_slop = -1.0; std::vector<double> physical_display_features_bounds; std::vector<int> physical_display_features_type; std::vector<int> physical_display_features_state; size_t display_id = 0; }; bool operator==(const ViewportMetrics& a, const ViewportMetrics& b); std::ostream& operator<<(std::ostream& os, const ViewportMetrics& a); } // namespace flutter #endif // FLUTTER_LIB_UI_WINDOW_VIEWPORT_METRICS_H_
engine/lib/ui/window/viewport_metrics.h/0
{ "file_path": "engine/lib/ui/window/viewport_metrics.h", "repo_id": "engine", "token_count": 1194 }
287
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:async'; import 'dart:io' as io; import 'package:args/command_runner.dart'; import 'package:path/path.dart' as path; import 'environment.dart'; import 'utils.dart'; class CleanCommand extends Command<bool> with ArgUtils<bool> { CleanCommand() { argParser ..addFlag( 'flutter', defaultsTo: true, help: 'Cleans up the .dart_tool directory under engine/src/flutter. Enabled by default.', ) ..addFlag( 'ninja', help: 'Also clean up the engine out directory with ninja output. Disabled by default.', ); } @override String get name => 'clean'; bool get _alsoCleanNinja => boolArg('ninja'); bool get _alsoCleanFlutterRepo => boolArg('flutter'); @override String get description => 'Deletes build caches and artifacts.'; @override FutureOr<bool> run() async { // This is the old path that tests used to be built into. Clean this path too. final String legacyBuildPath = path.join(environment.webUiRootDir.path, 'build'); final List<io.FileSystemEntity> thingsToBeCleaned = <io.FileSystemEntity>[ environment.webUiDartToolDir, environment.webUiBuildDir, io.Directory(legacyBuildPath), io.File(path.join(environment.webUiRootDir.path, '.dart_tool', 'package_config.json')), io.File(path.join(environment.webUiRootDir.path, 'pubspec.lock')), if (_alsoCleanNinja) environment.outDir, if(_alsoCleanFlutterRepo) environment.engineDartToolDir, ]; await Future.wait( thingsToBeCleaned .where((io.FileSystemEntity entity) => entity.existsSync()) .map((io.FileSystemEntity entity) => entity.delete(recursive: true)) ); return true; } }
engine/lib/web_ui/dev/clean.dart/0
{ "file_path": "engine/lib/web_ui/dev/clean.dart", "repo_id": "engine", "token_count": 700 }
288
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:io' as io; import 'package:args/args.dart'; import 'package:http/http.dart'; import 'package:path/path.dart' as path; import 'cipd.dart'; import 'common.dart'; import 'package_lock.dart'; import 'utils.dart'; final ArgParser _argParser = ArgParser(allowTrailingOptions: false) ..addFlag( 'dry-run', help: 'Whether or not to push changes to CIPD. When --dry-run is set, the ' 'script will download everything and attempt to prepare the bundle ' 'but will stop before publishing. When not set, the bundle will be ' 'published.', negatable: false, )..addFlag( 'verbose', abbr: 'v', help: 'Enable verbose output.', negatable: false, ); late final bool dryRun; late final bool verbose; final Client _client = Client(); /// Rolls browser CIPD packages to the version specified in `package_lock.yaml`. /// /// Currently only rolls Chrome. /// /// Chrome rolls are consumed by the "chrome_and_driver" and "chrome" LUCI recipes, here: /// * https://cs.opensource.google/flutter/recipes/+/main:recipe_modules/flutter_deps/api.py;l=146 /// * https://cs.opensource.google/flutter/recipes/+/master:recipe_modules/web_util/api.py;l=22 /// /// Chromedriver is consumed by the same "chrome_and_driver" LUCI recipe, but also "chrome_driver": /// * https://cs.opensource.google/flutter/recipes/+/master:recipe_modules/web_util/api.py;l=48 /// /// There's a small difference in the layout of the zip file coming from CIPD for the /// Mac platform. In `Linux` and `Windows`, the chrome(.exe) executable is expected /// to be placed directly in the root of the zip file. /// /// However in `Mac`, the `Chromium.app` is expected to be placed inside of a /// `chrome-mac` directory in the resulting zip file. /// /// This script respects that historical quirk when building the CIPD packages. /// In order for all the packages to be the same, the recipes listed above should /// be made slightly smarter, so they can find the CHROME_EXECUTABLE in the right /// place. /// /// All platforms expect the "chromedriver" executable to be placed in the root /// of the CIPD zip. Future<void> main(List<String> args) async { try { processArgs(_argParser.parse(args)); await _PackageRoller().roll(); io.exitCode = 0; } on FormatException catch (e) { print(''' Error! ${e.message} Available options: ${_argParser.usage} '''); io.exitCode = 1; } finally { _client.close(); } } // Initialize globals from the parsed command-line arguments. void processArgs(ArgResults args) { dryRun = args['dry-run'] as bool; verbose = args['verbose'] as bool; } class _Platform { _Platform(this.os, this.arch, this.binding); final String os; final String arch; final PlatformBinding binding; String get name => '$os-$arch'; } class _PackageRoller { _PackageRoller(); final io.Directory _rollDir = io.Directory.systemTemp.createTempSync('browser-roll-'); final List<_Platform> _platforms = <_Platform>[ _Platform('linux', 'amd64', LinuxPlatformBinding()), _Platform('mac', 'amd64', Macx64PlatformBinding()), _Platform('mac', 'arm64', MacArmPlatformBinding()), _Platform('windows', 'amd64', WindowsPlatformBinding()), ]; final PackageLock _lock = PackageLock(); // Prints output when --verbose is set. void vprint(String out) { if (verbose) { print(out); } } // Roll Chromium and ChromeDriver for each of the Platforms. Future<void> roll() async { for (final _Platform platform in _platforms) { await _rollChromium(platform); await _rollChromeDriver(platform); // For now, we only test Firefox on Linux. if (platform.os == 'linux') { await _rollFirefox(platform); } await _rollEsbuild(platform); } if (dryRun) { print('\nDry Run Done!\nNon-published roll artifacts kept here: ${_rollDir.path}\n'); } else { // Clean-up vprint('\nDeleting temporary directory: ${_rollDir.path}'); await _rollDir.delete(recursive: true); print('\nDone.\n'); } } // Download a file from the internet, and put it in a temporary location. Future<io.File> _downloadTemporaryFile(String url) async { // Use the hash of the Url to temporarily store a file under tmp final io.File downloadedFile = io.File(path.join( io.Directory.systemTemp.path, 'download_${url.hashCode.toRadixString(16)}', )); vprint(' Downloading [$url] into [${downloadedFile.path}]'); final StreamedResponse download = await _client.send( Request('GET', Uri.parse(url)), ); await download.stream.pipe(downloadedFile.openWrite()); return downloadedFile; } // Unzips a `file` into a `destination` Directory (must exist). Future<void> _unzipAndDeleteFile(io.File zipFile, io.Directory destination) async { vprint(' Unzipping [${zipFile.path}] into [$destination]'); await runProcess('unzip', <String>[ if (!verbose) ...<String>[ '-q', ], zipFile.path, '-d', destination.path, ]); vprint(' Deleting [${zipFile.path}]'); await zipFile.delete(); } // Uncompresses a `file` into a `destination` Directory (must exist). Future<void> _uncompressAndDeleteFile(io.File tarFile, io.Directory destination) async { vprint(' Uncompressing [${tarFile.path}] into [$destination]'); final io.ProcessResult unzipResult = await io.Process.run('tar', <String>[ '-x', '-f', tarFile.path, '-C', destination.path, ]); if (unzipResult.exitCode != 0) { throw StateError( 'Failed to unzip the downloaded archive ${tarFile.path}.\n' 'The unzip process exited with code ${unzipResult.exitCode}.'); } vprint(' Deleting [${tarFile.path}]'); await tarFile.delete(); } // Locate the first subdirectory that contains more than one file under `root`. // (or one ".app" bundle for mac) // // When uncompressing files, unzip might create some extra directories, but it // seems that our scripts want our CIPD packages to contain everything in the root. Future<io.Directory?> _locateContentRoot(io.Directory root) async { final List<io.FileSystemEntity> children = root.listSync(followLinks: false); assert(children.isNotEmpty); if (root.path.toLowerCase().endsWith('.app')) { // We've gone inside the .app bundle of the mac version! return root.parent; } if (children.length == 1) { if (children.first is io.Directory) { return _locateContentRoot(children.first as io.Directory); } else { return root; } } return root; } // Downloads Chromium from the internet, packs it in the directory structure // that the LUCI script wants. The result of this will be then uploaded to CIPD. Future<void> _rollChromium(_Platform platform) async { final String version = _lock.chromeLock.version; final String url = platform.binding.getChromeDownloadUrl(version); final String cipdPackageName = 'flutter_internal/browsers/chrome/${platform.name}'; final io.Directory platformDir = io.Directory(path.join(_rollDir.path, platform.name)); print('\nRolling Chromium for ${platform.name} (version:$version)'); // Bail out if CIPD already has version:$majorVersion for this package! if (!dryRun && await cipdKnowsPackageVersion( package: cipdPackageName, versionTag: version, isVerbose: verbose )) { print(' Skipping $cipdPackageName version:$version. Already uploaded to CIPD!'); vprint(' Update package_lock.yaml and use a different version value.'); return; } await platformDir.create(recursive: true); vprint(' Created target directory [${platformDir.path}]'); final io.File chromeDownload = await _downloadTemporaryFile(url); await _unzipAndDeleteFile(chromeDownload, platformDir); final io.Directory? actualContentRoot = await _locateContentRoot(platformDir); assert(actualContentRoot != null); final String relativePlatformDirPath = path.relative(actualContentRoot!.path, from: _rollDir.path); vprint(' Uploading Chromium (${platform.name}) to CIPD...'); await uploadDirectoryToCipd( directory: _rollDir, packageName: cipdPackageName, configFileName: 'cipd.chromium.${platform.name}.yaml', description: 'Chromium $version used for testing', version: version, root: relativePlatformDirPath, isDryRun: dryRun, isVerbose: verbose, ); } // Downloads Chromedriver from the internet, packs it in the directory structure // that the LUCI script wants. The result of this will be then uploaded to CIPD. Future<void> _rollChromeDriver(_Platform platform) async { final String version = _lock.chromeLock.version; final String url = platform.binding.getChromeDriverDownloadUrl(version); final String cipdPackageName = 'flutter_internal/browser-drivers/chrome/${platform.name}'; final io.Directory platformDir = io.Directory(path.join(_rollDir.path, '${platform.name}_driver')); print('\nRolling Chromedriver for ${platform.os}-${platform.arch} (version:$version)'); // Bail out if CIPD already has version:$majorVersion for this package! if (!dryRun && await cipdKnowsPackageVersion( package: cipdPackageName, versionTag: version, isVerbose: verbose )) { print(' Skipping $cipdPackageName version:$version. Already uploaded to CIPD!'); vprint(' Update package_lock.yaml and use a different version value.'); return; } await platformDir.create(recursive: true); vprint(' Created target directory [${platformDir.path}]'); final io.File chromedriverDownload = await _downloadTemporaryFile(url); await _unzipAndDeleteFile(chromedriverDownload, platformDir); // Ensure the chromedriver executable is placed in the root of the bundle. final io.Directory? actualContentRoot = await _locateContentRoot(platformDir); assert(actualContentRoot != null); final String relativePlatformDirPath = path.relative(actualContentRoot!.path, from: _rollDir.path); vprint(' Uploading Chromedriver (${platform.name}) to CIPD...'); await uploadDirectoryToCipd( directory: _rollDir, packageName: cipdPackageName, configFileName: 'cipd.chromedriver.${platform.name}.yaml', description: 'Chromedriver for Chromium $version used for testing', version: version, root: relativePlatformDirPath, isDryRun: dryRun, isVerbose: verbose, ); } // Downloads Firefox from the internet, packs it in the directory structure // that the LUCI script wants. The result of this will be then uploaded to CIPD. Future<void> _rollFirefox(_Platform platform) async { final String version = _lock.firefoxLock.version; final String url = platform.binding.getFirefoxDownloadUrl(version); final String cipdPackageName = 'flutter_internal/browsers/firefox/${platform.name}'; final io.Directory platformDir = io.Directory(path.join(_rollDir.path, platform.name)); print('\nRolling Firefox for ${platform.name} (version:$version)'); // Bail out if CIPD already has version:$majorVersion for this package! if (!dryRun && await cipdKnowsPackageVersion( package: cipdPackageName, versionTag: version, isVerbose: verbose )) { print(' Skipping $cipdPackageName version:$version. Already uploaded to CIPD!'); vprint(' Update package_lock.yaml and use a different version value.'); return; } await platformDir.create(recursive: true); vprint(' Created target directory [${platformDir.path}]'); final io.File firefoxDownload = await _downloadTemporaryFile(url); await _uncompressAndDeleteFile(firefoxDownload, platformDir); final io.Directory? actualContentRoot = await _locateContentRoot(platformDir); assert(actualContentRoot != null); final String relativePlatformDirPath = path.relative(actualContentRoot!.path, from: _rollDir.path); vprint(' Uploading Firefox (${platform.name}) to CIPD...'); await uploadDirectoryToCipd( directory: _rollDir, packageName: cipdPackageName, configFileName: 'cipd.firefox.${platform.name}.yaml', description: 'Firefox $version used for testing', version: version, root: relativePlatformDirPath, isDryRun: dryRun, isVerbose: verbose, ); } Future<void> _rollEsbuild(_Platform platform) async { final String version = _lock.esbuildLock.version; final String url = platform.binding.getEsbuildDownloadUrl(version); final String cipdPackageName = 'flutter/tools/esbuild/${platform.name}'; final io.Directory platformDir = io.Directory(path.join(_rollDir.path, platform.name)); print('\nRolling esbuild for ${platform.name} (version:$version)'); // Bail out if CIPD already has version:$majorVersion for this package! if (!dryRun && await cipdKnowsPackageVersion( package: cipdPackageName, versionTag: version, isVerbose: verbose )) { print(' Skipping $cipdPackageName version:$version. Already uploaded to CIPD!'); vprint(' Update package_lock.yaml and use a different version value.'); return; } await platformDir.create(recursive: true); vprint(' Created target directory [${platformDir.path}]'); final io.File esbuildDownload = await _downloadTemporaryFile(url); await _uncompressAndDeleteFile(esbuildDownload, platformDir); final String packageDir = path.join(platformDir.path, 'package'); // Write out the license file from the github repo. // Copied from https://github.com/evanw/esbuild/blob/main/LICENSE.md final io.File licenseFile = io.File(path.join( packageDir, 'LICENSE.md', )); licenseFile..createSync()..writeAsStringSync(''' MIT License Copyright (c) 2020 Evan Wallace Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. '''); vprint(' Uploading esbuild (${platform.name}) to CIPD...'); await uploadDirectoryToCipd( directory: _rollDir, packageName: cipdPackageName, configFileName: 'cipd.esbuild.${platform.name}.yaml', description: 'esbuild used by the flutter engine for bundling JavaScript', version: version, root: packageDir, isDryRun: dryRun, isVerbose: verbose, ); } }
engine/lib/web_ui/dev/package_roller.dart/0
{ "file_path": "engine/lib/web_ui/dev/package_roller.dart", "repo_id": "engine", "token_count": 5281 }
289
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. const isBlink = () => { return (navigator.vendor === 'Google Inc.') || (navigator.agent === 'Edg/'); } const hasImageCodecs = () => { if (typeof ImageDecoder === 'undefined') { return false; } // TODO(yjbanov): https://github.com/flutter/flutter/issues/122761 // Frequently, when a browser launches an API that other browsers already // support, there are subtle incompatibilities that may cause apps to crash if, // we blindly adopt the new implementation. This check prevents us from picking // up potentially incompatible implementations of ImagdeDecoder API. Instead, // when a new browser engine launches the API, we'll evaluate it and enable it // explicitly. return isBlink(); } const hasChromiumBreakIterators = () => { return (typeof Intl.v8BreakIterator !== "undefined") && (typeof Intl.Segmenter !== "undefined"); } const supportsWasmGC = () => { // This attempts to instantiate a wasm module that only will validate if the // final WasmGC spec is implemented in the browser. // // Copied from https://github.com/GoogleChromeLabs/wasm-feature-detect/blob/main/src/detectors/gc/index.js const bytes = [0, 97, 115, 109, 1, 0, 0, 0, 1, 5, 1, 95, 1, 120, 0]; return WebAssembly.validate(new Uint8Array(bytes)); } /** * @returns {import("./types").BrowserEnvironment} */ export const browserEnvironment = { hasImageCodecs: hasImageCodecs(), hasChromiumBreakIterators: hasChromiumBreakIterators(), supportsWasmGC: supportsWasmGC(), crossOriginIsolated: window.crossOriginIsolated, };
engine/lib/web_ui/flutter_js/src/browser_environment.js/0
{ "file_path": "engine/lib/web_ui/flutter_js/src/browser_environment.js", "repo_id": "engine", "token_count": 534 }
290
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // TODO(yjbanov): rename this file to web_only_api.dart. // https://github.com/flutter/flutter/issues/100394 // Rather than extending this file with new APIs, we // should instead use js interop. // This file contains extra web-only API that non-web engines do not have. // // Why have web-only API? // // Because all Dart code is compiled (and tree shaken) as a single compilation // unit it only has one entry-point - the `main()` function of the Flutter // app. The top-level `main()` is generated by Flutter tool and it needs to // ask the engine to initialize itself before calling the handwritten `main()` // function of the app itself. To do that, it needs something to call. The // mobile engine doesn't provide a function like that because the application // starts from the Java (Android) or Objective-C (iOS). Any initial // configuration can be done in Java and the engine's C++ code prior to // calling Dart's `main()`. part of ui; // TODO(mdebbar): Deprecate this and remove it. // https://github.com/flutter/flutter/issues/127395 Future<void> webOnlyWarmupEngine({ VoidCallback? registerPlugins, VoidCallback? runApp, }) { assert(() { engine.printWarning( 'The webOnlyWarmupEngine API is deprecated and will be removed in a ' 'future release. Please use `bootstrapEngine` from `dart:ui_web` instead.', ); return true; }()); return ui_web.bootstrapEngine( registerPlugins: registerPlugins, runApp: runApp, ); } // TODO(mdebbar): Deprecate this and remove it. // https://github.com/flutter/flutter/issues/127395 bool get debugEmulateFlutterTesterEnvironment { assert(() { engine.printWarning( 'The debugEmulateFlutterTesterEnvironment getter is deprecated and will ' 'be removed in a future release. Please use ' '`debugEmulateFlutterTesterEnvironment` from `dart:ui_web` instead.', ); return true; }()); return ui_web.debugEmulateFlutterTesterEnvironment; } // TODO(mdebbar): Deprecate this and remove it. // https://github.com/flutter/flutter/issues/127395 set debugEmulateFlutterTesterEnvironment(bool value) { assert(() { engine.printWarning( 'The debugEmulateFlutterTesterEnvironment setter is deprecated and will ' 'be removed in a future release. Please use ' '`debugEmulateFlutterTesterEnvironment` from `dart:ui_web` instead.', ); return true; }()); ui_web.debugEmulateFlutterTesterEnvironment = value; } // TODO(mdebbar): Deprecate this and remove it. // https://github.com/flutter/flutter/issues/127395 ui_web.AssetManager get webOnlyAssetManager { assert(() { engine.printWarning( 'The webOnlyAssetManager getter is deprecated and will be removed in a ' 'future release. Please use `assetManager` from `dart:ui_web` instead.', ); return true; }()); return ui_web.assetManager; } // TODO(mdebbar): Deprecate this and remove it. // https://github.com/flutter/flutter/issues/127395 void webOnlySetPluginHandler(PlatformMessageCallback handler) { assert(() { engine.printWarning( 'The webOnlySetPluginHandler API is deprecated and will be removed in a ' 'future release. Please use `setPluginHandler` from `dart:ui_web` instead.', ); return true; }()); ui_web.setPluginHandler(handler); } // TODO(mdebbar): Deprecate this and remove it. // https://github.com/flutter/flutter/issues/127395 ui_web.PlatformViewRegistry get platformViewRegistry { assert(() { engine.printWarning( 'The platformViewRegistry getter is deprecated and will be removed in a ' 'future release. Please import it from `dart:ui_web` instead.', ); return true; }()); return ui_web.platformViewRegistry; }
engine/lib/web_ui/lib/initialization.dart/0
{ "file_path": "engine/lib/web_ui/lib/initialization.dart", "repo_id": "engine", "token_count": 1310 }
291
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:math' as math; import 'dart:typed_data'; import 'package:meta/meta.dart'; import 'package:ui/ui.dart' as ui; import 'browser_detection.dart'; import 'display.dart'; import 'dom.dart'; import 'engine_canvas.dart'; import 'html/bitmap_canvas.dart'; import 'html/painting.dart'; import 'html/path/conic.dart'; import 'html/path/path.dart'; import 'html/path/path_ref.dart'; import 'html/path/path_utils.dart'; import 'html/picture.dart'; import 'html/shaders/image_shader.dart'; import 'html/shaders/shader.dart'; import 'rrect_renderer.dart'; import 'safe_browser_api.dart'; import 'shadow.dart'; import 'util.dart'; import 'vector_math.dart'; /// Renders picture to a CanvasElement by allocating and caching 0 or more /// canvas(s) for [BitmapCanvas]. /// /// [BitmapCanvas] signals allocation of first canvas using allocateCanvas. /// When a painting command such as drawImage or drawParagraph requires /// multiple canvases for correct compositing, it calls [closeCurrentCanvas] /// and adds the canvas(s) to [_activeCanvasList]. /// /// To make sure transformations and clips are preserved correctly when a new /// canvas is allocated, [CanvasPool] replays the current stack on the newly /// allocated canvas. It also maintains a [_saveContextCount] so that /// the context stack can be reinitialized to default when reused in the future. /// /// On a subsequent repaint, when a Picture determines that a [BitmapCanvas] /// can be reused, [CanvasPool] will move canvas(s) from pool to reusablePool /// to prevent reallocation. class CanvasPool extends _SaveStackTracking { /// Initializes canvas pool for target size and dpi. CanvasPool(this._widthInBitmapPixels, this._heightInBitmapPixels, this._density); DomCanvasRenderingContext2D? _context; ContextStateHandle? _contextHandle; final int _widthInBitmapPixels, _heightInBitmapPixels; // List of canvases that have been allocated and used in this paint cycle. List<DomCanvasElement>? _activeCanvasList; // List of canvases available to reuse from prior paint cycle. List<DomCanvasElement>? _reusablePool; // Current canvas element or null if marked for lazy allocation. DomCanvasElement? _canvas; DomHTMLElement? _rootElement; int _saveContextCount = 0; final double _density; /// Initializes canvas pool to be hosted on a surface. void mount(DomHTMLElement rootElement) { _rootElement = rootElement; } /// Sets the translate transform to be applied to canvas to compensate for /// pixel padding applied to hosting [BitmapCanvas]. /// /// Should be called during initialization after [CanvasPool] is mounted. set initialTransform(ui.Offset transform) { translate(transform.dx, transform.dy); } /// Returns [CanvasRenderingContext2D] api to draw into this canvas. DomCanvasRenderingContext2D get context { DomCanvasRenderingContext2D? ctx = _context; if (ctx == null) { _createCanvas(); ctx = _context; assert(_context != null); assert(_canvas != null); } return ctx!; } /// Returns [ContextStateHandle] API to efficiently update state of /// drawing context. ContextStateHandle get contextHandle { if (_canvas == null) { _createCanvas(); assert(_context != null); assert(_canvas != null); } return _contextHandle!; } /// Returns true if a canvas is currently available for drawing. /// /// Calling [contextHandle] or, transitively, any of the `draw*` methods while /// this returns true will reuse the existing canvas. Otherwise, a new canvas /// will be allocated. /// /// Previously allocated and closed canvases (see [closeCanvas]) are not /// considered by this getter. bool get hasCanvas => _canvas != null; /// Stops the currently available canvas from receiving any further drawing /// commands. /// /// After calling this method, a subsequent call to [contextHandle] or, /// transitively, any of the `draw*` methods will cause a new canvas to be /// allocated. /// /// The closed canvas becomes an "active" canvas, that is a canvas that's used /// to render picture content in the current frame. Active canvases may be /// reused in other pictures if their contents are no longer needed for this /// picture. void closeCanvas() { assert(_rootElement != null); // Place clean copy of current canvas with context stack restored and paint // reset into pool. if (_canvas != null) { _restoreContextSave(); _contextHandle!.reset(); _activeCanvasList ??= <DomCanvasElement>[]; _activeCanvasList!.add(_canvas!); _canvas = null; _context = null; _contextHandle = null; } } void _createCanvas() { bool requiresClearRect = false; bool reused = false; DomCanvasElement? canvas; if (_canvas != null) { _canvas!.width = 0; _canvas!.height = 0; _canvas = null; } if (_reusablePool != null && _reusablePool!.isNotEmpty) { canvas = _canvas = _reusablePool!.removeAt(0); requiresClearRect = true; reused = true; } else { // Compute the final CSS canvas size given the actual pixel count we // allocated. This is done for the following reasons: // // * To satisfy the invariant: pixel size = css size * device pixel ratio. // * To make sure that when we scale the canvas by devicePixelRatio (see // _initializeViewport below) the pixels line up. final double cssWidth = _widthInBitmapPixels / EngineFlutterDisplay.instance.browserDevicePixelRatio; final double cssHeight = _heightInBitmapPixels / EngineFlutterDisplay.instance.browserDevicePixelRatio; canvas = _allocCanvas(_widthInBitmapPixels, _heightInBitmapPixels); _canvas = canvas; // Why is this null check here, even though we just allocated a canvas element above? // // On iOS Safari, if you alloate too many canvases, the browser will stop allocating them // and return null instead. If that happens, we evict canvases from the cache, giving the // browser more memory to allocate a new canvas. if (_canvas == null) { // Evict BitmapCanvas(s) and retry. reduceCanvasMemoryUsage(); canvas = _allocCanvas(_widthInBitmapPixels, _heightInBitmapPixels); } canvas!.style ..position = 'absolute' ..width = '${cssWidth}px' ..height = '${cssHeight}px'; } // Before appending canvas, check if canvas is already on rootElement. This // optimization prevents DOM .append call when a PersistentSurface is // reused. Reading lastChild is faster than append call. if (_rootElement!.lastChild != canvas) { _rootElement!.append(canvas); } try { if (reused) { // If a canvas is the first element we set z-index = -1 in [BitmapCanvas] // endOfPaint to workaround blink compositing bug. To make sure this // does not leak when reused reset z-index. canvas.style.removeProperty('z-index'); } _context = canvas.context2D; } catch (e) { // Handle OOM. } if (_context == null) { reduceCanvasMemoryUsage(); _context = canvas.context2D; } if (_context == null) { /// Browser ran out of memory, try to recover current allocation /// and bail. _canvas?.width = 0; _canvas?.height = 0; _canvas = null; return; } _contextHandle = ContextStateHandle(this, _context!, _density); _initializeViewport(requiresClearRect); _replayClipStack(); } DomCanvasElement? _allocCanvas(int width, int height) { // The dartdocs for `tryCreateCanvasElement` on why we don't use the // `DomCanvasElement` constructor. return tryCreateCanvasElement( (width * _density).ceil(), (height * _density).ceil(), ); } @override void clear() { super.clear(); if (_canvas != null) { // Restore to the state where we have only applied the scaling. final DomCanvasRenderingContext2D? ctx = _context; if (ctx != null) { try { ctx.font = ''; } catch (e) { // Firefox may explode here: // https://bugzilla.mozilla.org/show_bug.cgi?id=941146 if (!isNsErrorFailureException(e)) { rethrow; } } } } reuse(); } int _replaySingleSaveEntry(int clipDepth, Matrix4 prevTransform, Matrix4 transform, List<SaveClipEntry>? clipStack) { final DomCanvasRenderingContext2D ctx = context; if (clipStack != null) { for (final int clipCount = clipStack.length; clipDepth < clipCount; clipDepth++) { final SaveClipEntry clipEntry = clipStack[clipDepth]; final Matrix4 clipTimeTransform = clipEntry.currentTransform; // If transform for entry recording change since last element, update. // Comparing only matrix3 elements since Canvas API restricted. if (clipTimeTransform[0] != prevTransform[0] || clipTimeTransform[1] != prevTransform[1] || clipTimeTransform[4] != prevTransform[4] || clipTimeTransform[5] != prevTransform[5] || clipTimeTransform[12] != prevTransform[12] || clipTimeTransform[13] != prevTransform[13]) { final double ratio = dpi; ctx.setTransform(ratio, 0, 0, ratio, 0, 0); ctx.transform( clipTimeTransform[0], clipTimeTransform[1], clipTimeTransform[4], clipTimeTransform[5], clipTimeTransform[12], clipTimeTransform[13]); prevTransform = clipTimeTransform; } if (clipEntry.rect != null) { _clipRect(ctx, clipEntry.rect!); } else if (clipEntry.rrect != null) { _clipRRect(ctx, clipEntry.rrect!); } else if (clipEntry.path != null) { final SurfacePath path = clipEntry.path! as SurfacePath; _runPath(ctx, path); if (path.fillType == ui.PathFillType.nonZero) { ctx.clip(); } else { ctx.clip('evenodd'); } } } } // If transform was changed between last clip operation and save call, // update. if (transform[0] != prevTransform[0] || transform[1] != prevTransform[1] || transform[4] != prevTransform[4] || transform[5] != prevTransform[5] || transform[12] != prevTransform[12] || transform[13] != prevTransform[13]) { final double ratio = dpi; ctx.setTransform(ratio, 0, 0, ratio, 0, 0); ctx.transform(transform[0], transform[1], transform[4], transform[5], transform[12], transform[13]); } return clipDepth; } void _replayClipStack() { // Replay save/clip stack on this canvas now. final DomCanvasRenderingContext2D ctx = context; int clipDepth = 0; Matrix4 prevTransform = Matrix4.identity(); final int len = _saveStack.length; for (int saveStackIndex = 0; saveStackIndex < len; saveStackIndex++) { final SaveStackEntry saveEntry = _saveStack[saveStackIndex]; clipDepth = _replaySingleSaveEntry( clipDepth, prevTransform, saveEntry.transform, saveEntry.clipStack); prevTransform = saveEntry.transform; ctx.save(); ++_saveContextCount; } _replaySingleSaveEntry( clipDepth, prevTransform, _currentTransform, clipStack); } /// Marks this pool for reuse. void reuse() { if (_canvas != null) { _restoreContextSave(); _contextHandle!.reset(); _activeCanvasList ??= <DomCanvasElement>[]; _activeCanvasList!.add(_canvas!); _context = null; _contextHandle = null; } _reusablePool = _activeCanvasList; _activeCanvasList = null; _canvas = null; _context = null; _contextHandle = null; _resetTransform(); } /// Signals to canvas pool the end of drawing commands so cached resources /// that are reused from last instance can be cleanup. void endOfPaint() { if (_reusablePool != null) { for (final DomCanvasElement e in _reusablePool!) { if (browserEngine == BrowserEngine.webkit) { e.width = e.height = 0; } e.remove(); } _reusablePool = null; } _restoreContextSave(); } void _restoreContextSave() { while (_saveContextCount != 0) { _context!.restore(); --_saveContextCount; } } /// Configures the canvas such that its coordinate system follows the scene's /// coordinate system, and the pixel ratio is applied such that CSS pixels are /// translated to bitmap pixels. void _initializeViewport(bool clearCanvas) { final DomCanvasRenderingContext2D ctx = context; // Save the canvas state with top-level transforms so we can undo // any clips later when we reuse the canvas. ctx.save(); ++_saveContextCount; // We always start with identity transform because the surrounding transform // is applied on the DOM elements. ctx.setTransform(1, 0, 0, 1, 0, 0); if (clearCanvas) { ctx.clearRect(0, 0, _widthInBitmapPixels * _density, _heightInBitmapPixels * _density); } // This scale makes sure that 1 CSS pixel is translated to the correct // number of bitmap pixels. ctx.scale(dpi, dpi); } /// Returns effective dpi (browser DPI and pixel density due to transform). double get dpi => EngineFlutterDisplay.instance.browserDevicePixelRatio * _density; void _resetTransform() { final DomCanvasElement? canvas = _canvas; if (canvas != null) { canvas.style.transformOrigin = ''; canvas.style.transform = ''; } } /// Returns a "data://" URI containing a representation of the image in this /// canvas in PNG format. String toDataUrl() { if (_canvas == null) { _createCanvas(); } return _canvas!.toDataURL(); } @override void save() { super.save(); if (_canvas != null) { context.save(); ++_saveContextCount; } } @override void restore() { super.restore(); if (_canvas != null) { context.restore(); contextHandle.reset(); --_saveContextCount; } } @override void translate(double dx, double dy) { super.translate(dx, dy); if (_canvas != null) { context.translate(dx, dy); } } @override void scale(double sx, double sy) { super.scale(sx, sy); if (_canvas != null) { context.scale(sx, sy); } } @override void rotate(double radians) { super.rotate(radians); if (_canvas != null) { context.rotate(radians); } } @override void skew(double sx, double sy) { super.skew(sx, sy); if (_canvas != null) { context.transform(1, sy, sx, 1, 0, 0); // | | | | | | // | | | | | f - vertical translation // | | | | e - horizontal translation // | | | d - vertical scaling // | | c - horizontal skewing // | b - vertical skewing // a - horizontal scaling // // Source: https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/transform } } @override void transform(Float32List matrix4) { super.transform(matrix4); // Canvas2D transform API: // // ctx.transform(a, b, c, d, e, f); // // In 3x3 matrix form assuming vector representation of (x, y, 1): // // a c e // b d f // 0 0 1 // // This translates to 4x4 matrix with vector representation of (x, y, z, 1) // as: // // a c 0 e // b d 0 f // 0 0 1 0 // 0 0 0 1 // // This matrix is sufficient to represent 2D rotates, translates, scales, // and skews. if (_canvas != null) { context.transform(matrix4[0], matrix4[1], matrix4[4], matrix4[5], matrix4[12], matrix4[13]); } } @override void clipRect(ui.Rect rect) { super.clipRect(rect); if (_canvas != null) { _clipRect(context, rect); } } void _clipRect(DomCanvasRenderingContext2D ctx, ui.Rect rect) { ctx.beginPath(); ctx.rect(rect.left, rect.top, rect.width, rect.height); ctx.clip(); } @override void clipRRect(ui.RRect rrect) { super.clipRRect(rrect); if (_canvas != null) { _clipRRect(context, rrect); } } void _clipRRect(DomCanvasRenderingContext2D ctx, ui.RRect rrect) { final ui.Path path = ui.Path()..addRRect(rrect); _runPath(ctx, path as SurfacePath); ctx.clip(); } @override void clipPath(ui.Path path) { super.clipPath(path); if (_canvas != null) { final DomCanvasRenderingContext2D ctx = context; _runPath(ctx, path as SurfacePath); if (path.fillType == ui.PathFillType.nonZero) { ctx.clip(); } else { ctx.clip('evenodd'); } } } /// Fill a virtually infinite rect with a color and optional blendMode. void drawColor(ui.Color color, ui.BlendMode blendMode) { final DomCanvasRenderingContext2D ctx = context; contextHandle.blendMode = blendMode; contextHandle.fillStyle = color.toCssString(); contextHandle.strokeStyle = ''; ctx.beginPath(); // Fill a virtually infinite rect with the color. // // We can't use (0, 0, width, height) because the current transform can // cause it to not fill the entire clip. ctx.fillRect(-10000, -10000, 20000, 20000); } /// Fill a virtually infinite rect with the color. void fill() { final DomCanvasRenderingContext2D ctx = context; ctx.beginPath(); // We can't use (0, 0, width, height) because the current transform can // cause it to not fill the entire clip. ctx.fillRect(-10000, -10000, 20000, 20000); } /// Draws a line from [p1] to [p2]. void strokeLine(ui.Offset p1, ui.Offset p2) { final DomCanvasRenderingContext2D ctx = context; ctx.beginPath(); final ui.Rect? shaderBounds = contextHandle._shaderBounds; if (shaderBounds == null) { ctx.moveTo(p1.dx, p1.dy); ctx.lineTo(p2.dx, p2.dy); } else { ctx.moveTo(p1.dx - shaderBounds.left, p1.dy - shaderBounds.top); ctx.lineTo(p2.dx - shaderBounds.left, p2.dy - shaderBounds.top); } ctx.stroke(); } /// Draws a set of points with given radius, lines between points or /// a polygon. void drawPoints(ui.PointMode pointMode, Float32List points, double radius) { final DomCanvasRenderingContext2D ctx = context; final int len = points.length; final ui.Rect? shaderBounds = contextHandle._shaderBounds; final double offsetX = shaderBounds == null ? 0 : -shaderBounds.left; final double offsetY = shaderBounds == null ? 0 : -shaderBounds.top; switch (pointMode) { case ui.PointMode.points: for (int i = 0; i < len; i += 2) { final double x = points[i] + offsetX; final double y = points[i + 1] + offsetY; ctx.beginPath(); ctx.arc(x, y, radius, 0, 2.0 * math.pi); ctx.fill(); } case ui.PointMode.lines: ctx.beginPath(); for (int i = 0; i < (len - 2); i += 4) { ctx.moveTo(points[i] + offsetX, points[i + 1] + offsetY); ctx.lineTo(points[i + 2] + offsetX, points[i + 3] + offsetY); ctx.stroke(); } case ui.PointMode.polygon: ctx.beginPath(); ctx.moveTo(points[0] + offsetX, points[1] + offsetY); for (int i = 2; i < len; i += 2) { ctx.lineTo(points[i] + offsetX, points[i + 1] + offsetY); } ctx.stroke(); } } // Float buffer used for path iteration. static final Float32List _runBuffer = Float32List(PathRefIterator.kMaxBufferSize); /// 'Runs' the given [path] by applying all of its commands to the canvas. void _runPath(DomCanvasRenderingContext2D ctx, SurfacePath path) { ctx.beginPath(); final Float32List p = _runBuffer; final PathRefIterator iter = PathRefIterator(path.pathRef); int verb = 0; while ((verb = iter.next(p)) != SPath.kDoneVerb) { switch (verb) { case SPath.kMoveVerb: ctx.moveTo(p[0], p[1]); case SPath.kLineVerb: ctx.lineTo(p[2], p[3]); case SPath.kCubicVerb: ctx.bezierCurveTo(p[2], p[3], p[4], p[5], p[6], p[7]); case SPath.kQuadVerb: ctx.quadraticCurveTo(p[2], p[3], p[4], p[5]); case SPath.kConicVerb: final double w = iter.conicWeight; final Conic conic = Conic(p[0], p[1], p[2], p[3], p[4], p[5], w); final List<ui.Offset> points = conic.toQuads(); final int len = points.length; for (int i = 1; i < len; i += 2) { final double p1x = points[i].dx; final double p1y = points[i].dy; final double p2x = points[i + 1].dx; final double p2y = points[i + 1].dy; ctx.quadraticCurveTo(p1x, p1y, p2x, p2y); } case SPath.kCloseVerb: ctx.closePath(); default: throw UnimplementedError('Unknown path verb $verb'); } } } /// Draws a rectangle filled or stroked based on [style]. void drawRect(ui.Rect rect, ui.PaintingStyle? style) { context.beginPath(); final ui.Rect? shaderBounds = contextHandle._shaderBounds; if (shaderBounds == null) { context.rect(rect.left, rect.top, rect.width, rect.height); } else { context.rect(rect.left - shaderBounds.left, rect.top - shaderBounds.top, rect.width, rect.height); } contextHandle.paint(style); } /// Applies path to drawing context, preparing for fill and other operations. /// /// WARNING: Don't refactor _runPath/_runPathWithOffset. Latency sensitive void _runPathWithOffset(DomCanvasRenderingContext2D ctx, SurfacePath path, double offsetX, double offsetY) { ctx.beginPath(); final Float32List p = _runBuffer; final PathRefIterator iter = PathRefIterator(path.pathRef); int verb = 0; while ((verb = iter.next(p)) != SPath.kDoneVerb) { switch (verb) { case SPath.kMoveVerb: ctx.moveTo(p[0] + offsetX, p[1] + offsetY); case SPath.kLineVerb: ctx.lineTo(p[2] + offsetX, p[3] + offsetY); case SPath.kCubicVerb: ctx.bezierCurveTo(p[2] + offsetX, p[3] + offsetY, p[4] + offsetX, p[5] + offsetY, p[6] + offsetX, p[7] + offsetY); case SPath.kQuadVerb: ctx.quadraticCurveTo(p[2] + offsetX, p[3] + offsetY, p[4] + offsetX, p[5] + offsetY); case SPath.kConicVerb: final double w = iter.conicWeight; final Conic conic = Conic(p[0], p[1], p[2], p[3], p[4], p[5], w); final List<ui.Offset> points = conic.toQuads(); final int len = points.length; for (int i = 1; i < len; i += 2) { final double p1x = points[i].dx; final double p1y = points[i].dy; final double p2x = points[i + 1].dx; final double p2y = points[i + 1].dy; ctx.quadraticCurveTo(p1x + offsetX, p1y + offsetY, p2x + offsetX, p2y + offsetY); } case SPath.kCloseVerb: ctx.closePath(); default: throw UnimplementedError('Unknown path verb $verb'); } } } /// Draws a rounded rectangle filled or stroked based on [style]. void drawRRect(ui.RRect roundRect, ui.PaintingStyle? style) { final ui.Rect? shaderBounds = contextHandle._shaderBounds; RRectToCanvasRenderer(context).render( shaderBounds == null ? roundRect : roundRect.shift(ui.Offset(-shaderBounds.left, -shaderBounds.top))); contextHandle.paint(style); } /// Fills or strokes the area between [outer] and [inner] rounded rectangles. /// /// Typically used to draw a thick round border. void drawDRRect(ui.RRect outer, ui.RRect inner, ui.PaintingStyle? style) { final RRectRenderer renderer = RRectToCanvasRenderer(context); final ui.Rect? shaderBounds = contextHandle._shaderBounds; if (shaderBounds == null) { renderer.render(outer); renderer.render(inner, startNewPath: false, reverse: true); } else { final ui.Offset shift = ui.Offset(-shaderBounds.left, -shaderBounds.top); renderer.render(outer.shift(shift)); renderer.render(inner.shift(shift), startNewPath: false, reverse: true); } contextHandle.paint(style); } /// Draws an axis-aligned oval that fills the given axis-aligned rectangle. void drawOval(ui.Rect rect, ui.PaintingStyle? style) { context.beginPath(); final ui.Rect? shaderBounds = contextHandle._shaderBounds; final double cx = shaderBounds == null ? rect.center.dx : rect.center.dx - shaderBounds.left; final double cy = shaderBounds == null ? rect.center.dy : rect.center.dy - shaderBounds.top; drawEllipse(context, cx, cy, rect.width / 2, rect.height / 2, 0, 0, 2.0 * math.pi, false); contextHandle.paint(style); } /// Draws a circle centered at [c] with [radius]. void drawCircle(ui.Offset c, double radius, ui.PaintingStyle? style) { context.beginPath(); final ui.Rect? shaderBounds = contextHandle._shaderBounds; final double cx = shaderBounds == null ? c.dx : c.dx - shaderBounds.left; final double cy = shaderBounds == null ? c.dy : c.dy - shaderBounds.top; drawEllipse(context, cx, cy, radius, radius, 0, 0, 2.0 * math.pi, false); contextHandle.paint(style); } /// Draws or strokes a path based on [style] and current context state. void drawPath(ui.Path path, ui.PaintingStyle? style) { final ui.Rect? shaderBounds = contextHandle._shaderBounds; if (shaderBounds == null) { _runPath(context, path as SurfacePath); } else { _runPathWithOffset(context, path as SurfacePath, -shaderBounds.left, -shaderBounds.top); } contextHandle.paintPath(style, path.fillType); } void drawImage(DomHTMLImageElement element, ui.Offset p) { context.drawImage(element, p.dx, p.dy); } /// Draws a shadow for a Path representing the given material elevation. void drawShadow(ui.Path path, ui.Color color, double elevation, bool transparentOccluder) { final SurfaceShadowData? shadow = computeShadow(path.getBounds(), elevation); if (shadow != null) { // On April 2020 Web canvas 2D did not support shadow color alpha. So // instead we apply alpha separately using globalAlpha, then paint a // solid shadow. final ui.Color shadowColor = toShadowColor(color); final double opacity = shadowColor.alpha / 255; final String solidColor = colorComponentsToCssString( shadowColor.red, shadowColor.green, shadowColor.blue, 255, ); context.save(); context.globalAlpha = opacity; // TODO(hterkelsen): Shadows with transparent occluders are not supported // on webkit since filter is unsupported. if (transparentOccluder && browserEngine != BrowserEngine.webkit) { // We paint shadows using a path and a mask filter instead of the // built-in shadow* properties. This is because the color alpha of the // paint is added to the shadow. The effect we're looking for is to just // paint the shadow without the path itself, but if we use a non-zero // alpha for the paint the path is painted in addition to the shadow, // which is undesirable. context.translate(shadow.offset.dx, shadow.offset.dy); context.filter = maskFilterToCanvasFilter( ui.MaskFilter.blur(ui.BlurStyle.normal, shadow.blurWidth)); context.strokeStyle = ''; context.fillStyle = solidColor; } else { // TODO(yjbanov): the following comment by hterkelsen makes sense, but // somehow we lost the implementation described in it. // Perhaps we should revisit this and actually do what // the comment says. // TODO(hterkelsen): We fill the path with this paint, then later we clip // by the same path and fill it with a fully opaque color (we know // the color is fully opaque because `transparentOccluder` is false. // However, due to anti-aliasing of the clip, a few pixels of the // path we are about to paint may still be visible after we fill with // the opaque occluder. For that reason, we fill with the shadow color, // and set the shadow color to fully opaque. This way, the visible // pixels are less opaque and less noticeable. context.filter = 'none'; context.strokeStyle = ''; context.fillStyle = solidColor; context.shadowBlur = shadow.blurWidth; context.shadowColor = solidColor; context.shadowOffsetX = shadow.offset.dx; context.shadowOffsetY = shadow.offset.dy; } _runPath(context, path as SurfacePath); context.fill(); // This also resets globalAlpha and shadow attributes. See: // https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/save#Drawing_state context.restore(); } } /// Disposes html canvas element(s) used by this pool when persistent surface /// is disposed. /// /// When this pool is reused, [clear] is called instead to be able to /// draw using existing canvas elements. void dispose() { // Webkit has a threshold for the amount of canvas pixels an app can // allocate. Even though our canvases are being garbage-collected as // expected when we don't need them, Webkit keeps track of their sizes // towards the threshold. Setting width and height to zero tricks Webkit // into thinking that this canvas has a zero size so it doesn't count it // towards the threshold. if (browserEngine == BrowserEngine.webkit && _canvas != null) { _canvas!.width = _canvas!.height = 0; } _clearActiveCanvasList(); } void _clearActiveCanvasList() { if (_activeCanvasList != null) { for (final DomCanvasElement c in _activeCanvasList!) { if (browserEngine == BrowserEngine.webkit) { c.width = c.height = 0; } c.remove(); } } _activeCanvasList = null; } } /// Optimizes applying paint parameters to html canvas. /// /// See https://www.w3.org/TR/2dcontext/ for defaults used in this class /// to initialize current values. class ContextStateHandle { /// Initializes context state for a [CanvasPool]. ContextStateHandle(this._canvasPool, this.context, this.density); /// Associated canvas element context tracked by this context state. final DomCanvasRenderingContext2D context; final CanvasPool _canvasPool; /// Dpi of context. final double density; ui.BlendMode? _currentBlendMode = ui.BlendMode.srcOver; ui.StrokeCap? _currentStrokeCap = ui.StrokeCap.butt; ui.StrokeJoin? _currentStrokeJoin = ui.StrokeJoin.miter; // Fill style and stroke style are Object since they can have a String or // shader object such as a gradient. Object? _currentFillStyle; Object? _currentStrokeStyle; double _currentLineWidth = 1.0; /// See [DomCanvasRenderingContext2D]. set blendMode(ui.BlendMode? blendMode) { if (blendMode != _currentBlendMode) { _currentBlendMode = blendMode; context.globalCompositeOperation = blendModeToCssMixBlendMode(blendMode) ?? 'source-over'; } } /// See [DomCanvasRenderingContext2D]. set strokeCap(ui.StrokeCap? strokeCap) { strokeCap ??= ui.StrokeCap.butt; if (strokeCap != _currentStrokeCap) { _currentStrokeCap = strokeCap; context.lineCap = stringForStrokeCap(strokeCap)!; } } /// See [DomCanvasRenderingContext2D]. set lineWidth(double lineWidth) { if (lineWidth != _currentLineWidth) { _currentLineWidth = lineWidth; context.lineWidth = lineWidth; } } /// See [DomCanvasRenderingContext2D]. set strokeJoin(ui.StrokeJoin? strokeJoin) { strokeJoin ??= ui.StrokeJoin.miter; if (strokeJoin != _currentStrokeJoin) { _currentStrokeJoin = strokeJoin; context.lineJoin = stringForStrokeJoin(strokeJoin); } } /// See [DomCanvasRenderingContext2D]. set fillStyle(Object? colorOrGradient) { if (!identical(colorOrGradient, _currentFillStyle)) { _currentFillStyle = colorOrGradient; context.fillStyle = colorOrGradient; } } /// See [DomCanvasRenderingContext2D]. set strokeStyle(Object? colorOrGradient) { if (!identical(colorOrGradient, _currentStrokeStyle)) { _currentStrokeStyle = colorOrGradient; context.strokeStyle = colorOrGradient; } } ui.MaskFilter? _currentFilter; SurfacePaintData? _lastUsedPaint; /// Currently active shader bounds. /// /// When a paint style uses a shader that produces a pattern, the pattern /// origin is relative to current transform. Therefore any painting operations /// will have to reverse the transform to correctly align pattern with /// drawing bounds. ui.Rect? _shaderBounds; /// The painting state. /// /// Used to validate that the [setUpPaint] and [tearDownPaint] are called in /// a correct sequence. bool _debugIsPaintSetUp = false; /// Whether to use WebKit's method of rendering [MaskFilter]. /// /// This is used in screenshot tests to test Safari codepaths. static bool debugEmulateWebKitMaskFilter = false; bool get _renderMaskFilterForWebkit => browserEngine == BrowserEngine.webkit || debugEmulateWebKitMaskFilter; /// Sets paint properties on the current canvas. /// /// [tearDownPaint] must be called after calling this method. void setUpPaint(SurfacePaintData paint, ui.Rect? shaderBounds) { assert(() { final bool wasPaintSetUp = _debugIsPaintSetUp; _debugIsPaintSetUp = true; // When setting up paint, the previous paint must be torn down. return !wasPaintSetUp; }()); _lastUsedPaint = paint; lineWidth = paint.strokeWidth ?? 1.0; blendMode = paint.blendMode; strokeCap = paint.strokeCap; strokeJoin = paint.strokeJoin; if (paint.shader != null) { if (paint.shader is EngineGradient) { final EngineGradient engineShader = paint.shader! as EngineGradient; final Object paintStyle = engineShader.createPaintStyle(_canvasPool.context, shaderBounds, density); fillStyle = paintStyle; strokeStyle = paintStyle; _shaderBounds = shaderBounds; // Align pattern origin to destination. context.translate(shaderBounds!.left, shaderBounds.top); } else if (paint.shader is EngineImageShader) { final EngineImageShader imageShader = paint.shader! as EngineImageShader; final Object paintStyle = imageShader.createPaintStyle(_canvasPool.context, shaderBounds, density); fillStyle = paintStyle; strokeStyle = paintStyle; if (imageShader.requiresTileOffset) { _shaderBounds = shaderBounds; // Align pattern origin to destination. context.translate(shaderBounds!.left, shaderBounds.top); } } } else { final String colorString = colorValueToCssString(paint.color); fillStyle = colorString; strokeStyle = colorString; } final ui.MaskFilter? maskFilter = paint.maskFilter; if (!_renderMaskFilterForWebkit) { if (_currentFilter != maskFilter) { _currentFilter = maskFilter; context.filter = maskFilterToCanvasFilter(maskFilter); } } else { // WebKit does not support the `filter` property. Instead we apply a // shadow to the shape of the same color as the paint and the same blur // as the mask filter. // // Note that on WebKit the cached value of _currentFilter is not useful. // Instead we destructure it into the shadow properties and cache those. if (maskFilter != null) { context.save(); context.shadowBlur = convertSigmaToRadius(maskFilter.webOnlySigma); // Shadow color must be fully opaque. context.shadowColor = ui.Color(paint.color).withAlpha(255).toCssString(); // On the web a shadow must always be painted together with the shape // that casts it. In order to paint just the shadow, we offset the shape // by a large enough value that it moved outside the canvas bounds, then // offset the shadow in the opposite direction such that it lands exactly // where the shape is. const double kOutsideTheBoundsOffset = 50000; context.translate(-kOutsideTheBoundsOffset, 0); // Shadow offset is not affected by the current canvas context transform. // We have to apply the transform ourselves. To do that we transform the // tip of the vector from the shape to the shadow, then we transform the // origin (0, 0). The desired shadow offset is the difference between the // two. In vector notation, this is: // // transformedShadowDelta = M*shadowDelta - M*origin. final Float32List tempVector = Float32List(2); tempVector[0] = kOutsideTheBoundsOffset * EngineFlutterDisplay.instance.devicePixelRatio; _canvasPool.currentTransform.transform2(tempVector); final double shadowOffsetX = tempVector[0]; final double shadowOffsetY = tempVector[1]; tempVector[0] = tempVector[1] = 0; _canvasPool.currentTransform.transform2(tempVector); context.shadowOffsetX = shadowOffsetX - tempVector[0]; context.shadowOffsetY = shadowOffsetY - tempVector[1]; } } } /// Removes paint properties on the current canvas used by the last draw /// command. /// /// Not all properties are cleared. Properties that are set by all paint /// commands prior to painting do not need to be cleared. /// /// Must be called after calling [setUpPaint]. void tearDownPaint() { assert(() { final bool wasPaintSetUp = _debugIsPaintSetUp; _debugIsPaintSetUp = false; // When tearing down paint, we expect that it was set up before. return wasPaintSetUp; }()); final ui.MaskFilter? maskFilter = _lastUsedPaint?.maskFilter; if (maskFilter != null && _renderMaskFilterForWebkit) { // On Safari (WebKit) we use a translated shadow to emulate // MaskFilter.blur. We use restore to undo the translation and // shadow attributes. context.restore(); } if (_shaderBounds != null) { context.translate(-_shaderBounds!.left, -_shaderBounds!.top); _shaderBounds = null; } } /// Fills or strokes the currently active path. void paint(ui.PaintingStyle? style) { if (style == ui.PaintingStyle.stroke) { context.stroke(); } else { context.fill(); } } /// Fills or strokes the currently active path based on fill type. void paintPath(ui.PaintingStyle? style, ui.PathFillType pathFillType) { if (style == ui.PaintingStyle.stroke) { context.stroke(); } else { if (pathFillType == ui.PathFillType.nonZero) { context.fill(); } else { context.fill('evenodd'); } } } /// Resets drawing context state to defaults for /// [DomCanvasRenderingContext2D]. void reset() { context.fillStyle = ''; // Read back fillStyle/strokeStyle values from context so that input such // as rgba(0, 0, 0, 0) is correctly compared and doesn't cause diff on // setter. _currentFillStyle = context.fillStyle; context.strokeStyle = ''; _currentStrokeStyle = context.strokeStyle; context.shadowBlur = 0; context.shadowColor = 'none'; context.shadowOffsetX = 0; context.shadowOffsetY = 0; context.globalCompositeOperation = 'source-over'; _currentBlendMode = ui.BlendMode.srcOver; context.lineWidth = 1.0; _currentLineWidth = 1.0; context.lineCap = 'butt'; _currentStrokeCap = ui.StrokeCap.butt; context.lineJoin = 'miter'; _currentStrokeJoin = ui.StrokeJoin.miter; _shaderBounds = null; } } /// Provides save stack tracking functionality to implementations of /// [EngineCanvas]. class _SaveStackTracking { final List<SaveStackEntry> _saveStack = <SaveStackEntry>[]; /// The stack that maintains clipping operations used when text is painted /// onto bitmap canvas but is composited as separate element. List<SaveClipEntry>? clipStack; /// Returns whether there are active clipping regions on the canvas. bool get isClipped => clipStack != null; /// Empties the save stack and the element stack, and resets the transform /// and clip parameters. @mustCallSuper void clear() { _saveStack.clear(); clipStack = null; _currentTransform = Matrix4.identity(); } /// The current transformation matrix. Matrix4 get currentTransform => _currentTransform; Matrix4 _currentTransform = Matrix4.identity(); /// Saves current clip and transform on the save stack. @mustCallSuper void save() { _saveStack.add(SaveStackEntry( transform: _currentTransform.clone(), clipStack: clipStack == null ? null : List<SaveClipEntry>.from(clipStack!), )); } /// Restores current clip and transform from the save stack. @mustCallSuper void restore() { if (_saveStack.isEmpty) { return; } final SaveStackEntry entry = _saveStack.removeLast(); _currentTransform = entry.transform; clipStack = entry.clipStack; } /// Multiplies the [currentTransform] matrix by a translation. @mustCallSuper void translate(double dx, double dy) { _currentTransform.translate(dx, dy); } /// Scales the [currentTransform] matrix. @mustCallSuper void scale(double sx, double sy) { _currentTransform.scale(sx, sy); } /// Rotates the [currentTransform] matrix. @mustCallSuper void rotate(double radians) { _currentTransform.rotate(kUnitZ, radians); } /// Skews the [currentTransform] matrix. @mustCallSuper void skew(double sx, double sy) { final Matrix4 skewMatrix = Matrix4.identity(); final Float32List storage = skewMatrix.storage; storage[1] = sy; storage[4] = sx; _currentTransform.multiply(skewMatrix); } /// Multiplies the [currentTransform] matrix by another matrix. @mustCallSuper void transform(Float32List matrix4) { _currentTransform.multiply(Matrix4.fromFloat32List(matrix4)); } /// Adds a rectangle to clipping stack. @mustCallSuper void clipRect(ui.Rect rect) { clipStack ??= <SaveClipEntry>[]; clipStack!.add(SaveClipEntry.rect(rect, _currentTransform.clone())); } /// Adds a round rectangle to clipping stack. @mustCallSuper void clipRRect(ui.RRect rrect) { clipStack ??= <SaveClipEntry>[]; clipStack!.add(SaveClipEntry.rrect(rrect, _currentTransform.clone())); } /// Adds a path to clipping stack. @mustCallSuper void clipPath(ui.Path path) { clipStack ??= <SaveClipEntry>[]; clipStack!.add(SaveClipEntry.path(path, _currentTransform.clone())); } }
engine/lib/web_ui/lib/src/engine/canvas_pool.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/canvas_pool.dart", "repo_id": "engine", "token_count": 16479 }
292
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart' as ui; /// A Rasterizer which uses one or many on-screen WebGL contexts to display the /// scene. This way of rendering is prone to bugs because there is a limit to /// how many WebGL contexts can be live at one time as well as bugs in sharing /// GL resources between the contexts. However, using [createImageBitmap] is /// currently very slow on Firefox and Safari browsers, so directly rendering /// to several [Surface]s is how we can achieve 60 fps on these browsers. class MultiSurfaceRasterizer extends Rasterizer { @override MultiSurfaceViewRasterizer createViewRasterizer(EngineFlutterView view) { return _viewRasterizers.putIfAbsent( view, () => MultiSurfaceViewRasterizer(view, this)); } final Map<EngineFlutterView, MultiSurfaceViewRasterizer> _viewRasterizers = <EngineFlutterView, MultiSurfaceViewRasterizer>{}; @override void dispose() { for (final MultiSurfaceViewRasterizer viewRasterizer in _viewRasterizers.values) { viewRasterizer.dispose(); } _viewRasterizers.clear(); } @override void setResourceCacheMaxBytes(int bytes) { for (final MultiSurfaceViewRasterizer viewRasterizer in _viewRasterizers.values) { viewRasterizer.displayFactory.forEachCanvas((Surface surface) { surface.setSkiaResourceCacheMaxBytes(bytes); }); } } } class MultiSurfaceViewRasterizer extends ViewRasterizer { MultiSurfaceViewRasterizer(super.view, this.rasterizer); final MultiSurfaceRasterizer rasterizer; @override final DisplayCanvasFactory<Surface> displayFactory = DisplayCanvasFactory<Surface>( createCanvas: () => Surface(isDisplayCanvas: true)); @override void prepareToDraw() { displayFactory.baseCanvas.createOrUpdateSurface(currentFrameSize); } @override Future<void> rasterizeToCanvas( DisplayCanvas canvas, List<CkPicture> pictures) { final Surface surface = canvas as Surface; surface.createOrUpdateSurface(currentFrameSize); surface.positionToShowFrame(currentFrameSize); final CkCanvas skCanvas = surface.getCanvas(); skCanvas.clear(const ui.Color(0x00000000)); pictures.forEach(skCanvas.drawPicture); surface.flush(); return Future<void>.value(); } }
engine/lib/web_ui/lib/src/engine/canvaskit/multi_surface_rasterizer.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/canvaskit/multi_surface_rasterizer.dart", "repo_id": "engine", "token_count": 811 }
293
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:js_interop'; import 'package:ui/ui.dart' as ui; import '../browser_detection.dart'; import '../configuration.dart'; import '../display.dart'; import '../dom.dart'; import '../platform_dispatcher.dart'; import '../util.dart'; import 'canvas.dart'; import 'canvaskit_api.dart'; import 'picture.dart'; import 'rasterizer.dart'; import 'render_canvas.dart'; import 'util.dart'; // Only supported in profile/release mode. Allows Flutter to use MSAA but // removes the ability for disabling AA on Paint objects. const bool _kUsingMSAA = bool.fromEnvironment('flutter.canvaskit.msaa'); typedef SubmitCallback = bool Function(SurfaceFrame, CkCanvas); /// A frame which contains a canvas to be drawn into. class SurfaceFrame { SurfaceFrame(this.skiaSurface, this.submitCallback) : _submitted = false; final CkSurface skiaSurface; final SubmitCallback submitCallback; final bool _submitted; /// Submit this frame to be drawn. bool submit() { if (_submitted) { return false; } return submitCallback(this, skiaCanvas); } CkCanvas get skiaCanvas => skiaSurface.getCanvas(); } /// A surface which can be drawn into by the compositor. /// /// The underlying representation is a [CkSurface], which can be reused by /// successive frames if they are the same size. Otherwise, a new [CkSurface] is /// created. class Surface extends DisplayCanvas { Surface({this.isDisplayCanvas = false}) : useOffscreenCanvas = Surface.offscreenCanvasSupported && !isDisplayCanvas; CkSurface? _surface; /// Whether or not to use an `OffscreenCanvas` to back this [Surface]. final bool useOffscreenCanvas; /// If `true`, this [Surface] is used as a [DisplayCanvas]. final bool isDisplayCanvas; /// If true, forces a new WebGL context to be created, even if the window /// size is the same. This is used to restore the UI after the browser tab /// goes dormant and loses the GL context. bool _forceNewContext = true; bool get debugForceNewContext => _forceNewContext; bool _contextLost = false; bool get debugContextLost => _contextLost; /// A cached copy of the most recently created `webglcontextlost` listener. /// /// We must cache this function because each time we access the tear-off it /// creates a new object, meaning we won't be able to remove this listener /// later. DomEventListener? _cachedContextLostListener; /// A cached copy of the most recently created `webglcontextrestored` /// listener. /// /// We must cache this function because each time we access the tear-off it /// creates a new object, meaning we won't be able to remove this listener /// later. DomEventListener? _cachedContextRestoredListener; SkGrContext? _grContext; int? _glContext; int? _skiaCacheBytes; /// The underlying OffscreenCanvas element used for this surface. DomOffscreenCanvas? _offscreenCanvas; /// Returns the underlying OffscreenCanvas. Should only be used in tests. DomOffscreenCanvas? get debugOffscreenCanvas => _offscreenCanvas; /// The <canvas> backing this Surface in the case that OffscreenCanvas isn't /// supported. DomCanvasElement? _canvasElement; /// Note, if this getter is called, then this Surface is being used as an /// overlay and must be backed by an onscreen <canvas> element. @override final DomElement hostElement = createDomElement('flt-canvas-container'); int _pixelWidth = -1; int _pixelHeight = -1; double _currentDevicePixelRatio = -1; int _sampleCount = -1; int _stencilBits = -1; /// Specify the GPU resource cache limits. void setSkiaResourceCacheMaxBytes(int bytes) { _skiaCacheBytes = bytes; _syncCacheBytes(); } void _syncCacheBytes() { if (_skiaCacheBytes != null) { _grContext?.setResourceCacheLimitBytes(_skiaCacheBytes!.toDouble()); } } /// The CanvasKit canvas associated with this surface. CkCanvas getCanvas() { return _surface!.getCanvas(); } void flush() { _surface!.flush(); } Future<void> rasterizeToCanvas( ui.Size frameSize, RenderCanvas canvas, List<CkPicture> pictures) async { final CkCanvas skCanvas = getCanvas(); skCanvas.clear(const ui.Color(0x00000000)); pictures.forEach(skCanvas.drawPicture); flush(); if (browserSupportsCreateImageBitmap) { JSObject bitmapSource; if (useOffscreenCanvas) { bitmapSource = _offscreenCanvas! as JSObject; } else { bitmapSource = _canvasElement! as JSObject; } final DomImageBitmap bitmap = await createImageBitmap(bitmapSource, ( x: 0, y: _pixelHeight - frameSize.height.toInt(), width: frameSize.width.toInt(), height: frameSize.height.toInt(), )); canvas.render(bitmap); } else { // If the browser doesn't support `createImageBitmap` (e.g. Safari 14) // then render using `drawImage` instead. DomCanvasImageSource imageSource; if (useOffscreenCanvas) { imageSource = _offscreenCanvas! as DomCanvasImageSource; } else { imageSource = _canvasElement! as DomCanvasImageSource; } canvas.renderWithNoBitmapSupport(imageSource, _pixelHeight, frameSize); } } /// Acquire a frame of the given [size] containing a drawable canvas. /// /// The given [size] is in physical pixels. SurfaceFrame acquireFrame(ui.Size size) { final CkSurface surface = createOrUpdateSurface(size); // ignore: prefer_function_declarations_over_variables final SubmitCallback submitCallback = (SurfaceFrame surfaceFrame, CkCanvas canvas) { return _presentSurface(); }; return SurfaceFrame(surface, submitCallback); } ui.Size? _currentCanvasPhysicalSize; ui.Size? _currentSurfaceSize; /// Sets the CSS size of the canvas so that canvas pixels are 1:1 with device /// pixels. /// /// The logical size of the canvas is not based on the size of the window /// but on the size of the canvas, which, due to `ceil()` above, may not be /// the same as the window. We do not round/floor/ceil the logical size as /// CSS pixels can contain more than one physical pixel and therefore to /// match the size of the window precisely we use the most precise floating /// point value we can get. void _updateLogicalHtmlCanvasSize() { final double devicePixelRatio = EngineFlutterDisplay.instance.devicePixelRatio; final double logicalWidth = _pixelWidth / devicePixelRatio; final double logicalHeight = _pixelHeight / devicePixelRatio; final DomCSSStyleDeclaration style = _canvasElement!.style; style.width = '${logicalWidth}px'; style.height = '${logicalHeight}px'; _currentDevicePixelRatio = devicePixelRatio; } /// The <canvas> element backing this surface may be larger than the screen. /// The Surface will draw the frame to the bottom left of the <canvas>, but /// the <canvas> is, by default, positioned so that the top left corner is in /// the top left of the window. We need to shift the canvas down so that the /// bottom left of the <canvas> is the the bottom left corner of the window. void positionToShowFrame(ui.Size frameSize) { assert(isDisplayCanvas, 'Should not position Surface if not used as a render canvas'); final double devicePixelRatio = EngineFlutterDisplay.instance.devicePixelRatio; final double logicalHeight = _pixelHeight / devicePixelRatio; final double logicalFrameHeight = frameSize.height / devicePixelRatio; // Shift the canvas up so the bottom left is in the window. _canvasElement!.style.transform = 'translate(0px, ${logicalFrameHeight - logicalHeight}px)'; } /// This is only valid after the first frame or if [ensureSurface] has been /// called bool get usingSoftwareBackend => _glContext == null || _grContext == null || webGLVersion == -1 || configuration.canvasKitForceCpuOnly; /// Ensure that the initial surface exists and has a size of at least [size]. /// /// If not provided, [size] defaults to 1x1. /// /// This also ensures that the gl/grcontext have been populated so /// that software rendering can be detected. void ensureSurface([ui.Size size = const ui.Size(1, 1)]) { // If the GrContext hasn't been setup yet then we need to force initialization // of the canvas and initial surface. if (_surface != null) { return; } // TODO(jonahwilliams): this is somewhat wasteful. We should probably // eagerly setup this surface instead of delaying until the first frame? // Or at least cache the estimated window sizeThis is the first frame we have rendered with this canvas. createOrUpdateSurface(size); } /// Creates a <canvas> and SkSurface for the given [size]. CkSurface createOrUpdateSurface(ui.Size size) { if (size.isEmpty) { throw CanvasKitError('Cannot create surfaces of empty size.'); } if (!_forceNewContext) { // Check if the window is the same size as before, and if so, don't allocate // a new canvas as the previous canvas is big enough to fit everything. final ui.Size? previousSurfaceSize = _currentSurfaceSize; if (previousSurfaceSize != null && size.width == previousSurfaceSize.width && size.height == previousSurfaceSize.height) { final double devicePixelRatio = EngineFlutterDisplay.instance.devicePixelRatio; if (isDisplayCanvas && devicePixelRatio != _currentDevicePixelRatio) { _updateLogicalHtmlCanvasSize(); } return _surface!; } final ui.Size? previousCanvasSize = _currentCanvasPhysicalSize; // Initialize a new, larger, canvas. If the size is growing, then make the // new canvas larger than required to avoid many canvas creations. if (previousCanvasSize != null && (size.width > previousCanvasSize.width || size.height > previousCanvasSize.height)) { final ui.Size newSize = size * 1.4; _surface?.dispose(); _surface = null; _pixelWidth = newSize.width.ceil(); _pixelHeight = newSize.height.ceil(); if (useOffscreenCanvas) { _offscreenCanvas!.width = _pixelWidth.toDouble(); _offscreenCanvas!.height = _pixelHeight.toDouble(); } else { _canvasElement!.width = _pixelWidth.toDouble(); _canvasElement!.height = _pixelHeight.toDouble(); } _currentCanvasPhysicalSize = ui.Size(_pixelWidth.toDouble(), _pixelHeight.toDouble()); if (isDisplayCanvas) { _updateLogicalHtmlCanvasSize(); } } } // Either a new context is being forced or we've never had one. if (_forceNewContext || _currentCanvasPhysicalSize == null) { _surface?.dispose(); _surface = null; _grContext?.releaseResourcesAndAbandonContext(); _grContext?.delete(); _grContext = null; _createNewCanvas(size); _currentCanvasPhysicalSize = size; } _currentSurfaceSize = size; _surface?.dispose(); _surface = _createNewSurface(size); return _surface!; } JSVoid _contextRestoredListener(DomEvent event) { assert( _contextLost, 'Received "webglcontextrestored" event but never received ' 'a "webglcontextlost" event.'); _contextLost = false; // Force the framework to rerender the frame. EnginePlatformDispatcher.instance.invokeOnMetricsChanged(); event.stopPropagation(); event.preventDefault(); } JSVoid _contextLostListener(DomEvent event) { assert(event.target == _offscreenCanvas || event.target == _canvasElement, 'Received a context lost event for a disposed canvas'); _contextLost = true; _forceNewContext = true; event.preventDefault(); } /// This function is expensive. /// /// It's better to reuse canvas if possible. void _createNewCanvas(ui.Size physicalSize) { // Clear the container, if it's not empty. We're going to create a new <canvas>. if (_offscreenCanvas != null) { _offscreenCanvas!.removeEventListener( 'webglcontextrestored', _cachedContextRestoredListener, false, ); _offscreenCanvas!.removeEventListener( 'webglcontextlost', _cachedContextLostListener, false, ); _offscreenCanvas = null; _cachedContextRestoredListener = null; _cachedContextLostListener = null; } else if (_canvasElement != null) { _canvasElement!.removeEventListener( 'webglcontextrestored', _cachedContextRestoredListener, false, ); _canvasElement!.removeEventListener( 'webglcontextlost', _cachedContextLostListener, false, ); _canvasElement!.remove(); _canvasElement = null; _cachedContextRestoredListener = null; _cachedContextLostListener = null; } // If `physicalSize` is not precise, use a slightly bigger canvas. This way // we ensure that the rendred picture covers the entire browser window. _pixelWidth = physicalSize.width.ceil(); _pixelHeight = physicalSize.height.ceil(); DomEventTarget htmlCanvas; if (useOffscreenCanvas) { final DomOffscreenCanvas offscreenCanvas = createDomOffscreenCanvas( _pixelWidth, _pixelHeight, ); htmlCanvas = offscreenCanvas; _offscreenCanvas = offscreenCanvas; _canvasElement = null; } else { final DomCanvasElement canvas = createDomCanvasElement(width: _pixelWidth, height: _pixelHeight); htmlCanvas = canvas; _canvasElement = canvas; _offscreenCanvas = null; if (isDisplayCanvas) { _canvasElement!.setAttribute('aria-hidden', 'true'); _canvasElement!.style.position = 'absolute'; hostElement.append(_canvasElement!); _updateLogicalHtmlCanvasSize(); } } // When the browser tab using WebGL goes dormant the browser and/or OS may // decide to clear GPU resources to let other tabs/programs use the GPU. // When this happens, the browser sends the "webglcontextlost" event as a // notification. When we receive this notification we force a new context. // // See also: https://www.khronos.org/webgl/wiki/HandlingContextLost _cachedContextRestoredListener = createDomEventListener(_contextRestoredListener); _cachedContextLostListener = createDomEventListener(_contextLostListener); htmlCanvas.addEventListener( 'webglcontextlost', _cachedContextLostListener, false, ); htmlCanvas.addEventListener( 'webglcontextrestored', _cachedContextRestoredListener, false, ); _forceNewContext = false; _contextLost = false; if (webGLVersion != -1 && !configuration.canvasKitForceCpuOnly) { int glContext = 0; final SkWebGLContextOptions options = SkWebGLContextOptions( // Default to no anti-aliasing. Paint commands can be explicitly // anti-aliased by setting their `Paint` object's `antialias` property. antialias: _kUsingMSAA ? 1 : 0, majorVersion: webGLVersion.toDouble(), ); if (useOffscreenCanvas) { glContext = canvasKit.GetOffscreenWebGLContext( _offscreenCanvas!, options, ).toInt(); } else { glContext = canvasKit.GetWebGLContext( _canvasElement!, options, ).toInt(); } _glContext = glContext; if (_glContext != 0) { _grContext = canvasKit.MakeGrContext(glContext.toDouble()); if (_grContext == null) { throw CanvasKitError('Failed to initialize CanvasKit. ' 'CanvasKit.MakeGrContext returned null.'); } if (_sampleCount == -1 || _stencilBits == -1) { _initWebglParams(); } // Set the cache byte limit for this grContext, if not specified it will // use CanvasKit's default. _syncCacheBytes(); } } } void _initWebglParams() { WebGLContext gl; if (useOffscreenCanvas) { gl = _offscreenCanvas!.getGlContext(webGLVersion); } else { gl = _canvasElement!.getGlContext(webGLVersion); } _sampleCount = gl.getParameter(gl.samples); _stencilBits = gl.getParameter(gl.stencilBits); } CkSurface _createNewSurface(ui.Size size) { assert(_offscreenCanvas != null || _canvasElement != null); if (webGLVersion == -1) { return _makeSoftwareCanvasSurface('WebGL support not detected'); } else if (configuration.canvasKitForceCpuOnly) { return _makeSoftwareCanvasSurface('CPU rendering forced by application'); } else if (_glContext == 0) { return _makeSoftwareCanvasSurface('Failed to initialize WebGL context'); } else { final SkSurface? skSurface = canvasKit.MakeOnScreenGLSurface( _grContext!, size.width.ceilToDouble(), size.height.ceilToDouble(), SkColorSpaceSRGB, _sampleCount, _stencilBits); if (skSurface == null) { return _makeSoftwareCanvasSurface('Failed to initialize WebGL surface'); } return CkSurface(skSurface, _glContext); } } static bool _didWarnAboutWebGlInitializationFailure = false; CkSurface _makeSoftwareCanvasSurface(String reason) { if (!_didWarnAboutWebGlInitializationFailure) { printWarning('WARNING: Falling back to CPU-only rendering. $reason.'); _didWarnAboutWebGlInitializationFailure = true; } SkSurface surface; if (useOffscreenCanvas) { surface = canvasKit.MakeOffscreenSWCanvasSurface(_offscreenCanvas!); } else { surface = canvasKit.MakeSWCanvasSurface(_canvasElement!); } return CkSurface( surface, null, ); } bool _presentSurface() { _surface!.flush(); return true; } @override bool get isConnected => _canvasElement!.isConnected!; @override void initialize() { ensureSurface(); } @override void dispose() { _offscreenCanvas?.removeEventListener( 'webglcontextlost', _cachedContextLostListener, false); _offscreenCanvas?.removeEventListener( 'webglcontextrestored', _cachedContextRestoredListener, false); _cachedContextLostListener = null; _cachedContextRestoredListener = null; _surface?.dispose(); } /// Safari 15 doesn't support OffscreenCanvas at all. Safari 16 supports /// OffscreenCanvas, but only with the context2d API, not WebGL. static bool get offscreenCanvasSupported => browserSupportsOffscreenCanvas && !isSafari; } /// A Dart wrapper around Skia's CkSurface. class CkSurface { CkSurface(this.surface, this._glContext); CkCanvas getCanvas() { assert(!_isDisposed, 'Attempting to use the canvas of a disposed surface'); return CkCanvas(surface.getCanvas()); } /// The underlying CanvasKit surface object. /// /// Only borrow this value temporarily. Do not store it as it may be deleted /// at any moment. Storing it may lead to dangling pointer bugs. final SkSurface surface; final int? _glContext; /// Flushes the graphics to be rendered on screen. void flush() { surface.flush(); } int? get context => _glContext; int width() => surface.width().ceil(); int height() => surface.height().ceil(); void dispose() { if (_isDisposed) { return; } surface.dispose(); _isDisposed = true; } bool _isDisposed = false; }
engine/lib/web_ui/lib/src/engine/canvaskit/surface.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/canvaskit/surface.dart", "repo_id": "engine", "token_count": 6990 }
294
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart' as ui; class FrameTimingRecorder { final int _vsyncStartMicros = _currentFrameVsyncStart; final int _buildStartMicros = _currentFrameBuildStart; int? _buildFinishMicros; int? _rasterStartMicros; int? _rasterFinishMicros; /// Collects frame timings from frames. /// /// This list is periodically reported to the framework (see [_kFrameTimingsSubmitInterval]). static List<ui.FrameTiming> _frameTimings = <ui.FrameTiming>[]; /// These two metrics are collected early in the process, before the respective /// scene builders are created. These are instead treated as global state, which /// are used to initialize any recorders that are created by the scene builders. static int _currentFrameVsyncStart = 0; static int _currentFrameBuildStart = 0; static void recordCurrentFrameVsync() { if (frameTimingsEnabled) { _currentFrameVsyncStart = _nowMicros(); } } static void recordCurrentFrameBuildStart() { if (frameTimingsEnabled) { _currentFrameBuildStart = _nowMicros(); } } /// The last time (in microseconds) we submitted frame timings. static int _frameTimingsLastSubmitTime = _nowMicros(); /// The amount of time in microseconds we wait between submitting /// frame timings. static const int _kFrameTimingsSubmitInterval = 100000; // 100 milliseconds /// Whether we are collecting [ui.FrameTiming]s. static bool get frameTimingsEnabled { return EnginePlatformDispatcher.instance.onReportTimings != null; } /// Current timestamp in microseconds taken from the high-precision /// monotonically increasing timer. /// /// See also: /// /// * https://developer.mozilla.org/en-US/docs/Web/API/Performance/now, /// particularly notes about Firefox rounding to 1ms for security reasons, /// which can be bypassed in tests by setting certain browser options. static int _nowMicros() { return (domWindow.performance.now() * 1000).toInt(); } void recordBuildFinish([int? buildFinish]) { assert(_buildFinishMicros == null, "can't record build finish more than once"); _buildFinishMicros = buildFinish ?? _nowMicros(); } void recordRasterStart([int? rasterStart]) { assert(_rasterStartMicros == null, "can't record raster start more than once"); _rasterStartMicros = rasterStart ?? _nowMicros(); } void recordRasterFinish([int? rasterFinish]) { assert(_rasterFinishMicros == null, "can't record raster finish more than once"); _rasterFinishMicros = rasterFinish ?? _nowMicros(); } void submitTimings() { assert( _buildFinishMicros != null && _rasterStartMicros != null && _rasterFinishMicros != null, 'Attempted to submit an incomplete timings.' ); final ui.FrameTiming timing = ui.FrameTiming( vsyncStart: _vsyncStartMicros, buildStart: _buildStartMicros, buildFinish: _buildFinishMicros!, rasterStart: _rasterStartMicros!, rasterFinish: _rasterFinishMicros!, rasterFinishWallTime: _rasterFinishMicros!, ); _frameTimings.add(timing); final int now = _nowMicros(); if (now - _frameTimingsLastSubmitTime > _kFrameTimingsSubmitInterval) { _frameTimingsLastSubmitTime = now; EnginePlatformDispatcher.instance.invokeOnReportTimings(_frameTimings); _frameTimings = <ui.FrameTiming>[]; } } }
engine/lib/web_ui/lib/src/engine/frame_timing_recorder.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/frame_timing_recorder.dart", "repo_id": "engine", "token_count": 1145 }
295
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:collection' show IterableBase; import 'dart:math' as math; import 'dart:typed_data'; import 'package:ui/ui.dart' as ui; import 'conic.dart'; import 'cubic.dart'; import 'path_iterator.dart'; import 'path_ref.dart'; import 'path_utils.dart'; const double kEpsilon = 0.000000001; /// An iterable collection of [PathMetric] objects describing a [Path]. /// /// A [PathMetrics] object is created by using the [Path.computeMetrics] method, /// and represents the path as it stood at the time of the call. Subsequent /// modifications of the path do not affect the [PathMetrics] object. /// /// Each path metric corresponds to a segment, or contour, of a path. /// /// For example, a path consisting of a [Path.lineTo], a [Path.moveTo], and /// another [Path.lineTo] will contain two contours and thus be represented by /// two [PathMetric] objects. /// /// When iterating across a [PathMetrics]' contours, the [PathMetric] objects /// are only valid until the next one is obtained. class SurfacePathMetrics extends IterableBase<ui.PathMetric> implements ui.PathMetrics { SurfacePathMetrics(PathRef path, bool forceClosed) : _iterator = SurfacePathMetricIterator._(_SurfacePathMeasure(PathRef.shallowCopy(path), forceClosed)); final SurfacePathMetricIterator _iterator; @override Iterator<ui.PathMetric> get iterator => _iterator; } /// Maintains a single instance of computed segments for set of PathMetric /// objects exposed through iterator. class _SurfacePathMeasure { _SurfacePathMeasure(this._path, this.forceClosed) : // nextContour will increment this to the zero based index. _currentContourIndex = -1, _pathIterator = PathIterator(_path, forceClosed); final PathRef _path; final PathIterator _pathIterator; final List<_PathContourMeasure> _contours = <_PathContourMeasure>[]; // If the contour ends with a call to [Path.close] (which may // have been implied when using [Path.addRect]) final bool forceClosed; int _currentContourIndex; int get currentContourIndex => _currentContourIndex; double length(int contourIndex) { assert(contourIndex <= currentContourIndex, 'Iterator must be advanced before index $contourIndex can be used.'); return _contours[contourIndex].length; } /// Computes the position of hte current contour at the given offset, and the /// angle of the path at that point. /// /// For example, calling this method with a distance of 1.41 for a line from /// 0.0,0.0 to 2.0,2.0 would give a point 1.0,1.0 and the angle 45 degrees /// (but in radians). /// /// Returns null if the contour has zero [length]. /// /// The distance is clamped to the [length] of the current contour. ui.Tangent? getTangentForOffset(int contourIndex, double distance) { return _contours[contourIndex].getTangentForOffset(distance); } bool isClosed(int contourIndex) => _contours[contourIndex].isClosed; // Move to the next contour in the path. // // A path can have a next contour if [Path.moveTo] was called after drawing began. // Return true if one exists, or false. bool _nextContour() { final bool next = _nativeNextContour(); if (next) { _currentContourIndex++; } return next; } // Iterator index into next contour. int _verbIterIndex = 0; // Move to the next contour in the path. // // A path can have a next contour if [Path.moveTo] was called after drawing // began. Return true if one exists, or false. // // This is not exactly congruent with a regular [Iterator.moveNext]. // Typically, [Iterator.moveNext] should be called before accessing the // [Iterator.current]. In this case, the [PathMetric] is valid before // calling `_moveNext` - `_moveNext` should be called after the first // iteration is done instead of before. bool _nativeNextContour() { if (_verbIterIndex == _path.countVerbs()) { return false; } final _PathContourMeasure measure = _PathContourMeasure(_path, _pathIterator, forceClosed); _verbIterIndex = measure.verbEndIndex; _contours.add(measure); return true; } ui.Path extractPath(int contourIndex, double start, double end, {bool startWithMoveTo = true}) { return _contours[contourIndex].extractPath(start, end, startWithMoveTo); } } /// Builds segments for a single contour to measure distance, compute tangent /// and extract a sub path. class _PathContourMeasure { _PathContourMeasure(this.pathRef, PathIterator iter, this.forceClosed) { _verbEndIndex = _buildSegments(iter); } final PathRef pathRef; int _verbEndIndex = 0; final List<_PathSegment> _segments = <_PathSegment>[]; // Allocate buffer large enough for returning cubic curve chop result. // 2 floats for each coordinate x (start, end & control point 1 & 2). static final Float32List _buffer = Float32List(8); final bool forceClosed; double get length => _contourLength; bool get isClosed => _isClosed; int get verbEndIndex => _verbEndIndex; double _contourLength = 0.0; bool _isClosed = false; ui.Tangent? getTangentForOffset(double distance) { final int segmentIndex = _segmentIndexAtDistance(distance); if (segmentIndex == -1) { return null; } return _getPosTan(segmentIndex, distance); } // Returns segment at [distance]. int _segmentIndexAtDistance(double distance) { if (distance.isNaN) { return -1; } // Pin distance to legal range. if (distance < 0.0) { distance = 0.0; } else if (distance > _contourLength) { distance = _contourLength; } // Binary search through segments to find segment at distance. if (_segments.isEmpty) { return -1; } int lo = 0; int hi = _segments.length - 1; while (lo < hi) { final int mid = (lo + hi) >> 1; if (_segments[mid].distance < distance) { lo = mid + 1; } else { hi = mid; } } if (_segments[hi].distance < distance) { hi++; } return hi; } _SurfaceTangent _getPosTan(int segmentIndex, double distance) { final _PathSegment segment = _segments[segmentIndex]; // Compute distance to segment. Since distance is cumulative to find // t = 0..1 on the segment, we need to calculate start distance using prior // segment. final double startDistance = segmentIndex == 0 ? 0 : _segments[segmentIndex - 1].distance; final double totalDistance = segment.distance - startDistance; final double t = totalDistance < kEpsilon ? 0 : (distance - startDistance) / totalDistance; return segment.computeTangent(t); } ui.Path extractPath( double startDistance, double stopDistance, bool startWithMoveTo) { if (startDistance < 0) { startDistance = 0; } if (stopDistance > _contourLength) { stopDistance = _contourLength; } final ui.Path path = ui.Path(); if (startDistance > stopDistance || _segments.isEmpty) { return path; } final int startSegmentIndex = _segmentIndexAtDistance(startDistance); final int stopSegmentIndex = _segmentIndexAtDistance(stopDistance); if (startSegmentIndex == -1 || stopSegmentIndex == -1) { return path; } int currentSegmentIndex = startSegmentIndex; _PathSegment seg = _segments[currentSegmentIndex]; final _SurfaceTangent startTangent = _getPosTan(startSegmentIndex, startDistance); if (startWithMoveTo) { final ui.Offset startPosition = startTangent.position; path.moveTo(startPosition.dx, startPosition.dy); } final _SurfaceTangent stopTangent = _getPosTan(stopSegmentIndex, stopDistance); double startT = startTangent.t; final double stopT = stopTangent.t; if (startSegmentIndex == stopSegmentIndex) { // We only have a single segment that covers the complete distance. _outputSegmentTo(seg, startT, stopT, path); } else { do { // Write this segment from startT to end (t = 1.0). _outputSegmentTo(seg, startT, 1.0, path); // Move to next segment until we hit stop segment. ++currentSegmentIndex; seg = _segments[currentSegmentIndex]; startT = 0; } while (currentSegmentIndex != stopSegmentIndex); // Final write last segment from t=0.0 to t=stopT. _outputSegmentTo(seg, 0.0, stopT, path); } return path; } // Chops the segment at startT and endT and writes it to output [path]. void _outputSegmentTo( _PathSegment segment, double startT, double stopT, ui.Path path) { final List<double> points = segment.points; switch (segment.segmentType) { case SPath.kLineVerb: final double toX = (points[2] * stopT) + (points[0] * (1.0 - stopT)); final double toY = (points[3] * stopT) + (points[1] * (1.0 - stopT)); path.lineTo(toX, toY); case SPath.kCubicVerb: chopCubicBetweenT(points, startT, stopT, _buffer); path.cubicTo(_buffer[2], _buffer[3], _buffer[4], _buffer[5], _buffer[6], _buffer[7]); case SPath.kQuadVerb: _chopQuadBetweenT(points, startT, stopT, _buffer); path.quadraticBezierTo(_buffer[2], _buffer[3], _buffer[4], _buffer[5]); case SPath.kConicVerb: // Implement this once we start writing out conic segments. throw UnimplementedError(); default: throw UnsupportedError('Invalid segment type'); } } /// Builds segments from contour starting at verb [_verbStartIndex] and /// returns next contour verb index. int _buildSegments(PathIterator iter) { assert(_segments.isEmpty, '_buildSegments should be called once'); _isClosed = false; double distance = 0.0; bool haveSeenMoveTo = false; void lineToHandler(double fromX, double fromY, double x, double y) { final double dx = fromX - x; final double dy = fromY - y; final double prevDistance = distance; distance += math.sqrt(dx * dx + dy * dy); // As we accumulate distance, we have to check that the result of += // actually made it larger, since a very small delta might be > 0, but // still have no effect on distance (if distance >>> delta). if (distance > prevDistance) { _segments.add( _PathSegment(SPath.kLineVerb, distance, <double>[fromX, fromY, x, y]), ); } } int verb = 0; final Float32List points = Float32List(PathRefIterator.kMaxBufferSize); do { if (iter.peek() == SPath.kMoveVerb && haveSeenMoveTo) { break; } verb = iter.next(points); switch (verb) { case SPath.kMoveVerb: haveSeenMoveTo = true; case SPath.kLineVerb: assert(haveSeenMoveTo); lineToHandler(points[0], points[1], points[2], points[3]); case SPath.kCubicVerb: assert(haveSeenMoveTo); // Compute cubic curve distance. distance = _computeCubicSegments( points[0], points[1], points[2], points[3], points[4], points[5], points[6], points[7], distance, 0, _kMaxTValue, _segments); case SPath.kConicVerb: assert(haveSeenMoveTo); final double w = iter.conicWeight; final Conic conic = Conic(points[0], points[1], points[2], points[3], points[4], points[5], w); final List<ui.Offset> conicPoints = conic.toQuads(); final int len = conicPoints.length; double startX = conicPoints[0].dx; double startY = conicPoints[0].dy; for (int i = 1; i < len; i += 2) { final double p1x = conicPoints[i].dx; final double p1y = conicPoints[i].dy; final double p2x = conicPoints[i + 1].dx; final double p2y = conicPoints[i + 1].dy; distance = _computeQuadSegments( startX, startY, p1x, p1y, p2x, p2y, distance, 0, _kMaxTValue); startX = p2x; startY = p2y; } case SPath.kQuadVerb: assert(haveSeenMoveTo); // Compute quad curve distance. distance = _computeQuadSegments(points[0], points[1], points[2], points[3], points[4], points[5], distance, 0, _kMaxTValue); case SPath.kCloseVerb: _contourLength = distance; return iter.pathVerbIndex; default: break; } } while (verb != SPath.kDoneVerb); _contourLength = distance; return iter.pathVerbIndex; } static bool _tspanBigEnough(int tSpan) => (tSpan >> 10) != 0; static bool _cubicTooCurvy(double x0, double y0, double x1, double y1, double x2, double y2, double x3, double y3) { // Measure distance from start-end line at 1/3 and 2/3rds to control // points. If distance is less than _fTolerance we should continue // subdividing curve. Uses approx distance for speed. // // p1 = point 1/3rd between start,end points. final double p1x = (x0 * 2 / 3) + (x3 / 3); final double p1y = (y0 * 2 / 3) + (y3 / 3); if ((p1x - x1).abs() > _fTolerance) { return true; } if ((p1y - y1).abs() > _fTolerance) { return true; } // p2 = point 2/3rd between start,end points. final double p2x = (x0 / 3) + (x3 * 2 / 3); final double p2y = (y0 / 3) + (y3 * 2 / 3); if ((p2x - x2).abs() > _fTolerance) { return true; } if ((p2y - y2).abs() > _fTolerance) { return true; } return false; } // Recursively subdivides cubic and adds segments. static double _computeCubicSegments( double x0, double y0, double x1, double y1, double x2, double y2, double x3, double y3, double distance, int tMin, int tMax, List<_PathSegment> segments) { if (_tspanBigEnough(tMax - tMin) && _cubicTooCurvy(x0, y0, x1, y1, x2, y2, x3, y3)) { // Chop cubic into two halves (De Cateljau's algorithm) // See https://en.wikipedia.org/wiki/De_Casteljau%27s_algorithm final double abX = (x0 + x1) / 2; final double abY = (y0 + y1) / 2; final double bcX = (x1 + x2) / 2; final double bcY = (y1 + y2) / 2; final double cdX = (x2 + x3) / 2; final double cdY = (y2 + y3) / 2; final double abcX = (abX + bcX) / 2; final double abcY = (abY + bcY) / 2; final double bcdX = (bcX + cdX) / 2; final double bcdY = (bcY + cdY) / 2; final double abcdX = (abcX + bcdX) / 2; final double abcdY = (abcY + bcdY) / 2; final int tHalf = (tMin + tMax) >> 1; distance = _computeCubicSegments(x0, y0, abX, abY, abcX, abcY, abcdX, abcdY, distance, tMin, tHalf, segments); distance = _computeCubicSegments(abcdX, abcdY, bcdX, bcdY, cdX, cdY, x3, y3, distance, tHalf, tMax, segments); } else { final double dx = x0 - x3; final double dy = y0 - y3; final double startToEndDistance = math.sqrt(dx * dx + dy * dy); final double prevDistance = distance; distance += startToEndDistance; if (distance > prevDistance) { segments.add(_PathSegment(SPath.kCubicVerb, distance, <double>[x0, y0, x1, y1, x2, y2, x3, y3])); } } return distance; } static bool _quadTooCurvy( double x0, double y0, double x1, double y1, double x2, double y2) { // (a/4 + b/2 + c/4) - (a/2 + c/2) = -a/4 + b/2 - c/4 final double dx = (x1 / 2) - (x0 + x2) / 4; if (dx.abs() > _fTolerance) { return true; } final double dy = (y1 / 2) - (y0 + y2) / 4; if (dy.abs() > _fTolerance) { return true; } return false; } double _computeQuadSegments(double x0, double y0, double x1, double y1, double x2, double y2, double distance, int tMin, int tMax) { if (_tspanBigEnough(tMax - tMin) && _quadTooCurvy(x0, y0, x1, y1, x2, y2)) { final double p01x = (x0 + x1) / 2; final double p01y = (y0 + y1) / 2; final double p12x = (x1 + x2) / 2; final double p12y = (y1 + y2) / 2; final double p012x = (p01x + p12x) / 2; final double p012y = (p01y + p12y) / 2; final int tHalf = (tMin + tMax) >> 1; distance = _computeQuadSegments( x0, y0, p01x, p01y, p012x, p012y, distance, tMin, tHalf); distance = _computeQuadSegments( p012x, p012y, p12x, p12y, x2, y2, distance, tMin, tHalf); } else { final double dx = x0 - x2; final double dy = y0 - y2; final double startToEndDistance = math.sqrt(dx * dx + dy * dy); final double prevDistance = distance; distance += startToEndDistance; if (distance > prevDistance) { _segments.add(_PathSegment( SPath.kQuadVerb, distance, <double>[x0, y0, x1, y1, x2, y2])); } } return distance; } } /// Tracks iteration from one segment of a path to the next for measurement. class SurfacePathMetricIterator implements Iterator<ui.PathMetric> { SurfacePathMetricIterator._(this._pathMeasure); SurfacePathMetric? _pathMetric; final _SurfacePathMeasure _pathMeasure; @override SurfacePathMetric get current { if (_pathMetric == null) { throw RangeError( 'PathMetricIterator is not pointing to a PathMetric. This can happen in two situations:\n' '- The iteration has not started yet. If so, call "moveNext" to start iteration.\n' '- The iterator ran out of elements. If so, check that "moveNext" returns true prior to calling "current".'); } return _pathMetric!; } @override bool moveNext() { if (_pathMeasure._nextContour()) { _pathMetric = SurfacePathMetric._(_pathMeasure); return true; } _pathMetric = null; return false; } } // Maximum range value used in curve subdivision using Casteljau algorithm. const int _kMaxTValue = 0x3FFFFFFF; // Distance at which we stop subdividing cubic and quadratic curves. const double _fTolerance = 0.5; /// Utilities for measuring a [Path] and extracting sub-paths. /// /// Iterate over the object returned by [Path.computeMetrics] to obtain /// [PathMetric] objects. Callers that want to randomly access elements or /// iterate multiple times should use `path.computeMetrics().toList()`, since /// [PathMetrics] does not memoize. /// /// Once created, the metrics are only valid for the path as it was specified /// when [Path.computeMetrics] was called. If additional contours are added or /// any contours are updated, the metrics need to be recomputed. Previously /// created metrics will still refer to a snapshot of the path at the time they /// were computed, rather than to the actual metrics for the new mutations to /// the path. /// /// Implementation is based on /// https://github.com/google/skia/blob/main/src/core/SkContourMeasure.cpp /// to maintain consistency with native platforms. class SurfacePathMetric implements ui.PathMetric { SurfacePathMetric._(this._measure) : length = _measure.length(_measure.currentContourIndex), isClosed = _measure.isClosed(_measure.currentContourIndex), contourIndex = _measure.currentContourIndex; /// Return the total length of the current contour. @override final double length; /// Whether the contour is closed. /// /// Returns true if the contour ends with a call to [Path.close] (which may /// have been implied when using methods like [Path.addRect]) or if /// `forceClosed` was specified as true in the call to [Path.computeMetrics]. /// Returns false otherwise. @override final bool isClosed; /// The zero-based index of the contour. /// /// [Path] objects are made up of zero or more contours. The first contour is /// created once a drawing command (e.g. [Path.lineTo]) is issued. A /// [Path.moveTo] command after a drawing command may create a new contour, /// although it may not if optimizations are applied that determine the move /// command did not actually result in moving the pen. /// /// This property is only valid with reference to its original iterator and /// the contours of the path at the time the path's metrics were computed. If /// additional contours were added or existing contours updated, this metric /// will be invalid for the current state of the path. @override final int contourIndex; final _SurfacePathMeasure _measure; /// Computes the position of the current contour at the given offset, and the /// angle of the path at that point. /// /// For example, calling this method with a distance of 1.41 for a line from /// 0.0,0.0 to 2.0,2.0 would give a point 1.0,1.0 and the angle 45 degrees /// (but in radians). /// /// Returns null if the contour has zero [length]. /// /// The distance is clamped to the [length] of the current contour. @override ui.Tangent? getTangentForOffset(double distance) { return _measure.getTangentForOffset(contourIndex, distance); } /// Given a start and end distance, return the intervening segment(s). /// /// `start` and `end` are pinned to legal values (0..[length]) /// Begin the segment with a moveTo if `startWithMoveTo` is true. @override ui.Path extractPath(double start, double end, {bool startWithMoveTo = true}) { return _measure.extractPath(contourIndex, start, end, startWithMoveTo: startWithMoveTo); } @override String toString() => 'PathMetric'; } // Given a vector dx, dy representing slope, normalize and return as [ui.Offset]. ui.Offset _normalizeSlope(double dx, double dy) { final double length = math.sqrt(dx * dx + dy * dy); return length < kEpsilon ? ui.Offset.zero : ui.Offset(dx / length, dy / length); } class _SurfaceTangent extends ui.Tangent { const _SurfaceTangent(super.position, super.vector, this.t); // Normalized distance of tangent point from start of a contour. final double t; } class _PathSegment { _PathSegment(this.segmentType, this.distance, this.points); final int segmentType; final double distance; final List<double> points; _SurfaceTangent computeTangent(double t) { switch (segmentType) { case SPath.kLineVerb: // Simple line. Position is simple interpolation from start to end point. final double xAtDistance = (points[2] * t) + (points[0] * (1.0 - t)); final double yAtDistance = (points[3] * t) + (points[1] * (1.0 - t)); return _SurfaceTangent(ui.Offset(xAtDistance, yAtDistance), _normalizeSlope(points[2] - points[0], points[3] - points[1]), t); case SPath.kCubicVerb: return tangentForCubicAt(t, points[0], points[1], points[2], points[3], points[4], points[5], points[6], points[7]); case SPath.kQuadVerb: return tangentForQuadAt(t, points[0], points[1], points[2], points[3], points[4], points[5]); default: throw UnsupportedError('Invalid segment type'); } } _SurfaceTangent tangentForQuadAt(double t, double x0, double y0, double x1, double y1, double x2, double y2) { assert(t >= 0 && t <= 1); final SkQuadCoefficients quadEval = SkQuadCoefficients(x0, y0, x1, y1, x2, y2); final ui.Offset pos = ui.Offset(quadEval.evalX(t), quadEval.evalY(t)); // Derivative of quad curve is 2(b - a + (a - 2b + c)t). // If control point is at start or end point, this yields 0 for t = 0 and // t = 1. In that case use the quad end points to compute tangent instead // of derivative. final ui.Offset tangentVector = ((t == 0 && x0 == x1 && y0 == y1) || (t == 1 && x1 == x2 && y1 == y2)) ? _normalizeSlope(x2 - x0, y2 - y0) : _normalizeSlope( 2 * ((x2 - x0) * t + (x1 - x0)), 2 * ((y2 - y0) * t + (y1 - y0))); return _SurfaceTangent(pos, tangentVector, t); } _SurfaceTangent tangentForCubicAt(double t, double x0, double y0, double x1, double y1, double x2, double y2, double x3, double y3) { assert(t >= 0 && t <= 1); final _SkCubicCoefficients cubicEval = _SkCubicCoefficients(x0, y0, x1, y1, x2, y2, x3, y3); final ui.Offset pos = ui.Offset(cubicEval.evalX(t), cubicEval.evalY(t)); // Derivative of cubic is zero when t = 0 or 1 and adjacent control point // is on the start or end point of curve. Use the other control point // to compute the tangent or if both control points are on end points // use end points for tangent. final bool tAtZero = t == 0; ui.Offset tangentVector; if ((tAtZero && x0 == x1 && y0 == y1) || (t == 1 && x2 == x3 && y2 == y3)) { double dx = tAtZero ? x2 - x0 : x3 - x1; double dy = tAtZero ? y2 - y0 : y3 - y1; if (dx == 0 && dy == 0) { dx = x3 - x0; dy = y3 - y0; } tangentVector = _normalizeSlope(dx, dy); } else { final double ax = x3 + (3 * (x1 - x2)) - x0; final double ay = y3 + (3 * (y1 - y2)) - y0; final double bx = 2 * (x2 - (2 * x1) + x0); final double by = 2 * (y2 - (2 * y1) + y0); final double cx = x1 - x0; final double cy = y1 - y0; final double tx = (ax * t + bx) * t + cx; final double ty = (ay * t + by) * t + cy; tangentVector = _normalizeSlope(tx, ty); } return _SurfaceTangent(pos, tangentVector, t); } } // Evaluates A * t^3 + B * t^2 + Ct + D = 0 for cubic curve. class _SkCubicCoefficients { _SkCubicCoefficients(double x0, double y0, double x1, double y1, double x2, double y2, double x3, double y3) : ax = x3 + (3 * (x1 - x2)) - x0, ay = y3 + (3 * (y1 - y2)) - y0, bx = 3 * (x2 - (2 * x1) + x0), by = 3 * (y2 - (2 * y1) + y0), cx = 3 * (x1 - x0), cy = 3 * (y1 - y0), dx = x0, dy = y0; final double ax, ay, bx, by, cx, cy, dx, dy; double evalX(double t) => (((ax * t + bx) * t) + cx) * t + dx; double evalY(double t) => (((ay * t + by) * t) + cy) * t + dy; } /// Chops quadratic curve at startT and stopT and writes result to buffer. void _chopQuadBetweenT( List<double> points, double startT, double stopT, Float32List buffer) { assert(startT != 0 || stopT != 0); final double p2y = points[5]; final double p0x = points[0]; final double p0y = points[1]; final double p1x = points[2]; final double p1y = points[3]; final double p2x = points[4]; // If startT == 0 chop at end point and return curve. final bool chopStart = startT != 0; final double t = chopStart ? startT : stopT; final double ab1x = interpolate(p0x, p1x, t); final double ab1y = interpolate(p0y, p1y, t); final double bc1x = interpolate(p1x, p2x, t); final double bc1y = interpolate(p1y, p2y, t); final double abc1x = interpolate(ab1x, bc1x, t); final double abc1y = interpolate(ab1y, bc1y, t); if (!chopStart) { // Return left side of curve. buffer[0] = p0x; buffer[1] = p0y; buffer[2] = ab1x; buffer[3] = ab1y; buffer[4] = abc1x; buffer[5] = abc1y; return; } if (stopT == 1) { // Return right side of curve. buffer[0] = abc1x; buffer[1] = abc1y; buffer[2] = bc1x; buffer[3] = bc1y; buffer[4] = p2x; buffer[5] = p2y; return; } // We chopped at startT, now the right hand side of curve is at // abc1x, abc1y, bc1x, bc1y, p2x, p2y final double endT = (stopT - startT) / (1 - startT); final double ab2x = interpolate(abc1x, bc1x, endT); final double ab2y = interpolate(abc1y, bc1y, endT); final double bc2x = interpolate(bc1x, p2x, endT); final double bc2y = interpolate(bc1y, p2y, endT); final double abc2x = interpolate(ab2x, bc2x, endT); final double abc2y = interpolate(ab2y, bc2y, endT); buffer[0] = abc1x; buffer[1] = abc1y; buffer[2] = ab2x; buffer[3] = ab2y; buffer[4] = abc2x; buffer[5] = abc2y; }
engine/lib/web_ui/lib/src/engine/html/path/path_metrics.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/html/path/path_metrics.dart", "repo_id": "engine", "token_count": 11186 }
296
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:typed_data'; import 'package:ui/ui.dart' as ui; import '../../browser_detection.dart'; import '../../dom.dart'; import '../../html_image_codec.dart'; import '../../safe_browser_api.dart'; import '../../vector_math.dart'; import '../render_vertices.dart'; import 'vertex_shaders.dart'; class EngineImageShader implements ui.ImageShader { EngineImageShader(ui.Image image, this.tileModeX, this.tileModeY, Float64List matrix4, this.filterQuality) : image = image as HtmlImage, matrix4 = Float32List.fromList(matrix4); final ui.TileMode tileModeX; final ui.TileMode tileModeY; final Float32List matrix4; final ui.FilterQuality? filterQuality; final HtmlImage image; /// Whether fill pattern requires transform to shift tiling offset. bool requiresTileOffset = false; Object createPaintStyle(DomCanvasRenderingContext2D context, ui.Rect? shaderBounds, double density) { /// Creates a canvas rendering context pattern based on image and tile modes. final ui.TileMode tileX = tileModeX; final ui.TileMode tileY = tileModeY; if (tileX != ui.TileMode.clamp && tileY != ui.TileMode.clamp) { return context.createPattern( _resolveTiledImageSource(image, tileX, tileY)!, _tileModeToHtmlRepeatAttribute(tileX, tileY))!; } else { initWebGl(); return _createGlShader(context, shaderBounds!, density); } } /// Converts tilemode to CSS repeat attribute. /// /// CSS and Canvas2D createPattern apis only support repeated tiles. /// For mirroring we create a new image with mirror builtin so both /// repeated and mirrored modes can be supported when applied to /// html element background-image or used by createPattern api. String _tileModeToHtmlRepeatAttribute( ui.TileMode tileModeX, ui.TileMode tileModeY) { final bool repeatX = tileModeX == ui.TileMode.repeated || tileModeX == ui.TileMode.mirror; final bool repeatY = tileModeY == ui.TileMode.repeated || tileModeY == ui.TileMode.mirror; return repeatX ? (repeatY ? 'repeat' : 'repeat-x') : (repeatY ? 'repeat-y' : 'no-repeat'); } /// Tiles the image and returns an image or canvas element to be used as /// source for a repeated pattern. /// /// Other alternative was to create a webgl shader for the area and /// tile in the shader, but that will generate a much larger image footprint /// when the pattern is small. So we opt here for mirroring by /// redrawing the image 2 or 4 times into a new bitmap. Object? _resolveTiledImageSource( HtmlImage image, ui.TileMode tileX, ui.TileMode tileY) { final int mirrorX = tileX == ui.TileMode.mirror ? 2 : 1; final int mirrorY = tileY == ui.TileMode.mirror ? 2 : 1; /// If we have no mirror, we can use image directly as pattern. if (mirrorX == 1 && mirrorY == 1) { return image.imgElement; } /// Create a new image by mirroring. final int imageWidth = image.width; final int imageHeight = image.height; final int newWidth = imageWidth * mirrorX; final int newHeight = imageHeight * mirrorY; final OffScreenCanvas offscreenCanvas = OffScreenCanvas(newWidth, newHeight); final Object renderContext = offscreenCanvas.getContext2d()!; for (int y = 0; y < mirrorY; y++) { for (int x = 0; x < mirrorX; x++) { final int flipX = x != 0 ? -1 : 1; final int flipY = y != 0 ? -1 : 1; /// To draw image flipped we set translate and scale and pass /// negative width/height to drawImage. if (flipX != 1 || flipY != 1) { scaleCanvas2D(renderContext, flipX, flipY); } drawImageCanvas2D( renderContext, image.imgElement, x == 0 ? 0 : -2 * imageWidth, y == 0 ? 0 : -2 * imageHeight, ); if (flipX != 1 || flipY != 1) { /// Restore transform. This is faster than save/restore on context. scaleCanvas2D(renderContext, flipX, flipY); } } } // When using OffscreenCanvas and transferToImageBitmap is supported by // browser create ImageBitmap otherwise use more expensive canvas // allocation. if (OffScreenCanvas.supported && offscreenCanvas.transferToImageBitmapSupported) { return offscreenCanvas.transferToImageBitmap(); } else { final DomCanvasElement canvas = createDomCanvasElement(width: newWidth, height: newHeight); final DomCanvasRenderingContext2D ctx = canvas.context2D; offscreenCanvas.transferImage(ctx); return canvas; } } /// Creates an image with tiled/transformed images. DomCanvasPattern _createGlShader(DomCanvasRenderingContext2D? context, ui.Rect shaderBounds, double density) { final Matrix4 transform = Matrix4.fromFloat32List(matrix4); final double dpr = ui.window.devicePixelRatio; final int widthInPixels = (shaderBounds.width * dpr).ceil(); final int heightInPixels = (shaderBounds.height * dpr).ceil(); assert(widthInPixels > 0 && heightInPixels > 0); /// Render tiles into a bitmap and create a canvas pattern. final bool isWebGl2 = webGLVersion == WebGLVersion.webgl2; final String vertexShader = VertexShaders.writeTextureVertexShader(); final String fragmentShader = FragmentShaders.writeTextureFragmentShader( isWebGl2, tileModeX, tileModeY); /// Render gradient into a bitmap and create a canvas pattern. final OffScreenCanvas offScreenCanvas = OffScreenCanvas(widthInPixels, heightInPixels); final GlContext gl = GlContext(offScreenCanvas); gl.setViewportSize(widthInPixels, heightInPixels); final GlProgram glProgram = gl.cacheProgram(vertexShader, fragmentShader); gl.useProgram(glProgram); const int vertexCount = 6; final Float32List vertices = Float32List(vertexCount * 2); final ui.Rect vRect = shaderBounds.translate(-shaderBounds.left, -shaderBounds.top); vertices[0] = vRect.left; vertices[1] = vRect.top; vertices[2] = vRect.right; vertices[3] = vRect.top; vertices[4] = vRect.right; vertices[5] = vRect.bottom; vertices[6] = vRect.right; vertices[7] = vRect.bottom; vertices[8] = vRect.left; vertices[9] = vRect.bottom; vertices[10] = vRect.left; vertices[11] = vRect.top; final Object positionAttributeLocation = gl.getAttributeLocation(glProgram.program, 'position'); setupVertexTransforms(gl, glProgram, 0, 0, widthInPixels.toDouble(), heightInPixels.toDouble(), transform); requiresTileOffset = shaderBounds.left !=0 || shaderBounds.top != 0; /// To map from vertex position to texture coordinate in 0..1 range, /// we setup scalar to be used in vertex shader. setupTextureTransform( gl, glProgram, shaderBounds.left, shaderBounds.top, 1.0 / image.width.toDouble(), 1.0 / image.height.toDouble()); /// Setup geometry. /// /// Create buffer for vertex coordinates. final Object positionsBuffer = gl.createBuffer()!; Object? vao; if (isWebGl2) { /// Create a vertex array object. vao = gl.createVertexArray(); /// Set vertex array object as active one. gl.bindVertexArray(vao!); } /// Turn on position attribute. gl.enableVertexAttribArray(positionAttributeLocation); /// Bind buffer as position buffer and transfer data. gl.bindArrayBuffer(positionsBuffer); bufferVertexData(gl, vertices, ui.window.devicePixelRatio); /// Setup data format for attribute. vertexAttribPointerGlContext( gl.glContext, positionAttributeLocation, 2, gl.kFloat, false, 0, 0, ); /// Copy image to the texture. final Object? texture = gl.createTexture(); /// Texture units are a global array of references to the textures. /// By setting activeTexture, we associate the bound texture to a unit. /// Every time we call a texture function such as texImage2D with a target /// like TEXTURE_2D, it looks up texture by using the currently active /// unit. /// In our case we have a single texture unit 0. gl.activeTexture(gl.kTexture0); gl.bindTexture(gl.kTexture2D, texture); gl.texImage2D(gl.kTexture2D, 0, gl.kRGBA, gl.kRGBA, gl.kUnsignedByte, image.imgElement); if (isWebGl2) { /// Texture REPEAT and MIRROR is only supported in WebGL 2, for /// WebGL 1.0 we let shader compute correct uv coordinates. gl.texParameteri(gl.kTexture2D, gl.kTextureWrapS, tileModeToGlWrapping(gl, tileModeX)); gl.texParameteri(gl.kTexture2D, gl.kTextureWrapT, tileModeToGlWrapping(gl, tileModeY)); /// Mipmapping saves your texture in different resolutions /// so the graphics card can choose which resolution is optimal /// without artifacts. gl.generateMipmap(gl.kTexture2D); } else { /// For webgl1, if a texture is not mipmap complete, then the return /// value of a texel fetch is (0, 0, 0, 1), so we have to set /// minifying function to filter. /// See https://www.khronos.org/registry/webgl/specs/1.0.0/#5.13.8. gl.texParameteri(gl.kTexture2D, gl.kTextureWrapS, gl.kClampToEdge); gl.texParameteri(gl.kTexture2D, gl.kTextureWrapT, gl.kClampToEdge); gl.texParameteri(gl.kTexture2D, gl.kTextureMinFilter, gl.kLinear); } /// Finally render triangles. gl.clear(); gl.drawTriangles(vertexCount, ui.VertexMode.triangles); if (vao != null) { gl.unbindVertexArray(); } final Object? bitmapImage = gl.readPatternData(false); gl.bindArrayBuffer(null); gl.bindElementArrayBuffer(null); return context!.createPattern(bitmapImage!, 'no-repeat')!; } bool _disposed = false; @override bool get debugDisposed { late bool disposed; assert(() { disposed = _disposed; return true; }()); return disposed; } @override void dispose() { assert(() { _disposed = true; return true; }()); image.dispose(); } }
engine/lib/web_ui/lib/src/engine/html/shaders/image_shader.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/html/shaders/image_shader.dart", "repo_id": "engine", "token_count": 3842 }
297
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:js_interop'; import 'package:meta/meta.dart'; import 'package:ui/ui.dart' as ui; import 'package:web_locale_keymap/web_locale_keymap.dart' as locale_keymap; import '../engine.dart' show registerHotRestartListener; import 'browser_detection.dart'; import 'dom.dart'; import 'key_map.g.dart'; import 'platform_dispatcher.dart'; import 'raw_keyboard.dart'; import 'semantics.dart'; typedef _VoidCallback = void Function(); typedef ValueGetter<T> = T Function(); typedef _ModifierGetter = bool Function(FlutterHtmlKeyboardEvent event); // Set this flag to true to see all the fired events in the console. const bool _debugLogKeyEvents = false; const int _kLocationLeft = 1; const int _kLocationRight = 2; final int _kLogicalAltLeft = kWebLogicalLocationMap['Alt']![_kLocationLeft]!; final int _kLogicalAltRight = kWebLogicalLocationMap['Alt']![_kLocationRight]!; final int _kLogicalControlLeft = kWebLogicalLocationMap['Control']![_kLocationLeft]!; final int _kLogicalControlRight = kWebLogicalLocationMap['Control']![_kLocationRight]!; final int _kLogicalShiftLeft = kWebLogicalLocationMap['Shift']![_kLocationLeft]!; final int _kLogicalShiftRight = kWebLogicalLocationMap['Shift']![_kLocationRight]!; final int _kLogicalMetaLeft = kWebLogicalLocationMap['Meta']![_kLocationLeft]!; final int _kLogicalMetaRight = kWebLogicalLocationMap['Meta']![_kLocationRight]!; final int _kPhysicalAltLeft = kWebToPhysicalKey['AltLeft']!; final int _kPhysicalAltRight = kWebToPhysicalKey['AltRight']!; final int kPhysicalControlLeft = kWebToPhysicalKey['ControlLeft']!; final int kPhysicalControlRight = kWebToPhysicalKey['ControlRight']!; final int _kPhysicalShiftLeft = kWebToPhysicalKey['ShiftLeft']!; final int _kPhysicalShiftRight = kWebToPhysicalKey['ShiftRight']!; final int _kPhysicalMetaLeft = kWebToPhysicalKey['MetaLeft']!; final int _kPhysicalMetaRight = kWebToPhysicalKey['MetaRight']!; // Map logical keys for modifier keys to the functions that can get their // modifier flag out of an event. final Map<int, _ModifierGetter> _kLogicalKeyToModifierGetter = <int, _ModifierGetter>{ _kLogicalAltLeft: (FlutterHtmlKeyboardEvent event) => event.altKey, _kLogicalAltRight: (FlutterHtmlKeyboardEvent event) => event.altKey, _kLogicalControlLeft: (FlutterHtmlKeyboardEvent event) => event.ctrlKey, _kLogicalControlRight: (FlutterHtmlKeyboardEvent event) => event.ctrlKey, _kLogicalShiftLeft: (FlutterHtmlKeyboardEvent event) => event.shiftKey, _kLogicalShiftRight: (FlutterHtmlKeyboardEvent event) => event.shiftKey, _kLogicalMetaLeft: (FlutterHtmlKeyboardEvent event) => event.metaKey, _kLogicalMetaRight: (FlutterHtmlKeyboardEvent event) => event.metaKey, }; const String _kPhysicalCapsLock = 'CapsLock'; const String _kLogicalDead = 'Dead'; const int _kWebKeyIdPlane = 0x1700000000; // Bits in a Flutter logical event to generate the logical key for dead keys. // // Logical keys for dead keys are generated by annotating physical keys with // modifiers (see `_getLogicalCode`). const int _kDeadKeyCtrl = 0x10000000; const int _kDeadKeyShift = 0x20000000; const int _kDeadKeyAlt = 0x40000000; const int _kDeadKeyMeta = 0x80000000; const ui.KeyData _emptyKeyData = ui.KeyData( type: ui.KeyEventType.down, timeStamp: Duration.zero, logical: 0, physical: 0, character: null, synthesized: false, ); typedef DispatchKeyData = bool Function(ui.KeyData data); /// Converts a floating number timestamp (in milliseconds) to a [Duration] by /// splitting it into two integer components: milliseconds + microseconds. Duration _eventTimeStampToDuration(num milliseconds) { final int ms = milliseconds.toInt(); final int micro = ((milliseconds - ms) * Duration.microsecondsPerMillisecond).toInt(); return Duration(milliseconds: ms, microseconds: micro); } // Returns a function that caches the result of `body`, ensuring that `body` is // only run once. ValueGetter<T> _cached<T>(ValueGetter<T> body) { T? cache; return () { return cache ??= body(); }; } class KeyboardBinding { KeyboardBinding._() { _addEventListener('keydown', (DomEvent domEvent) { final FlutterHtmlKeyboardEvent event = FlutterHtmlKeyboardEvent(domEvent as DomKeyboardEvent); _converter.handleEvent(event); RawKeyboard.instance?.handleHtmlEvent(domEvent); }); _addEventListener('keyup', (DomEvent domEvent) { final FlutterHtmlKeyboardEvent event = FlutterHtmlKeyboardEvent(domEvent as DomKeyboardEvent); _converter.handleEvent(event); RawKeyboard.instance?.handleHtmlEvent(domEvent); }); } /// The singleton instance of this object. static KeyboardBinding? get instance => _instance; static KeyboardBinding? _instance; static void initInstance() { if (_instance == null) { _instance = KeyboardBinding._(); assert(() { registerHotRestartListener(_instance!._reset); return true; }()); } } /// The platform as used in the initialization. /// /// By default it is derived from [operatingSystem]. @protected OperatingSystem get localPlatform { return operatingSystem; } KeyboardConverter get converter => _converter; late final KeyboardConverter _converter = KeyboardConverter( _onKeyData, localPlatform, ); final Map<String, DomEventListener> _listeners = <String, DomEventListener>{}; void _addEventListener(String eventName, DartDomEventListener handler) { JSVoid loggedHandler(DomEvent event) { if (_debugLogKeyEvents) { print(event.type); } if (EngineSemantics.instance.receiveGlobalEvent(event)) { handler(event); } } final DomEventListener wrappedHandler = createDomEventListener(loggedHandler); assert(!_listeners.containsKey(eventName)); _listeners[eventName] = wrappedHandler; domWindow.addEventListener(eventName, wrappedHandler, true); } /// Remove all active event listeners. void _clearListeners() { _listeners.forEach((String eventName, DomEventListener listener) { domWindow.removeEventListener(eventName, listener, true); }); _listeners.clear(); } bool _onKeyData(ui.KeyData data) { bool? result; // This callback is designed to be invoked synchronously. This is enforced // by `result`, which starts null and is asserted non-null when returned. EnginePlatformDispatcher.instance.invokeOnKeyData(data, (bool handled) { result = handled; }); return result!; } void _reset() { _clearListeners(); _converter.dispose(); } } class AsyncKeyboardDispatching { AsyncKeyboardDispatching({ required this.keyData, this.callback, }); final ui.KeyData keyData; final _VoidCallback? callback; } // A wrapper of [DomKeyboardEvent] with reduced methods delegated to the event // for the convenience of testing. class FlutterHtmlKeyboardEvent { FlutterHtmlKeyboardEvent(this._event); final DomKeyboardEvent _event; String get type => _event.type; String? get code => _event.code; String? get key => _event.key; int get keyCode => _event.keyCode.toInt(); bool? get repeat => _event.repeat; int? get location => _event.location.toInt(); num? get timeStamp => _event.timeStamp; bool get altKey => _event.altKey; bool get ctrlKey => _event.ctrlKey; bool get shiftKey => _event.shiftKey; bool get metaKey => _event.metaKey; bool get isComposing => _event.isComposing; bool getModifierState(String key) => _event.getModifierState(key); void preventDefault() => _event.preventDefault(); void stopPropagation() => _event.stopPropagation(); bool get defaultPrevented => _event.defaultPrevented; } // Reads [DomKeyboardEvent], then [dispatches ui.KeyData] accordingly. // // The events are read through [handleEvent], and dispatched through the // [dispatchKeyData] as given in the constructor. Some key data might be // dispatched asynchronously. class KeyboardConverter { KeyboardConverter(this.performDispatchKeyData, OperatingSystem platform) : onDarwin = platform == OperatingSystem.macOs || platform == OperatingSystem.iOs, _mapping = _mappingFromPlatform(platform); final DispatchKeyData performDispatchKeyData; /// Whether the current platform is macOS or iOS, which affects how certain key /// events are comprehended, including CapsLock and key guarding. final bool onDarwin; /// Maps logical keys from key event properties. final locale_keymap.LocaleKeymap _mapping; static locale_keymap.LocaleKeymap _mappingFromPlatform(OperatingSystem platform) { switch (platform) { case OperatingSystem.iOs: case OperatingSystem.macOs: return locale_keymap.LocaleKeymap.darwin(); case OperatingSystem.windows: return locale_keymap.LocaleKeymap.win(); case OperatingSystem.android: case OperatingSystem.linux: case OperatingSystem.unknown: return locale_keymap.LocaleKeymap.linux(); } } // The `performDispatchKeyData` wrapped with tracking logic. // // It is non-null only during `handleEvent`. All events during `handleEvent` // should be dispatched with `_dispatchKeyData`, others with // `performDispatchKeyData`. DispatchKeyData? _dispatchKeyData; bool _disposed = false; void dispose() { _disposed = true; clearPressedKeys(); } // On macOS, CapsLock behaves differently in that, a keydown event occurs when // the key is pressed and the light turns on, while a keyup event occurs when the // key is pressed and the light turns off. Flutter considers both events as // key down, and synthesizes immediate cancel events following them. The state // of "whether CapsLock is on" should be accessed by "activeLocks". bool _shouldSynthesizeCapsLockUp() { return onDarwin; } // ## About Key guards // // When the user enters a browser/system shortcut (e.g. `Cmd+Alt+i`) the // browser doesn't send a keyup for it. This puts the framework in a corrupt // state because it thinks the key was never released. // // To avoid this, we rely on the fact that browsers send repeat events // while the key is held down by the user. If we don't receive a repeat // event within a specific duration (_kKeydownCancelDurationMac) we assume // the user has released the key and we synthesize a keyup event. bool _shouldDoKeyGuard() { return onDarwin; } /// After a keydown is received, this is the duration we wait for a repeat event /// before we decide to synthesize a keyup event. /// /// This value is only for macOS, where the keyboard repeat delay goes up to /// 2000ms. static const Duration _kKeydownCancelDurationMac = Duration(milliseconds: 2000); static int _getPhysicalCode(String code) { if (code.isEmpty) { return _kWebKeyIdPlane; } return kWebToPhysicalKey[code] ?? (code.hashCode + _kWebKeyIdPlane); } static int _getModifierMask(FlutterHtmlKeyboardEvent event) { final bool altDown = event.altKey; final bool ctrlDown = event.ctrlKey; final bool shiftDown = event.shiftKey; final bool metaDown = event.metaKey; return (altDown ? _kDeadKeyAlt : 0) + (ctrlDown ? _kDeadKeyCtrl : 0) + (shiftDown ? _kDeadKeyShift : 0) + (metaDown ? _kDeadKeyMeta : 0); } // Whether `event.key` is a key name, such as "Shift", or otherwise a // character, such as "S" or "ж". // // A key name always has more than 1 code unit, and they are all alnums. // Character keys, however, can also have more than 1 code unit: en-in // maps KeyL to L̥/l̥. To resolve this, we check the second code unit. static bool _eventKeyIsKeyName(String key) { return key.length > 1 && key.codeUnitAt(0) < 0x7F && key.codeUnitAt(1) < 0x7F; } static int _deadKeyToLogicalKey(int physicalKey, FlutterHtmlKeyboardEvent event) { // 'Dead' is used to represent dead keys, such as a diacritic to the // following base letter (such as Option-e results in ´). // // Assume they can be told apart with the physical key and the modifiers // pressed. return physicalKey + _getModifierMask(event) + _kWebKeyIdPlane; } // Map from pressed physical key to corresponding pressed logical key. // // Multiple physical keys can be mapped to the same logical key, usually due // to positioned keys (left/right/numpad) or multiple keyboards. final Map<int, int> _pressingRecords = <int, int>{}; // Schedule the dispatching of an event in the future. The `callback` will // invoked before that. // // Returns a callback that cancels the schedule. Disposal of // `KeyBoardConverter` also cancels the shedule automatically. _VoidCallback _scheduleAsyncEvent(Duration duration, ValueGetter<ui.KeyData> getData, _VoidCallback callback) { bool canceled = false; Future<void>.delayed(duration).then<void>((_) { if (!canceled && !_disposed) { callback(); // This dispatch is performed asynchronously, therefore should not use // `_dispatchKeyData`. performDispatchKeyData(getData()); } }); return () { canceled = true; }; } final Map<int, _VoidCallback> _keyGuards = <int, _VoidCallback>{}; // Call this method on the down or repeated event of a non-modifier key. void _startGuardingKey(int physicalKey, int logicalKey, Duration currentTimeStamp) { if (!_shouldDoKeyGuard()) { return; } final _VoidCallback cancelingCallback = _scheduleAsyncEvent( _kKeydownCancelDurationMac, () => ui.KeyData( timeStamp: currentTimeStamp + _kKeydownCancelDurationMac, type: ui.KeyEventType.up, physical: physicalKey, logical: logicalKey, character: null, synthesized: true, ), () { _pressingRecords.remove(physicalKey); } ); _keyGuards.remove(physicalKey)?.call(); _keyGuards[physicalKey] = cancelingCallback; } // Call this method on an up event of a non-modifier key. void _stopGuardingKey(int physicalKey) { _keyGuards.remove(physicalKey)?.call(); } void _handleEvent(FlutterHtmlKeyboardEvent event) { final Duration timeStamp = _eventTimeStampToDuration(event.timeStamp!); final String eventKey = event.key!; final int physicalKey = _getPhysicalCode(event.code!); final bool logicalKeyIsCharacter = !_eventKeyIsKeyName(eventKey); // The function body might or might not be evaluated. If the event is a key // up event, the resulting event will simply use the currently pressed // logical key. final ValueGetter<int> logicalKey = _cached<int>(() { // Mapped logical keys, such as ArrowLeft, Escape, AudioVolumeDown. final int? mappedLogicalKey = kWebToLogicalKey[eventKey]; if (mappedLogicalKey != null) { return mappedLogicalKey; } // Keys with locations, such as modifier keys (Shift) or numpad keys. if (kWebLogicalLocationMap.containsKey(event.key)) { final int? result = kWebLogicalLocationMap[event.key!]?[event.location!]; assert(result != null, 'Invalid modifier location: ${event.key}, ${event.location}'); return result!; } // Locale-sensitive keys: letters, digits, and certain symbols. if (logicalKeyIsCharacter) { final int? localeLogicalKeys = _mapping.getLogicalKey(event.code, event.key, event.keyCode); if (localeLogicalKeys != null) { return localeLogicalKeys; } } // Dead keys that are not handled by the locale mapping. if (eventKey == _kLogicalDead) { return _deadKeyToLogicalKey(physicalKey, event); } // Minted logical keys. return eventKey.hashCode + _kWebKeyIdPlane; }); assert(event.type == 'keydown' || event.type == 'keyup'); final bool isPhysicalDown = event.type == 'keydown' || // On macOS, both keydown and keyup events of CapsLock should be considered keydown, // followed by an immediate cancel event. (_shouldSynthesizeCapsLockUp() && event.code! == _kPhysicalCapsLock); final ui.KeyEventType type; if (_shouldSynthesizeCapsLockUp() && event.code! == _kPhysicalCapsLock) { // Case 1: Handle CapsLock on macOS // // On macOS, both keydown and keyup events of CapsLock are considered // keydown, followed by an immediate synchronized up event. _scheduleAsyncEvent( Duration.zero, () => ui.KeyData( timeStamp: timeStamp, type: ui.KeyEventType.up, physical: physicalKey, logical: logicalKey(), character: null, synthesized: true, ), () { _pressingRecords.remove(physicalKey); } ); type = ui.KeyEventType.down; } else if (isPhysicalDown) { // Case 2: Handle key down of normal keys if (_pressingRecords[physicalKey] != null) { // This physical key is being pressed according to the record. if (event.repeat ?? false) { // A normal repeated key. type = ui.KeyEventType.repeat; } else { // A non-repeated key has been pressed that has the exact physical key as // a currently pressed one. This can mean one of the following cases: // // * Multiple keyboards are pressing keys with the same physical key. // * The up event was lost during a loss of focus. // * The previous down event was a system shortcut and its release // was skipped (see `_startGuardingKey`,) such as holding Ctrl and // pressing V then V, within the "guard window". // // The three cases can't be distinguished, and in the 3rd case, the // latter event must be dispatched as down events for the framework to // correctly recognize and choose to not to handle. Therefore, an up // event is synthesized before it. _dispatchKeyData!(ui.KeyData( timeStamp: timeStamp, type: ui.KeyEventType.up, physical: physicalKey, logical: _pressingRecords[physicalKey]!, character: null, synthesized: true, )); _pressingRecords.remove(physicalKey); type = ui.KeyEventType.down; } } else { // This physical key is not being pressed according to the record. It's a // normal down event, whether the system event is a repeat or not. type = ui.KeyEventType.down; } } else { // isPhysicalDown is false and not CapsLock // Case 2: Handle key up of normal keys if (_pressingRecords[physicalKey] == null) { // The physical key has been released before. It indicates multiple // keyboards pressed keys with the same physical key. Ignore the up event. event.preventDefault(); return; } type = ui.KeyEventType.up; } // The _pressingRecords[physicalKey] might have been changed during the last // `if` clause. final int? lastLogicalRecord = _pressingRecords[physicalKey]; final int? nextLogicalRecord; switch (type) { case ui.KeyEventType.down: assert(lastLogicalRecord == null); nextLogicalRecord = logicalKey(); case ui.KeyEventType.up: assert(lastLogicalRecord != null); nextLogicalRecord = null; case ui.KeyEventType.repeat: assert(lastLogicalRecord != null); nextLogicalRecord = lastLogicalRecord; } if (nextLogicalRecord == null) { _pressingRecords.remove(physicalKey); } else { _pressingRecords[physicalKey] = nextLogicalRecord; } // After updating _pressingRecords, synchronize modifier states. The // `event.***Key` fields can be used to reduce some omitted modifier key // events. We can synthesize key up events if they are false. Key down // events can not be synthesized since we don't know which physical key they // represent. _kLogicalKeyToModifierGetter.forEach((int testeeLogicalKey, _ModifierGetter getModifier) { // Do not synthesize for the key of the current event. The event is the // ground truth. if (logicalKey() == testeeLogicalKey) { return; } if (_pressingRecords.containsValue(testeeLogicalKey) && !getModifier(event)) { _pressingRecords.removeWhere((int physicalKey, int logicalRecord) { if (logicalRecord != testeeLogicalKey) { return false; } _dispatchKeyData!(ui.KeyData( timeStamp: timeStamp, type: ui.KeyEventType.up, physical: physicalKey, logical: testeeLogicalKey, character: null, synthesized: true, )); return true; }); } }); // Update key guards if (logicalKeyIsCharacter) { if (nextLogicalRecord != null) { _startGuardingKey(physicalKey, logicalKey(), timeStamp); } else { _stopGuardingKey(physicalKey); } } final String? character = logicalKeyIsCharacter ? eventKey : null; final ui.KeyData keyData = ui.KeyData( timeStamp: timeStamp, type: type, physical: physicalKey, logical: lastLogicalRecord ?? logicalKey(), character: type == ui.KeyEventType.up ? null : character, synthesized: false, ); final bool primaryHandled = _dispatchKeyData!(keyData); if (primaryHandled) { event.preventDefault(); } } // Parse the HTML event, update states, and dispatch Flutter key data through // [performDispatchKeyData]. // // * The method might dispatch some synthesized key data first to update states, // results discarded. // * Then it dispatches exactly one non-synthesized key data that corresponds // to the `event`, i.e. the primary key data. If this dispatching returns // true, then this event will be invoked `preventDefault`. // * Some key data might be synthesized to update states after the main key // data. They are always scheduled asynchronously with results discarded. void handleEvent(FlutterHtmlKeyboardEvent event) { // Autofill on Chrome sends keyboard events whose key and code are null. if (event.key == null || event.code == null) { return; } assert(_dispatchKeyData == null); bool sentAnyEvents = false; _dispatchKeyData = (ui.KeyData data) { sentAnyEvents = true; return performDispatchKeyData(data); }; try { _handleEvent(event); } finally { if (!sentAnyEvents) { _dispatchKeyData!(_emptyKeyData); } _dispatchKeyData = null; } } // Synthesize modifier keys up or down events only when the known pressing states are different. void synthesizeModifiersIfNeeded( bool altPressed, bool controlPressed, bool metaPressed, bool shiftPressed, num eventTimestamp, ) { _synthesizeModifierIfNeeded( _kPhysicalAltLeft, _kPhysicalAltRight, _kLogicalAltLeft, altPressed ? ui.KeyEventType.down : ui.KeyEventType.up, eventTimestamp, ); _synthesizeModifierIfNeeded( kPhysicalControlLeft, kPhysicalControlRight, _kLogicalControlLeft, controlPressed ? ui.KeyEventType.down : ui.KeyEventType.up, eventTimestamp, ); _synthesizeModifierIfNeeded( _kPhysicalMetaLeft, _kPhysicalMetaRight, _kLogicalMetaLeft, metaPressed ? ui.KeyEventType.down : ui.KeyEventType.up, eventTimestamp, ); _synthesizeModifierIfNeeded( _kPhysicalShiftLeft, _kPhysicalShiftRight, _kLogicalShiftLeft, shiftPressed ? ui.KeyEventType.down : ui.KeyEventType.up, eventTimestamp, ); } void _synthesizeModifierIfNeeded( int physicalLeft, int physicalRight, int logicalLeft, ui.KeyEventType type, num domTimestamp, ) { final bool leftPressed = _pressingRecords.containsKey(physicalLeft); final bool rightPressed = _pressingRecords.containsKey(physicalRight); final bool alreadyPressed = leftPressed || rightPressed; final bool synthesizeDown = type == ui.KeyEventType.down && !alreadyPressed; final bool synthesizeUp = type == ui.KeyEventType.up && alreadyPressed; // Synthesize a down event only for the left key if right and left are not pressed if (synthesizeDown) { _synthesizeKeyDownEvent(domTimestamp, physicalLeft, logicalLeft); } // Synthesize an up event for left key if pressed if (synthesizeUp && leftPressed) { final int knownLogicalKey = _pressingRecords[physicalLeft]!; _synthesizeKeyUpEvent(domTimestamp, physicalLeft, knownLogicalKey); } // Synthesize an up event for right key if pressed if (synthesizeUp && rightPressed) { final int knownLogicalKey = _pressingRecords[physicalRight]!; _synthesizeKeyUpEvent(domTimestamp, physicalRight, knownLogicalKey); } } void _synthesizeKeyDownEvent(num domTimestamp, int physical, int logical) { performDispatchKeyData(ui.KeyData( timeStamp: _eventTimeStampToDuration(domTimestamp), type: ui.KeyEventType.down, physical: physical, logical: logical, character: null, synthesized: true, )); // Update pressing state _pressingRecords[physical] = logical; } void _synthesizeKeyUpEvent(num domTimestamp, int physical, int logical) { performDispatchKeyData(ui.KeyData( timeStamp: _eventTimeStampToDuration(domTimestamp), type: ui.KeyEventType.up, physical: physical, logical: logical, character: null, synthesized: true, )); // Update pressing states _pressingRecords.remove(physical); } bool keyIsPressed(int physical) { return _pressingRecords.containsKey(physical); } void clearPressedKeys() { _pressingRecords.clear(); } }
engine/lib/web_ui/lib/src/engine/keyboard_binding.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/keyboard_binding.dart", "repo_id": "engine", "token_count": 9241 }
298
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import '../dom.dart'; /// Returns the name of a slot from its `viewId`. /// /// This is used by the [renderContent] function of the [PlatformViewManager] /// class, and the [createPlatformViewSlot] method below, to keep the slot name /// attribute consistent across the framework. String getPlatformViewSlotName(int viewId) { return 'flt-pv-slot-$viewId'; } /// Returns the value of the HTML "id" attribute set on the wrapper element that /// hosts the platform view content. String getPlatformViewDomId(int viewId) { return 'flt-pv-$viewId'; } /// Creates the HTML markup for the `slot` of a Platform View. /// /// The resulting DOM for a `slot` looks like this: /// /// ```html /// <flt-platform-view-slot style="..."> /// <slot name="..." /> /// </flt-platform-view-slot> /// ``` /// /// The inner `SLOT` tag is standard HTML to reveal an element that is rendered /// elsewhere in the DOM. Its `name` attribute must match the value of the `slot` /// attribute of the contents being revealed (see [getPlatformViewSlotName].) /// /// The outer `flt-platform-view-slot` tag is a simple wrapper that the framework /// can position/style as needed. /// /// (When the framework accesses a `slot`, it's really accessing its wrapper /// `flt-platform-view-slot` tag) DomElement createPlatformViewSlot(int viewId) { final String slotName = getPlatformViewSlotName(viewId); final DomElement wrapper = domDocument .createElement('flt-platform-view-slot') ..style.pointerEvents = 'auto'; final DomElement slot = domDocument.createElement('slot') ..setAttribute('name', slotName); return wrapper..append(slot); }
engine/lib/web_ui/lib/src/engine/platform_views/slots.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/platform_views/slots.dart", "repo_id": "engine", "token_count": 520 }
299
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import '../dom.dart'; import '../semantics.dart'; import '../util.dart'; /// Provides accessibility for dialogs. /// /// See also [Role.dialog]. class Dialog extends PrimaryRoleManager { Dialog(SemanticsObject semanticsObject) : super.blank(PrimaryRole.dialog, semanticsObject) { // The following secondary roles can coexist with dialog. Generic `RouteName` // and `LabelAndValue` are not used by this role because when the dialog // names its own route an `aria-label` is used instead of `aria-describedby`. addFocusManagement(); addLiveRegion(); // When a route/dialog shows up it is expected that the screen reader will // focus on something inside it. There could be two possibilities: // // 1. The framework explicitly marked a node inside the dialog as focused // via the `isFocusable` and `isFocused` flags. In this case, the node // will request focus directly and there's nothing to do on top of that. // 2. No node inside the route takes focus explicitly. In this case, the // expectation is to look through all nodes in traversal order and focus // on the first one. semanticsObject.owner.addOneTimePostUpdateCallback(() { if (semanticsObject.owner.hasNodeRequestingFocus) { // Case 1: a node requested explicit focus. Nothing extra to do. return; } // Case 2: nothing requested explicit focus. Focus on the first descendant. _setDefaultFocus(); }); } void _setDefaultFocus() { semanticsObject.visitDepthFirstInTraversalOrder((SemanticsObject node) { final PrimaryRoleManager? roleManager = node.primaryRole; if (roleManager == null) { return true; } // If the node does not take focus (e.g. focusing on it does not make // sense at all). Despair not. Keep looking. final bool didTakeFocus = roleManager.focusAsRouteDefault(); return !didTakeFocus; }); } @override void update() { super.update(); // If semantic object corresponding to the dialog also provides the label // for itself it is applied as `aria-label`. See also [describeBy]. if (semanticsObject.namesRoute) { final String? label = semanticsObject.label; assert(() { if (label == null || label.trim().isEmpty) { printWarning( 'Semantic node ${semanticsObject.id} had both scopesRoute and ' 'namesRoute set, indicating a self-labelled dialog, but it is ' 'missing the label. A dialog should be labelled either by setting ' 'namesRoute on itself and providing a label, or by containing a ' 'child node with namesRoute that can describe it with its content.' ); } return true; }()); setAttribute('aria-label', label ?? ''); setAriaRole('dialog'); } } /// Sets the description of this dialog based on a [RouteName] descendant /// node, unless the dialog provides its own label. void describeBy(RouteName routeName) { if (semanticsObject.namesRoute) { // The dialog provides its own label, which takes precedence. return; } setAriaRole('dialog'); setAttribute( 'aria-describedby', routeName.semanticsObject.element.id, ); } @override bool focusAsRouteDefault() { // Dialogs are the ones that look inside themselves to find elements to // focus on. It doesn't make sense to focus on the dialog itself. return false; } } /// Supplies a description for the nearest ancestor [Dialog]. class RouteName extends RoleManager { RouteName( SemanticsObject semanticsObject, PrimaryRoleManager owner, ) : super(Role.routeName, semanticsObject, owner); Dialog? _dialog; @override void update() { // NOTE(yjbanov): this does not handle the case when the node structure // changes such that this RouteName is no longer attached to the same // dialog. While this is technically expressible using the semantics API, // after discussing this case with customers I decided that this case is not // interesting enough to support. A tree restructure like this is likely to // confuse screen readers, and it would add complexity to the engine's // semantics code. Since reparenting can be done with no update to either // the Dialog or RouteName we'd have to scan intermediate nodes for // structural changes. if (!semanticsObject.namesRoute) { return; } if (semanticsObject.isLabelDirty) { final Dialog? dialog = _dialog; if (dialog != null) { // Already attached to a dialog, just update the description. dialog.describeBy(this); } else { // Setting the label for the first time. Wait for the DOM tree to be // established, then find the nearest dialog and update its label. semanticsObject.owner.addOneTimePostUpdateCallback(() { if (!isDisposed) { _lookUpNearestAncestorDialog(); _dialog?.describeBy(this); } }); } } } void _lookUpNearestAncestorDialog() { SemanticsObject? parent = semanticsObject.parent; while (parent != null && parent.primaryRole?.role != PrimaryRole.dialog) { parent = parent.parent; } if (parent != null && parent.primaryRole?.role == PrimaryRole.dialog) { _dialog = parent.primaryRole! as Dialog; } } }
engine/lib/web_ui/lib/src/engine/semantics/dialog.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/semantics/dialog.dart", "repo_id": "engine", "token_count": 1866 }
300
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:convert'; import 'dart:typed_data'; import 'message_codec.dart'; import 'serialization.dart'; /// [MessageCodec] with unencoded binary messages represented using [ByteData]. /// /// On Android, messages will be represented using `java.nio.ByteBuffer`. /// On iOS, messages will be represented using `NSData`. class BinaryCodec implements MessageCodec<ByteData> { /// Creates a [MessageCodec] with unencoded binary messages represented using /// [ByteData]. const BinaryCodec(); @override ByteData decodeMessage(ByteData message) => message; @override ByteData encodeMessage(ByteData message) => message; } /// [MessageCodec] with UTF-8 encoded String messages. /// /// On Android, messages will be represented using `java.util.String`. /// On iOS, messages will be represented using `NSString`. class StringCodec implements MessageCodec<String> { /// Creates a [MessageCodec] with UTF-8 encoded String messages. const StringCodec(); @override String decodeMessage(ByteData message) { return utf8.decoder.convert(message.buffer.asUint8List()); } @override ByteData encodeMessage(String message) { final Uint8List encoded = utf8.encode(message); return encoded.buffer.asByteData(); } } /// [MessageCodec] with UTF-8 encoded JSON messages. /// /// Supported messages are acyclic values of these forms: /// /// * null /// * [bool]s /// * [num]s /// * [String]s /// * [List]s of supported values /// * [Map]s from strings to supported values /// /// On Android, messages are decoded using the `org.json` library. /// On iOS, messages are decoded using the `NSJSONSerialization` library. /// In both cases, the use of top-level simple messages (null, [bool], [num], /// and [String]) is supported (by the Flutter SDK). The decoded value will be /// null/nil for null, and identical to what would result from decoding a /// singleton JSON array with a Boolean, number, or string value, and then /// extracting its single element. class JSONMessageCodec implements MessageCodec<dynamic> { // The codec serializes messages as defined by the JSON codec of the // dart:convert package. The format used must match the Android and // iOS counterparts. /// Creates a [MessageCodec] with UTF-8 encoded JSON messages. const JSONMessageCodec(); @override ByteData? encodeMessage(dynamic message) { if (message == null) { return null; } return const StringCodec().encodeMessage(json.encode(message)); } @override dynamic decodeMessage(ByteData? message) { if (message == null) { return message; } return json.decode(const StringCodec().decodeMessage(message)); } } /// [MethodCodec] with UTF-8 encoded JSON method calls and result envelopes. /// /// Values supported as method arguments and result payloads are those supported /// by [JSONMessageCodec]. class JSONMethodCodec implements MethodCodec { // The codec serializes method calls, and result envelopes as outlined below. // This format must match the Android and iOS counterparts. // // * Individual values are serialized as defined by the JSON codec of the // dart:convert package. // * Method calls are serialized as two-element maps, with the method name // keyed by 'method' and the arguments keyed by 'args'. // * Reply envelopes are serialized as either: // * one-element lists containing the successful result as its single // element, or // * three-element lists containing, in order, an error code String, an // error message String, and an error details value. /// Creates a [MethodCodec] with UTF-8 encoded JSON method calls and result /// envelopes. const JSONMethodCodec(); @override ByteData? encodeMethodCall(MethodCall call) { return const JSONMessageCodec().encodeMessage(<String, dynamic>{ 'method': call.method, 'args': call.arguments, }); } @override MethodCall decodeMethodCall(ByteData? methodCall) { final dynamic decoded = const JSONMessageCodec().decodeMessage(methodCall); if (decoded is! Map) { throw FormatException('Expected method call Map, got $decoded'); } final dynamic method = decoded['method']; final dynamic arguments = decoded['args']; if (method is String) { return MethodCall(method, arguments); } throw FormatException('Invalid method call: $decoded'); } @override dynamic decodeEnvelope(ByteData envelope) { final dynamic decoded = const JSONMessageCodec().decodeMessage(envelope); if (decoded is! List) { throw FormatException('Expected envelope List, got $decoded'); } if (decoded.length == 1) { return decoded[0]; } if (decoded.length == 3 && decoded[0] is String && (decoded[1] == null || decoded[1] is String)) { throw PlatformException( code: decoded[0] as String, message: decoded[1] as String?, details: decoded[2], ); } throw FormatException('Invalid envelope: $decoded'); } @override ByteData? encodeSuccessEnvelope(dynamic result) { return const JSONMessageCodec().encodeMessage(<dynamic>[result]); } @override ByteData? encodeErrorEnvelope( {required String code, String? message, dynamic details}) { return const JSONMessageCodec() .encodeMessage(<dynamic>[code, message, details]); } } /// [MessageCodec] using the Flutter standard binary encoding. /// /// Supported messages are acyclic values of these forms: /// /// * null /// * [bool]s /// * [num]s /// * [String]s /// * [Uint8List]s, [Int32List]s, [Int64List]s, [Float64List]s /// * [List]s of supported values /// * [Map]s from supported values to supported values /// /// Decoded values will use `List<dynamic>` and `Map<dynamic, dynamic>` /// irrespective of content. /// /// On Android, messages are represented as follows: /// /// * null: null /// * [bool]\: `java.lang.Boolean` /// * [int]\: `java.lang.Integer` for values that are representable using 32-bit /// two's complement; `java.lang.Long` otherwise /// * [double]\: `java.lang.Double` /// * [String]\: `java.lang.String` /// * [Uint8List]\: `byte[]` /// * [Int32List]\: `int[]` /// * [Int64List]\: `long[]` /// * [Float64List]\: `double[]` /// * [List]\: `java.util.ArrayList` /// * [Map]\: `java.util.HashMap` /// /// On iOS, messages are represented as follows: /// /// * null: nil /// * [bool]\: `NSNumber numberWithBool:` /// * [int]\: `NSNumber numberWithInt:` for values that are representable using /// 32-bit two's complement; `NSNumber numberWithLong:` otherwise /// * [double]\: `NSNumber numberWithDouble:` /// * [String]\: `NSString` /// * [Uint8List], [Int32List], [Int64List], [Float64List]\: /// `FlutterStandardTypedData` /// * [List]\: `NSArray` /// * [Map]\: `NSDictionary` /// /// The codec is extensible by subclasses overriding [writeValue] and /// [readValueOfType]. class StandardMessageCodec implements MessageCodec<dynamic> { /// Creates a [MessageCodec] using the Flutter standard binary encoding. const StandardMessageCodec(); // The codec serializes messages as outlined below. This format must // match the Android and iOS counterparts. // // * A single byte with one of the constant values below determines the // type of the value. // * The serialization of the value itself follows the type byte. // * Numbers are represented using the host endianness throughout. // * Lengths and sizes of serialized parts are encoded using an expanding // format optimized for the common case of small non-negative integers: // * values 0..253 inclusive using one byte with that value; // * values 254..2^16 inclusive using three bytes, the first of which is // 254, the next two the usual unsigned representation of the value; // * values 2^16+1..2^32 inclusive using five bytes, the first of which is // 255, the next four the usual unsigned representation of the value. // * null, true, and false have empty serialization; they are encoded directly // in the type byte (using _kNull, _kTrue, _kFalse) // * Integers representable in 32 bits are encoded using 4 bytes two's // complement representation. // * Larger integers are encoded using 8 bytes two's complement // representation. // * doubles are encoded using the IEEE 754 64-bit double-precision binary // format. // * Strings are encoded using their UTF-8 representation. First the length // of that in bytes is encoded using the expanding format, then follows the // UTF-8 encoding itself. // * Uint8Lists, Int32Lists, Int64Lists, and Float64Lists are encoded by first // encoding the list's element count in the expanding format, then the // smallest number of zero bytes needed to align the position in the full // message with a multiple of the number of bytes per element, then the // encoding of the list elements themselves, end-to-end with no additional // type information, using two's complement or IEEE 754 as applicable. // * Lists are encoded by first encoding their length in the expanding format, // then follows the recursive encoding of each element value, including the // type byte (Lists are assumed to be heterogeneous). // * Maps are encoded by first encoding their length in the expanding format, // then follows the recursive encoding of each key/value pair, including the // type byte for both (Maps are assumed to be heterogeneous). static const int _valueNull = 0; static const int _valueTrue = 1; static const int _valueFalse = 2; static const int _valueInt32 = 3; static const int _valueInt64 = 4; static const int _valueLargeInt = 5; static const int _valueFloat64 = 6; static const int _valueString = 7; static const int _valueUint8List = 8; static const int _valueInt32List = 9; static const int _valueInt64List = 10; static const int _valueFloat64List = 11; static const int _valueList = 12; static const int _valueMap = 13; @override ByteData? encodeMessage(dynamic message) { if (message == null) { return null; } final WriteBuffer buffer = WriteBuffer(); writeValue(buffer, message); return buffer.done(); } @override dynamic decodeMessage(ByteData? message) { if (message == null) { return null; } final ReadBuffer buffer = ReadBuffer(message); final dynamic result = readValue(buffer); if (buffer.hasRemaining) { throw const FormatException('Message corrupted'); } return result; } /// Writes [value] to [buffer] by first writing a type discriminator /// byte, then the value itself. /// /// This method may be called recursively to serialize container values. /// /// Type discriminators 0 through 127 inclusive are reserved for use by the /// base class. /// /// The codec can be extended by overriding this method, calling super /// for values that the extension does not handle. Type discriminators /// used by extensions must be greater than or equal to 128 in order to avoid /// clashes with any later extensions to the base class. void writeValue(WriteBuffer buffer, dynamic value) { if (value == null) { buffer.putUint8(_valueNull); } else if (value is bool) { buffer.putUint8(value ? _valueTrue : _valueFalse); // TODO(hterkelsen): upstream double/int if/else swap. } else if (value is double) { buffer.putUint8(_valueFloat64); buffer.putFloat64(value); } else if (value is int) { // ignore: avoid_double_and_int_checks if (-0x7fffffff - 1 <= value && value <= 0x7fffffff) { buffer.putUint8(_valueInt32); buffer.putInt32(value); } else { buffer.putUint8(_valueInt64); buffer.putInt64(value); } } else if (value is String) { buffer.putUint8(_valueString); final List<int> bytes = utf8.encode(value); writeSize(buffer, bytes.length); buffer.putUint8List(bytes as Uint8List); } else if (value is Uint8List) { buffer.putUint8(_valueUint8List); writeSize(buffer, value.length); buffer.putUint8List(value); } else if (value is Int32List) { buffer.putUint8(_valueInt32List); writeSize(buffer, value.length); buffer.putInt32List(value); } else if (value is Int64List) { buffer.putUint8(_valueInt64List); writeSize(buffer, value.length); buffer.putInt64List(value); } else if (value is Float64List) { buffer.putUint8(_valueFloat64List); writeSize(buffer, value.length); buffer.putFloat64List(value); } else if (value is List) { buffer.putUint8(_valueList); writeSize(buffer, value.length); for (final dynamic item in value) { writeValue(buffer, item); } } else if (value is Map) { buffer.putUint8(_valueMap); writeSize(buffer, value.length); value.forEach((dynamic key, dynamic value) { writeValue(buffer, key); writeValue(buffer, value); }); } else { throw ArgumentError.value(value); } } /// Reads a value from [buffer] as written by [writeValue]. /// /// This method is intended for use by subclasses overriding /// [readValueOfType]. dynamic readValue(ReadBuffer buffer) { if (!buffer.hasRemaining) { throw const FormatException('Message corrupted'); } final int type = buffer.getUint8(); return readValueOfType(type, buffer); } /// Reads a value of the indicated [type] from [buffer]. /// /// The codec can be extended by overriding this method, calling super /// for types that the extension does not handle. dynamic readValueOfType(int type, ReadBuffer buffer) { dynamic result; switch (type) { case _valueNull: result = null; case _valueTrue: result = true; case _valueFalse: result = false; case _valueInt32: result = buffer.getInt32(); case _valueInt64: result = buffer.getInt64(); case _valueLargeInt: // Flutter Engine APIs to use large ints have been deprecated on // 2018-01-09 and will be made unavailable. // TODO(mravn): remove this case once the APIs are unavailable. final int length = readSize(buffer); final String hex = utf8.decoder.convert(buffer.getUint8List(length)); result = int.parse(hex, radix: 16); case _valueFloat64: result = buffer.getFloat64(); case _valueString: final int length = readSize(buffer); result = utf8.decoder.convert(buffer.getUint8List(length)); case _valueUint8List: final int length = readSize(buffer); result = buffer.getUint8List(length); case _valueInt32List: final int length = readSize(buffer); result = buffer.getInt32List(length); case _valueInt64List: final int length = readSize(buffer); result = buffer.getInt64List(length); case _valueFloat64List: final int length = readSize(buffer); result = buffer.getFloat64List(length); case _valueList: final int length = readSize(buffer); result = <dynamic>[]; for (int i = 0; i < length; i++) { result.add(readValue(buffer)); } case _valueMap: final int length = readSize(buffer); result = <dynamic, dynamic>{}; for (int i = 0; i < length; i++) { result[readValue(buffer)] = readValue(buffer); } default: throw const FormatException('Message corrupted'); } return result; } /// Writes a non-negative 32-bit integer [value] to [buffer] /// using an expanding 1-5 byte encoding that optimizes for small values. /// /// This method is intended for use by subclasses overriding /// [writeValue]. void writeSize(WriteBuffer buffer, int value) { assert(0 <= value && value <= 0xffffffff); if (value < 254) { buffer.putUint8(value); } else if (value <= 0xffff) { buffer.putUint8(254); buffer.putUint16(value); } else { buffer.putUint8(255); buffer.putUint32(value); } } /// Reads a non-negative int from [buffer] as written by [writeSize]. /// /// This method is intended for use by subclasses overriding /// [readValueOfType]. int readSize(ReadBuffer buffer) { final int value = buffer.getUint8(); switch (value) { case 254: return buffer.getUint16(); case 255: return buffer.getUint32(); default: return value; } } } /// [MethodCodec] using the Flutter standard binary encoding. /// /// The standard codec is guaranteed to be compatible with the corresponding /// standard codec for FlutterMethodChannels on the host platform. These parts /// of the Flutter SDK are evolved synchronously. /// /// Values supported as method arguments and result payloads are those supported /// by [StandardMessageCodec]. class StandardMethodCodec implements MethodCodec { // The codec method calls, and result envelopes as outlined below. This format // must match the Android and iOS counterparts. // // * Individual values are encoded using [StandardMessageCodec]. // * Method calls are encoded using the concatenation of the encoding // of the method name String and the arguments value. // * Reply envelopes are encoded using first a single byte to distinguish the // success case (0) from the error case (1). Then follows: // * In the success case, the encoding of the result value. // * In the error case, the concatenation of the encoding of the error code // string, the error message string, and the error details value. /// Creates a [MethodCodec] using the Flutter standard binary encoding. const StandardMethodCodec([this.messageCodec = const StandardMessageCodec()]); /// The message codec that this method codec uses for encoding values. final StandardMessageCodec messageCodec; @override ByteData encodeMethodCall(MethodCall call) { final WriteBuffer buffer = WriteBuffer(); messageCodec.writeValue(buffer, call.method); messageCodec.writeValue(buffer, call.arguments); return buffer.done(); } @override MethodCall decodeMethodCall(ByteData? methodCall) { final ReadBuffer buffer = ReadBuffer(methodCall!); final dynamic method = messageCodec.readValue(buffer); final dynamic arguments = messageCodec.readValue(buffer); if (method is String && !buffer.hasRemaining) { return MethodCall(method, arguments); } else { throw const FormatException('Invalid method call'); } } @override ByteData encodeSuccessEnvelope(dynamic result) { final WriteBuffer buffer = WriteBuffer(); buffer.putUint8(0); messageCodec.writeValue(buffer, result); return buffer.done(); } @override ByteData encodeErrorEnvelope( {required String code, String? message, dynamic details}) { final WriteBuffer buffer = WriteBuffer(); buffer.putUint8(1); messageCodec.writeValue(buffer, code); messageCodec.writeValue(buffer, message); messageCodec.writeValue(buffer, details); return buffer.done(); } @override dynamic decodeEnvelope(ByteData envelope) { // First byte is zero in success case, and non-zero otherwise. if (envelope.lengthInBytes == 0) { throw const FormatException('Expected envelope, got nothing'); } final ReadBuffer buffer = ReadBuffer(envelope); if (buffer.getUint8() == 0) { return messageCodec.readValue(buffer); } final dynamic errorCode = messageCodec.readValue(buffer); final dynamic errorMessage = messageCodec.readValue(buffer); final dynamic errorDetails = messageCodec.readValue(buffer); if (errorCode is String && (errorMessage == null || errorMessage is String) && !buffer.hasRemaining) { throw PlatformException( code: errorCode, message: errorMessage as String?, details: errorDetails, ); } else { throw const FormatException('Invalid envelope'); } } }
engine/lib/web_ui/lib/src/engine/services/message_codecs.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/services/message_codecs.dart", "repo_id": "engine", "token_count": 6687 }
301
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:ui/src/engine.dart'; import 'package:ui/src/engine/skwasm/skwasm_impl.dart'; import 'package:ui/ui.dart' as ui; class SkwasmPicture extends SkwasmObjectWrapper<RawPicture> implements ScenePicture { SkwasmPicture.fromHandle(PictureHandle handle) : super(handle, _registry); static final SkwasmFinalizationRegistry<RawPicture> _registry = SkwasmFinalizationRegistry<RawPicture>(pictureDispose); @override Future<ui.Image> toImage(int width, int height) async => toImageSync(width, height); @override int get approximateBytesUsed => pictureApproximateBytesUsed(handle); @override void dispose() { super.dispose(); ui.Picture.onDispose?.call(this); } @override ui.Image toImageSync(int width, int height) => SkwasmImage(imageCreateFromPicture(handle, width, height)); @override ui.Rect get cullRect { return withStackScope((StackScope s) { final RawRect rect = s.allocFloatArray(4); pictureGetCullRect(handle, rect); return s.convertRectFromNative(rect); }); } } class SkwasmPictureRecorder extends SkwasmObjectWrapper<RawPictureRecorder> implements ui.PictureRecorder { SkwasmPictureRecorder() : super(pictureRecorderCreate(), _registry); static final SkwasmFinalizationRegistry<RawPictureRecorder> _registry = SkwasmFinalizationRegistry<RawPictureRecorder>(pictureRecorderDispose); @override SkwasmPicture endRecording() { isRecording = false; final SkwasmPicture picture = SkwasmPicture.fromHandle( pictureRecorderEndRecording(handle) ); ui.Picture.onCreate?.call(picture); return picture; } @override bool isRecording = true; }
engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/picture.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/picture.dart", "repo_id": "engine", "token_count": 610 }
302
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:_wasm'; import 'dart:js_interop'; @JS() @staticInterop class WebAssemblyMemory {} extension WebAssemblyMemoryExtension on WebAssemblyMemory { external JSArrayBuffer get buffer; } @JS() @staticInterop class SkwasmInstance {} extension SkwasmInstanceExtension on SkwasmInstance { external WebAssemblyMemory get wasmMemory; } @JS('window._flutter_skwasmInstance') external SkwasmInstance get skwasmInstance; @pragma('wasm:import', 'skwasmWrapper.addFunction') external WasmI32 addFunction(WasmFuncRef function);
engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/skwasm_module.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/skwasm_module.dart", "repo_id": "engine", "token_count": 215 }
303
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:ui/ui.dart' as ui; import '../dom.dart'; import '../html/bitmap_canvas.dart'; import '../profiler.dart'; import '../view_embedder/style_manager.dart'; import 'layout_fragmenter.dart'; import 'layout_service.dart'; import 'paint_service.dart'; import 'paragraph.dart'; import 'word_breaker.dart'; final String placeholderChar = String.fromCharCode(0xFFFC); /// A paragraph made up of a flat list of text spans and placeholders. /// /// [CanvasParagraph] doesn't use a DOM element to represent the structure of /// its spans and styles. Instead it uses a flat list of [ParagraphSpan] /// objects. class CanvasParagraph implements ui.Paragraph { /// This class is created by the engine, and should not be instantiated /// or extended directly. /// /// To create a [CanvasParagraph] object, use a [CanvasParagraphBuilder]. CanvasParagraph( this.spans, { required this.paragraphStyle, required this.plainText, required this.canDrawOnCanvas, }) : assert(spans.isNotEmpty); /// The flat list of spans that make up this paragraph. final List<ParagraphSpan> spans; /// General styling information for this paragraph. final EngineParagraphStyle paragraphStyle; /// The full textual content of the paragraph. final String plainText; /// Whether this paragraph can be drawn on a bitmap canvas. /// /// Some text features cannot be rendered into a 2D canvas and must use HTML, /// such as font features and text decorations. final bool canDrawOnCanvas; @override double get width => _layoutService.width; @override double get height => _layoutService.height; @override double get longestLine => _layoutService.longestLine?.width ?? 0.0; @override double get minIntrinsicWidth => _layoutService.minIntrinsicWidth; @override double get maxIntrinsicWidth => _layoutService.maxIntrinsicWidth; @override double get alphabeticBaseline => _layoutService.alphabeticBaseline; @override double get ideographicBaseline => _layoutService.ideographicBaseline; @override bool get didExceedMaxLines => _layoutService.didExceedMaxLines; List<ParagraphLine> get lines => _layoutService.lines; /// The bounds that contain the text painted inside this paragraph. ui.Rect get paintBounds => _layoutService.paintBounds; /// Whether this paragraph has been laid out or not. bool isLaidOut = false; ui.ParagraphConstraints? _lastUsedConstraints; late final TextLayoutService _layoutService = TextLayoutService(this); late final TextPaintService _paintService = TextPaintService(this); @override void layout(ui.ParagraphConstraints constraints) { if (constraints == _lastUsedConstraints) { return; } late Stopwatch stopwatch; if (Profiler.isBenchmarkMode) { stopwatch = Stopwatch()..start(); } _layoutService.performLayout(constraints); if (Profiler.isBenchmarkMode) { stopwatch.stop(); Profiler.instance .benchmark('text_layout', stopwatch.elapsedMicroseconds.toDouble()); } isLaidOut = true; _lastUsedConstraints = constraints; _cachedDomElement = null; } // TODO(mdebbar): Returning true means we always require a bitmap canvas. Revisit // this decision once `CanvasParagraph` is fully implemented. /// Whether this paragraph is doing arbitrary paint operations that require /// a bitmap canvas, and can't be expressed in a DOM canvas. bool get hasArbitraryPaint => true; /// Paints this paragraph instance on a [canvas] at the given [offset]. void paint(BitmapCanvas canvas, ui.Offset offset) { _paintService.paint(canvas, offset); } DomElement? _cachedDomElement; /// Returns a DOM element that represents the entire paragraph and its /// children. /// /// Generates a new DOM element on every invocation. DomElement toDomElement() { assert(isLaidOut); final DomElement? domElement = _cachedDomElement; if (domElement == null) { return _cachedDomElement ??= _createDomElement(); } return domElement.cloneNode(true) as DomElement; } DomElement _createDomElement() { final DomElement rootElement = domDocument.createElement('flt-paragraph'); // 1. Set paragraph-level styles. final DomCSSStyleDeclaration cssStyle = rootElement.style; cssStyle ..position = 'absolute' // Prevent the browser from doing any line breaks in the paragraph. We want // to have full control of the paragraph layout. ..whiteSpace = 'pre'; // 2. Append all spans to the paragraph. for (int i = 0; i < lines.length; i++) { final ParagraphLine line = lines[i]; for (final LayoutFragment fragment in line.fragments) { if (fragment.isPlaceholder) { continue; } final String text = fragment.getText(this); if (text.isEmpty) { continue; } final DomElement spanElement = domDocument.createElement('flt-span'); if (fragment.textDirection == ui.TextDirection.rtl) { spanElement.setAttribute('dir', 'rtl'); } applyTextStyleToElement(element: spanElement, style: fragment.style); _positionSpanElement(spanElement, line, fragment); spanElement.appendText(text); rootElement.append(spanElement); } } return rootElement; } @override List<ui.TextBox> getBoxesForPlaceholders() { return _layoutService.getBoxesForPlaceholders(); } @override List<ui.TextBox> getBoxesForRange( int start, int end, { ui.BoxHeightStyle boxHeightStyle = ui.BoxHeightStyle.tight, ui.BoxWidthStyle boxWidthStyle = ui.BoxWidthStyle.tight, }) { return _layoutService.getBoxesForRange(start, end, boxHeightStyle, boxWidthStyle); } @override ui.TextPosition getPositionForOffset(ui.Offset offset) { return _layoutService.getPositionForOffset(offset); } @override ui.GlyphInfo? getClosestGlyphInfoForOffset(ui.Offset offset) => _layoutService.getClosestGlyphInfo(offset); @override ui.GlyphInfo? getGlyphInfoAt(int codeUnitOffset) { final int? lineNumber = _findLine(codeUnitOffset, 0, numberOfLines); if (lineNumber == null) { return null; } final ParagraphLine line = lines[lineNumber]; final ui.TextRange? range = line.getCharacterRangeAt(codeUnitOffset); if (range == null) { return null; } assert(line.overlapsWith(range.start, range.end)); for (final LayoutFragment fragment in line.fragments) { if (fragment.overlapsWith(range.start, range.end)) { // If the grapheme cluster is split into multiple fragments (which really // shouldn't happen but currently if they are in different TextSpans they // don't combine), use the layout box of the first base character as its // layout box has a better chance to be not that far-off. final ui.TextBox textBox = fragment.toTextBox(start: range.start, end: range.end); return ui.GlyphInfo(textBox.toRect(), range, textBox.direction); } } assert(false, 'This should not be reachable.'); return null; } @override ui.TextRange getWordBoundary(ui.TextPosition position) { final int characterPosition; switch (position.affinity) { case ui.TextAffinity.upstream: characterPosition = position.offset - 1; case ui.TextAffinity.downstream: characterPosition = position.offset; } final int start = WordBreaker.prevBreakIndex(plainText, characterPosition + 1); final int end = WordBreaker.nextBreakIndex(plainText, characterPosition); return ui.TextRange(start: start, end: end); } @override ui.TextRange getLineBoundary(ui.TextPosition position) { if (lines.isEmpty) { return ui.TextRange.empty; } final int? lineNumber = getLineNumberAt(position.offset); // Fallback to the last line for backward compatibility. final ParagraphLine line = lineNumber != null ? lines[lineNumber] : lines.last; return ui.TextRange(start: line.startIndex, end: line.endIndex - line.trailingNewlines); } @override List<EngineLineMetrics> computeLineMetrics() { return lines.map((ParagraphLine line) => line.lineMetrics).toList(); } @override EngineLineMetrics? getLineMetricsAt(int lineNumber) { return 0 <= lineNumber && lineNumber < lines.length ? lines[lineNumber].lineMetrics : null; } @override int get numberOfLines => lines.length; @override int? getLineNumberAt(int codeUnitOffset) => _findLine(codeUnitOffset, 0, lines.length); int? _findLine(int codeUnitOffset, int startLine, int endLine) { assert(endLine <= lines.length); final bool isOutOfBounds = endLine <= startLine || codeUnitOffset < lines[startLine].startIndex || (endLine < numberOfLines && lines[endLine].startIndex <= codeUnitOffset); if (isOutOfBounds) { return null; } if (endLine == startLine + 1) { assert(lines[startLine].startIndex <= codeUnitOffset); assert(endLine == numberOfLines || codeUnitOffset < lines[endLine].startIndex); return codeUnitOffset >= lines[startLine].visibleEndIndex ? null : startLine; } // endLine >= startLine + 2 thus we have // startLine + 1 <= midIndex <= endLine - 1 final int midIndex = (startLine + endLine) ~/ 2; return _findLine(codeUnitOffset, midIndex, endLine) ?? _findLine(codeUnitOffset, startLine, midIndex); } bool _disposed = false; @override void dispose() { // TODO(dnfield): It should be possible to clear resources here, but would // need refcounting done on any surfaces/pictures holding references to this // object. _disposed = true; } @override bool get debugDisposed { bool? result; assert(() { result = _disposed; return true; }()); if (result != null) { return result!; } throw StateError('Paragraph.debugDisposed is only avialalbe when asserts are enabled.'); } } void _positionSpanElement(DomElement element, ParagraphLine line, LayoutFragment fragment) { final ui.Rect boxRect = fragment.toPaintingTextBox().toRect(); element.style ..position = 'absolute' ..top = '${boxRect.top}px' ..left = '${boxRect.left}px' // This is needed for space-only spans that are used to justify the paragraph. ..width = '${boxRect.width}px' // Makes sure the baseline of each span is positioned as expected. ..lineHeight = '${boxRect.height}px'; } /// Represents a span in the paragraph. /// /// Instead of keeping spans and styles in a tree hierarchy like the framework /// does, we flatten the structure and resolve/merge all the styles from parent /// nodes. /// /// These spans are stored as a flat list in the paragraph object. class ParagraphSpan { /// Creates a [ParagraphSpan] with the given [style], representing the span of /// text in the range between [start] and [end]. ParagraphSpan({ required this.style, required this.start, required this.end, }); /// The resolved style of the span. final EngineTextStyle style; /// The index of the beginning of the range of text represented by this span. final int start; /// The index of the end of the range of text represented by this span. final int end; } class PlaceholderSpan extends ParagraphPlaceholder implements ParagraphSpan { PlaceholderSpan( this.style, this.start, this.end, double width, double height, ui.PlaceholderAlignment alignment, { required double baselineOffset, required ui.TextBaseline baseline, }) : super( width, height, alignment, baselineOffset: baselineOffset, baseline: baseline, ); @override final EngineTextStyle style; @override final int start; @override final int end; } /// Represents a node in the tree of text styles pushed to [ui.ParagraphBuilder]. /// /// The [ui.ParagraphBuilder.pushText] and [ui.ParagraphBuilder.pop] operations /// represent the entire tree of styles in the paragraph. In our implementation, /// we don't need to keep the entire tree structure in memory. At any point in /// time, we only need a stack of nodes that represent the current branch in the /// tree. The items in the stack are [StyleNode] objects. abstract class StyleNode { /// Create a child for this style node. /// /// We are not creating a tree structure, hence there's no need to keep track /// of the children. ChildStyleNode createChild(EngineTextStyle style) { return ChildStyleNode(parent: this, style: style); } EngineTextStyle? _cachedStyle; /// Generates the final text style to be applied to the text span. /// /// The resolved text style is equivalent to the entire ascendent chain of /// parent style nodes. EngineTextStyle resolveStyle() { final EngineTextStyle? style = _cachedStyle; if (style == null) { return _cachedStyle ??= EngineTextStyle( color: _color, decoration: _decoration, decorationColor: _decorationColor, decorationStyle: _decorationStyle, decorationThickness: _decorationThickness, fontWeight: _fontWeight, fontStyle: _fontStyle, textBaseline: _textBaseline, fontFamily: _fontFamily, fontFamilyFallback: _fontFamilyFallback, fontFeatures: _fontFeatures, fontVariations: _fontVariations, fontSize: _fontSize, letterSpacing: _letterSpacing, wordSpacing: _wordSpacing, height: _height, leadingDistribution: _leadingDistribution, locale: _locale, background: _background, foreground: _foreground, shadows: _shadows, ); } return style; } ui.Color? get _color; ui.TextDecoration? get _decoration; ui.Color? get _decorationColor; ui.TextDecorationStyle? get _decorationStyle; double? get _decorationThickness; ui.FontWeight? get _fontWeight; ui.FontStyle? get _fontStyle; ui.TextBaseline? get _textBaseline; String get _fontFamily; List<String>? get _fontFamilyFallback; List<ui.FontFeature>? get _fontFeatures; List<ui.FontVariation>? get _fontVariations; double get _fontSize; double? get _letterSpacing; double? get _wordSpacing; double? get _height; ui.TextLeadingDistribution? get _leadingDistribution; ui.Locale? get _locale; ui.Paint? get _background; ui.Paint? get _foreground; List<ui.Shadow>? get _shadows; } /// Represents a non-root [StyleNode]. class ChildStyleNode extends StyleNode { /// Creates a [ChildStyleNode] with the given [parent] and [style]. ChildStyleNode({required this.parent, required this.style}); /// The parent node to be used when resolving text styles. final StyleNode parent; /// The text style associated with the current node. final EngineTextStyle style; // Read these properties from the TextStyle associated with this node. If the // property isn't defined, go to the parent node. @override ui.Color? get _color => style.color ?? (_foreground == null ? parent._color : null); @override ui.TextDecoration? get _decoration => style.decoration ?? parent._decoration; @override ui.Color? get _decorationColor => style.decorationColor ?? parent._decorationColor; @override ui.TextDecorationStyle? get _decorationStyle => style.decorationStyle ?? parent._decorationStyle; @override double? get _decorationThickness => style.decorationThickness ?? parent._decorationThickness; @override ui.FontWeight? get _fontWeight => style.fontWeight ?? parent._fontWeight; @override ui.FontStyle? get _fontStyle => style.fontStyle ?? parent._fontStyle; @override ui.TextBaseline? get _textBaseline => style.textBaseline ?? parent._textBaseline; @override List<String>? get _fontFamilyFallback => style.fontFamilyFallback ?? parent._fontFamilyFallback; @override List<ui.FontFeature>? get _fontFeatures => style.fontFeatures ?? parent._fontFeatures; @override List<ui.FontVariation>? get _fontVariations => style.fontVariations ?? parent._fontVariations; @override double get _fontSize => style.fontSize ?? parent._fontSize; @override double? get _letterSpacing => style.letterSpacing ?? parent._letterSpacing; @override double? get _wordSpacing => style.wordSpacing ?? parent._wordSpacing; @override double? get _height => style.height ?? parent._height; @override ui.TextLeadingDistribution? get _leadingDistribution => style.leadingDistribution ?? parent._leadingDistribution; @override ui.Locale? get _locale => style.locale ?? parent._locale; @override ui.Paint? get _background => style.background ?? parent._background; @override ui.Paint? get _foreground => style.foreground ?? parent._foreground; @override List<ui.Shadow>? get _shadows => style.shadows ?? parent._shadows; // Font family is slightly different from the other properties above. It's // never null on the TextStyle object, so we use `isFontFamilyProvided` to // check if font family is defined or not. @override String get _fontFamily => style.isFontFamilyProvided ? style.fontFamily : parent._fontFamily; } /// The root style node for the paragraph. /// /// The style of the root is derived from a [ui.ParagraphStyle] and is the root /// style for all spans in the paragraph. class RootStyleNode extends StyleNode { /// Creates a [RootStyleNode] from [paragraphStyle]. RootStyleNode(this.paragraphStyle); /// The style of the paragraph being built. final EngineParagraphStyle paragraphStyle; @override ui.Color? get _color => null; @override ui.TextDecoration? get _decoration => null; @override ui.Color? get _decorationColor => null; @override ui.TextDecorationStyle? get _decorationStyle => null; @override double? get _decorationThickness => null; @override ui.FontWeight? get _fontWeight => paragraphStyle.fontWeight; @override ui.FontStyle? get _fontStyle => paragraphStyle.fontStyle; @override ui.TextBaseline? get _textBaseline => null; @override String get _fontFamily => paragraphStyle.fontFamily ?? StyleManager.defaultFontFamily; @override List<String>? get _fontFamilyFallback => null; @override List<ui.FontFeature>? get _fontFeatures => null; @override List<ui.FontVariation>? get _fontVariations => null; @override double get _fontSize => paragraphStyle.fontSize ?? StyleManager.defaultFontSize; @override double? get _letterSpacing => null; @override double? get _wordSpacing => null; @override double? get _height => paragraphStyle.height; @override ui.TextLeadingDistribution? get _leadingDistribution => null; @override ui.Locale? get _locale => paragraphStyle.locale; @override ui.Paint? get _background => null; @override ui.Paint? get _foreground => null; @override List<ui.Shadow>? get _shadows => null; } /// Builds a [CanvasParagraph] containing text with the given styling /// information. class CanvasParagraphBuilder implements ui.ParagraphBuilder { /// Creates a [CanvasParagraphBuilder] object, which is used to create a /// [CanvasParagraph]. CanvasParagraphBuilder(EngineParagraphStyle style) : _paragraphStyle = style, _rootStyleNode = RootStyleNode(style); final StringBuffer _plainTextBuffer = StringBuffer(); final EngineParagraphStyle _paragraphStyle; final List<ParagraphSpan> _spans = <ParagraphSpan>[]; final List<StyleNode> _styleStack = <StyleNode>[]; final RootStyleNode _rootStyleNode; StyleNode get _currentStyleNode => _styleStack.isEmpty ? _rootStyleNode : _styleStack[_styleStack.length - 1]; @override int get placeholderCount => _placeholderCount; int _placeholderCount = 0; @override List<double> get placeholderScales => _placeholderScales; final List<double> _placeholderScales = <double>[]; @override void addPlaceholder( double width, double height, ui.PlaceholderAlignment alignment, { double scale = 1.0, double? baselineOffset, ui.TextBaseline? baseline, }) { // Require a baseline to be specified if using a baseline-based alignment. assert(!(alignment == ui.PlaceholderAlignment.aboveBaseline || alignment == ui.PlaceholderAlignment.belowBaseline || alignment == ui.PlaceholderAlignment.baseline) || baseline != null); final int start = _plainTextBuffer.length; _plainTextBuffer.write(placeholderChar); final int end = _plainTextBuffer.length; final EngineTextStyle style = _currentStyleNode.resolveStyle(); _updateCanDrawOnCanvas(style); _placeholderCount++; _placeholderScales.add(scale); _spans.add(PlaceholderSpan( style, start, end, width * scale, height * scale, alignment, baselineOffset: (baselineOffset ?? height) * scale, baseline: baseline ?? ui.TextBaseline.alphabetic, )); } @override void pushStyle(ui.TextStyle style) { _styleStack.add(_currentStyleNode.createChild(style as EngineTextStyle)); } @override void pop() { if (_styleStack.isNotEmpty) { _styleStack.removeLast(); } } bool _canDrawOnCanvas = true; @override void addText(String text) { final int start = _plainTextBuffer.length; _plainTextBuffer.write(text); final int end = _plainTextBuffer.length; final EngineTextStyle style = _currentStyleNode.resolveStyle(); _updateCanDrawOnCanvas(style); _spans.add(ParagraphSpan(style: style, start: start, end: end)); } void _updateCanDrawOnCanvas(EngineTextStyle style) { if (!_canDrawOnCanvas) { return; } final double? letterSpacing = style.letterSpacing; if (letterSpacing != null && letterSpacing != 0.0) { _canDrawOnCanvas = false; return; } final ui.TextDecoration? decoration = style.decoration; if (decoration != null && decoration != ui.TextDecoration.none) { _canDrawOnCanvas = false; return; } final List<ui.FontFeature>? fontFeatures = style.fontFeatures; if (fontFeatures != null && fontFeatures.isNotEmpty) { _canDrawOnCanvas = false; return; } final List<ui.FontVariation>? fontVariations = style.fontVariations; if (fontVariations != null && fontVariations.isNotEmpty) { _canDrawOnCanvas = false; return; } } @override CanvasParagraph build() { if (_spans.isEmpty) { // In case `addText` and `addPlaceholder` were never called. // // We want the paragraph to always have a non-empty list of spans to match // the expectations of the [LayoutFragmenter]. _spans.add( ParagraphSpan(style: _rootStyleNode.resolveStyle(), start: 0, end: 0), ); } return CanvasParagraph( _spans, paragraphStyle: _paragraphStyle, plainText: _plainTextBuffer.toString(), canDrawOnCanvas: _canDrawOnCanvas, ); } }
engine/lib/web_ui/lib/src/engine/text/canvas_paragraph.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/text/canvas_paragraph.dart", "repo_id": "engine", "token_count": 7828 }
304
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:js_interop'; import '../dom.dart'; import 'text_editing.dart'; /// Provides default functionality for listening to HTML composition events. /// /// A class with this mixin generally calls [determineCompositionState] in order to update /// an [EditingState] with new composition values; namely, [EditingState.composingBaseOffset] /// and [EditingState.composingExtentOffset]. /// /// A class with this mixin should call [addCompositionEventHandlers] on initalization, and /// [removeCompositionEventHandlers] on deinitalization. /// /// See also: /// /// * [EditingState], the state of a text field that [CompositionAwareMixin] updates. /// * [DefaultTextEditingStrategy], the primary implementer of [CompositionAwareMixin]. mixin CompositionAwareMixin { /// The name of the HTML composition event type that triggers on starting a composition. static const String _kCompositionStart = 'compositionstart'; /// The name of the browser composition event type that triggers on updating a composition. static const String _kCompositionUpdate = 'compositionupdate'; /// The name of the browser composition event type that triggers on ending a composition. static const String _kCompositionEnd = 'compositionend'; late final DomEventListener _compositionStartListener = createDomEventListener(_handleCompositionStart); late final DomEventListener _compositionUpdateListener = createDomEventListener(_handleCompositionUpdate); late final DomEventListener _compositionEndListener = createDomEventListener(_handleCompositionEnd); /// The currently composing text in the `domElement`. /// /// Will be null if composing just started, ended, or no composing is being done. /// This member is kept up to date provided compositionEventHandlers are in place, /// so it is safe to reference it to get the current composingText. String? composingText; void addCompositionEventHandlers(DomHTMLElement domElement) { domElement.addEventListener(_kCompositionStart, _compositionStartListener); domElement.addEventListener(_kCompositionUpdate, _compositionUpdateListener); domElement.addEventListener(_kCompositionEnd, _compositionEndListener); } void removeCompositionEventHandlers(DomHTMLElement domElement) { domElement.removeEventListener(_kCompositionStart, _compositionStartListener); domElement.removeEventListener(_kCompositionUpdate, _compositionUpdateListener); domElement.removeEventListener(_kCompositionEnd, _compositionEndListener); } JSVoid _handleCompositionStart(DomEvent event) { composingText = null; } JSVoid _handleCompositionUpdate(DomEvent event) { if (domInstanceOfString(event, 'CompositionEvent')) { composingText = (event as DomCompositionEvent).data; } } JSVoid _handleCompositionEnd(DomEvent event) { composingText = null; } EditingState determineCompositionState(EditingState editingState) { if (editingState.extentOffset == null || composingText == null || editingState.text == null) { return editingState; } final int composingBase = editingState.extentOffset! - composingText!.length; if (composingBase < 0) { return editingState; } return editingState.copyWith( composingBaseOffset: composingBase, composingExtentOffset: composingBase + composingText!.length, ); } }
engine/lib/web_ui/lib/src/engine/text_editing/composition_aware_mixin.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/text_editing/composition_aware_mixin.dart", "repo_id": "engine", "token_count": 986 }
305
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:async'; import 'package:ui/src/engine.dart'; /// Encapsulates view objects, and their optional metadata indexed by `viewId`. class FlutterViewManager { FlutterViewManager(this._dispatcher); final EnginePlatformDispatcher _dispatcher; // A map of EngineFlutterViews indexed by their viewId. final Map<int, EngineFlutterView> _viewData = <int, EngineFlutterView>{}; // A map of (optional) JsFlutterViewOptions, indexed by their viewId. final Map<int, JsFlutterViewOptions> _jsViewOptions = <int, JsFlutterViewOptions>{}; // The controller of the [onViewCreated] stream. final StreamController<int> _onViewCreatedController = StreamController<int>.broadcast(sync: true); // The controller of the [onViewDisposed] stream. final StreamController<int> _onViewDisposedController = StreamController<int>.broadcast(sync: true); /// A stream of viewIds that will fire when a view is created. Stream<int> get onViewCreated => _onViewCreatedController.stream; /// A stream of viewIds that will fire when a view is disposed. Stream<int> get onViewDisposed => _onViewDisposedController.stream; /// Exposes all the [EngineFlutterView]s registered so far. Iterable<EngineFlutterView> get views => _viewData.values; /// Retrieves an [EngineFlutterView] by its `viewId`. EngineFlutterView? operator [](int viewId) { return _viewData[viewId]; } EngineFlutterView createAndRegisterView( JsFlutterViewOptions jsViewOptions, ) { final EngineFlutterView view = EngineFlutterView( _dispatcher, jsViewOptions.hostElement, viewConstraints: jsViewOptions.viewConstraints, ); registerView(view, jsViewOptions: jsViewOptions); return view; } /// Stores a [view] and its (optional) [jsViewOptions], indexed by `viewId`. /// /// Returns the registered [view]. EngineFlutterView registerView( EngineFlutterView view, { JsFlutterViewOptions? jsViewOptions, }) { final int viewId = view.viewId; assert(!_viewData.containsKey(viewId)); // Adding the same view twice? // Store the view, and the jsViewOptions, if any... _viewData[viewId] = view; if (jsViewOptions != null) { _jsViewOptions[viewId] = jsViewOptions; } _onViewCreatedController.add(viewId); return view; } JsFlutterViewOptions? disposeAndUnregisterView(int viewId) { final EngineFlutterView? view = _viewData[viewId]; if (view == null) { return null; } final JsFlutterViewOptions? options = unregisterView(viewId); view.dispose(); return options; } /// Un-registers [viewId]. /// /// Returns its [JsFlutterViewOptions] (if any). JsFlutterViewOptions? unregisterView(int viewId) { _viewData.remove(viewId); // .dispose(); final JsFlutterViewOptions? jsViewOptions = _jsViewOptions.remove(viewId); _onViewDisposedController.add(viewId); return jsViewOptions; } /// Returns the [JsFlutterViewOptions] associated to `viewId` (if any). /// /// This is useful for plugins and apps that need this information, and can /// be exposed through a method in ui_web. JsFlutterViewOptions? getOptions(int viewId) { return _jsViewOptions[viewId]; } /// Returns the [viewId] if [rootElement] corresponds to any of the [views]. int? viewIdForRootElement(DomElement rootElement) { for(final EngineFlutterView view in views) { if (view.dom.rootElement == rootElement) { return view.viewId; } } return null; } void dispose() { // We need to call `toList()` in order to avoid concurrent modification // inside the loop. _viewData.keys.toList().forEach(disposeAndUnregisterView); // Let listeners receive the unregistration events from the loop above, then // close the streams. _onViewCreatedController.close(); _onViewDisposedController.close(); } }
engine/lib/web_ui/lib/src/engine/view_embedder/flutter_view_manager.dart/0
{ "file_path": "engine/lib/web_ui/lib/src/engine/view_embedder/flutter_view_manager.dart", "repo_id": "engine", "token_count": 1342 }
306
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:ui/src/engine.dart'; /// A function which takes a unique `id` and some `params` and creates an HTML /// element. typedef ParameterizedPlatformViewFactory = Object Function( int viewId, { Object? params, }); /// A function which takes a unique `id` and creates an HTML element. typedef PlatformViewFactory = Object Function(int viewId); /// The platform view registry for this app. final PlatformViewRegistry platformViewRegistry = PlatformViewRegistry(); /// A registry for factories that create platform views. class PlatformViewRegistry { /// The view type of the built-in factory that creates visible platform view /// DOM elements. /// /// There's no need to register this view type with [PlatformViewRegistry] /// because it is registered by default. static const String defaultVisibleViewType = '_default_document_create_element_visible'; /// The view type of the built-in factory that creates invisible platform view /// DOM elements. /// /// There's no need to register this view type with [PlatformViewRegistry] /// because it is registered by default. static const String defaultInvisibleViewType = '_default_document_create_element_invisible'; /// Register [viewType] as being created by the given [viewFactory]. /// /// [viewFactory] can be any function that takes an integer and optional /// `params` and returns an `HTMLElement` DOM object. bool registerViewFactory( String viewType, Function viewFactory, { bool isVisible = true, }) { return PlatformViewManager.instance.registerFactory( viewType, viewFactory, isVisible: isVisible, ); } /// Returns the view previously created for [viewId]. /// /// Throws if no view has been created for [viewId]. Object getViewById(int viewId) { return PlatformViewManager.instance.getViewById(viewId); } }
engine/lib/web_ui/lib/ui_web/src/ui_web/platform_view_registry.dart/0
{ "file_path": "engine/lib/web_ui/lib/ui_web/src/ui_web/platform_view_registry.dart", "repo_id": "engine", "token_count": 562 }
307
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "export.h" #include "helpers.h" #include "third_party/skia/include/core/SkPath.h" #include "third_party/skia/include/pathops/SkPathOps.h" using namespace Skwasm; SKWASM_EXPORT SkPath* path_create() { return new SkPath(); } SKWASM_EXPORT void path_dispose(SkPath* path) { delete path; } SKWASM_EXPORT SkPath* path_copy(SkPath* path) { return new SkPath(*path); } SKWASM_EXPORT void path_setFillType(SkPath* path, SkPathFillType fillType) { path->setFillType(fillType); } SKWASM_EXPORT SkPathFillType path_getFillType(SkPath* path) { return path->getFillType(); } SKWASM_EXPORT void path_moveTo(SkPath* path, SkScalar x, SkScalar y) { path->moveTo(x, y); } SKWASM_EXPORT void path_relativeMoveTo(SkPath* path, SkScalar x, SkScalar y) { path->rMoveTo(x, y); } SKWASM_EXPORT void path_lineTo(SkPath* path, SkScalar x, SkScalar y) { path->lineTo(x, y); } SKWASM_EXPORT void path_relativeLineTo(SkPath* path, SkScalar x, SkScalar y) { path->rLineTo(x, y); } SKWASM_EXPORT void path_quadraticBezierTo(SkPath* path, SkScalar x1, SkScalar y1, SkScalar x2, SkScalar y2) { path->quadTo(x1, y1, x2, y2); } SKWASM_EXPORT void path_relativeQuadraticBezierTo(SkPath* path, SkScalar x1, SkScalar y1, SkScalar x2, SkScalar y2) { path->rQuadTo(x1, y1, x2, y2); } SKWASM_EXPORT void path_cubicTo(SkPath* path, SkScalar x1, SkScalar y1, SkScalar x2, SkScalar y2, SkScalar x3, SkScalar y3) { path->cubicTo(x1, y1, x2, y2, x3, y3); } SKWASM_EXPORT void path_relativeCubicTo(SkPath* path, SkScalar x1, SkScalar y1, SkScalar x2, SkScalar y2, SkScalar x3, SkScalar y3) { path->rCubicTo(x1, y1, x2, y2, x3, y3); } SKWASM_EXPORT void path_conicTo(SkPath* path, SkScalar x1, SkScalar y1, SkScalar x2, SkScalar y2, SkScalar w) { path->conicTo(x1, y1, x2, y2, w); } SKWASM_EXPORT void path_relativeConicTo(SkPath* path, SkScalar x1, SkScalar y1, SkScalar x2, SkScalar y2, SkScalar w) { path->rConicTo(x1, y1, x2, y2, w); } SKWASM_EXPORT void path_arcToOval(SkPath* path, const SkRect* rect, SkScalar startAngle, SkScalar sweepAngle, bool forceMoveTo) { path->arcTo(*rect, startAngle, sweepAngle, forceMoveTo); } SKWASM_EXPORT void path_arcToRotated(SkPath* path, SkScalar rx, SkScalar ry, SkScalar xAxisRotate, SkPath::ArcSize arcSize, SkPathDirection pathDirection, SkScalar x, SkScalar y) { path->arcTo(rx, ry, xAxisRotate, arcSize, pathDirection, x, y); } SKWASM_EXPORT void path_relativeArcToRotated(SkPath* path, SkScalar rx, SkScalar ry, SkScalar xAxisRotate, SkPath::ArcSize arcSize, SkPathDirection pathDirection, SkScalar x, SkScalar y) { path->rArcTo(rx, ry, xAxisRotate, arcSize, pathDirection, x, y); } SKWASM_EXPORT void path_addRect(SkPath* path, const SkRect* rect) { path->addRect(*rect); } SKWASM_EXPORT void path_addOval(SkPath* path, const SkRect* oval) { path->addOval(*oval, SkPathDirection::kCW, 1); } SKWASM_EXPORT void path_addArc(SkPath* path, const SkRect* oval, SkScalar startAngle, SkScalar sweepAngle) { path->addArc(*oval, startAngle, sweepAngle); } SKWASM_EXPORT void path_addPolygon(SkPath* path, const SkPoint* points, int count, bool close) { path->addPoly(points, count, close); } SKWASM_EXPORT void path_addRRect(SkPath* path, const SkScalar* rrectValues) { path->addRRect(createRRect(rrectValues), SkPathDirection::kCW); } SKWASM_EXPORT void path_addPath(SkPath* path, const SkPath* other, const SkScalar* matrix33, SkPath::AddPathMode extendPath) { path->addPath(*other, createMatrix(matrix33), extendPath); } SKWASM_EXPORT void path_close(SkPath* path) { path->close(); } SKWASM_EXPORT void path_reset(SkPath* path) { path->reset(); } SKWASM_EXPORT bool path_contains(SkPath* path, SkScalar x, SkScalar y) { return path->contains(x, y); } SKWASM_EXPORT void path_transform(SkPath* path, const SkScalar* matrix33) { path->transform(createMatrix(matrix33)); } SKWASM_EXPORT void path_getBounds(SkPath* path, SkRect* rect) { *rect = path->getBounds(); } SKWASM_EXPORT SkPath* path_combine(SkPathOp operation, const SkPath* path1, const SkPath* path2) { SkPath* output = new SkPath(); if (Op(*path1, *path2, operation, output)) { output->setFillType(path1->getFillType()); return output; } else { delete output; return nullptr; } }
engine/lib/web_ui/skwasm/path.cpp/0
{ "file_path": "engine/lib/web_ui/skwasm/path.cpp", "repo_id": "engine", "token_count": 4111 }
308
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart' as ui; import 'package:ui/ui_web/src/ui_web.dart' as ui_web; import 'common.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } const ui.Rect region = ui.Rect.fromLTRB(0, 0, 500, 500); void testMain() { group('BackdropFilter', () { setUpCanvasKitTest(withImplicitView: true); setUp(() { EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(1); }); tearDown(() { PlatformViewManager.instance.debugClear(); CanvasKitRenderer.instance.debugClear(); }); test('blur renders to the edges', () async { // Make a checkerboard picture so we can see the blur. final CkPictureRecorder recorder = CkPictureRecorder(); final CkCanvas canvas = recorder.beginRecording(region); canvas.drawColor(const ui.Color(0xffffffff), ui.BlendMode.srcOver); final double sideLength = region.width / 20; final int rows = (region.height / sideLength).ceil(); for (int row = 0; row < rows; row++) { for (int column = 0; column < 10; column++) { final ui.Rect rect = ui.Rect.fromLTWH( row.isEven ? (column * 2) * sideLength : (column * 2 + 1) * sideLength, row * sideLength, sideLength, sideLength, ); canvas.drawRect(rect, CkPaint()..color = const ui.Color(0xffff0000)); } } final CkPicture checkerboard = recorder.endRecording(); final LayerSceneBuilder builder = LayerSceneBuilder(); builder.pushOffset(0, 0); builder.addPicture(ui.Offset.zero, checkerboard); builder.pushBackdropFilter(ui.ImageFilter.blur(sigmaX: 10, sigmaY: 10)); await matchSceneGolden( 'canvaskit_backdropfilter_blur_edges.png', builder.build(), region: region); }); test('ImageFilter with ColorFilter as child', () async { final LayerSceneBuilder builder = LayerSceneBuilder(); const ui.Rect region = ui.Rect.fromLTRB(0, 0, 500, 250); builder.pushOffset(0, 0); final CkPictureRecorder recorder = CkPictureRecorder(); final CkCanvas canvas = recorder.beginRecording(region); final ui.ColorFilter colorFilter = ui.ColorFilter.mode( const ui.Color(0XFF00FF00).withOpacity(0.55), ui.BlendMode.darken); // using a colorFilter as an imageFilter for backDrop filter builder.pushBackdropFilter(colorFilter); canvas.drawCircle( const ui.Offset(75, 125), 50, CkPaint()..color = const ui.Color.fromARGB(255, 255, 0, 0), ); final CkPicture redCircle1 = recorder.endRecording(); builder.addPicture(ui.Offset.zero, redCircle1); await matchSceneGolden( 'canvaskit_red_circle_green_backdrop_colorFilter.png', builder.build(), region: region); }); test('works with an invisible platform view inside', () async { ui_web.platformViewRegistry.registerViewFactory( 'test-platform-view', (int viewId) => createDomHTMLDivElement()..id = 'view-0', isVisible: false, ); await createPlatformView(0, 'test-platform-view'); // Make a checkerboard picture so we can see the blur. final CkPictureRecorder recorder = CkPictureRecorder(); final CkCanvas canvas = recorder.beginRecording(region); canvas.drawColor(const ui.Color(0xffffffff), ui.BlendMode.srcOver); final double sideLength = region.width / 20; final int rows = (region.height / sideLength).ceil(); for (int row = 0; row < rows; row++) { for (int column = 0; column < 10; column++) { final ui.Rect rect = ui.Rect.fromLTWH( row.isEven ? (column * 2) * sideLength : (column * 2 + 1) * sideLength, row * sideLength, sideLength, sideLength, ); canvas.drawRect(rect, CkPaint()..color = const ui.Color(0xffff0000)); } } final CkPicture checkerboard = recorder.endRecording(); final LayerSceneBuilder builder = LayerSceneBuilder(); builder.pushOffset(0, 0); builder.addPicture(ui.Offset.zero, checkerboard); builder.pushBackdropFilter(ui.ImageFilter.blur(sigmaX: 10, sigmaY: 10)); // Draw a green rectangle, then an invisible platform view, then a blue // rectangle. Both rectangles should not be blurred. final CkPictureRecorder greenRectRecorder = CkPictureRecorder(); final CkCanvas greenRectCanvas = greenRectRecorder.beginRecording(region); final CkPaint greenPaint = CkPaint()..color = const ui.Color(0xff00ff00); greenRectCanvas.drawRect( ui.Rect.fromCenter( center: ui.Offset(region.width / 3, region.height / 2), width: region.width / 6, height: region.height / 6), greenPaint); final CkPicture greenRectPicture = greenRectRecorder.endRecording(); final CkPictureRecorder blueRectRecorder = CkPictureRecorder(); final CkCanvas blueRectCanvas = blueRectRecorder.beginRecording(region); final CkPaint bluePaint = CkPaint()..color = const ui.Color(0xff0000ff); blueRectCanvas.drawRect( ui.Rect.fromCenter( center: ui.Offset(2 * region.width / 3, region.height / 2), width: region.width / 6, height: region.height / 6), bluePaint); final CkPicture blueRectPicture = blueRectRecorder.endRecording(); builder.addPicture(ui.Offset.zero, greenRectPicture); builder.addPlatformView(0, width: 10, height: 10); builder.addPicture(ui.Offset.zero, blueRectPicture); // Pop the backdrop filter layer. builder.pop(); await matchSceneGolden( 'canvaskit_backdropfilter_with_platformview.png', builder.build(), region: region); }); // TODO(hterkelsen): https://github.com/flutter/flutter/issues/71520 }, skip: isSafari || isFirefox); }
engine/lib/web_ui/test/canvaskit/backdrop_filter_golden_test.dart/0
{ "file_path": "engine/lib/web_ui/test/canvaskit/backdrop_filter_golden_test.dart", "repo_id": "engine", "token_count": 2577 }
309
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:async'; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } void testMain() { test('services are initalized separately from UI', () async { final JsFlutterConfiguration? config = await bootstrapAndExtractConfig(); expect(scheduleFrameCallback, isNull); expect(findGlassPane(), isNull); expect(RawKeyboard.instance, isNull); expect(KeyboardBinding.instance, isNull); expect(EnginePlatformDispatcher.instance.implicitView, isNull); // After initializing services the UI should remain intact. await initializeEngineServices(jsConfiguration: config); expect(scheduleFrameCallback, isNotNull); expect(windowFlutterCanvasKit, isNotNull); expect(findGlassPane(), isNull); expect(RawKeyboard.instance, isNull); expect(KeyboardBinding.instance, isNull); expect(EnginePlatformDispatcher.instance.implicitView, isNull); // Now UI should be taken over by Flutter. await initializeEngineUi(); expect(findGlassPane(), isNotNull); expect(RawKeyboard.instance, isNotNull); expect(KeyboardBinding.instance, isNotNull); expect(EnginePlatformDispatcher.instance.implicitView, isNotNull); }); } DomElement? findGlassPane() { return domDocument.querySelector('flt-glass-pane'); } Future<JsFlutterConfiguration?> bootstrapAndExtractConfig() { // Since this test is explicitly checking each part of the bootstrapping process, // we can't use the standard bootstrapper here. However, we do need the flutter // configuration object that is passed into flutter.js to actually initialize the // engine with, so here we do a little no-op bootstrap that just retrieves the // configuration that is passed into the `initializeEngine` callback. final Completer<JsFlutterConfiguration?> configCompleter = Completer<JsFlutterConfiguration?>(); final AppBootstrap bootstrap = AppBootstrap( initializeEngine: ([JsFlutterConfiguration? config]) async => configCompleter.complete(config), runApp: () {} ); final FlutterLoader? loader = flutter?.loader; if (loader == null || loader.isAutoStart) { // TODO(jacksongardner): Unit tests under dart2wasm still use the old way which // doesn't invoke flutter.js directly, so we autostart here. Once dart2wasm tests // work with flutter.js, we can remove this code path. bootstrap.autoStart(); } else { loader.didCreateEngineInitializer(bootstrap.prepareEngineInitializer()); } return configCompleter.future; }
engine/lib/web_ui/test/canvaskit/initialization/services_vs_ui_test.dart/0
{ "file_path": "engine/lib/web_ui/test/canvaskit/initialization/services_vs_ui_test.dart", "repo_id": "engine", "token_count": 843 }
310
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:typed_data'; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart' as ui; import 'common.dart'; import 'test_data.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } void testMain() { group('CanvasKit shaders', () { setUpCanvasKitTest(); test('Sweep gradient', () { final CkGradientSweep gradient = ui.Gradient.sweep( ui.Offset.zero, testColors, ) as CkGradientSweep; expect(gradient.getSkShader(ui.FilterQuality.none), isNotNull); }); test('Linear gradient', () { final CkGradientLinear gradient = ui.Gradient.linear( ui.Offset.zero, const ui.Offset(0, 1), testColors, ) as CkGradientLinear; expect(gradient.getSkShader(ui.FilterQuality.none), isNotNull); }); test('Radial gradient', () { final CkGradientRadial gradient = ui.Gradient.radial( ui.Offset.zero, 10, testColors, ) as CkGradientRadial; expect(gradient.getSkShader(ui.FilterQuality.none), isNotNull); }); test('Conical gradient', () { final CkGradientConical gradient = ui.Gradient.radial( ui.Offset.zero, 10, testColors, null, ui.TileMode.clamp, null, const ui.Offset(10, 10), 40, ) as CkGradientConical; expect(gradient.getSkShader(ui.FilterQuality.none), isNotNull); }); test('Image shader initialize/dispose cycle', () { final SkImage skImage = canvasKit.MakeAnimatedImageFromEncoded(kTransparentImage)!.makeImageAtCurrentFrame(); final CkImage image = CkImage(skImage); final CkImageShader imageShader = ui.ImageShader( image, ui.TileMode.clamp, ui.TileMode.repeated, Float64List.fromList(Matrix4.diagonal3Values(1, 2, 3).storage), ) as CkImageShader; expect(imageShader, isA<CkImageShader>()); final UniqueRef<SkShader> ref = imageShader.ref!; expect(imageShader.debugDisposed, false); expect(imageShader.getSkShader(ui.FilterQuality.none), same(ref.nativeObject)); expect(ref.isDisposed, false); expect(image.debugDisposed, false); imageShader.dispose(); expect(imageShader.debugDisposed, true); expect(ref.isDisposed, true); expect(imageShader.ref, isNull); expect(image.debugDisposed, true); }); test('Image shader withQuality', () { final SkImage skImage = canvasKit.MakeAnimatedImageFromEncoded(kTransparentImage)!.makeImageAtCurrentFrame(); final CkImage image = CkImage(skImage); final CkImageShader imageShader = ui.ImageShader( image, ui.TileMode.clamp, ui.TileMode.repeated, Float64List.fromList(Matrix4.diagonal3Values(1, 2, 3).storage), ) as CkImageShader; expect(imageShader, isA<CkImageShader>()); final UniqueRef<SkShader> ref1 = imageShader.ref!; expect(imageShader.getSkShader(ui.FilterQuality.none), same(ref1.nativeObject)); // Request the same quality as the default quality (none). expect(imageShader.getSkShader(ui.FilterQuality.none), isNotNull); final UniqueRef<SkShader> ref2 = imageShader.ref!; expect(ref1, same(ref2)); expect(ref1.isDisposed, false); expect(image.debugDisposed, false); // Change quality to medium. expect(imageShader.getSkShader(ui.FilterQuality.medium), isNotNull); final UniqueRef<SkShader> ref3 = imageShader.ref!; expect(ref1, isNot(same(ref3))); expect(ref1.isDisposed, true, reason: 'The previous reference must be released to avoid a memory leak'); expect(image.debugDisposed, false); expect(imageShader.ref!.nativeObject, same(ref3.nativeObject)); // Ask for medium again. expect(imageShader.getSkShader(ui.FilterQuality.medium), isNotNull); final UniqueRef<SkShader> ref4 = imageShader.ref!; expect(ref4, same(ref3)); expect(ref3.isDisposed, false); expect(image.debugDisposed, false); expect(imageShader.ref!.nativeObject, same(ref4.nativeObject)); // Done with the shader. imageShader.dispose(); expect(imageShader.debugDisposed, true); expect(ref4.isDisposed, true); expect(imageShader.ref, isNull); expect(image.debugDisposed, true); }); }); } const List<ui.Color> testColors = <ui.Color>[ui.Color(0xFFFFFF00), ui.Color(0xFFFFFFFF)];
engine/lib/web_ui/test/canvaskit/shader_test.dart/0
{ "file_path": "engine/lib/web_ui/test/canvaskit/shader_test.dart", "repo_id": "engine", "token_count": 1925 }
311
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. @TestOn('browser') library; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui_web/src/ui_web.dart' as ui_web; import '../common/matchers.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } void testMain() { group('AssetManager getAssetUrl', () { setUp(() { // Remove the meta-tag from the environment before each test. removeAssetBaseMeta(); }); test('initializes with default values', () { final ui_web.AssetManager assets = ui_web.AssetManager(); expect( assets.getAssetUrl('asset.txt'), 'assets/asset.txt', reason: 'Default `assetsDir` is "assets".', ); }); test('assetsDir changes the directory where assets are stored', () { final ui_web.AssetManager assets = ui_web.AssetManager(assetsDir: 'static'); expect(assets.getAssetUrl('asset.txt'), 'static/asset.txt'); }); test('assetBase must end with slash', () { expect(() { ui_web.AssetManager(assetBase: '/deployment'); }, throwsAssertionError); }); test('assetBase can be relative', () { final ui_web.AssetManager assets = ui_web.AssetManager(assetBase: 'base/'); expect(assets.getAssetUrl('asset.txt'), 'base/assets/asset.txt'); }); test('assetBase can be absolute', () { final ui_web.AssetManager assets = ui_web.AssetManager( assetBase: 'https://www.gstatic.com/my-app/', ); expect( assets.getAssetUrl('asset.txt'), 'https://www.gstatic.com/my-app/assets/asset.txt', ); }); test('assetBase in conjunction with assetsDir, fully custom paths', () { final ui_web.AssetManager assets = ui_web.AssetManager( assetBase: '/asset/base/', assetsDir: 'static', ); expect(assets.getAssetUrl('asset.txt'), '/asset/base/static/asset.txt'); }); test('Fully-qualified asset URLs are untouched', () { final ui_web.AssetManager assets = ui_web.AssetManager(); expect( assets.getAssetUrl('https://static.my-app.com/favicon.ico'), 'https://static.my-app.com/favicon.ico', ); }); test('Fully-qualified asset URLs are untouched (even with assetBase)', () { final ui_web.AssetManager assets = ui_web.AssetManager( assetBase: 'https://static.my-app.com/', ); expect( assets.getAssetUrl('https://static.my-app.com/favicon.ico'), 'https://static.my-app.com/favicon.ico', ); }); }); group('AssetManager getAssetUrl with <meta name=assetBase> tag', () { setUp(() { removeAssetBaseMeta(); addAssetBaseMeta('/dom/base/'); }); test('reads value from DOM', () { final ui_web.AssetManager assets = ui_web.AssetManager(); expect(assets.getAssetUrl('asset.txt'), '/dom/base/assets/asset.txt'); }); test('reads value from DOM (only once!)', () { final ui_web.AssetManager firstManager = ui_web.AssetManager(); expect( firstManager.getAssetUrl('asset.txt'), '/dom/base/assets/asset.txt', ); removeAssetBaseMeta(); final ui_web.AssetManager anotherManager = ui_web.AssetManager(); expect( firstManager.getAssetUrl('asset.txt'), '/dom/base/assets/asset.txt', reason: 'The old value of the assetBase meta should be cached.', ); expect(anotherManager.getAssetUrl('asset.txt'), 'assets/asset.txt'); }); }); } /// Removes all meta-tags with name=assetBase. void removeAssetBaseMeta() { domWindow.document .querySelectorAll('meta[name=assetBase]') .forEach((DomElement element) { element.remove(); }); } /// Adds a meta-tag with name=assetBase and the passed-in [value]. void addAssetBaseMeta(String value) { final DomHTMLMetaElement meta = createDomHTMLMetaElement() ..name = 'assetBase' ..content = value; domDocument.head!.append(meta); }
engine/lib/web_ui/test/engine/assets_test.dart/0
{ "file_path": "engine/lib/web_ui/test/engine/assets_test.dart", "repo_id": "engine", "token_count": 1641 }
312
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/ui.dart' as ui; void main() { internalBootstrapBrowserTest(() => testMain); } Future<void> testMain() async { test('toImage succeeds', () async { final ui.Image image = await _createImage(); expect(image.runtimeType.toString(), equals('HtmlImage')); image.dispose(); // TODO(polina-c): unskip the test when bug is fixed: // https://github.com/flutter/flutter/issues/110599 }, skip: true); test('Image constructor invokes onCreate once', () async { int onCreateInvokedCount = 0; ui.Image? createdImage; ui.Image.onCreate = (ui.Image image) { onCreateInvokedCount++; createdImage = image; }; final ui.Image image1 = await _createImage(); expect(onCreateInvokedCount, 1); expect(createdImage, image1); final ui.Image image2 = await _createImage(); expect(onCreateInvokedCount, 2); expect(createdImage, image2); ui.Image.onCreate = null; // TODO(polina-c): unskip the test when bug is fixed: // https://github.com/flutter/flutter/issues/110599 }, skip: true); test('dispose() invokes onDispose once', () async { int onDisposeInvokedCount = 0; ui.Image? disposedImage; ui.Image.onDispose = (ui.Image image) { onDisposeInvokedCount++; disposedImage = image; }; final ui.Image image1 = await _createImage()..dispose(); expect(onDisposeInvokedCount, 1); expect(disposedImage, image1); final ui.Image image2 = await _createImage()..dispose(); expect(onDisposeInvokedCount, 2); expect(disposedImage, image2); ui.Image.onDispose = null; // TODO(polina-c): unskip the test when bug is fixed: // https://github.com/flutter/flutter/issues/110599 }, skip: true); } Future<ui.Image> _createImage() => _createPicture().toImage(10, 10); ui.Picture _createPicture() { final ui.PictureRecorder recorder = ui.PictureRecorder(); final ui.Canvas canvas = ui.Canvas(recorder); const ui.Rect rect = ui.Rect.fromLTWH(0.0, 0.0, 100.0, 100.0); canvas.clipRect(rect); return recorder.endRecording(); }
engine/lib/web_ui/test/engine/image/image_test.dart/0
{ "file_path": "engine/lib/web_ui/test/engine/image/image_test.dart", "repo_id": "engine", "token_count": 834 }
313
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:async'; import 'dart:typed_data'; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } const MethodCodec codec = StandardMethodCodec(); typedef PlatformViewFactoryCall = ({int viewId, Object? params}); void testMain() { group('PlatformViewMessageHandler', () { group('handlePlatformViewCall', () { const String platformViewType = 'forTest'; const int platformViewId = 6; late PlatformViewManager contentManager; late Completer<ByteData?> completer; setUp(() { contentManager = PlatformViewManager(); completer = Completer<ByteData?>(); }); group('"create" message', () { test('unregistered viewType, fails with descriptive exception', () async { final PlatformViewMessageHandler messageHandler = PlatformViewMessageHandler( contentManager: contentManager, ); final Map<dynamic, dynamic> arguments = _getCreateArguments(platformViewType, platformViewId); messageHandler.handlePlatformViewCall('create', arguments, completer.complete); final ByteData? response = await completer.future; try { codec.decodeEnvelope(response!); } on PlatformException catch (e) { expect(e.code, 'unregistered_view_type'); expect(e.message, contains(platformViewType)); expect(e.details, contains('registerViewFactory')); } }); test('duplicate viewId, fails with descriptive exception', () async { contentManager.registerFactory( platformViewType, (int id) => createDomHTMLDivElement()); contentManager.renderContent(platformViewType, platformViewId, null); final PlatformViewMessageHandler messageHandler = PlatformViewMessageHandler( contentManager: contentManager, ); final Map<dynamic, dynamic> arguments = _getCreateArguments(platformViewType, platformViewId); messageHandler.handlePlatformViewCall('create', arguments, completer.complete); final ByteData? response = await completer.future; try { codec.decodeEnvelope(response!); } on PlatformException catch (e) { expect(e.code, 'recreating_view'); expect(e.details, contains('$platformViewId')); } }); test('returns a successEnvelope when the view is created normally', () async { contentManager.registerFactory( platformViewType, (int id) => createDomHTMLDivElement()..id = 'success'); final PlatformViewMessageHandler messageHandler = PlatformViewMessageHandler( contentManager: contentManager, ); final Map<dynamic, dynamic> arguments = _getCreateArguments(platformViewType, platformViewId); messageHandler.handlePlatformViewCall('create', arguments, completer.complete); final ByteData? response = await completer.future; expect(codec.decodeEnvelope(response!), isNull, reason: 'The response should be a success envelope, with null in it.'); }); test('caches the created view so it can be retrieved (not on the DOM)', () async { final DomElement platformViewsContainer = createDomElement('pv-container'); contentManager.registerFactory( platformViewType, (int id) => createDomHTMLDivElement()..id = 'success'); final PlatformViewMessageHandler messageHandler = PlatformViewMessageHandler( contentManager: contentManager, ); final Map<dynamic, dynamic> arguments = _getCreateArguments(platformViewType, platformViewId); messageHandler.handlePlatformViewCall('create', arguments, completer.complete); final ByteData? response = await completer.future; expect( codec.decodeEnvelope(response!), isNull, reason: 'The response should be a success envelope, with null in it.', ); expect( contentManager.knowsViewId(platformViewId), isTrue, reason: 'The contentManager should have pre-rendered the platformViewId.' ); expect( contentManager.getViewById(platformViewId).matches('div#success'), isNotNull, reason: 'The element created by the factory should be retrievable.', ); expect( platformViewsContainer.children, hasLength(0), reason: 'The view should not have been injected into the DOM', ); }); test('passes creation params to the factory', () async { final List<PlatformViewFactoryCall> factoryCalls = <PlatformViewFactoryCall>[]; contentManager.registerFactory(platformViewType, (int viewId, {Object? params}) { factoryCalls.add((viewId: viewId, params: params)); return createDomHTMLDivElement(); }); final PlatformViewMessageHandler messageHandler = PlatformViewMessageHandler( contentManager: contentManager, ); final List<Completer<ByteData?>> completers = <Completer<ByteData?>>[]; completers.add(Completer<ByteData?>()); messageHandler.handlePlatformViewCall( 'create', _getCreateArguments(platformViewType, 111), completers.last.complete, ); completers.add(Completer<ByteData?>()); messageHandler.handlePlatformViewCall( 'create', _getCreateArguments(platformViewType, 222, <dynamic, dynamic>{'foo': 'bar'}), completers.last.complete, ); completers.add(Completer<ByteData?>()); messageHandler.handlePlatformViewCall( 'create', _getCreateArguments(platformViewType, 333, 'foobar'), completers.last.complete, ); completers.add(Completer<ByteData?>()); messageHandler.handlePlatformViewCall( 'create', _getCreateArguments(platformViewType, 444, <dynamic>[1, null, 'str']), completers.last.complete, ); final List<ByteData?> responses = await Future.wait( completers.map((Completer<ByteData?> c) => c.future), ); for (final ByteData? response in responses) { expect( codec.decodeEnvelope(response!), isNull, reason: 'The response should be a success envelope, with null in it.', ); } expect(factoryCalls, hasLength(4)); expect(factoryCalls[0].viewId, 111); expect(factoryCalls[0].params, isNull); expect(factoryCalls[1].viewId, 222); expect(factoryCalls[1].params, <dynamic, dynamic>{'foo': 'bar'}); expect(factoryCalls[2].viewId, 333); expect(factoryCalls[2].params, 'foobar'); expect(factoryCalls[3].viewId, 444); expect(factoryCalls[3].params, <dynamic>[1, null, 'str']); }); test('fails if the factory returns a non-DOM object', () async { contentManager.registerFactory(platformViewType, (int viewId) { // Return an object that's not a DOM element. return Object(); }); final PlatformViewMessageHandler messageHandler = PlatformViewMessageHandler( contentManager: contentManager, ); final Map<dynamic, dynamic> arguments = _getCreateArguments(platformViewType, platformViewId); expect(() { messageHandler.handlePlatformViewCall('create', arguments, (_) {}); }, throwsA(isA<TypeError>())); }); }); group('"dispose" message', () { late Completer<int> viewIdCompleter; setUp(() { viewIdCompleter = Completer<int>(); }); test('never fails, even for unknown viewIds', () async { final PlatformViewMessageHandler messageHandler = PlatformViewMessageHandler( contentManager: contentManager, ); messageHandler.handlePlatformViewCall('dispose', platformViewId, completer.complete); final ByteData? response = await completer.future; expect(codec.decodeEnvelope(response!), isNull, reason: 'The response should be a success envelope, with null in it.'); }); test('never fails, even for unknown viewIds', () async { final PlatformViewMessageHandler messageHandler = PlatformViewMessageHandler( contentManager: _FakePlatformViewManager(viewIdCompleter.complete), ); messageHandler.handlePlatformViewCall('dispose', platformViewId, completer.complete); final int disposedViewId = await viewIdCompleter.future; expect(disposedViewId, platformViewId, reason: 'The viewId to dispose should be passed to the contentManager'); }); }); }); }); } class _FakePlatformViewManager extends PlatformViewManager { _FakePlatformViewManager(void Function(int) clearFunction) : _clearPlatformView = clearFunction; final void Function(int) _clearPlatformView; @override void clearPlatformView(int viewId) { return _clearPlatformView(viewId); } } Map<dynamic, dynamic> _getCreateArguments(String viewType, int viewId, [Object? params]) { return <String, dynamic>{ 'id': viewId, 'viewType': viewType, if (params != null) 'params': params, }; }
engine/lib/web_ui/test/engine/platform_views/message_handler_test.dart/0
{ "file_path": "engine/lib/web_ui/test/engine/platform_views/message_handler_test.dart", "repo_id": "engine", "token_count": 4033 }
314
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:async'; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart' as ui; import '../../common/test_initialization.dart'; import 'semantics_tester.dart'; void main() { internalBootstrapBrowserTest(() { return testMain; }); } Future<void> testMain() async { await bootstrapAndRunApp(); test('Can create multiple views each with its own semantics tree', () async { EngineSemantics.instance.semanticsEnabled = true; final DomElement host1 = createDomElement('view-host'); domDocument.body!.append(host1); final EngineFlutterView view1 = EngineFlutterView(EnginePlatformDispatcher.instance, host1); EnginePlatformDispatcher.instance.viewManager.registerView(view1); final SemanticsTester tester1 = SemanticsTester(view1.semantics); final DomElement host2 = createDomElement('view-host'); domDocument.body!.append(host2); final EngineFlutterView view2 = EngineFlutterView(EnginePlatformDispatcher.instance, host2); EnginePlatformDispatcher.instance.viewManager.registerView(view2); final SemanticsTester tester2 = SemanticsTester(view2.semantics); tester1.updateNode(id: 0); tester1.apply(); tester2.updateNode(id: 0); tester2.apply(); // Check that we have both root nodes in the DOM (root nodes have id == 0) expect(domDocument.querySelectorAll('flutter-view'), hasLength(2)); expect(domDocument.querySelectorAll('flutter-view[flt-view-id="${view1.viewId}"]'), hasLength(1)); expect(domDocument.querySelectorAll('flutter-view[flt-view-id="${view2.viewId}"]'), hasLength(1)); expect(domDocument.querySelectorAll('flt-semantics[id=flt-semantic-node-0]'), hasLength(2)); // Check that each is attached to its own view expect(view1.semantics.semanticsHost, view1.dom.semanticsHost); expect(view2.semantics.semanticsHost, view2.dom.semanticsHost); // Check semantics expectSemanticsTree(view1.semantics, '<sem style="filter: opacity(0%); color: rgba(0, 0, 0, 0)"></sem>'); expectSemanticsTree(view2.semantics, '<sem style="filter: opacity(0%); color: rgba(0, 0, 0, 0)"></sem>'); // Add some children tester1.updateNode( id: 0, children: <SemanticsNodeUpdate>[ tester1.updateNode( id: 1, isFocusable: true, hasTap: true, hasEnabledState: true, isEnabled: true, isButton: true, rect: const ui.Rect.fromLTRB(0, 0, 100, 50), ) ], ); tester1.apply(); tester2.updateNode( id: 0, children: <SemanticsNodeUpdate>[ tester2.updateNode( id: 2, hasIncrease: true, label: 'd', rect: const ui.Rect.fromLTRB(0, 0, 100, 50), ), ], ); tester2.apply(); // Test that each view renders its own semantics tree. expectSemanticsTree( view1.semantics, ''' <sem style="filter: opacity(0%); color: rgba(0, 0, 0, 0)"> <sem-c> <sem flt-tappable="" role="button"></sem> </sem-c> </sem>''', ); expectSemanticsTree( view2.semantics, ''' <sem style="filter: opacity(0%); color: rgba(0, 0, 0, 0)"> <sem-c> <sem aria-label="d"><input aria-valuemax="1" aria-valuemin="1" aria-valuenow="1" aria-valuetext="" role="slider"></sem> </sem-c> </sem> '''); // Remove the first view, but keep the second one. EnginePlatformDispatcher.instance.viewManager.disposeAndUnregisterView(view1.viewId); expect(domDocument.querySelectorAll('flutter-view'), hasLength(1)); expect(domDocument.querySelectorAll('flt-semantics[id=flt-semantic-node-0]'), hasLength(1)); expect(domDocument.querySelectorAll('flt-semantics[id=flt-semantic-node-2]'), hasLength(1)); // Disable semantics; make sure the view is there but semantics is removed. EngineSemantics.instance.semanticsEnabled = false; expect(domDocument.querySelectorAll('flutter-view'), hasLength(1)); expect(domDocument.querySelectorAll('flt-semantics[id=flt-semantic-node-0]'), isEmpty); }); }
engine/lib/web_ui/test/engine/semantics/semantics_multi_view_test.dart/0
{ "file_path": "engine/lib/web_ui/test/engine/semantics/semantics_multi_view_test.dart", "repo_id": "engine", "token_count": 1659 }
315
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:typed_data'; import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine/vector_math.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } void testMain() { test('toMatrix32', () { final List<double> data = <double>[ 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 ]; final Float32List m32 = toMatrix32(Float64List.fromList(data)); expect(m32, Float32List.fromList(data)); }); test('FastMatrix32.transform', () { final FastMatrix32 fast = FastMatrix32(Float32List.fromList(<double>[ 2, 1, 0, 0, 1, 3, 0, 0, 0, 0, 0, 0, 4, 5, 0, 1 ])); fast.transform(6, 7); // Just make sure that the fast version produces a result consistent with // the slow version. final Matrix4 slow = Matrix4.fromFloat32List(fast.matrix); final Float32List slowTransformed = Float32List.fromList(<double>[ 6, 7, 0 ]); slow.transform3(slowTransformed); expect(fast.transformedX, slowTransformed[0]); expect(fast.transformedY, slowTransformed[1]); }); }
engine/lib/web_ui/test/engine/vector_math_test.dart/0
{ "file_path": "engine/lib/web_ui/test/engine/vector_math_test.dart", "repo_id": "engine", "token_count": 489 }
316
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import '../common/test_initialization.dart'; import '../ui/utils.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } Future<void> testMain() async { setUpUnitTests( setUpTestViewDimensions: false, ); test('bootstrapper selects correct builds', () { if (browserEngine == BrowserEngine.blink) { expect(isWasm, isTrue); expect(isSkwasm, isTrue); } else { expect(isWasm, isFalse); expect(isCanvasKit, isTrue); } }); }
engine/lib/web_ui/test/fallbacks/fallbacks_test.dart/0
{ "file_path": "engine/lib/web_ui/test/fallbacks/fallbacks_test.dart", "repo_id": "engine", "token_count": 273 }
317
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'package:test/bootstrap/browser.dart'; import 'package:test/test.dart'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart'; void main() { internalBootstrapBrowserTest(() => testMain); } Future<void> testMain() async { group('$adjustRectForDom', () { test('does not change rect when not necessary', () async { const Rect rect = Rect.fromLTWH(10, 20, 140, 160); expect( adjustRectForDom(rect, SurfacePaintData()..style=PaintingStyle.fill), rect, ); expect( adjustRectForDom(rect, SurfacePaintData()..style=PaintingStyle.stroke..strokeWidth=0), rect, ); }); test('takes stroke width into consideration', () async { const Rect rect = Rect.fromLTWH(10, 20, 140, 160); expect( adjustRectForDom(rect, SurfacePaintData()..style=PaintingStyle.stroke..strokeWidth=1), const Rect.fromLTWH(9.5, 19.5, 139, 159), ); expect( adjustRectForDom(rect, SurfacePaintData()..style=PaintingStyle.stroke..strokeWidth=10), const Rect.fromLTWH(5, 15, 130, 150), ); expect( adjustRectForDom(rect, SurfacePaintData()..style=PaintingStyle.stroke..strokeWidth=15), const Rect.fromLTWH(2.5, 12.5, 125, 145), ); }); test('flips rect when necessary', () { Rect rect = const Rect.fromLTWH(100, 200, -40, -60); expect( adjustRectForDom(rect, SurfacePaintData()..style=PaintingStyle.fill), const Rect.fromLTWH(60, 140, 40, 60), ); rect = const Rect.fromLTWH(100, 200, 40, -60); expect( adjustRectForDom(rect, SurfacePaintData()..style=PaintingStyle.fill), const Rect.fromLTWH(100, 140, 40, 60), ); rect = const Rect.fromLTWH(100, 200, -40, 60); expect( adjustRectForDom(rect, SurfacePaintData()..style=PaintingStyle.fill), const Rect.fromLTWH(60, 200, 40, 60), ); }); test('handles stroke width greater than width or height', () { const Rect rect = Rect.fromLTWH(100, 200, 20, 70); expect( adjustRectForDom(rect, SurfacePaintData()..style=PaintingStyle.stroke..strokeWidth=50), const Rect.fromLTWH(75, 175, 0, 20), ); expect( adjustRectForDom(rect, SurfacePaintData()..style=PaintingStyle.stroke..strokeWidth=80), const Rect.fromLTWH(60, 160, 0, 0), ); }); }); }
engine/lib/web_ui/test/html/dom_canvas_test.dart/0
{ "file_path": "engine/lib/web_ui/test/html/dom_canvas_test.dart", "repo_id": "engine", "token_count": 1057 }
318
// Copyright 2013 The Flutter Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. import 'dart:async'; import 'package:ui/src/engine.dart'; import 'package:ui/ui.dart'; import 'package:web_engine_tester/golden_tester.dart'; typedef CanvasTest = FutureOr<void> Function(EngineCanvas canvas); const LineBreakType prohibited = LineBreakType.prohibited; const LineBreakType opportunity = LineBreakType.opportunity; const LineBreakType mandatory = LineBreakType.mandatory; const LineBreakType endOfText = LineBreakType.endOfText; const TextDirection ltr = TextDirection.ltr; const TextDirection rtl = TextDirection.rtl; const FragmentFlow ffLtr = FragmentFlow.ltr; const FragmentFlow ffRtl = FragmentFlow.rtl; const FragmentFlow ffPrevious = FragmentFlow.previous; const FragmentFlow ffSandwich = FragmentFlow.sandwich; const String rtlWord1 = 'واحدة'; const String rtlWord2 = 'ثنتان'; const Color white = Color(0xFFFFFFFF); const Color black = Color(0xFF000000); const Color red = Color(0xFFFF0000); const Color lightGreen = Color(0xFFDCEDC8); const Color green = Color(0xFF00FF00); const Color lightBlue = Color(0xFFB3E5FC); const Color blue = Color(0xFF0000FF); const Color yellow = Color(0xFFFFEB3B); const Color lightPurple = Color(0xFFE1BEE7); final EngineParagraphStyle ahemStyle = EngineParagraphStyle( fontFamily: 'Ahem', fontSize: 10, ); ParagraphConstraints constrain(double width) { return ParagraphConstraints(width: width); } CanvasParagraph plain( EngineParagraphStyle style, String text, { EngineTextStyle? textStyle, }) { final CanvasParagraphBuilder builder = CanvasParagraphBuilder(style); if (textStyle != null) { builder.pushStyle(textStyle); } builder.addText(text); return builder.build(); } CanvasParagraph rich( EngineParagraphStyle style, void Function(CanvasParagraphBuilder) callback, ) { final CanvasParagraphBuilder builder = CanvasParagraphBuilder(style); callback(builder); return builder.build(); } Future<void> takeScreenshot( EngineCanvas canvas, Rect region, String fileName, ) async { final DomElement sceneElement = createDomElement('flt-scene'); if (isIosSafari) { // Shrink to fit on the iPhone screen. sceneElement.style.position = 'absolute'; sceneElement.style.transformOrigin = '0 0 0'; sceneElement.style.transform = 'scale(0.3)'; } try { sceneElement.append(canvas.rootElement); domDocument.body!.append(sceneElement); await matchGoldenFile('$fileName.png', region: region); } finally { // The page is reused across tests, so remove the element after taking the // screenshot. sceneElement.remove(); } } /// Fills the single placeholder in the given [paragraph] with a red rectangle. /// /// The placeholder is filled relative to [offset]. /// /// Throws if the paragraph contains more than one placeholder. void fillPlaceholder( EngineCanvas canvas, Offset offset, CanvasParagraph paragraph, ) { final TextBox placeholderBox = paragraph.getBoxesForPlaceholders().single; final SurfacePaint paint = SurfacePaint()..color = red; canvas.drawRect(placeholderBox.toRect().shift(offset), paint.paintData); } /// Fill the given [boxes] with rectangles of the given [color]. /// /// All rectangles are filled relative to [offset]. void fillBoxes(EngineCanvas canvas, Offset offset, List<TextBox> boxes, Color color) { for (final TextBox box in boxes) { final Rect rect = box.toRect().shift(offset); canvas.drawRect(rect, SurfacePaintData()..color = color.value); } } String getSpanText(CanvasParagraph paragraph, ParagraphSpan span) { return paragraph.plainText.substring(span.start, span.end); }
engine/lib/web_ui/test/html/paragraph/helper.dart/0
{ "file_path": "engine/lib/web_ui/test/html/paragraph/helper.dart", "repo_id": "engine", "token_count": 1181 }
319