text
stringlengths 6
13.6M
| id
stringlengths 13
176
| metadata
dict | __index_level_0__
int64 0
1.69k
|
---|---|---|---|
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "impeller/renderer/backend/metal/render_pass_mtl.h"
#include "flutter/fml/closure.h"
#include "flutter/fml/logging.h"
#include "flutter/fml/make_copyable.h"
#include "fml/status.h"
#include "impeller/base/backend_cast.h"
#include "impeller/core/formats.h"
#include "impeller/core/host_buffer.h"
#include "impeller/core/shader_types.h"
#include "impeller/renderer/backend/metal/context_mtl.h"
#include "impeller/renderer/backend/metal/device_buffer_mtl.h"
#include "impeller/renderer/backend/metal/formats_mtl.h"
#include "impeller/renderer/backend/metal/pipeline_mtl.h"
#include "impeller/renderer/backend/metal/sampler_mtl.h"
#include "impeller/renderer/backend/metal/texture_mtl.h"
#include "impeller/renderer/command.h"
#include "impeller/renderer/vertex_descriptor.h"
namespace impeller {
static bool ConfigureResolveTextureAttachment(
const Attachment& desc,
MTLRenderPassAttachmentDescriptor* attachment) {
bool needs_resolve =
desc.store_action == StoreAction::kMultisampleResolve ||
desc.store_action == StoreAction::kStoreAndMultisampleResolve;
if (needs_resolve && !desc.resolve_texture) {
VALIDATION_LOG << "Resolve store action specified on attachment but no "
"resolve texture was specified.";
return false;
}
if (desc.resolve_texture && !needs_resolve) {
VALIDATION_LOG << "A resolve texture was specified even though the store "
"action doesn't require it.";
return false;
}
if (!desc.resolve_texture) {
return true;
}
attachment.resolveTexture =
TextureMTL::Cast(*desc.resolve_texture).GetMTLTexture();
return true;
}
static bool ConfigureAttachment(const Attachment& desc,
MTLRenderPassAttachmentDescriptor* attachment) {
if (!desc.texture) {
return false;
}
attachment.texture = TextureMTL::Cast(*desc.texture).GetMTLTexture();
attachment.loadAction = ToMTLLoadAction(desc.load_action);
attachment.storeAction = ToMTLStoreAction(desc.store_action);
if (!ConfigureResolveTextureAttachment(desc, attachment)) {
return false;
}
return true;
}
static bool ConfigureColorAttachment(
const ColorAttachment& desc,
MTLRenderPassColorAttachmentDescriptor* attachment) {
if (!ConfigureAttachment(desc, attachment)) {
return false;
}
attachment.clearColor = ToMTLClearColor(desc.clear_color);
return true;
}
static bool ConfigureDepthAttachment(
const DepthAttachment& desc,
MTLRenderPassDepthAttachmentDescriptor* attachment) {
if (!ConfigureAttachment(desc, attachment)) {
return false;
}
attachment.clearDepth = desc.clear_depth;
return true;
}
static bool ConfigureStencilAttachment(
const StencilAttachment& desc,
MTLRenderPassStencilAttachmentDescriptor* attachment) {
if (!ConfigureAttachment(desc, attachment)) {
return false;
}
attachment.clearStencil = desc.clear_stencil;
return true;
}
// TODO(csg): Move this to formats_mtl.h
static MTLRenderPassDescriptor* ToMTLRenderPassDescriptor(
const RenderTarget& desc) {
auto result = [MTLRenderPassDescriptor renderPassDescriptor];
const auto& colors = desc.GetColorAttachments();
for (const auto& color : colors) {
if (!ConfigureColorAttachment(color.second,
result.colorAttachments[color.first])) {
VALIDATION_LOG << "Could not configure color attachment at index "
<< color.first;
return nil;
}
}
const auto& depth = desc.GetDepthAttachment();
if (depth.has_value() &&
!ConfigureDepthAttachment(depth.value(), result.depthAttachment)) {
VALIDATION_LOG << "Could not configure depth attachment.";
return nil;
}
const auto& stencil = desc.GetStencilAttachment();
if (stencil.has_value() &&
!ConfigureStencilAttachment(stencil.value(), result.stencilAttachment)) {
VALIDATION_LOG << "Could not configure stencil attachment.";
return nil;
}
return result;
}
RenderPassMTL::RenderPassMTL(std::shared_ptr<const Context> context,
const RenderTarget& target,
id<MTLCommandBuffer> buffer)
: RenderPass(std::move(context), target),
buffer_(buffer),
desc_(ToMTLRenderPassDescriptor(GetRenderTarget())) {
if (!buffer_ || !desc_ || !render_target_.IsValid()) {
return;
}
encoder_ = [buffer_ renderCommandEncoderWithDescriptor:desc_];
if (!encoder_) {
return;
}
#ifdef IMPELLER_DEBUG
is_metal_trace_active_ =
[[MTLCaptureManager sharedCaptureManager] isCapturing];
#endif // IMPELLER_DEBUG
pass_bindings_.SetEncoder(encoder_);
pass_bindings_.SetViewport(
Viewport{.rect = Rect::MakeSize(GetRenderTargetSize())});
pass_bindings_.SetScissor(IRect::MakeSize(GetRenderTargetSize()));
is_valid_ = true;
}
RenderPassMTL::~RenderPassMTL() {
if (!did_finish_encoding_) {
[encoder_ endEncoding];
did_finish_encoding_ = true;
}
}
bool RenderPassMTL::IsValid() const {
return is_valid_;
}
void RenderPassMTL::OnSetLabel(std::string label) {
#ifdef IMPELLER_DEBUG
if (label.empty()) {
return;
}
encoder_.label = @(std::string(label).c_str());
#endif // IMPELLER_DEBUG
}
bool RenderPassMTL::OnEncodeCommands(const Context& context) const {
did_finish_encoding_ = true;
[encoder_ endEncoding];
return true;
}
static bool Bind(PassBindingsCacheMTL& pass,
ShaderStage stage,
size_t bind_index,
const BufferView& view) {
if (!view.buffer) {
return false;
}
auto device_buffer = view.buffer;
if (!device_buffer) {
return false;
}
auto buffer = DeviceBufferMTL::Cast(*device_buffer).GetMTLBuffer();
// The Metal call is a void return and we don't want to make it on nil.
if (!buffer) {
return false;
}
return pass.SetBuffer(stage, bind_index, view.range.offset, buffer);
}
static bool Bind(PassBindingsCacheMTL& pass,
ShaderStage stage,
size_t bind_index,
const std::unique_ptr<const Sampler>& sampler,
const Texture& texture) {
if (!sampler || !texture.IsValid()) {
return false;
}
if (texture.NeedsMipmapGeneration()) {
// TODO(127697): generate mips when the GPU is available on iOS.
#if !FML_OS_IOS
VALIDATION_LOG
<< "Texture at binding index " << bind_index
<< " has a mip count > 1, but the mipmap has not been generated.";
return false;
#endif // !FML_OS_IOS
}
return pass.SetTexture(stage, bind_index,
TextureMTL::Cast(texture).GetMTLTexture()) &&
pass.SetSampler(stage, bind_index,
SamplerMTL::Cast(*sampler).GetMTLSamplerState());
}
// |RenderPass|
void RenderPassMTL::SetPipeline(
const std::shared_ptr<Pipeline<PipelineDescriptor>>& pipeline) {
const PipelineDescriptor& pipeline_desc = pipeline->GetDescriptor();
primitive_type_ = pipeline_desc.GetPrimitiveType();
pass_bindings_.SetRenderPipelineState(
PipelineMTL::Cast(*pipeline).GetMTLRenderPipelineState());
pass_bindings_.SetDepthStencilState(
PipelineMTL::Cast(*pipeline).GetMTLDepthStencilState());
[encoder_ setFrontFacingWinding:pipeline_desc.GetWindingOrder() ==
WindingOrder::kClockwise
? MTLWindingClockwise
: MTLWindingCounterClockwise];
[encoder_ setCullMode:ToMTLCullMode(pipeline_desc.GetCullMode())];
[encoder_ setTriangleFillMode:ToMTLTriangleFillMode(
pipeline_desc.GetPolygonMode())];
has_valid_pipeline_ = true;
}
// |RenderPass|
void RenderPassMTL::SetCommandLabel(std::string_view label) {
#ifdef IMPELLER_DEBUG
if (is_metal_trace_active_) {
has_label_ = true;
std::string label_copy(label);
[encoder_ pushDebugGroup:@(label_copy.c_str())];
}
#endif // IMPELLER_DEBUG
}
// |RenderPass|
void RenderPassMTL::SetStencilReference(uint32_t value) {
[encoder_ setStencilReferenceValue:value];
}
// |RenderPass|
void RenderPassMTL::SetBaseVertex(uint64_t value) {
base_vertex_ = value;
}
// |RenderPass|
void RenderPassMTL::SetViewport(Viewport viewport) {
pass_bindings_.SetViewport(viewport);
}
// |RenderPass|
void RenderPassMTL::SetScissor(IRect scissor) {
pass_bindings_.SetScissor(scissor);
}
// |RenderPass|
void RenderPassMTL::SetInstanceCount(size_t count) {
instance_count_ = count;
}
// |RenderPass|
bool RenderPassMTL::SetVertexBuffer(VertexBuffer buffer) {
if (buffer.index_type == IndexType::kUnknown) {
return false;
}
if (!Bind(pass_bindings_, ShaderStage::kVertex,
VertexDescriptor::kReservedVertexBufferIndex,
buffer.vertex_buffer)) {
return false;
}
vertex_count_ = buffer.vertex_count;
if (buffer.index_type != IndexType::kNone) {
index_type_ = ToMTLIndexType(buffer.index_type);
index_buffer_ = std::move(buffer.index_buffer);
}
return true;
}
// |RenderPass|
fml::Status RenderPassMTL::Draw() {
if (!has_valid_pipeline_) {
return fml::Status(fml::StatusCode::kCancelled, "Invalid pipeline.");
}
if (!index_buffer_) {
if (instance_count_ != 1u) {
[encoder_ drawPrimitives:ToMTLPrimitiveType(primitive_type_)
vertexStart:base_vertex_
vertexCount:vertex_count_
instanceCount:instance_count_
baseInstance:0u];
} else {
[encoder_ drawPrimitives:ToMTLPrimitiveType(primitive_type_)
vertexStart:base_vertex_
vertexCount:vertex_count_];
}
} else {
id<MTLBuffer> mtl_index_buffer =
DeviceBufferMTL::Cast(*index_buffer_.buffer).GetMTLBuffer();
if (instance_count_ != 1u) {
[encoder_ drawIndexedPrimitives:ToMTLPrimitiveType(primitive_type_)
indexCount:vertex_count_
indexType:index_type_
indexBuffer:mtl_index_buffer
indexBufferOffset:index_buffer_.range.offset
instanceCount:instance_count_
baseVertex:base_vertex_
baseInstance:0u];
} else {
[encoder_ drawIndexedPrimitives:ToMTLPrimitiveType(primitive_type_)
indexCount:vertex_count_
indexType:index_type_
indexBuffer:mtl_index_buffer
indexBufferOffset:index_buffer_.range.offset];
}
}
#ifdef IMPELLER_DEBUG
if (has_label_) {
[encoder_ popDebugGroup];
}
#endif // IMPELLER_DEBUG
vertex_count_ = 0u;
base_vertex_ = 0u;
instance_count_ = 1u;
index_buffer_ = {};
has_valid_pipeline_ = false;
has_label_ = false;
return fml::Status();
}
// |RenderPass|
bool RenderPassMTL::BindResource(ShaderStage stage,
DescriptorType type,
const ShaderUniformSlot& slot,
const ShaderMetadata& metadata,
BufferView view) {
return Bind(pass_bindings_, stage, slot.ext_res_0, view);
}
// |RenderPass|
bool RenderPassMTL::BindResource(
ShaderStage stage,
DescriptorType type,
const ShaderUniformSlot& slot,
const std::shared_ptr<const ShaderMetadata>& metadata,
BufferView view) {
return Bind(pass_bindings_, stage, slot.ext_res_0, view);
}
// |RenderPass|
bool RenderPassMTL::BindResource(
ShaderStage stage,
DescriptorType type,
const SampledImageSlot& slot,
const ShaderMetadata& metadata,
std::shared_ptr<const Texture> texture,
const std::unique_ptr<const Sampler>& sampler) {
return Bind(pass_bindings_, stage, slot.texture_index, sampler, *texture);
}
} // namespace impeller
| engine/impeller/renderer/backend/metal/render_pass_mtl.mm/0 | {
"file_path": "engine/impeller/renderer/backend/metal/render_pass_mtl.mm",
"repo_id": "engine",
"token_count": 4998
} | 212 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_IMPELLER_RENDERER_BACKEND_METAL_VERTEX_DESCRIPTOR_MTL_H_
#define FLUTTER_IMPELLER_RENDERER_BACKEND_METAL_VERTEX_DESCRIPTOR_MTL_H_
#include <Metal/Metal.h>
#include <set>
#include "flutter/fml/macros.h"
#include "impeller/base/backend_cast.h"
#include "impeller/renderer/vertex_descriptor.h"
namespace impeller {
class VertexDescriptorMTL {
public:
VertexDescriptorMTL();
~VertexDescriptorMTL();
bool SetStageInputsAndLayout(
const std::vector<ShaderStageIOSlot>& inputs,
const std::vector<ShaderStageBufferLayout>& layouts);
MTLVertexDescriptor* GetMTLVertexDescriptor() const;
private:
MTLVertexDescriptor* descriptor_;
VertexDescriptorMTL(const VertexDescriptorMTL&) = delete;
VertexDescriptorMTL& operator=(const VertexDescriptorMTL&) = delete;
};
} // namespace impeller
#endif // FLUTTER_IMPELLER_RENDERER_BACKEND_METAL_VERTEX_DESCRIPTOR_MTL_H_
| engine/impeller/renderer/backend/metal/vertex_descriptor_mtl.h/0 | {
"file_path": "engine/impeller/renderer/backend/metal/vertex_descriptor_mtl.h",
"repo_id": "engine",
"token_count": 410
} | 213 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_CAPABILITIES_VK_H_
#define FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_CAPABILITIES_VK_H_
#include <cstdint>
#include <map>
#include <set>
#include <string>
#include <vector>
#include "impeller/base/backend_cast.h"
#include "impeller/renderer/backend/vulkan/vk.h"
#include "impeller/renderer/capabilities.h"
namespace impeller {
class ContextVK;
//------------------------------------------------------------------------------
/// @brief A device extension available on all platforms. Without the
/// presence of these extensions, context creation will fail.
///
enum class RequiredCommonDeviceExtensionVK : uint32_t {
//----------------------------------------------------------------------------
/// For displaying content in the window system.
///
/// https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_swapchain.html
///
kKHRSwapchain,
kLast,
};
//------------------------------------------------------------------------------
/// @brief A device extension available on all Android platforms. Without
/// the presence of these extensions on Android, context creation
/// will fail.
///
/// Platform agnostic code can still check if these Android
/// extensions are present.
///
enum class RequiredAndroidDeviceExtensionVK : uint32_t {
//----------------------------------------------------------------------------
/// For importing hardware buffers used in external texture composition.
///
/// https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_ANDROID_external_memory_android_hardware_buffer.html
///
kANDROIDExternalMemoryAndroidHardwareBuffer,
//----------------------------------------------------------------------------
/// Dependency of kANDROIDExternalMemoryAndroidHardwareBuffer.
///
/// https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_sampler_ycbcr_conversion.html
///
kKHRSamplerYcbcrConversion,
//----------------------------------------------------------------------------
/// Dependency of kANDROIDExternalMemoryAndroidHardwareBuffer.
///
/// https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_external_memory.html
///
kKHRExternalMemory,
//----------------------------------------------------------------------------
/// Dependency of kANDROIDExternalMemoryAndroidHardwareBuffer.
///
/// https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_EXT_queue_family_foreign.html
///
kEXTQueueFamilyForeign,
//----------------------------------------------------------------------------
/// Dependency of kANDROIDExternalMemoryAndroidHardwareBuffer.
///
/// https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_dedicated_allocation.html
///
kKHRDedicatedAllocation,
kLast,
};
//------------------------------------------------------------------------------
/// @brief A device extension enabled if available. Subsystems cannot
/// assume availability and must check if these extensions are
/// available.
///
/// @see `CapabilitiesVK::HasExtension`.
///
enum class OptionalDeviceExtensionVK : uint32_t {
//----------------------------------------------------------------------------
/// To instrument and profile PSO creation.
///
/// https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_EXT_pipeline_creation_feedback.html
///
kEXTPipelineCreationFeedback,
//----------------------------------------------------------------------------
/// To enable context creation on MoltenVK. A non-conformant Vulkan
/// implementation.
///
/// https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_KHR_portability_subset.html
///
kVKKHRPortabilitySubset,
kLast,
};
//------------------------------------------------------------------------------
/// @brief The Vulkan layers and extensions wrangler.
///
class CapabilitiesVK final : public Capabilities,
public BackendCast<CapabilitiesVK, Capabilities> {
public:
explicit CapabilitiesVK(bool enable_validations);
~CapabilitiesVK();
bool IsValid() const;
bool AreValidationsEnabled() const;
bool HasExtension(RequiredCommonDeviceExtensionVK ext) const;
bool HasExtension(RequiredAndroidDeviceExtensionVK ext) const;
bool HasExtension(OptionalDeviceExtensionVK ext) const;
std::optional<std::vector<std::string>> GetEnabledLayers() const;
std::optional<std::vector<std::string>> GetEnabledInstanceExtensions() const;
std::optional<std::vector<std::string>> GetEnabledDeviceExtensions(
const vk::PhysicalDevice& physical_device) const;
using PhysicalDeviceFeatures =
vk::StructureChain<vk::PhysicalDeviceFeatures2,
vk::PhysicalDeviceSamplerYcbcrConversionFeaturesKHR>;
std::optional<PhysicalDeviceFeatures> GetEnabledDeviceFeatures(
const vk::PhysicalDevice& physical_device) const;
[[nodiscard]] bool SetPhysicalDevice(
const vk::PhysicalDevice& physical_device);
const vk::PhysicalDeviceProperties& GetPhysicalDeviceProperties() const;
void SetOffscreenFormat(PixelFormat pixel_format) const;
// |Capabilities|
bool SupportsOffscreenMSAA() const override;
// |Capabilities|
bool SupportsImplicitResolvingMSAA() const override;
// |Capabilities|
bool SupportsSSBO() const override;
// |Capabilities|
bool SupportsBufferToTextureBlits() const override;
// |Capabilities|
bool SupportsTextureToTextureBlits() const override;
// |Capabilities|
bool SupportsFramebufferFetch() const override;
// |Capabilities|
bool SupportsCompute() const override;
// |Capabilities|
bool SupportsComputeSubgroups() const override;
// |Capabilities|
bool SupportsReadFromResolve() const override;
// |Capabilities|
bool SupportsDecalSamplerAddressMode() const override;
// |Capabilities|
bool SupportsDeviceTransientTextures() const override;
// |Capabilities|
PixelFormat GetDefaultColorFormat() const override;
// |Capabilities|
PixelFormat GetDefaultStencilFormat() const override;
// |Capabilities|
PixelFormat GetDefaultDepthStencilFormat() const override;
// |Capabilities|
PixelFormat GetDefaultGlyphAtlasFormat() const override;
private:
bool validations_enabled_ = false;
std::map<std::string, std::set<std::string>> exts_;
std::set<RequiredCommonDeviceExtensionVK> required_common_device_extensions_;
std::set<RequiredAndroidDeviceExtensionVK>
required_android_device_extensions_;
std::set<OptionalDeviceExtensionVK> optional_device_extensions_;
mutable PixelFormat default_color_format_ = PixelFormat::kUnknown;
PixelFormat default_stencil_format_ = PixelFormat::kUnknown;
PixelFormat default_depth_stencil_format_ = PixelFormat::kUnknown;
vk::PhysicalDeviceProperties device_properties_;
bool supports_compute_subgroups_ = false;
bool supports_device_transient_textures_ = false;
bool is_valid_ = false;
bool HasExtension(const std::string& ext) const;
bool HasLayer(const std::string& layer) const;
CapabilitiesVK(const CapabilitiesVK&) = delete;
CapabilitiesVK& operator=(const CapabilitiesVK&) = delete;
};
} // namespace impeller
#endif // FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_CAPABILITIES_VK_H_
| engine/impeller/renderer/backend/vulkan/capabilities_vk.h/0 | {
"file_path": "engine/impeller/renderer/backend/vulkan/capabilities_vk.h",
"repo_id": "engine",
"token_count": 2223
} | 214 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_CONTEXT_VK_H_
#define FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_CONTEXT_VK_H_
#include <memory>
#include "flutter/fml/concurrent_message_loop.h"
#include "flutter/fml/mapping.h"
#include "flutter/fml/unique_fd.h"
#include "fml/thread.h"
#include "impeller/base/backend_cast.h"
#include "impeller/core/formats.h"
#include "impeller/renderer/backend/vulkan/command_pool_vk.h"
#include "impeller/renderer/backend/vulkan/device_holder_vk.h"
#include "impeller/renderer/backend/vulkan/driver_info_vk.h"
#include "impeller/renderer/backend/vulkan/pipeline_library_vk.h"
#include "impeller/renderer/backend/vulkan/queue_vk.h"
#include "impeller/renderer/backend/vulkan/sampler_library_vk.h"
#include "impeller/renderer/backend/vulkan/shader_library_vk.h"
#include "impeller/renderer/capabilities.h"
#include "impeller/renderer/command_queue.h"
#include "impeller/renderer/context.h"
namespace impeller {
bool HasValidationLayers();
class CommandEncoderFactoryVK;
class CommandEncoderVK;
class CommandPoolRecyclerVK;
class DebugReportVK;
class FenceWaiterVK;
class ResourceManagerVK;
class SurfaceContextVK;
class GPUTracerVK;
class DescriptorPoolRecyclerVK;
class CommandQueueVK;
class ContextVK final : public Context,
public BackendCast<ContextVK, Context>,
public std::enable_shared_from_this<ContextVK> {
public:
struct Settings {
PFN_vkGetInstanceProcAddr proc_address_callback = nullptr;
std::vector<std::shared_ptr<fml::Mapping>> shader_libraries_data;
fml::UniqueFD cache_directory;
bool enable_validation = false;
bool enable_gpu_tracing = false;
Settings() = default;
Settings(Settings&&) = default;
};
/// Choose the number of worker threads the context_vk will create.
///
/// Visible for testing.
static size_t ChooseThreadCountForWorkers(size_t hardware_concurrency);
static std::shared_ptr<ContextVK> Create(Settings settings);
uint64_t GetHash() const { return hash_; }
// |Context|
~ContextVK() override;
// |Context|
BackendType GetBackendType() const override;
// |Context|
std::string DescribeGpuModel() const override;
// |Context|
bool IsValid() const override;
// |Context|
std::shared_ptr<Allocator> GetResourceAllocator() const override;
// |Context|
std::shared_ptr<ShaderLibrary> GetShaderLibrary() const override;
// |Context|
std::shared_ptr<SamplerLibrary> GetSamplerLibrary() const override;
// |Context|
std::shared_ptr<PipelineLibrary> GetPipelineLibrary() const override;
// |Context|
std::shared_ptr<CommandBuffer> CreateCommandBuffer() const override;
// |Context|
const std::shared_ptr<const Capabilities>& GetCapabilities() const override;
const std::shared_ptr<YUVConversionLibraryVK>& GetYUVConversionLibrary()
const;
// |Context|
void Shutdown() override;
void SetOffscreenFormat(PixelFormat pixel_format);
template <typename T>
bool SetDebugName(T handle, std::string_view label) const {
return SetDebugName(GetDevice(), handle, label);
}
template <typename T>
static bool SetDebugName(const vk::Device& device,
T handle,
std::string_view label) {
if (!HasValidationLayers()) {
// No-op if validation layers are not enabled.
return true;
}
auto c_handle = static_cast<typename T::CType>(handle);
vk::DebugUtilsObjectNameInfoEXT info;
info.objectType = T::objectType;
info.pObjectName = label.data();
info.objectHandle = reinterpret_cast<decltype(info.objectHandle)>(c_handle);
if (device.setDebugUtilsObjectNameEXT(info) != vk::Result::eSuccess) {
VALIDATION_LOG << "Unable to set debug name: " << label;
return false;
}
return true;
}
std::shared_ptr<DeviceHolderVK> GetDeviceHolder() const {
return device_holder_;
}
vk::Instance GetInstance() const;
const vk::Device& GetDevice() const;
const std::unique_ptr<DriverInfoVK>& GetDriverInfo() const;
const std::shared_ptr<fml::ConcurrentTaskRunner>
GetConcurrentWorkerTaskRunner() const;
std::shared_ptr<SurfaceContextVK> CreateSurfaceContext();
const std::shared_ptr<QueueVK>& GetGraphicsQueue() const;
vk::PhysicalDevice GetPhysicalDevice() const;
std::shared_ptr<FenceWaiterVK> GetFenceWaiter() const;
std::shared_ptr<ResourceManagerVK> GetResourceManager() const;
std::shared_ptr<CommandPoolRecyclerVK> GetCommandPoolRecycler() const;
std::shared_ptr<DescriptorPoolRecyclerVK> GetDescriptorPoolRecycler() const;
std::shared_ptr<CommandQueue> GetCommandQueue() const override;
std::shared_ptr<GPUTracerVK> GetGPUTracer() const;
void RecordFrameEndTime() const;
void InitializeCommonlyUsedShadersIfNeeded() const override;
private:
struct DeviceHolderImpl : public DeviceHolderVK {
// |DeviceHolder|
const vk::Device& GetDevice() const override { return device.get(); }
// |DeviceHolder|
const vk::PhysicalDevice& GetPhysicalDevice() const override {
return physical_device;
}
vk::UniqueInstance instance;
vk::PhysicalDevice physical_device;
vk::UniqueDevice device;
};
std::shared_ptr<DeviceHolderImpl> device_holder_;
std::unique_ptr<DriverInfoVK> driver_info_;
std::unique_ptr<DebugReportVK> debug_report_;
std::shared_ptr<Allocator> allocator_;
std::shared_ptr<ShaderLibraryVK> shader_library_;
std::shared_ptr<SamplerLibraryVK> sampler_library_;
std::shared_ptr<PipelineLibraryVK> pipeline_library_;
std::shared_ptr<YUVConversionLibraryVK> yuv_conversion_library_;
QueuesVK queues_;
std::shared_ptr<const Capabilities> device_capabilities_;
std::shared_ptr<FenceWaiterVK> fence_waiter_;
std::shared_ptr<ResourceManagerVK> resource_manager_;
std::shared_ptr<CommandPoolRecyclerVK> command_pool_recycler_;
std::string device_name_;
std::shared_ptr<fml::ConcurrentMessageLoop> raster_message_loop_;
std::shared_ptr<GPUTracerVK> gpu_tracer_;
std::shared_ptr<DescriptorPoolRecyclerVK> descriptor_pool_recycler_;
std::shared_ptr<CommandQueue> command_queue_vk_;
const uint64_t hash_;
bool is_valid_ = false;
ContextVK();
void Setup(Settings settings);
std::unique_ptr<CommandEncoderFactoryVK> CreateGraphicsCommandEncoderFactory()
const;
ContextVK(const ContextVK&) = delete;
ContextVK& operator=(const ContextVK&) = delete;
};
} // namespace impeller
#endif // FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_CONTEXT_VK_H_
| engine/impeller/renderer/backend/vulkan/context_vk.h/0 | {
"file_path": "engine/impeller/renderer/backend/vulkan/context_vk.h",
"repo_id": "engine",
"token_count": 2361
} | 215 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "impeller/renderer/backend/vulkan/formats_vk.h"
namespace impeller {
vk::PipelineDepthStencilStateCreateInfo ToVKPipelineDepthStencilStateCreateInfo(
std::optional<DepthAttachmentDescriptor> depth,
std::optional<StencilAttachmentDescriptor> front,
std::optional<StencilAttachmentDescriptor> back) {
vk::PipelineDepthStencilStateCreateInfo info;
if (depth.has_value()) {
info.depthTestEnable = true;
info.depthWriteEnable = depth->depth_write_enabled;
info.depthCompareOp = ToVKCompareOp(depth->depth_compare);
info.minDepthBounds = 0.0f;
info.maxDepthBounds = 1.0f;
}
if (front.has_value()) {
info.stencilTestEnable = true;
info.front = ToVKStencilOpState(*front);
}
if (back.has_value()) {
info.stencilTestEnable = true;
info.back = ToVKStencilOpState(*back);
}
return info;
}
} // namespace impeller
| engine/impeller/renderer/backend/vulkan/formats_vk.cc/0 | {
"file_path": "engine/impeller/renderer/backend/vulkan/formats_vk.cc",
"repo_id": "engine",
"token_count": 378
} | 216 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "impeller/renderer/backend/vulkan/render_pass_vk.h"
#include <array>
#include <cstdint>
#include <vector>
#include "fml/status.h"
#include "impeller/base/validation.h"
#include "impeller/core/device_buffer.h"
#include "impeller/core/formats.h"
#include "impeller/core/texture.h"
#include "impeller/renderer/backend/vulkan/barrier_vk.h"
#include "impeller/renderer/backend/vulkan/command_buffer_vk.h"
#include "impeller/renderer/backend/vulkan/command_encoder_vk.h"
#include "impeller/renderer/backend/vulkan/context_vk.h"
#include "impeller/renderer/backend/vulkan/device_buffer_vk.h"
#include "impeller/renderer/backend/vulkan/formats_vk.h"
#include "impeller/renderer/backend/vulkan/pipeline_vk.h"
#include "impeller/renderer/backend/vulkan/render_pass_builder_vk.h"
#include "impeller/renderer/backend/vulkan/sampler_vk.h"
#include "impeller/renderer/backend/vulkan/shared_object_vk.h"
#include "impeller/renderer/backend/vulkan/texture_vk.h"
#include "impeller/renderer/backend/vulkan/vk.h"
#include "vulkan/vulkan_handles.hpp"
namespace impeller {
// Warning: if any of the constant values or layouts are changed in the
// framebuffer fetch shader, then this input binding may need to be
// manually changed.
//
// See: impeller/entity/shaders/blending/framebuffer_blend.frag
static constexpr size_t kMagicSubpassInputBinding = 64u;
static vk::ClearColorValue VKClearValueFromColor(Color color) {
vk::ClearColorValue value;
value.setFloat32(
std::array<float, 4>{color.red, color.green, color.blue, color.alpha});
return value;
}
static vk::ClearDepthStencilValue VKClearValueFromDepthStencil(uint32_t stencil,
Scalar depth) {
vk::ClearDepthStencilValue value;
value.depth = depth;
value.stencil = stencil;
return value;
}
static std::vector<vk::ClearValue> GetVKClearValues(
const RenderTarget& target) {
std::vector<vk::ClearValue> clears;
for (const auto& [_, color] : target.GetColorAttachments()) {
clears.emplace_back(VKClearValueFromColor(color.clear_color));
if (color.resolve_texture) {
clears.emplace_back(VKClearValueFromColor(color.clear_color));
}
}
const auto& depth = target.GetDepthAttachment();
const auto& stencil = target.GetStencilAttachment();
if (depth.has_value()) {
clears.emplace_back(VKClearValueFromDepthStencil(
stencil ? stencil->clear_stencil : 0u, depth->clear_depth));
} else if (stencil.has_value()) {
clears.emplace_back(VKClearValueFromDepthStencil(
stencil->clear_stencil, depth ? depth->clear_depth : 0.0f));
}
return clears;
}
SharedHandleVK<vk::RenderPass> RenderPassVK::CreateVKRenderPass(
const ContextVK& context,
const SharedHandleVK<vk::RenderPass>& recycled_renderpass,
const std::shared_ptr<CommandBufferVK>& command_buffer) const {
BarrierVK barrier;
barrier.new_layout = vk::ImageLayout::eGeneral;
barrier.cmd_buffer = command_buffer->GetEncoder()->GetCommandBuffer();
barrier.src_access = vk::AccessFlagBits::eShaderRead;
barrier.src_stage = vk::PipelineStageFlagBits::eFragmentShader;
barrier.dst_access = vk::AccessFlagBits::eColorAttachmentWrite |
vk::AccessFlagBits::eTransferWrite;
barrier.dst_stage = vk::PipelineStageFlagBits::eColorAttachmentOutput |
vk::PipelineStageFlagBits::eTransfer;
RenderPassBuilderVK builder;
for (const auto& [bind_point, color] : render_target_.GetColorAttachments()) {
builder.SetColorAttachment(
bind_point, //
color.texture->GetTextureDescriptor().format, //
color.texture->GetTextureDescriptor().sample_count, //
color.load_action, //
color.store_action //
);
TextureVK::Cast(*color.texture).SetLayout(barrier);
if (color.resolve_texture) {
TextureVK::Cast(*color.resolve_texture).SetLayout(barrier);
}
}
if (auto depth = render_target_.GetDepthAttachment(); depth.has_value()) {
builder.SetDepthStencilAttachment(
depth->texture->GetTextureDescriptor().format, //
depth->texture->GetTextureDescriptor().sample_count, //
depth->load_action, //
depth->store_action //
);
TextureVK::Cast(*depth->texture).SetLayout(barrier);
} else if (auto stencil = render_target_.GetStencilAttachment();
stencil.has_value()) {
builder.SetStencilAttachment(
stencil->texture->GetTextureDescriptor().format, //
stencil->texture->GetTextureDescriptor().sample_count, //
stencil->load_action, //
stencil->store_action //
);
TextureVK::Cast(*stencil->texture).SetLayout(barrier);
}
if (recycled_renderpass != nullptr) {
return recycled_renderpass;
}
auto pass = builder.Build(context.GetDevice());
if (!pass) {
VALIDATION_LOG << "Failed to create render pass for framebuffer.";
return {};
}
context.SetDebugName(pass.get(), debug_label_.c_str());
return MakeSharedVK(std::move(pass));
}
RenderPassVK::RenderPassVK(const std::shared_ptr<const Context>& context,
const RenderTarget& target,
std::shared_ptr<CommandBufferVK> command_buffer)
: RenderPass(context, target), command_buffer_(std::move(command_buffer)) {
color_image_vk_ =
render_target_.GetColorAttachments().find(0u)->second.texture;
resolve_image_vk_ =
render_target_.GetColorAttachments().find(0u)->second.resolve_texture;
const auto& vk_context = ContextVK::Cast(*context);
const std::shared_ptr<CommandEncoderVK>& encoder =
command_buffer_->GetEncoder();
command_buffer_vk_ = encoder->GetCommandBuffer();
render_target_.IterateAllAttachments(
[&encoder](const auto& attachment) -> bool {
encoder->Track(attachment.texture);
encoder->Track(attachment.resolve_texture);
return true;
});
SharedHandleVK<vk::RenderPass> recycled_render_pass;
SharedHandleVK<vk::Framebuffer> recycled_framebuffer;
if (resolve_image_vk_) {
recycled_render_pass =
TextureVK::Cast(*resolve_image_vk_).GetCachedRenderPass();
recycled_framebuffer =
TextureVK::Cast(*resolve_image_vk_).GetCachedFramebuffer();
}
const auto& target_size = render_target_.GetRenderTargetSize();
render_pass_ =
CreateVKRenderPass(vk_context, recycled_render_pass, command_buffer_);
if (!render_pass_) {
VALIDATION_LOG << "Could not create renderpass.";
is_valid_ = false;
return;
}
auto framebuffer = (recycled_framebuffer == nullptr)
? CreateVKFramebuffer(vk_context, *render_pass_)
: recycled_framebuffer;
if (!framebuffer) {
VALIDATION_LOG << "Could not create framebuffer.";
is_valid_ = false;
return;
}
if (!encoder->Track(framebuffer) || !encoder->Track(render_pass_)) {
is_valid_ = false;
return;
}
if (resolve_image_vk_) {
TextureVK::Cast(*resolve_image_vk_).SetCachedFramebuffer(framebuffer);
TextureVK::Cast(*resolve_image_vk_).SetCachedRenderPass(render_pass_);
}
auto clear_values = GetVKClearValues(render_target_);
vk::RenderPassBeginInfo pass_info;
pass_info.renderPass = *render_pass_;
pass_info.framebuffer = *framebuffer;
pass_info.renderArea.extent.width = static_cast<uint32_t>(target_size.width);
pass_info.renderArea.extent.height =
static_cast<uint32_t>(target_size.height);
pass_info.setClearValues(clear_values);
command_buffer_vk_.beginRenderPass(pass_info, vk::SubpassContents::eInline);
// Set the initial viewport.
const auto vp = Viewport{.rect = Rect::MakeSize(target_size)};
vk::Viewport viewport = vk::Viewport()
.setWidth(vp.rect.GetWidth())
.setHeight(-vp.rect.GetHeight())
.setY(vp.rect.GetHeight())
.setMinDepth(0.0f)
.setMaxDepth(1.0f);
command_buffer_vk_.setViewport(0, 1, &viewport);
// Set the initial scissor.
const auto sc = IRect::MakeSize(target_size);
vk::Rect2D scissor =
vk::Rect2D()
.setOffset(vk::Offset2D(sc.GetX(), sc.GetY()))
.setExtent(vk::Extent2D(sc.GetWidth(), sc.GetHeight()));
command_buffer_vk_.setScissor(0, 1, &scissor);
// Set the initial stencil reference.
command_buffer_vk_.setStencilReference(
vk::StencilFaceFlagBits::eVkStencilFrontAndBack, 0u);
is_valid_ = true;
}
RenderPassVK::~RenderPassVK() = default;
bool RenderPassVK::IsValid() const {
return is_valid_;
}
void RenderPassVK::OnSetLabel(std::string label) {
#ifdef IMPELLER_DEBUG
ContextVK::Cast(*context_).SetDebugName(render_pass_->Get(),
std::string(label).c_str());
#endif // IMPELLER_DEBUG
}
SharedHandleVK<vk::Framebuffer> RenderPassVK::CreateVKFramebuffer(
const ContextVK& context,
const vk::RenderPass& pass) const {
vk::FramebufferCreateInfo fb_info;
fb_info.renderPass = pass;
const auto target_size = render_target_.GetRenderTargetSize();
fb_info.width = target_size.width;
fb_info.height = target_size.height;
fb_info.layers = 1u;
std::vector<vk::ImageView> attachments;
// This bit must be consistent to ensure compatibility with the pass created
// earlier. Follow this order: Color attachments, then depth-stencil, then
// stencil.
for (const auto& [_, color] : render_target_.GetColorAttachments()) {
// The bind point doesn't matter here since that information is present in
// the render pass.
attachments.emplace_back(
TextureVK::Cast(*color.texture).GetRenderTargetView());
if (color.resolve_texture) {
attachments.emplace_back(
TextureVK::Cast(*color.resolve_texture).GetRenderTargetView());
}
}
if (auto depth = render_target_.GetDepthAttachment(); depth.has_value()) {
attachments.emplace_back(
TextureVK::Cast(*depth->texture).GetRenderTargetView());
} else if (auto stencil = render_target_.GetStencilAttachment();
stencil.has_value()) {
attachments.emplace_back(
TextureVK::Cast(*stencil->texture).GetRenderTargetView());
}
fb_info.setAttachments(attachments);
auto [result, framebuffer] =
context.GetDevice().createFramebufferUnique(fb_info);
if (result != vk::Result::eSuccess) {
VALIDATION_LOG << "Could not create framebuffer: " << vk::to_string(result);
return {};
}
return MakeSharedVK(std::move(framebuffer));
}
// |RenderPass|
void RenderPassVK::SetPipeline(
const std::shared_ptr<Pipeline<PipelineDescriptor>>& pipeline) {
pipeline_ = pipeline;
if (!pipeline_) {
return;
}
pipeline_uses_input_attachments_ =
pipeline_->GetDescriptor().GetVertexDescriptor()->UsesInputAttacments();
if (pipeline_uses_input_attachments_) {
if (bound_image_offset_ >= kMaxBindings) {
pipeline_ = nullptr;
return;
}
vk::DescriptorImageInfo image_info;
image_info.imageLayout = vk::ImageLayout::eGeneral;
image_info.sampler = VK_NULL_HANDLE;
image_info.imageView = TextureVK::Cast(*color_image_vk_).GetImageView();
image_workspace_[bound_image_offset_++] = image_info;
vk::WriteDescriptorSet write_set;
write_set.dstBinding = kMagicSubpassInputBinding;
write_set.descriptorCount = 1u;
write_set.descriptorType = vk::DescriptorType::eInputAttachment;
write_set.pImageInfo = &image_workspace_[bound_image_offset_ - 1];
write_workspace_[descriptor_write_offset_++] = write_set;
}
}
// |RenderPass|
void RenderPassVK::SetCommandLabel(std::string_view label) {
#ifdef IMPELLER_DEBUG
command_buffer_->GetEncoder()->PushDebugGroup(label);
has_label_ = true;
#endif // IMPELLER_DEBUG
}
// |RenderPass|
void RenderPassVK::SetStencilReference(uint32_t value) {
command_buffer_vk_.setStencilReference(
vk::StencilFaceFlagBits::eVkStencilFrontAndBack, value);
}
// |RenderPass|
void RenderPassVK::SetBaseVertex(uint64_t value) {
base_vertex_ = value;
}
// |RenderPass|
void RenderPassVK::SetViewport(Viewport viewport) {
vk::Viewport viewport_vk = vk::Viewport()
.setWidth(viewport.rect.GetWidth())
.setHeight(-viewport.rect.GetHeight())
.setY(viewport.rect.GetHeight())
.setMinDepth(0.0f)
.setMaxDepth(1.0f);
command_buffer_vk_.setViewport(0, 1, &viewport_vk);
}
// |RenderPass|
void RenderPassVK::SetScissor(IRect scissor) {
vk::Rect2D scissor_vk =
vk::Rect2D()
.setOffset(vk::Offset2D(scissor.GetX(), scissor.GetY()))
.setExtent(vk::Extent2D(scissor.GetWidth(), scissor.GetHeight()));
command_buffer_vk_.setScissor(0, 1, &scissor_vk);
}
// |RenderPass|
void RenderPassVK::SetInstanceCount(size_t count) {
instance_count_ = count;
}
// |RenderPass|
bool RenderPassVK::SetVertexBuffer(VertexBuffer buffer) {
vertex_count_ = buffer.vertex_count;
if (buffer.index_type == IndexType::kUnknown || !buffer.vertex_buffer) {
return false;
}
if (!command_buffer_->GetEncoder()->Track(buffer.vertex_buffer.buffer)) {
return false;
}
// Bind the vertex buffer.
vk::Buffer vertex_buffer_handle =
DeviceBufferVK::Cast(*buffer.vertex_buffer.buffer).GetBuffer();
vk::Buffer vertex_buffers[] = {vertex_buffer_handle};
vk::DeviceSize vertex_buffer_offsets[] = {buffer.vertex_buffer.range.offset};
command_buffer_vk_.bindVertexBuffers(0u, 1u, vertex_buffers,
vertex_buffer_offsets);
// Bind the index buffer.
if (buffer.index_type != IndexType::kNone) {
has_index_buffer_ = true;
const BufferView& index_buffer_view = buffer.index_buffer;
if (!index_buffer_view) {
return false;
}
const std::shared_ptr<const DeviceBuffer>& index_buffer =
index_buffer_view.buffer;
if (!index_buffer) {
VALIDATION_LOG << "Failed to acquire device buffer"
<< " for index buffer view";
return false;
}
if (!command_buffer_->GetEncoder()->Track(index_buffer)) {
return false;
}
vk::Buffer index_buffer_handle =
DeviceBufferVK::Cast(*index_buffer).GetBuffer();
command_buffer_vk_.bindIndexBuffer(index_buffer_handle,
index_buffer_view.range.offset,
ToVKIndexType(buffer.index_type));
} else {
has_index_buffer_ = false;
}
return true;
}
// |RenderPass|
fml::Status RenderPassVK::Draw() {
if (!pipeline_) {
return fml::Status(fml::StatusCode::kCancelled,
"No valid pipeline is bound to the RenderPass.");
}
//----------------------------------------------------------------------------
/// If there are immutable samplers referenced in the render pass, the base
/// pipeline variant is no longer valid and needs to be re-constructed to
/// reference the samplers.
///
/// This is an instance of JIT creation of PSOs that can cause jank. It is
/// unavoidable because it isn't possible to know all possible combinations of
/// target YUV conversions. Fortunately, this will only ever happen when
/// rendering to external textures. Like Android Hardware Buffers on Android.
///
/// Even when JIT creation is unavoidable, pipelines will cache their variants
/// when able and all pipeline creation will happen via a base pipeline cache
/// anyway. So the jank can be mostly entirely ameliorated and it should only
/// ever happen when the first unknown YUV conversion is encountered.
///
/// Jank can be completely eliminated by pre-populating known YUV conversion
/// pipelines.
if (immutable_sampler_) {
std::shared_ptr<PipelineVK> pipeline_variant =
PipelineVK::Cast(*pipeline_)
.CreateVariantForImmutableSamplers(immutable_sampler_);
if (!pipeline_variant) {
return fml::Status(
fml::StatusCode::kAborted,
"Could not create pipeline variant with immutable sampler.");
}
pipeline_ = std::move(pipeline_variant);
}
const auto& context_vk = ContextVK::Cast(*context_);
const auto& pipeline_vk = PipelineVK::Cast(*pipeline_);
auto descriptor_result =
command_buffer_->GetEncoder()->AllocateDescriptorSets(
pipeline_vk.GetDescriptorSetLayout(), context_vk);
if (!descriptor_result.ok()) {
return fml::Status(fml::StatusCode::kAborted,
"Could not allocate descriptor sets.");
}
const auto descriptor_set = descriptor_result.value();
const auto pipeline_layout = pipeline_vk.GetPipelineLayout();
command_buffer_vk_.bindPipeline(vk::PipelineBindPoint::eGraphics,
pipeline_vk.GetPipeline());
for (auto i = 0u; i < descriptor_write_offset_; i++) {
write_workspace_[i].dstSet = descriptor_set;
}
context_vk.GetDevice().updateDescriptorSets(descriptor_write_offset_,
write_workspace_.data(), 0u, {});
command_buffer_vk_.bindDescriptorSets(
vk::PipelineBindPoint::eGraphics, // bind point
pipeline_layout, // layout
0, // first set
1, // set count
&descriptor_set, // sets
0, // offset count
nullptr // offsets
);
if (pipeline_uses_input_attachments_) {
InsertBarrierForInputAttachmentRead(
command_buffer_vk_, TextureVK::Cast(*color_image_vk_).GetImage());
}
if (has_index_buffer_) {
command_buffer_vk_.drawIndexed(vertex_count_, // index count
instance_count_, // instance count
0u, // first index
base_vertex_, // vertex offset
0u // first instance
);
} else {
command_buffer_vk_.draw(vertex_count_, // vertex count
instance_count_, // instance count
base_vertex_, // vertex offset
0u // first instance
);
}
#ifdef IMPELLER_DEBUG
if (has_label_) {
command_buffer_->GetEncoder()->PopDebugGroup();
}
#endif // IMPELLER_DEBUG
has_label_ = false;
has_index_buffer_ = false;
bound_image_offset_ = 0u;
bound_buffer_offset_ = 0u;
descriptor_write_offset_ = 0u;
instance_count_ = 1u;
base_vertex_ = 0u;
vertex_count_ = 0u;
pipeline_ = nullptr;
pipeline_uses_input_attachments_ = false;
immutable_sampler_ = nullptr;
return fml::Status();
}
// The RenderPassVK binding methods only need the binding, set, and buffer type
// information.
bool RenderPassVK::BindResource(ShaderStage stage,
DescriptorType type,
const ShaderUniformSlot& slot,
const ShaderMetadata& metadata,
BufferView view) {
return BindResource(slot.binding, type, view);
}
bool RenderPassVK::BindResource(
ShaderStage stage,
DescriptorType type,
const ShaderUniformSlot& slot,
const std::shared_ptr<const ShaderMetadata>& metadata,
BufferView view) {
return BindResource(slot.binding, type, view);
}
bool RenderPassVK::BindResource(size_t binding,
DescriptorType type,
const BufferView& view) {
if (bound_buffer_offset_ >= kMaxBindings) {
return false;
}
const std::shared_ptr<const DeviceBuffer>& device_buffer = view.buffer;
auto buffer = DeviceBufferVK::Cast(*device_buffer).GetBuffer();
if (!buffer) {
return false;
}
if (!command_buffer_->GetEncoder()->Track(device_buffer)) {
return false;
}
uint32_t offset = view.range.offset;
vk::DescriptorBufferInfo buffer_info;
buffer_info.buffer = buffer;
buffer_info.offset = offset;
buffer_info.range = view.range.length;
buffer_workspace_[bound_buffer_offset_++] = buffer_info;
vk::WriteDescriptorSet write_set;
write_set.dstBinding = binding;
write_set.descriptorCount = 1u;
write_set.descriptorType = ToVKDescriptorType(type);
write_set.pBufferInfo = &buffer_workspace_[bound_buffer_offset_ - 1];
write_workspace_[descriptor_write_offset_++] = write_set;
return true;
}
bool RenderPassVK::BindResource(ShaderStage stage,
DescriptorType type,
const SampledImageSlot& slot,
const ShaderMetadata& metadata,
std::shared_ptr<const Texture> texture,
const std::unique_ptr<const Sampler>& sampler) {
if (bound_buffer_offset_ >= kMaxBindings) {
return false;
}
if (!texture->IsValid() || !sampler) {
return false;
}
const TextureVK& texture_vk = TextureVK::Cast(*texture);
const SamplerVK& sampler_vk = SamplerVK::Cast(*sampler);
if (!command_buffer_->GetEncoder()->Track(texture)) {
return false;
}
if (!immutable_sampler_) {
immutable_sampler_ = texture_vk.GetImmutableSamplerVariant(sampler_vk);
}
vk::DescriptorImageInfo image_info;
image_info.imageLayout = vk::ImageLayout::eShaderReadOnlyOptimal;
image_info.sampler = sampler_vk.GetSampler();
image_info.imageView = texture_vk.GetImageView();
image_workspace_[bound_image_offset_++] = image_info;
vk::WriteDescriptorSet write_set;
write_set.dstBinding = slot.binding;
write_set.descriptorCount = 1u;
write_set.descriptorType = vk::DescriptorType::eCombinedImageSampler;
write_set.pImageInfo = &image_workspace_[bound_image_offset_ - 1];
write_workspace_[descriptor_write_offset_++] = write_set;
return true;
}
bool RenderPassVK::OnEncodeCommands(const Context& context) const {
command_buffer_->GetEncoder()->GetCommandBuffer().endRenderPass();
// If this render target will be consumed by a subsequent render pass,
// perform a layout transition to a shader read state.
const std::shared_ptr<Texture>& result_texture =
resolve_image_vk_ ? resolve_image_vk_ : color_image_vk_;
if (result_texture->GetTextureDescriptor().usage &
TextureUsage::kShaderRead) {
BarrierVK barrier;
barrier.cmd_buffer = command_buffer_vk_;
barrier.src_access = vk::AccessFlagBits::eColorAttachmentWrite |
vk::AccessFlagBits::eTransferWrite;
barrier.src_stage = vk::PipelineStageFlagBits::eColorAttachmentOutput |
vk::PipelineStageFlagBits::eTransfer;
barrier.dst_access = vk::AccessFlagBits::eShaderRead;
barrier.dst_stage = vk::PipelineStageFlagBits::eFragmentShader;
barrier.new_layout = vk::ImageLayout::eShaderReadOnlyOptimal;
if (!TextureVK::Cast(*result_texture).SetLayout(barrier)) {
return false;
}
}
return true;
}
} // namespace impeller
| engine/impeller/renderer/backend/vulkan/render_pass_vk.cc/0 | {
"file_path": "engine/impeller/renderer/backend/vulkan/render_pass_vk.cc",
"repo_id": "engine",
"token_count": 9740
} | 217 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_SURFACE_CONTEXT_VK_H_
#define FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_SURFACE_CONTEXT_VK_H_
#include <memory>
#include "impeller/base/backend_cast.h"
#include "impeller/renderer/backend/vulkan/vk.h"
#include "impeller/renderer/command_queue.h"
#include "impeller/renderer/context.h"
namespace impeller {
class ContextVK;
class Surface;
class KHRSwapchainVK;
/// For Vulkan, there is both a ContextVK that implements Context and a
/// SurfaceContextVK that also implements Context and takes a ContextVK as its
/// parent. There is a one to many relationship between ContextVK and
/// SurfaceContextVK.
///
/// Most operations in this class are delegated to the parent ContextVK.
/// This class specifically manages swapchains and creation of VkSurfaces on
/// Android. By maintaining the swapchain this way, it is possible to have
/// multiple surfaces sharing the same ContextVK without stepping on each
/// other's swapchains.
class SurfaceContextVK : public Context,
public BackendCast<SurfaceContextVK, Context> {
public:
explicit SurfaceContextVK(const std::shared_ptr<ContextVK>& parent);
// |Context|
~SurfaceContextVK() override;
// |Context|
BackendType GetBackendType() const override;
// |Context|
std::string DescribeGpuModel() const override;
// |Context|
bool IsValid() const override;
// |Context|
std::shared_ptr<Allocator> GetResourceAllocator() const override;
// |Context|
std::shared_ptr<ShaderLibrary> GetShaderLibrary() const override;
// |Context|
std::shared_ptr<SamplerLibrary> GetSamplerLibrary() const override;
// |Context|
std::shared_ptr<PipelineLibrary> GetPipelineLibrary() const override;
// |Context|
std::shared_ptr<CommandBuffer> CreateCommandBuffer() const override;
// |Context|
const std::shared_ptr<const Capabilities>& GetCapabilities() const override;
// |Context|
std::shared_ptr<CommandQueue> GetCommandQueue() const override;
// |Context|
void Shutdown() override;
[[nodiscard]] bool SetWindowSurface(vk::UniqueSurfaceKHR surface,
const ISize& size);
std::unique_ptr<Surface> AcquireNextSurface();
/// @brief Mark the current swapchain configuration as dirty, forcing it to be
/// recreated on the next frame.
void UpdateSurfaceSize(const ISize& size) const;
void InitializeCommonlyUsedShadersIfNeeded() const override;
#ifdef FML_OS_ANDROID
vk::UniqueSurfaceKHR CreateAndroidSurface(ANativeWindow* window) const;
#endif // FML_OS_ANDROID
const vk::Device& GetDevice() const;
const ContextVK& GetParent() const;
private:
std::shared_ptr<ContextVK> parent_;
std::shared_ptr<KHRSwapchainVK> swapchain_;
};
} // namespace impeller
#endif // FLUTTER_IMPELLER_RENDERER_BACKEND_VULKAN_SURFACE_CONTEXT_VK_H_
| engine/impeller/renderer/backend/vulkan/surface_context_vk.h/0 | {
"file_path": "engine/impeller/renderer/backend/vulkan/surface_context_vk.h",
"repo_id": "engine",
"token_count": 991
} | 218 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "impeller/renderer/backend/vulkan/texture_source_vk.h"
namespace impeller {
TextureSourceVK::TextureSourceVK(TextureDescriptor desc) : desc_(desc) {}
TextureSourceVK::~TextureSourceVK() = default;
const TextureDescriptor& TextureSourceVK::GetTextureDescriptor() const {
return desc_;
}
std::shared_ptr<YUVConversionVK> TextureSourceVK::GetYUVConversion() const {
return nullptr;
}
vk::ImageLayout TextureSourceVK::GetLayout() const {
ReaderLock lock(layout_mutex_);
return layout_;
}
vk::ImageLayout TextureSourceVK::SetLayoutWithoutEncoding(
vk::ImageLayout layout) const {
WriterLock lock(layout_mutex_);
const auto old_layout = layout_;
layout_ = layout;
return old_layout;
}
fml::Status TextureSourceVK::SetLayout(const BarrierVK& barrier) const {
const auto old_layout = SetLayoutWithoutEncoding(barrier.new_layout);
if (barrier.new_layout == old_layout) {
return {};
}
vk::ImageMemoryBarrier image_barrier;
image_barrier.srcAccessMask = barrier.src_access;
image_barrier.dstAccessMask = barrier.dst_access;
image_barrier.oldLayout = old_layout;
image_barrier.newLayout = barrier.new_layout;
image_barrier.image = GetImage();
image_barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
image_barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
image_barrier.subresourceRange.aspectMask = ToImageAspectFlags(desc_.format);
image_barrier.subresourceRange.baseMipLevel = 0u;
image_barrier.subresourceRange.levelCount = desc_.mip_count;
image_barrier.subresourceRange.baseArrayLayer = 0u;
image_barrier.subresourceRange.layerCount = ToArrayLayerCount(desc_.type);
barrier.cmd_buffer.pipelineBarrier(barrier.src_stage, // src stage
barrier.dst_stage, // dst stage
{}, // dependency flags
nullptr, // memory barriers
nullptr, // buffer barriers
image_barrier // image barriers
);
return {};
}
void TextureSourceVK::SetCachedFramebuffer(
const SharedHandleVK<vk::Framebuffer>& framebuffer) {
framebuffer_ = framebuffer;
}
void TextureSourceVK::SetCachedRenderPass(
const SharedHandleVK<vk::RenderPass>& render_pass) {
render_pass_ = render_pass;
}
SharedHandleVK<vk::Framebuffer> TextureSourceVK::GetCachedFramebuffer() const {
return framebuffer_;
}
SharedHandleVK<vk::RenderPass> TextureSourceVK::GetCachedRenderPass() const {
return render_pass_;
}
} // namespace impeller
| engine/impeller/renderer/backend/vulkan/texture_source_vk.cc/0 | {
"file_path": "engine/impeller/renderer/backend/vulkan/texture_source_vk.cc",
"repo_id": "engine",
"token_count": 1061
} | 219 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_IMPELLER_RENDERER_BLIT_COMMAND_H_
#define FLUTTER_IMPELLER_RENDERER_BLIT_COMMAND_H_
#include "impeller/core/device_buffer.h"
#include "impeller/core/texture.h"
#include "impeller/geometry/rect.h"
namespace impeller {
struct BlitCommand {
std::string label;
};
struct BlitCopyTextureToTextureCommand : public BlitCommand {
std::shared_ptr<Texture> source;
std::shared_ptr<Texture> destination;
IRect source_region;
IPoint destination_origin;
};
struct BlitCopyTextureToBufferCommand : public BlitCommand {
std::shared_ptr<Texture> source;
std::shared_ptr<DeviceBuffer> destination;
IRect source_region;
size_t destination_offset;
};
struct BlitCopyBufferToTextureCommand : public BlitCommand {
BufferView source;
std::shared_ptr<Texture> destination;
IPoint destination_origin;
};
struct BlitGenerateMipmapCommand : public BlitCommand {
std::shared_ptr<Texture> texture;
};
} // namespace impeller
#endif // FLUTTER_IMPELLER_RENDERER_BLIT_COMMAND_H_
| engine/impeller/renderer/blit_command.h/0 | {
"file_path": "engine/impeller/renderer/blit_command.h",
"repo_id": "engine",
"token_count": 395
} | 220 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_IMPELLER_RENDERER_COMPUTE_PIPELINE_BUILDER_H_
#define FLUTTER_IMPELLER_RENDERER_COMPUTE_PIPELINE_BUILDER_H_
#include "impeller/base/strings.h"
#include "impeller/base/validation.h"
#include "impeller/renderer/compute_pipeline_descriptor.h"
#include "impeller/renderer/context.h"
#include "impeller/renderer/shader_library.h"
namespace impeller {
//------------------------------------------------------------------------------
/// @brief An optional (but highly recommended) utility for creating
/// pipelines from reflected shader information.
///
/// @tparam Compute_Shader The reflected compute shader information. Found
/// in a generated header file called
/// <shader_name>.comp.h.
///
template <class ComputeShader_>
struct ComputePipelineBuilder {
public:
using ComputeShader = ComputeShader_;
//----------------------------------------------------------------------------
/// @brief Create a default pipeline descriptor using the combination
/// reflected shader information. The descriptor can be configured
/// further before a pipeline state object is created using it.
///
/// @param[in] context The context
///
/// @return If the combination of reflected shader information is
/// compatible and the requisite functions can be found in the
/// context, a pipeline descriptor.
///
static std::optional<ComputePipelineDescriptor> MakeDefaultPipelineDescriptor(
const Context& context) {
ComputePipelineDescriptor desc;
if (InitializePipelineDescriptorDefaults(context, desc)) {
return {std::move(desc)};
}
return std::nullopt;
}
[[nodiscard]] static bool InitializePipelineDescriptorDefaults(
const Context& context,
ComputePipelineDescriptor& desc) {
// Setup debug instrumentation.
desc.SetLabel(SPrintF("%s Pipeline", ComputeShader::kLabel.data()));
// Resolve pipeline entrypoints.
{
auto compute_function = context.GetShaderLibrary()->GetFunction(
ComputeShader::kEntrypointName, ShaderStage::kCompute);
if (!compute_function) {
VALIDATION_LOG << "Could not resolve compute pipeline entrypoint '"
<< ComputeShader::kEntrypointName
<< "' for pipeline named '" << ComputeShader::kLabel
<< "'.";
return false;
}
if (!desc.RegisterDescriptorSetLayouts(
ComputeShader::kDescriptorSetLayouts)) {
VALIDATION_LOG << "Could not configure compute descriptor set layout "
"for pipeline named '"
<< ComputeShader::kLabel << "'.";
return false;
}
desc.SetStageEntrypoint(std::move(compute_function));
}
return true;
}
};
} // namespace impeller
#endif // FLUTTER_IMPELLER_RENDERER_COMPUTE_PIPELINE_BUILDER_H_
| engine/impeller/renderer/compute_pipeline_builder.h/0 | {
"file_path": "engine/impeller/renderer/compute_pipeline_builder.h",
"repo_id": "engine",
"token_count": 1196
} | 221 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_IMPELLER_RENDERER_PIPELINE_DESCRIPTOR_H_
#define FLUTTER_IMPELLER_RENDERER_PIPELINE_DESCRIPTOR_H_
#include <map>
#include <memory>
#include <string>
#include "impeller/base/comparable.h"
#include "impeller/core/formats.h"
#include "impeller/core/shader_types.h"
#include "impeller/tessellator/tessellator.h"
namespace impeller {
class ShaderFunction;
class VertexDescriptor;
template <typename T>
class Pipeline;
class PipelineDescriptor final : public Comparable<PipelineDescriptor> {
public:
PipelineDescriptor();
~PipelineDescriptor();
PipelineDescriptor& SetLabel(std::string label);
const std::string& GetLabel() const;
PipelineDescriptor& SetSampleCount(SampleCount samples);
SampleCount GetSampleCount() const { return sample_count_; }
PipelineDescriptor& AddStageEntrypoint(
std::shared_ptr<const ShaderFunction> function);
const std::map<ShaderStage, std::shared_ptr<const ShaderFunction>>&
GetStageEntrypoints() const;
std::shared_ptr<const ShaderFunction> GetEntrypointForStage(
ShaderStage stage) const;
PipelineDescriptor& SetVertexDescriptor(
std::shared_ptr<VertexDescriptor> vertex_descriptor);
const std::shared_ptr<VertexDescriptor>& GetVertexDescriptor() const;
size_t GetMaxColorAttacmentBindIndex() const;
PipelineDescriptor& SetColorAttachmentDescriptor(
size_t index,
ColorAttachmentDescriptor desc);
PipelineDescriptor& SetColorAttachmentDescriptors(
std::map<size_t /* index */, ColorAttachmentDescriptor> descriptors);
const ColorAttachmentDescriptor* GetColorAttachmentDescriptor(
size_t index) const;
const std::map<size_t /* index */, ColorAttachmentDescriptor>&
GetColorAttachmentDescriptors() const;
const ColorAttachmentDescriptor* GetLegacyCompatibleColorAttachment() const;
PipelineDescriptor& SetDepthStencilAttachmentDescriptor(
std::optional<DepthAttachmentDescriptor> desc);
std::optional<DepthAttachmentDescriptor> GetDepthStencilAttachmentDescriptor()
const;
PipelineDescriptor& SetStencilAttachmentDescriptors(
std::optional<StencilAttachmentDescriptor> front_and_back);
PipelineDescriptor& SetStencilAttachmentDescriptors(
std::optional<StencilAttachmentDescriptor> front,
std::optional<StencilAttachmentDescriptor> back);
void ClearStencilAttachments();
void ClearDepthAttachment();
void ClearColorAttachment(size_t index);
std::optional<StencilAttachmentDescriptor>
GetFrontStencilAttachmentDescriptor() const;
std::optional<StencilAttachmentDescriptor>
GetBackStencilAttachmentDescriptor() const;
bool HasStencilAttachmentDescriptors() const;
PipelineDescriptor& SetDepthPixelFormat(PixelFormat format);
PixelFormat GetDepthPixelFormat() const;
PipelineDescriptor& SetStencilPixelFormat(PixelFormat format);
PixelFormat GetStencilPixelFormat() const;
// Comparable<PipelineDescriptor>
std::size_t GetHash() const override;
// Comparable<PipelineDescriptor>
bool IsEqual(const PipelineDescriptor& other) const override;
void ResetAttachments();
void SetCullMode(CullMode mode);
CullMode GetCullMode() const;
void SetWindingOrder(WindingOrder order);
WindingOrder GetWindingOrder() const;
void SetPrimitiveType(PrimitiveType type);
PrimitiveType GetPrimitiveType() const;
void SetPolygonMode(PolygonMode mode);
PolygonMode GetPolygonMode() const;
void SetSpecializationConstants(std::vector<Scalar> values);
const std::vector<Scalar>& GetSpecializationConstants() const;
private:
std::string label_;
SampleCount sample_count_ = SampleCount::kCount1;
WindingOrder winding_order_ = WindingOrder::kClockwise;
CullMode cull_mode_ = CullMode::kNone;
std::map<ShaderStage, std::shared_ptr<const ShaderFunction>> entrypoints_;
std::map<size_t /* index */, ColorAttachmentDescriptor>
color_attachment_descriptors_;
std::shared_ptr<VertexDescriptor> vertex_descriptor_;
PixelFormat depth_pixel_format_ = PixelFormat::kUnknown;
PixelFormat stencil_pixel_format_ = PixelFormat::kUnknown;
std::optional<DepthAttachmentDescriptor> depth_attachment_descriptor_;
std::optional<StencilAttachmentDescriptor>
front_stencil_attachment_descriptor_;
std::optional<StencilAttachmentDescriptor>
back_stencil_attachment_descriptor_;
PrimitiveType primitive_type_ = PrimitiveType::kTriangle;
PolygonMode polygon_mode_ = PolygonMode::kFill;
std::vector<Scalar> specialization_constants_;
};
} // namespace impeller
#endif // FLUTTER_IMPELLER_RENDERER_PIPELINE_DESCRIPTOR_H_
| engine/impeller/renderer/pipeline_descriptor.h/0 | {
"file_path": "engine/impeller/renderer/pipeline_descriptor.h",
"repo_id": "engine",
"token_count": 1558
} | 222 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_IMPELLER_RENDERER_SAMPLER_LIBRARY_H_
#define FLUTTER_IMPELLER_RENDERER_SAMPLER_LIBRARY_H_
#include "impeller/core/sampler.h"
#include "impeller/core/sampler_descriptor.h"
namespace impeller {
class SamplerLibrary {
public:
virtual ~SamplerLibrary();
/// @brief Retrieve a backend specific sampler object for the given sampler
/// descriptor.
///
/// If the descriptor is invalid or there is a loss of rendering
/// context, this method may return a nullptr.
///
/// The sampler library implementations must cache this sampler object
/// and guarantee that the reference will continue to be valid
/// throughout the lifetime of the Impeller context.
virtual const std::unique_ptr<const Sampler>& GetSampler(
SamplerDescriptor descriptor) = 0;
protected:
SamplerLibrary();
private:
SamplerLibrary(const SamplerLibrary&) = delete;
SamplerLibrary& operator=(const SamplerLibrary&) = delete;
};
} // namespace impeller
#endif // FLUTTER_IMPELLER_RENDERER_SAMPLER_LIBRARY_H_
| engine/impeller/renderer/sampler_library.h/0 | {
"file_path": "engine/impeller/renderer/sampler_library.h",
"repo_id": "engine",
"token_count": 417
} | 223 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "impeller/scene/animation/animation_clip.h"
#include <algorithm>
#include <cmath>
#include <memory>
#include <valarray>
#include "impeller/scene/node.h"
namespace impeller {
namespace scene {
AnimationClip::AnimationClip(std::shared_ptr<Animation> animation,
Node* bind_target)
: animation_(std::move(animation)) {
BindToTarget(bind_target);
}
AnimationClip::~AnimationClip() = default;
AnimationClip::AnimationClip(AnimationClip&&) = default;
AnimationClip& AnimationClip::operator=(AnimationClip&&) = default;
bool AnimationClip::IsPlaying() const {
return playing_;
}
void AnimationClip::SetPlaying(bool playing) {
playing_ = playing;
}
void AnimationClip::Play() {
SetPlaying(true);
}
void AnimationClip::Pause() {
SetPlaying(false);
}
void AnimationClip::Stop() {
SetPlaying(false);
Seek(SecondsF::zero());
}
bool AnimationClip::GetLoop() const {
return loop_;
}
void AnimationClip::SetLoop(bool looping) {
loop_ = looping;
}
Scalar AnimationClip::GetPlaybackTimeScale() const {
return playback_time_scale_;
}
void AnimationClip::SetPlaybackTimeScale(Scalar playback_speed) {
playback_time_scale_ = playback_speed;
}
Scalar AnimationClip::GetWeight() const {
return weight_;
}
void AnimationClip::SetWeight(Scalar weight) {
weight_ = std::max(0.0f, weight);
}
SecondsF AnimationClip::GetPlaybackTime() const {
return playback_time_;
}
void AnimationClip::Seek(SecondsF time) {
playback_time_ = std::clamp(time, SecondsF::zero(), animation_->GetEndTime());
}
void AnimationClip::Advance(SecondsF delta_time) {
if (!playing_ || delta_time <= SecondsF::zero()) {
return;
}
delta_time *= playback_time_scale_;
playback_time_ += delta_time;
/// Handle looping behavior.
auto end_time = animation_->GetEndTime();
if (end_time == SecondsF::zero()) {
playback_time_ = SecondsF::zero();
return;
}
if (!loop_ &&
(playback_time_ < SecondsF::zero() || playback_time_ > end_time)) {
// If looping is disabled, clamp to the end (or beginning, if playing in
// reverse) and pause.
Pause();
playback_time_ = std::clamp(playback_time_, SecondsF::zero(), end_time);
} else if (/* loop && */ playback_time_ > end_time) {
// If looping is enabled and we ran off the end, loop to the beginning.
playback_time_ =
SecondsF(std::fmod(std::abs(playback_time_.count()), end_time.count()));
} else if (/* loop && */ playback_time_ < SecondsF::zero()) {
// If looping is enabled and we ran off the beginning, loop to the end.
playback_time_ =
end_time -
SecondsF(std::fmod(std::abs(playback_time_.count()), end_time.count()));
}
}
void AnimationClip::ApplyToBindings(
std::unordered_map<Node*, AnimationTransforms>& transform_decomps,
Scalar weight_multiplier) const {
for (auto& binding : bindings_) {
auto transforms = transform_decomps.find(binding.node);
if (transforms == transform_decomps.end()) {
continue;
}
binding.channel.resolver->Apply(transforms->second, playback_time_,
weight_ * weight_multiplier);
}
}
void AnimationClip::BindToTarget(Node* node) {
const auto& channels = animation_->GetChannels();
bindings_.clear();
bindings_.reserve(channels.size());
for (const auto& channel : channels) {
Node* channel_target;
if (channel.bind_target.node_name == node->GetName()) {
channel_target = node;
} else if (auto result =
node->FindChildByName(channel.bind_target.node_name, true)) {
channel_target = result.get();
} else {
continue;
}
bindings_.push_back(
ChannelBinding{.channel = channel, .node = channel_target});
}
}
} // namespace scene
} // namespace impeller
| engine/impeller/scene/animation/animation_clip.cc/0 | {
"file_path": "engine/impeller/scene/animation/animation_clip.cc",
"repo_id": "engine",
"token_count": 1442
} | 224 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/testing/testing.h"
#include "impeller/geometry/geometry_asserts.h"
#include "impeller/geometry/matrix.h"
#include "impeller/scene/importer/conversions.h"
#include "impeller/scene/importer/importer.h"
#include "impeller/scene/importer/scene_flatbuffers.h"
namespace impeller {
namespace scene {
namespace importer {
namespace testing {
TEST(ImporterTest, CanParseUnskinnedGLTF) {
auto mapping =
flutter::testing::OpenFixtureAsMapping("flutter_logo_baked.glb");
fb::SceneT scene;
ASSERT_TRUE(ParseGLTF(*mapping, scene));
ASSERT_EQ(scene.children.size(), 1u);
auto& node = scene.nodes[scene.children[0]];
Matrix node_transform = ToMatrix(*node->transform);
ASSERT_MATRIX_NEAR(node_transform, Matrix());
ASSERT_EQ(node->mesh_primitives.size(), 1u);
auto& mesh = *node->mesh_primitives[0];
ASSERT_EQ(mesh.indices->count, 918u);
uint16_t first_index =
*reinterpret_cast<uint16_t*>(mesh.indices->data.data());
ASSERT_EQ(first_index, 45u);
ASSERT_EQ(mesh.vertices.type, fb::VertexBuffer::UnskinnedVertexBuffer);
auto& vertices = mesh.vertices.AsUnskinnedVertexBuffer()->vertices;
ASSERT_EQ(vertices.size(), 260u);
auto& vertex = vertices[0];
Vector3 position = ToVector3(vertex.position());
ASSERT_VECTOR3_NEAR(position, Vector3(-0.0100185, -0.522907, 0.133178));
Vector3 normal = ToVector3(vertex.normal());
ASSERT_VECTOR3_NEAR(normal, Vector3(0.556997, -0.810833, 0.179733));
Vector4 tangent = ToVector4(vertex.tangent());
ASSERT_VECTOR4_NEAR(tangent, Vector4(0.155901, -0.110485, -0.981574, 1));
Vector2 texture_coords = ToVector2(vertex.texture_coords());
ASSERT_POINT_NEAR(texture_coords, Vector2(0.727937, 0.713817));
Color color = ToColor(vertex.color());
ASSERT_COLOR_NEAR(color, Color(0.0221714, 0.467781, 0.921584, 1));
}
TEST(ImporterTest, CanParseSkinnedGLTF) {
auto mapping = flutter::testing::OpenFixtureAsMapping("two_triangles.glb");
fb::SceneT scene;
ASSERT_TRUE(ParseGLTF(*mapping, scene));
ASSERT_EQ(scene.children.size(), 1u);
auto& node = scene.nodes[scene.children[0]];
Matrix node_transform = ToMatrix(*node->transform);
ASSERT_MATRIX_NEAR(node_transform, Matrix());
ASSERT_EQ(node->mesh_primitives.size(), 0u);
ASSERT_EQ(node->children.size(), 2u);
// The skinned node contains both a skeleton and skinned mesh primitives that
// reference bones in the skeleton.
auto& skinned_node = scene.nodes[node->children[0]];
ASSERT_NE(skinned_node->skin, nullptr);
ASSERT_EQ(skinned_node->mesh_primitives.size(), 2u);
auto& bottom_triangle = *skinned_node->mesh_primitives[0];
ASSERT_EQ(bottom_triangle.indices->count, 3u);
ASSERT_EQ(bottom_triangle.vertices.type,
fb::VertexBuffer::SkinnedVertexBuffer);
auto& vertices = bottom_triangle.vertices.AsSkinnedVertexBuffer()->vertices;
ASSERT_EQ(vertices.size(), 3u);
auto& vertex = vertices[0];
Vector3 position = ToVector3(vertex.vertex().position());
ASSERT_VECTOR3_NEAR(position, Vector3(1, 1, 0));
Vector3 normal = ToVector3(vertex.vertex().normal());
ASSERT_VECTOR3_NEAR(normal, Vector3(0, 0, 1));
Vector4 tangent = ToVector4(vertex.vertex().tangent());
ASSERT_VECTOR4_NEAR(tangent, Vector4(1, 0, 0, -1));
Vector2 texture_coords = ToVector2(vertex.vertex().texture_coords());
ASSERT_POINT_NEAR(texture_coords, Vector2(0, 1));
Color color = ToColor(vertex.vertex().color());
ASSERT_COLOR_NEAR(color, Color(1, 1, 1, 1));
Vector4 joints = ToVector4(vertex.joints());
ASSERT_VECTOR4_NEAR(joints, Vector4(0, 0, 0, 0));
Vector4 weights = ToVector4(vertex.weights());
ASSERT_VECTOR4_NEAR(weights, Vector4(1, 0, 0, 0));
ASSERT_EQ(scene.animations.size(), 2u);
ASSERT_EQ(scene.animations[0]->name, "Idle");
ASSERT_EQ(scene.animations[1]->name, "Metronome");
ASSERT_EQ(scene.animations[1]->channels.size(), 6u);
auto& channel = scene.animations[1]->channels[3];
ASSERT_EQ(channel->keyframes.type, fb::Keyframes::RotationKeyframes);
auto* keyframes = channel->keyframes.AsRotationKeyframes();
ASSERT_EQ(keyframes->values.size(), 40u);
ASSERT_VECTOR4_NEAR(ToVector4(keyframes->values[0]),
Vector4(0.653281, -0.270598, 0.270598, 0.653281));
ASSERT_VECTOR4_NEAR(ToVector4(keyframes->values[10]),
Vector4(0.700151, 0.0989373, -0.0989373, 0.700151));
}
} // namespace testing
} // namespace importer
} // namespace scene
} // namespace impeller
| engine/impeller/scene/importer/importer_unittests.cc/0 | {
"file_path": "engine/impeller/scene/importer/importer_unittests.cc",
"repo_id": "engine",
"token_count": 1814
} | 225 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_IMPELLER_SCENE_SCENE_H_
#define FLUTTER_IMPELLER_SCENE_SCENE_H_
#include <memory>
#include <vector>
#include "flutter/fml/macros.h"
#include "impeller/renderer/render_target.h"
#include "impeller/scene/camera.h"
#include "impeller/scene/node.h"
#include "impeller/scene/scene_context.h"
namespace impeller {
namespace scene {
class Scene {
public:
Scene() = delete;
explicit Scene(std::shared_ptr<SceneContext> scene_context);
~Scene();
Node& GetRoot();
bool Render(const RenderTarget& render_target,
const Matrix& camera_transform);
bool Render(const RenderTarget& render_target, const Camera& camera);
private:
std::shared_ptr<SceneContext> scene_context_;
Node root_;
Scene(const Scene&) = delete;
Scene& operator=(const Scene&) = delete;
};
} // namespace scene
} // namespace impeller
#endif // FLUTTER_IMPELLER_SCENE_SCENE_H_
| engine/impeller/scene/scene.h/0 | {
"file_path": "engine/impeller/scene/scene.h",
"repo_id": "engine",
"token_count": 371
} | 226 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "impeller/shader_archive/multi_arch_shader_archive_writer.h"
#include "impeller/base/validation.h"
#include "impeller/shader_archive/multi_arch_shader_archive_flatbuffers.h"
namespace impeller {
MultiArchShaderArchiveWriter::MultiArchShaderArchiveWriter() = default;
MultiArchShaderArchiveWriter::~MultiArchShaderArchiveWriter() = default;
bool MultiArchShaderArchiveWriter::RegisterShaderArchive(
ArchiveRenderingBackend backend,
std::shared_ptr<const fml::Mapping> mapping) {
if (!mapping || mapping->GetMapping() == nullptr) {
return false;
}
if (archives_.find(backend) != archives_.end()) {
VALIDATION_LOG << "Multi-archive already has a shader library registered "
"for that backend.";
return false;
}
archives_[backend] = std::move(mapping);
return true;
}
constexpr fb::RenderingBackend ToRenderingBackend(
ArchiveRenderingBackend backend) {
switch (backend) {
case ArchiveRenderingBackend::kMetal:
return fb::RenderingBackend::kMetal;
case ArchiveRenderingBackend::kVulkan:
return fb::RenderingBackend::kVulkan;
case ArchiveRenderingBackend::kOpenGLES:
return fb::RenderingBackend::kOpenGLES;
}
FML_UNREACHABLE();
}
std::shared_ptr<fml::Mapping> MultiArchShaderArchiveWriter::CreateMapping()
const {
fb::MultiArchShaderArchiveT multi_archive;
for (const auto& archive : archives_) {
auto archive_blob = std::make_unique<fb::ShaderArchiveBlobT>();
archive_blob->rendering_backend = ToRenderingBackend(archive.first);
archive_blob->mapping = {
archive.second->GetMapping(),
archive.second->GetMapping() + archive.second->GetSize()};
multi_archive.items.emplace_back(std::move(archive_blob));
}
auto builder = std::make_shared<flatbuffers::FlatBufferBuilder>();
builder->Finish(
fb::MultiArchShaderArchive::Pack(*builder.get(), &multi_archive),
fb::MultiArchShaderArchiveIdentifier());
return std::make_shared<fml::NonOwnedMapping>(builder->GetBufferPointer(),
builder->GetSize(),
[builder](auto, auto) {});
}
} // namespace impeller
| engine/impeller/shader_archive/multi_arch_shader_archive_writer.cc/0 | {
"file_path": "engine/impeller/shader_archive/multi_arch_shader_archive_writer.cc",
"repo_id": "engine",
"token_count": 910
} | 227 |
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
name: tessellator
description: A Dart FFI wrapper for Impeller's tessellator.
version: 0.0.0
publish_to: none
homepage: https://github.com/flutter/impeller/tree/main/tessellator/dart
environment:
sdk: '>=3.2.0-0 <4.0.0'
| engine/impeller/tessellator/dart/pubspec.yaml/0 | {
"file_path": "engine/impeller/tessellator/dart/pubspec.yaml",
"repo_id": "engine",
"token_count": 134
} | 228 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/impeller/toolkit/android/surface_transaction.h"
#include "flutter/impeller/toolkit/android/hardware_buffer.h"
#include "flutter/impeller/toolkit/android/surface_control.h"
#include "impeller/base/validation.h"
namespace impeller::android {
SurfaceTransaction::SurfaceTransaction()
: transaction_(GetProcTable().ASurfaceTransaction_create()) {}
SurfaceTransaction::~SurfaceTransaction() = default;
bool SurfaceTransaction::IsValid() const {
return transaction_.is_valid();
}
struct TransactionInFlightData {
SurfaceTransaction::OnCompleteCallback callback;
};
bool SurfaceTransaction::Apply(OnCompleteCallback callback) {
if (!IsValid()) {
return false;
}
if (!callback) {
callback = []() {};
}
const auto& proc_table = GetProcTable();
auto data = std::make_unique<TransactionInFlightData>();
data->callback = callback;
proc_table.ASurfaceTransaction_setOnComplete(
transaction_.get(), //
data.release(), //
[](void* context, ASurfaceTransactionStats* stats) -> void {
auto data = reinterpret_cast<TransactionInFlightData*>(context);
data->callback();
delete data;
});
proc_table.ASurfaceTransaction_apply(transaction_.get());
// Transactions may not be applied over and over.
transaction_.reset();
return true;
}
bool SurfaceTransaction::SetContents(const SurfaceControl* control,
const HardwareBuffer* buffer) {
if (control == nullptr || buffer == nullptr) {
VALIDATION_LOG << "Invalid control or buffer.";
return false;
}
GetProcTable().ASurfaceTransaction_setBuffer(transaction_.get(), //
control->GetHandle(), //
buffer->GetHandle(), //
-1);
return true;
}
bool SurfaceTransaction::SetBackgroundColor(const SurfaceControl& control,
const Color& color) {
if (!IsValid() || !control.IsValid()) {
return false;
}
GetProcTable().ASurfaceTransaction_setColor(transaction_.get(), //
control.GetHandle(), //
color.red, //
color.green, //
color.blue, //
color.alpha, //
ADATASPACE_SRGB_LINEAR //
);
return true;
}
bool SurfaceTransaction::SetParent(const SurfaceControl& control,
const SurfaceControl* new_parent) {
if (!IsValid() || !control.IsValid()) {
return false;
}
if (new_parent && !new_parent->IsValid()) {
return false;
}
GetProcTable().ASurfaceTransaction_reparent(
transaction_.get(), //
control.GetHandle(), //
new_parent == nullptr ? nullptr : new_parent->GetHandle() //
);
return true;
}
bool SurfaceTransaction::IsAvailableOnPlatform() {
return GetProcTable().IsValid() &&
GetProcTable().ASurfaceTransaction_create.IsAvailable();
}
} // namespace impeller::android
| engine/impeller/toolkit/android/surface_transaction.cc/0 | {
"file_path": "engine/impeller/toolkit/android/surface_transaction.cc",
"repo_id": "engine",
"token_count": 1578
} | 229 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_IMPELLER_TYPOGRAPHER_BACKENDS_SKIA_GLYPH_ATLAS_CONTEXT_SKIA_H_
#define FLUTTER_IMPELLER_TYPOGRAPHER_BACKENDS_SKIA_GLYPH_ATLAS_CONTEXT_SKIA_H_
#include "impeller/base/backend_cast.h"
#include "impeller/typographer/glyph_atlas.h"
class SkBitmap;
namespace impeller {
//------------------------------------------------------------------------------
/// @brief A container for caching a glyph atlas across frames.
///
class GlyphAtlasContextSkia
: public GlyphAtlasContext,
public BackendCast<GlyphAtlasContextSkia, GlyphAtlasContext> {
public:
GlyphAtlasContextSkia();
~GlyphAtlasContextSkia() override;
//----------------------------------------------------------------------------
/// @brief Retrieve the previous (if any) SkBitmap instance.
std::shared_ptr<SkBitmap> GetBitmap() const;
void UpdateBitmap(std::shared_ptr<SkBitmap> bitmap);
private:
std::shared_ptr<SkBitmap> bitmap_;
GlyphAtlasContextSkia(const GlyphAtlasContextSkia&) = delete;
GlyphAtlasContextSkia& operator=(const GlyphAtlasContextSkia&) = delete;
};
} // namespace impeller
#endif // FLUTTER_IMPELLER_TYPOGRAPHER_BACKENDS_SKIA_GLYPH_ATLAS_CONTEXT_SKIA_H_
| engine/impeller/typographer/backends/skia/glyph_atlas_context_skia.h/0 | {
"file_path": "engine/impeller/typographer/backends/skia/glyph_atlas_context_skia.h",
"repo_id": "engine",
"token_count": 468
} | 230 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "impeller/typographer/font.h"
namespace impeller {
Font::Font(std::shared_ptr<Typeface> typeface, Metrics metrics)
: typeface_(std::move(typeface)), metrics_(metrics) {
if (!typeface_) {
return;
}
is_valid_ = true;
}
Font::~Font() = default;
bool Font::IsValid() const {
return is_valid_;
}
const std::shared_ptr<Typeface>& Font::GetTypeface() const {
return typeface_;
}
std::size_t Font::GetHash() const {
return fml::HashCombine(is_valid_, typeface_ ? typeface_->GetHash() : 0u,
metrics_);
}
bool Font::IsEqual(const Font& other) const {
return DeepComparePointer(typeface_, other.typeface_) &&
is_valid_ == other.is_valid_ && metrics_ == other.metrics_;
}
const Font::Metrics& Font::GetMetrics() const {
return metrics_;
}
} // namespace impeller
| engine/impeller/typographer/font.cc/0 | {
"file_path": "engine/impeller/typographer/font.cc",
"repo_id": "engine",
"token_count": 370
} | 231 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_GPU_FIXTURES_H_
#define FLUTTER_LIB_GPU_FIXTURES_H_
#include "impeller/core/shader_types.h"
struct FlutterGPUUnlitVertexShader {
struct PerVertexData {
impeller::Point position; // (offset 0, size 8)
}; // struct PerVertexData (size 8)
static constexpr auto kInputPosition = impeller::ShaderStageIOSlot{
// position
"position", // name
0u, // attribute location
0u, // attribute set
0u, // attribute binding
impeller::ShaderType::kFloat, // type
32u, // bit width of type
2u, // vec size
1u, // number of columns
0u, // offset for interleaved layout
};
static constexpr std::array<const impeller::ShaderStageIOSlot*, 1>
kAllShaderStageInputs = {
&kInputPosition, // position
};
static constexpr auto kInterleavedLayout = impeller::ShaderStageBufferLayout{
sizeof(PerVertexData), // stride for interleaved layout
0u, // attribute binding
};
static constexpr std::array<const impeller::ShaderStageBufferLayout*, 1>
kInterleavedBufferLayout = {&kInterleavedLayout};
};
constexpr unsigned int kFlutterGPUUnlitVertIPLRLength = 856;
extern unsigned char kFlutterGPUUnlitVertIPLR[];
constexpr unsigned int kFlutterGPUUnlitFragIPLRLength = 556;
extern unsigned char kFlutterGPUUnlitFragIPLR[];
struct FlutterGPUTextureVertexShader {
struct PerVertexData {
impeller::Vector3 position; // (offset 0, size 12)
impeller::Point texture_coords; // (offset 12, size 8)
impeller::Vector4 color; // (offset 20, size 16)
}; // struct PerVertexData (size 36)
static constexpr auto kInputTextureCoords = impeller::ShaderStageIOSlot{
// texture_coords
"texture_coords", // name
1u, // attribute location
0u, // attribute set
0u, // attribute binding
impeller::ShaderType::kFloat, // type
32u, // bit width of type
2u, // vec size
1u, // number of columns
12u, // offset for interleaved layout
};
static constexpr auto kInputColor = impeller::ShaderStageIOSlot{
// color
"color", // name
2u, // attribute location
0u, // attribute set
0u, // attribute binding
impeller::ShaderType::kFloat, // type
32u, // bit width of type
4u, // vec size
1u, // number of columns
20u, // offset for interleaved layout
};
static constexpr auto kInputPosition = impeller::ShaderStageIOSlot{
// position
"position", // name
0u, // attribute location
0u, // attribute set
0u, // attribute binding
impeller::ShaderType::kFloat, // type
32u, // bit width of type
3u, // vec size
1u, // number of columns
0u, // offset for interleaved layout
};
static constexpr std::array<const impeller::ShaderStageIOSlot*, 3>
kAllShaderStageInputs = {
&kInputTextureCoords, // texture_coords
&kInputColor, // color
&kInputPosition, // position
};
static constexpr auto kInterleavedLayout = impeller::ShaderStageBufferLayout{
sizeof(PerVertexData), // stride for interleaved layout
0u, // attribute binding
};
static constexpr std::array<const impeller::ShaderStageBufferLayout*, 1>
kInterleavedBufferLayout = {&kInterleavedLayout};
};
constexpr unsigned int kFlutterGPUTextureVertIPLRLength = 920;
extern unsigned char kFlutterGPUTextureVertIPLR[];
constexpr unsigned int kFlutterGPUTextureFragIPLRLength = 800;
extern unsigned char kFlutterGPUTextureFragIPLR[];
#endif // FLUTTER_LIB_GPU_FIXTURES_H_
| engine/lib/gpu/fixtures.h/0 | {
"file_path": "engine/lib/gpu/fixtures.h",
"repo_id": "engine",
"token_count": 2280
} | 232 |
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
name: flutter_gpu
description: A framework for writing Flutter applications
homepage: https://flutter.dev
environment:
sdk: '>=3.2.0-0 <4.0.0'
dependencies:
sky_engine:
sdk: flutter
| engine/lib/gpu/pubspec.yaml/0 | {
"file_path": "engine/lib/gpu/pubspec.yaml",
"repo_id": "engine",
"token_count": 115
} | 233 |
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import("//build/compiled_action.gni")
import("//flutter/build/bin_to_obj.gni")
import("//flutter/common/config.gni")
import("//flutter/impeller/tools/impeller.gni")
import("//flutter/lib/ui/dart_ui.gni")
import("$dart_src/utils/compile_platform.gni")
if (is_fuchsia) {
import("//flutter/tools/fuchsia/gn-sdk/src/gn_configs.gni")
}
# Generates the Dart/Flutter core platform files and tools.
#
# This target generates the platform-specific snapshots and snapshot-related
# tooling for a given target CPU.
#
# Outputs:
# * Core platform compiled to kernel bytecode
# * Core platform compiled to target_cpu-specific binary snapshot
# * target_cpu-specific gen_snapshot
# * target_cpu-specific analyze_snapshot
group("generate_snapshot_bins") {
deps = [
":generate_snapshot_bin",
":kernel_platform_files",
]
# Build gen_snapshot for the currently specified target_cpu.
#
# For macOS target builds: needed for both target CPUs (arm64, x64).
# For iOS, Android target builds: all AOT target CPUs are arm/arm64.
if (host_os == "mac" && target_os == "mac") {
deps += [ ":create_macos_gen_snapshots" ]
} else if (host_os == "mac" &&
(target_cpu == "arm" || target_cpu == "arm64")) {
deps += [ ":create_arm_gen_snapshot" ]
}
# Build analyze_snapshot for 64-bit target CPUs.
if (host_os == "linux" && (target_cpu == "x64" || target_cpu == "arm64")) {
deps += [ "$dart_src/runtime/bin:analyze_snapshot($host_toolchain)" ]
}
}
# Compiles a binary snapshot of the core Dart/Flutter platform.
#
# Inputs:
# * platform_strong.dill
#
# Tools:
# * gen_snapshot
#
# Outputs:
# * vm_snapshot_data.bin
# * vm_snapshot_instructions.bin
# * isolate_snapshot_data.bin
# * isolate_snapshot_instructions.bin
#
# See: `bin_to_linkable` rules below that build these outputs into linkable form
# See: https://github.com/flutter/flutter/wiki/Flutter-engine-operation-in-AOT-Mode
compiled_action("generate_snapshot_bin") {
if (target_cpu == "x86" && host_os == "linux") {
# By default Dart will create a 32-bit gen_snapshot host binary if the target
# platform is 32-bit. Override this to create a 64-bit gen_snapshot for x86
# targets because some host platforms may not support 32-bit binaries.
tool = "$dart_src/runtime/bin:gen_snapshot_host_targeting_host"
toolchain = "//build/toolchain/$host_os:clang_x64"
} else {
tool = "$dart_src/runtime/bin:gen_snapshot"
}
platform_kernel = "$root_out_dir/flutter_patched_sdk/platform_strong.dill"
inputs = [ platform_kernel ]
deps = [ ":kernel_platform_files" ]
vm_snapshot_data = "$target_gen_dir/vm_isolate_snapshot.bin"
vm_snapshot_instructions = "$target_gen_dir/vm_snapshot_instructions.bin"
isolate_snapshot_data = "$target_gen_dir/isolate_snapshot.bin"
isolate_snapshot_instructions =
"$target_gen_dir/isolate_snapshot_instructions.bin"
outputs = [
vm_snapshot_data,
vm_snapshot_instructions,
isolate_snapshot_data,
isolate_snapshot_instructions,
]
args = [
"--snapshot_kind=core",
"--enable_mirrors=false",
"--vm_snapshot_data=" + rebase_path(vm_snapshot_data),
"--vm_snapshot_instructions=" + rebase_path(vm_snapshot_instructions),
"--isolate_snapshot_data=" + rebase_path(isolate_snapshot_data),
"--isolate_snapshot_instructions=" +
rebase_path(isolate_snapshot_instructions),
]
if (is_debug && flutter_runtime_mode != "profile" &&
flutter_runtime_mode != "release" &&
flutter_runtime_mode != "jit_release") {
args += [ "--enable_asserts" ]
}
args += [ rebase_path(platform_kernel) ]
metadata = {
entitlement_file_path = [ "gen_snapshot" ]
}
}
bin_to_linkable("vm_snapshot_data_linkable") {
deps = [ ":generate_snapshot_bin" ]
input = "$target_gen_dir/vm_isolate_snapshot.bin"
symbol = "kDartVmSnapshotData"
executable = false
}
bin_to_linkable("vm_snapshot_instructions_linkable") {
deps = [ ":generate_snapshot_bin" ]
input = "$target_gen_dir/vm_snapshot_instructions.bin"
symbol = "kDartVmSnapshotInstructions"
executable = true
}
bin_to_linkable("isolate_snapshot_data_linkable") {
deps = [ ":generate_snapshot_bin" ]
input = "$target_gen_dir/isolate_snapshot.bin"
symbol = "kDartIsolateSnapshotData"
executable = false
}
bin_to_linkable("isolate_snapshot_instructions_linkable") {
deps = [ ":generate_snapshot_bin" ]
input = "$target_gen_dir/isolate_snapshot_instructions.bin"
symbol = "kDartIsolateSnapshotInstructions"
executable = true
}
bin_to_linkable("platform_strong_dill_linkable") {
deps = [ ":kernel_platform_files" ]
input = "$root_out_dir/flutter_patched_sdk/platform_strong.dill"
symbol = "kPlatformStrongDill"
size_symbol = "kPlatformStrongDillSize"
executable = false
}
# Creates a `gen_snapshot` binary suffixed with the target CPU architecture.
#
# Builds gen_snapshot using the host toolchain then copies the resulting binary
# to `gen_snapshot_armv7` or `gen_snapshot_arm64` depending on the target
# platform.
#
# This target is used for builds targeting iOS/Android OS.
if (host_os == "mac" && target_os != "mac" &&
(target_cpu == "arm" || target_cpu == "arm64")) {
copy("create_arm_gen_snapshot") {
# The toolchain-specific output directory. For cross-compiles, this is a
# clang-x64 or clang-arm64 subdirectory of the top-level build directory.
host_output_dir =
get_label_info("$dart_src/runtime/bin:gen_snapshot($host_toolchain)",
"root_out_dir")
# Determine suffixed output gen_snapshot name.
target_cpu_suffix = target_cpu
if (target_cpu == "arm") {
target_cpu_suffix = "armv7"
}
sources = [ "${host_output_dir}/gen_snapshot" ]
outputs = [ "${host_output_dir}/gen_snapshot_${target_cpu_suffix}" ]
deps = [ "$dart_src/runtime/bin:gen_snapshot($host_toolchain)" ]
visibility = [ ":*" ]
}
}
# Creates a `gen_snapshot` binary suffixed with the target CPU architecture.
#
# Builds gen_snapshot using the host toolchain then copies the resulting binary
# to `gen_snapshot_arm64` or `gen_snapshot_x64` depending on the target
# platform.
#
# This target is used for builds targeting macOS.
if (host_os == "mac" && target_os == "mac") {
copy("create_macos_gen_snapshots") {
# The toolchain-specific output directory. For cross-compiles, this is a
# clang-x64 or clang-arm64 subdirectory of the top-level build directory.
host_output_dir =
get_label_info("$dart_src/runtime/bin:gen_snapshot($host_toolchain)",
"root_out_dir")
sources = [ "${host_output_dir}/gen_snapshot" ]
outputs = [ "${root_out_dir}/gen_snapshot_${target_cpu}" ]
deps = [ "$dart_src/runtime/bin:gen_snapshot($host_toolchain)" ]
}
}
source_set("snapshot") {
deps = [
":isolate_snapshot_data_linkable",
":isolate_snapshot_instructions_linkable",
":platform_strong_dill_linkable",
":vm_snapshot_data_linkable",
":vm_snapshot_instructions_linkable",
]
sources = get_target_outputs(":isolate_snapshot_data_linkable") +
get_target_outputs(":isolate_snapshot_instructions_linkable") +
get_target_outputs(":vm_snapshot_data_linkable") +
get_target_outputs(":vm_snapshot_instructions_linkable") +
get_target_outputs(":platform_strong_dill_linkable")
}
# Compiles the Dart/Flutter core libraries to kernel bytecode.
compile_platform("strong_platform") {
single_root_scheme = "org-dartlang-sdk"
single_root_base = rebase_path("../../../")
if (impeller_enable_3d) {
libraries_specification_uri =
"org-dartlang-sdk:///flutter/lib/snapshot/libraries_experimental.json"
} else {
libraries_specification_uri =
"org-dartlang-sdk:///flutter/lib/snapshot/libraries.json"
}
outputs = [
"$root_out_dir/flutter_patched_sdk/platform_strong.dill",
"$root_out_dir/flutter_patched_sdk/vm_outline_strong.dill",
]
pool = "//flutter/build/dart:dart_pool"
is_runtime_mode_release =
flutter_runtime_mode == "release" || flutter_runtime_mode == "jit_release"
args = [
"--enable-experiment=generic-metadata",
"--nnbd-agnostic",
"--target=flutter",
"-Ddart.vm.product=$is_runtime_mode_release",
"-Ddart.isVM=true",
"dart:core",
]
}
# Fuchsia's snapshot requires a different platform with extra dart: libraries.
group("kernel_platform_files") {
public_deps = [ ":strong_platform" ]
}
| engine/lib/snapshot/BUILD.gn/0 | {
"file_path": "engine/lib/snapshot/BUILD.gn",
"repo_id": "engine",
"token_count": 3311
} | 234 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/lib/ui/dart_runtime_hooks.h"
#include <cstdio>
#include <cstdlib>
#include <cstring>
#include <iostream>
#include <sstream>
#include "flutter/common/settings.h"
#include "flutter/fml/build_config.h"
#include "flutter/fml/logging.h"
#include "flutter/lib/ui/plugins/callback_cache.h"
#include "flutter/lib/ui/ui_dart_state.h"
#include "flutter/runtime/dart_plugin_registrant.h"
#include "third_party/dart/runtime/include/bin/dart_io_api.h"
#include "third_party/dart/runtime/include/dart_api.h"
#include "third_party/dart/runtime/include/dart_tools_api.h"
#include "third_party/tonic/converter/dart_converter.h"
#include "third_party/tonic/dart_library_natives.h"
#include "third_party/tonic/dart_microtask_queue.h"
#include "third_party/tonic/dart_state.h"
#include "third_party/tonic/logging/dart_error.h"
#include "third_party/tonic/logging/dart_invoke.h"
#include "third_party/tonic/scopes/dart_api_scope.h"
#include "third_party/tonic/scopes/dart_isolate_scope.h"
using tonic::DartConverter;
using tonic::ToDart;
namespace flutter {
static void PropagateIfError(Dart_Handle result) {
if (Dart_IsError(result)) {
FML_LOG(ERROR) << "Dart Error: " << ::Dart_GetError(result);
Dart_PropagateError(result);
}
}
static Dart_Handle InvokeFunction(Dart_Handle builtin_library,
const char* name) {
Dart_Handle getter_name = ToDart(name);
return Dart_Invoke(builtin_library, getter_name, 0, nullptr);
}
static void InitDartInternal(Dart_Handle builtin_library, bool is_ui_isolate) {
Dart_Handle print = InvokeFunction(builtin_library, "_getPrintClosure");
Dart_Handle internal_library = Dart_LookupLibrary(ToDart("dart:_internal"));
Dart_Handle result =
Dart_SetField(internal_library, ToDart("_printClosure"), print);
PropagateIfError(result);
if (is_ui_isolate) {
// Call |_setupHooks| to configure |VMLibraryHooks|.
Dart_Handle method_name = Dart_NewStringFromCString("_setupHooks");
result = Dart_Invoke(builtin_library, method_name, 0, NULL);
PropagateIfError(result);
}
Dart_Handle setup_hooks = Dart_NewStringFromCString("_setupHooks");
Dart_Handle io_lib = Dart_LookupLibrary(ToDart("dart:io"));
result = Dart_Invoke(io_lib, setup_hooks, 0, NULL);
PropagateIfError(result);
Dart_Handle isolate_lib = Dart_LookupLibrary(ToDart("dart:isolate"));
result = Dart_Invoke(isolate_lib, setup_hooks, 0, NULL);
PropagateIfError(result);
}
static void InitDartCore(Dart_Handle builtin, const std::string& script_uri) {
Dart_Handle io_lib = Dart_LookupLibrary(ToDart("dart:io"));
Dart_Handle get_base_url =
Dart_Invoke(io_lib, ToDart("_getUriBaseClosure"), 0, NULL);
Dart_Handle core_library = Dart_LookupLibrary(ToDart("dart:core"));
Dart_Handle result =
Dart_SetField(core_library, ToDart("_uriBaseClosure"), get_base_url);
PropagateIfError(result);
}
static void InitDartAsync(Dart_Handle builtin_library, bool is_ui_isolate) {
Dart_Handle schedule_microtask;
if (is_ui_isolate) {
schedule_microtask =
InvokeFunction(builtin_library, "_getScheduleMicrotaskClosure");
} else {
Dart_Handle isolate_lib = Dart_LookupLibrary(ToDart("dart:isolate"));
Dart_Handle method_name =
Dart_NewStringFromCString("_getIsolateScheduleImmediateClosure");
schedule_microtask = Dart_Invoke(isolate_lib, method_name, 0, NULL);
}
Dart_Handle async_library = Dart_LookupLibrary(ToDart("dart:async"));
Dart_Handle set_schedule_microtask = ToDart("_setScheduleImmediateClosure");
Dart_Handle result = Dart_Invoke(async_library, set_schedule_microtask, 1,
&schedule_microtask);
PropagateIfError(result);
}
static void InitDartIO(Dart_Handle builtin_library,
const std::string& script_uri) {
Dart_Handle io_lib = Dart_LookupLibrary(ToDart("dart:io"));
Dart_Handle platform_type =
Dart_GetNonNullableType(io_lib, ToDart("_Platform"), 0, nullptr);
if (!script_uri.empty()) {
Dart_Handle result = Dart_SetField(platform_type, ToDart("_nativeScript"),
ToDart(script_uri));
PropagateIfError(result);
}
// typedef _LocaleClosure = String Function();
Dart_Handle /* _LocaleClosure? */ locale_closure =
InvokeFunction(builtin_library, "_getLocaleClosure");
PropagateIfError(locale_closure);
// static String Function()? _localeClosure;
Dart_Handle result =
Dart_SetField(platform_type, ToDart("_localeClosure"), locale_closure);
PropagateIfError(result);
#if !FLUTTER_RELEASE
// Register dart:io service extensions used for network profiling.
Dart_Handle network_profiling_type =
Dart_GetNonNullableType(io_lib, ToDart("_NetworkProfiling"), 0, nullptr);
PropagateIfError(network_profiling_type);
result = Dart_Invoke(network_profiling_type,
ToDart("_registerServiceExtension"), 0, nullptr);
PropagateIfError(result);
#endif // !FLUTTER_RELEASE
}
void DartRuntimeHooks::Install(bool is_ui_isolate,
const std::string& script_uri) {
Dart_Handle builtin = Dart_LookupLibrary(ToDart("dart:ui"));
InitDartInternal(builtin, is_ui_isolate);
InitDartCore(builtin, script_uri);
InitDartAsync(builtin, is_ui_isolate);
InitDartIO(builtin, script_uri);
}
void DartRuntimeHooks::Logger_PrintDebugString(const std::string& message) {
#ifndef NDEBUG
DartRuntimeHooks::Logger_PrintString(message);
#endif
}
void DartRuntimeHooks::Logger_PrintString(const std::string& message) {
const auto& tag = UIDartState::Current()->logger_prefix();
UIDartState::Current()->LogMessage(tag, message);
if (dart::bin::ShouldCaptureStdout()) {
std::stringstream stream;
if (!tag.empty()) {
stream << tag << ": ";
}
stream << message;
std::string log = stream.str();
// For now we report print output on the Stdout stream.
uint8_t newline[] = {'\n'};
Dart_ServiceSendDataEvent("Stdout", "WriteEvent",
reinterpret_cast<const uint8_t*>(log.c_str()),
log.size());
Dart_ServiceSendDataEvent("Stdout", "WriteEvent", newline, sizeof(newline));
}
}
void DartRuntimeHooks::ScheduleMicrotask(Dart_Handle closure) {
UIDartState::Current()->ScheduleMicrotask(closure);
}
static std::string GetFunctionLibraryUrl(Dart_Handle closure) {
if (Dart_IsClosure(closure)) {
closure = Dart_ClosureFunction(closure);
PropagateIfError(closure);
}
if (!Dart_IsFunction(closure)) {
return "";
}
Dart_Handle url = Dart_Null();
Dart_Handle owner = Dart_FunctionOwner(closure);
if (Dart_IsInstance(owner)) {
owner = Dart_ClassLibrary(owner);
}
if (Dart_IsLibrary(owner)) {
url = Dart_LibraryUrl(owner);
PropagateIfError(url);
}
return DartConverter<std::string>::FromDart(url);
}
static std::string GetFunctionClassName(Dart_Handle closure) {
Dart_Handle result;
if (Dart_IsClosure(closure)) {
closure = Dart_ClosureFunction(closure);
PropagateIfError(closure);
}
if (!Dart_IsFunction(closure)) {
return "";
}
bool is_static = false;
result = Dart_FunctionIsStatic(closure, &is_static);
PropagateIfError(result);
if (!is_static) {
return "";
}
result = Dart_FunctionOwner(closure);
PropagateIfError(result);
if (Dart_IsLibrary(result) || !Dart_IsInstance(result)) {
return "";
}
return DartConverter<std::string>::FromDart(Dart_ClassName(result));
}
static std::string GetFunctionName(Dart_Handle func) {
if (Dart_IsClosure(func)) {
func = Dart_ClosureFunction(func);
PropagateIfError(func);
}
if (!Dart_IsFunction(func)) {
return "";
}
bool is_static = false;
Dart_Handle result = Dart_FunctionIsStatic(func, &is_static);
PropagateIfError(result);
if (!is_static) {
return "";
}
result = Dart_FunctionName(func);
PropagateIfError(result);
return DartConverter<std::string>::FromDart(result);
}
Dart_Handle DartRuntimeHooks::GetCallbackHandle(Dart_Handle func) {
std::string name = GetFunctionName(func);
std::string class_name = GetFunctionClassName(func);
std::string library_path = GetFunctionLibraryUrl(func);
// `name` is empty if `func` can't be used as a callback. This is the case
// when `func` is not a function object or is not a static function. Anonymous
// closures (e.g. `(int a, int b) => a + b;`) also cannot be used as
// callbacks, so `func` must be a tear-off of a named static function.
if (!Dart_IsTearOff(func) || name.empty()) {
return Dart_Null();
}
return DartConverter<int64_t>::ToDart(
DartCallbackCache::GetCallbackHandle(name, class_name, library_path));
}
Dart_Handle DartRuntimeHooks::GetCallbackFromHandle(int64_t handle) {
Dart_Handle result = DartCallbackCache::GetCallback(handle);
PropagateIfError(result);
return result;
}
void DartPluginRegistrant_EnsureInitialized() {
tonic::DartApiScope api_scope;
FindAndInvokeDartPluginRegistrant();
}
} // namespace flutter
| engine/lib/ui/dart_runtime_hooks.cc/0 | {
"file_path": "engine/lib/ui/dart_runtime_hooks.cc",
"repo_id": "engine",
"token_count": 3452
} | 235 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
part of dart.ui;
/// Base class for [Size] and [Offset], which are both ways to describe
/// a distance as a two-dimensional axis-aligned vector.
abstract class OffsetBase {
/// Abstract const constructor. This constructor enables subclasses to provide
/// const constructors so that they can be used in const expressions.
///
/// The first argument sets the horizontal component, and the second the
/// vertical component.
const OffsetBase(this._dx, this._dy);
final double _dx;
final double _dy;
/// Returns true if either component is [double.infinity], and false if both
/// are finite (or negative infinity, or NaN).
///
/// This is different than comparing for equality with an instance that has
/// _both_ components set to [double.infinity].
///
/// See also:
///
/// * [isFinite], which is true if both components are finite (and not NaN).
bool get isInfinite => _dx >= double.infinity || _dy >= double.infinity;
/// Whether both components are finite (neither infinite nor NaN).
///
/// See also:
///
/// * [isInfinite], which returns true if either component is equal to
/// positive infinity.
bool get isFinite => _dx.isFinite && _dy.isFinite;
/// Less-than operator. Compares an [Offset] or [Size] to another [Offset] or
/// [Size], and returns true if both the horizontal and vertical values of the
/// left-hand-side operand are smaller than the horizontal and vertical values
/// of the right-hand-side operand respectively. Returns false otherwise.
///
/// This is a partial ordering. It is possible for two values to be neither
/// less, nor greater than, nor equal to, another.
bool operator <(OffsetBase other) => _dx < other._dx && _dy < other._dy;
/// Less-than-or-equal-to operator. Compares an [Offset] or [Size] to another
/// [Offset] or [Size], and returns true if both the horizontal and vertical
/// values of the left-hand-side operand are smaller than or equal to the
/// horizontal and vertical values of the right-hand-side operand
/// respectively. Returns false otherwise.
///
/// This is a partial ordering. It is possible for two values to be neither
/// less, nor greater than, nor equal to, another.
bool operator <=(OffsetBase other) => _dx <= other._dx && _dy <= other._dy;
/// Greater-than operator. Compares an [Offset] or [Size] to another [Offset]
/// or [Size], and returns true if both the horizontal and vertical values of
/// the left-hand-side operand are bigger than the horizontal and vertical
/// values of the right-hand-side operand respectively. Returns false
/// otherwise.
///
/// This is a partial ordering. It is possible for two values to be neither
/// less, nor greater than, nor equal to, another.
bool operator >(OffsetBase other) => _dx > other._dx && _dy > other._dy;
/// Greater-than-or-equal-to operator. Compares an [Offset] or [Size] to
/// another [Offset] or [Size], and returns true if both the horizontal and
/// vertical values of the left-hand-side operand are bigger than or equal to
/// the horizontal and vertical values of the right-hand-side operand
/// respectively. Returns false otherwise.
///
/// This is a partial ordering. It is possible for two values to be neither
/// less, nor greater than, nor equal to, another.
bool operator >=(OffsetBase other) => _dx >= other._dx && _dy >= other._dy;
/// Equality operator. Compares an [Offset] or [Size] to another [Offset] or
/// [Size], and returns true if the horizontal and vertical values of the
/// left-hand-side operand are equal to the horizontal and vertical values of
/// the right-hand-side operand respectively. Returns false otherwise.
@override
bool operator ==(Object other) {
return other is OffsetBase
&& other._dx == _dx
&& other._dy == _dy;
}
@override
int get hashCode => Object.hash(_dx, _dy);
@override
String toString() => 'OffsetBase(${_dx.toStringAsFixed(1)}, ${_dy.toStringAsFixed(1)})';
}
/// An immutable 2D floating-point offset.
///
/// Generally speaking, Offsets can be interpreted in two ways:
///
/// 1. As representing a point in Cartesian space a specified distance from a
/// separately-maintained origin. For example, the top-left position of
/// children in the [RenderBox] protocol is typically represented as an
/// [Offset] from the top left of the parent box.
///
/// 2. As a vector that can be applied to coordinates. For example, when
/// painting a [RenderObject], the parent is passed an [Offset] from the
/// screen's origin which it can add to the offsets of its children to find
/// the [Offset] from the screen's origin to each of the children.
///
/// Because a particular [Offset] can be interpreted as one sense at one time
/// then as the other sense at a later time, the same class is used for both
/// senses.
///
/// See also:
///
/// * [Size], which represents a vector describing the size of a rectangle.
class Offset extends OffsetBase {
/// Creates an offset. The first argument sets [dx], the horizontal component,
/// and the second sets [dy], the vertical component.
const Offset(super.dx, super.dy);
/// Creates an offset from its [direction] and [distance].
///
/// The direction is in radians clockwise from the positive x-axis.
///
/// The distance can be omitted, to create a unit vector (distance = 1.0).
factory Offset.fromDirection(double direction, [ double distance = 1.0 ]) {
return Offset(distance * math.cos(direction), distance * math.sin(direction));
}
/// The x component of the offset.
///
/// The y component is given by [dy].
double get dx => _dx;
/// The y component of the offset.
///
/// The x component is given by [dx].
double get dy => _dy;
/// The magnitude of the offset.
///
/// If you need this value to compare it to another [Offset]'s distance,
/// consider using [distanceSquared] instead, since it is cheaper to compute.
double get distance => math.sqrt(dx * dx + dy * dy);
/// The square of the magnitude of the offset.
///
/// This is cheaper than computing the [distance] itself.
double get distanceSquared => dx * dx + dy * dy;
/// The angle of this offset as radians clockwise from the positive x-axis, in
/// the range -[pi] to [pi], assuming positive values of the x-axis go to the
/// right and positive values of the y-axis go down.
///
/// Zero means that [dy] is zero and [dx] is zero or positive.
///
/// Values from zero to [pi]/2 indicate positive values of [dx] and [dy], the
/// bottom-right quadrant.
///
/// Values from [pi]/2 to [pi] indicate negative values of [dx] and positive
/// values of [dy], the bottom-left quadrant.
///
/// Values from zero to -[pi]/2 indicate positive values of [dx] and negative
/// values of [dy], the top-right quadrant.
///
/// Values from -[pi]/2 to -[pi] indicate negative values of [dx] and [dy],
/// the top-left quadrant.
///
/// When [dy] is zero and [dx] is negative, the [direction] is [pi].
///
/// When [dx] is zero, [direction] is [pi]/2 if [dy] is positive and -[pi]/2
/// if [dy] is negative.
///
/// See also:
///
/// * [distance], to compute the magnitude of the vector.
/// * [Canvas.rotate], which uses the same convention for its angle.
double get direction => math.atan2(dy, dx);
/// An offset with zero magnitude.
///
/// This can be used to represent the origin of a coordinate space.
static const Offset zero = Offset(0.0, 0.0);
/// An offset with infinite x and y components.
///
/// See also:
///
/// * [isInfinite], which checks whether either component is infinite.
/// * [isFinite], which checks whether both components are finite.
// This is included for completeness, because [Size.infinite] exists.
static const Offset infinite = Offset(double.infinity, double.infinity);
/// Returns a new offset with the x component scaled by `scaleX` and the y
/// component scaled by `scaleY`.
///
/// If the two scale arguments are the same, consider using the `*` operator
/// instead:
///
/// ```dart
/// Offset a = const Offset(10.0, 10.0);
/// Offset b = a * 2.0; // same as: a.scale(2.0, 2.0)
/// ```
///
/// If the two arguments are -1, consider using the unary `-` operator
/// instead:
///
/// ```dart
/// Offset a = const Offset(10.0, 10.0);
/// Offset b = -a; // same as: a.scale(-1.0, -1.0)
/// ```
Offset scale(double scaleX, double scaleY) => Offset(dx * scaleX, dy * scaleY);
/// Returns a new offset with translateX added to the x component and
/// translateY added to the y component.
///
/// If the arguments come from another [Offset], consider using the `+` or `-`
/// operators instead:
///
/// ```dart
/// Offset a = const Offset(10.0, 10.0);
/// Offset b = const Offset(10.0, 10.0);
/// Offset c = a + b; // same as: a.translate(b.dx, b.dy)
/// Offset d = a - b; // same as: a.translate(-b.dx, -b.dy)
/// ```
Offset translate(double translateX, double translateY) => Offset(dx + translateX, dy + translateY);
/// Unary negation operator.
///
/// Returns an offset with the coordinates negated.
///
/// If the [Offset] represents an arrow on a plane, this operator returns the
/// same arrow but pointing in the reverse direction.
Offset operator -() => Offset(-dx, -dy);
/// Binary subtraction operator.
///
/// Returns an offset whose [dx] value is the left-hand-side operand's [dx]
/// minus the right-hand-side operand's [dx] and whose [dy] value is the
/// left-hand-side operand's [dy] minus the right-hand-side operand's [dy].
///
/// See also [translate].
Offset operator -(Offset other) => Offset(dx - other.dx, dy - other.dy);
/// Binary addition operator.
///
/// Returns an offset whose [dx] value is the sum of the [dx] values of the
/// two operands, and whose [dy] value is the sum of the [dy] values of the
/// two operands.
///
/// See also [translate].
Offset operator +(Offset other) => Offset(dx + other.dx, dy + other.dy);
/// Multiplication operator.
///
/// Returns an offset whose coordinates are the coordinates of the
/// left-hand-side operand (an Offset) multiplied by the scalar
/// right-hand-side operand (a double).
///
/// See also [scale].
Offset operator *(double operand) => Offset(dx * operand, dy * operand);
/// Division operator.
///
/// Returns an offset whose coordinates are the coordinates of the
/// left-hand-side operand (an Offset) divided by the scalar right-hand-side
/// operand (a double).
///
/// See also [scale].
Offset operator /(double operand) => Offset(dx / operand, dy / operand);
/// Integer (truncating) division operator.
///
/// Returns an offset whose coordinates are the coordinates of the
/// left-hand-side operand (an Offset) divided by the scalar right-hand-side
/// operand (a double), rounded towards zero.
Offset operator ~/(double operand) => Offset((dx ~/ operand).toDouble(), (dy ~/ operand).toDouble());
/// Modulo (remainder) operator.
///
/// Returns an offset whose coordinates are the remainder of dividing the
/// coordinates of the left-hand-side operand (an Offset) by the scalar
/// right-hand-side operand (a double).
Offset operator %(double operand) => Offset(dx % operand, dy % operand);
/// Rectangle constructor operator.
///
/// Combines an [Offset] and a [Size] to form a [Rect] whose top-left
/// coordinate is the point given by adding this offset, the left-hand-side
/// operand, to the origin, and whose size is the right-hand-side operand.
///
/// ```dart
/// Rect myRect = Offset.zero & const Size(100.0, 100.0);
/// // same as: Rect.fromLTWH(0.0, 0.0, 100.0, 100.0)
/// ```
Rect operator &(Size other) => Rect.fromLTWH(dx, dy, other.width, other.height);
/// Linearly interpolate between two offsets.
///
/// If either offset is null, this function interpolates from [Offset.zero].
///
/// The `t` argument represents position on the timeline, with 0.0 meaning
/// that the interpolation has not started, returning `a` (or something
/// equivalent to `a`), 1.0 meaning that the interpolation has finished,
/// returning `b` (or something equivalent to `b`), and values in between
/// meaning that the interpolation is at the relevant point on the timeline
/// between `a` and `b`. The interpolation can be extrapolated beyond 0.0 and
/// 1.0, so negative values and values greater than 1.0 are valid (and can
/// easily be generated by curves such as [Curves.elasticInOut]).
///
/// Values for `t` are usually obtained from an [Animation<double>], such as
/// an [AnimationController].
static Offset? lerp(Offset? a, Offset? b, double t) {
if (b == null) {
if (a == null) {
return null;
} else {
return a * (1.0 - t);
}
} else {
if (a == null) {
return b * t;
} else {
return Offset(_lerpDouble(a.dx, b.dx, t), _lerpDouble(a.dy, b.dy, t));
}
}
}
/// Compares two Offsets for equality.
@override
bool operator ==(Object other) {
return other is Offset
&& other.dx == dx
&& other.dy == dy;
}
@override
int get hashCode => Object.hash(dx, dy);
@override
String toString() => 'Offset(${dx.toStringAsFixed(1)}, ${dy.toStringAsFixed(1)})';
}
/// Holds a 2D floating-point size.
///
/// You can think of this as an [Offset] from the origin.
class Size extends OffsetBase {
/// Creates a [Size] with the given [width] and [height].
const Size(super.width, super.height);
/// Creates an instance of [Size] that has the same values as another.
// Used by the rendering library's _DebugSize hack.
Size.copy(Size source) : super(source.width, source.height);
/// Creates a square [Size] whose [width] and [height] are the given dimension.
///
/// See also:
///
/// * [Size.fromRadius], which is more convenient when the available size
/// is the radius of a circle.
const Size.square(double dimension) : super(dimension, dimension); // ignore: use_super_parameters
/// Creates a [Size] with the given [width] and an infinite [height].
const Size.fromWidth(double width) : super(width, double.infinity);
/// Creates a [Size] with the given [height] and an infinite [width].
const Size.fromHeight(double height) : super(double.infinity, height);
/// Creates a square [Size] whose [width] and [height] are twice the given
/// dimension.
///
/// This is a square that contains a circle with the given radius.
///
/// See also:
///
/// * [Size.square], which creates a square with the given dimension.
const Size.fromRadius(double radius) : super(radius * 2.0, radius * 2.0);
/// The horizontal extent of this size.
double get width => _dx;
/// The vertical extent of this size.
double get height => _dy;
/// The aspect ratio of this size.
///
/// This returns the [width] divided by the [height].
///
/// If the [width] is zero, the result will be zero. If the [height] is zero
/// (and the [width] is not), the result will be [double.infinity] or
/// [double.negativeInfinity] as determined by the sign of [width].
///
/// See also:
///
/// * [AspectRatio], a widget for giving a child widget a specific aspect
/// ratio.
/// * [FittedBox], a widget that (in most modes) attempts to maintain a
/// child widget's aspect ratio while changing its size.
double get aspectRatio {
if (height != 0.0) {
return width / height;
}
if (width > 0.0) {
return double.infinity;
}
if (width < 0.0) {
return double.negativeInfinity;
}
return 0.0;
}
/// An empty size, one with a zero width and a zero height.
static const Size zero = Size(0.0, 0.0);
/// A size whose [width] and [height] are infinite.
///
/// See also:
///
/// * [isInfinite], which checks whether either dimension is infinite.
/// * [isFinite], which checks whether both dimensions are finite.
static const Size infinite = Size(double.infinity, double.infinity);
/// Whether this size encloses a non-zero area.
///
/// Negative areas are considered empty.
bool get isEmpty => width <= 0.0 || height <= 0.0;
/// Binary subtraction operator for [Size].
///
/// Subtracting a [Size] from a [Size] returns the [Offset] that describes how
/// much bigger the left-hand-side operand is than the right-hand-side
/// operand. Adding that resulting [Offset] to the [Size] that was the
/// right-hand-side operand would return a [Size] equal to the [Size] that was
/// the left-hand-side operand. (i.e. if `sizeA - sizeB -> offsetA`, then
/// `offsetA + sizeB -> sizeA`)
///
/// Subtracting an [Offset] from a [Size] returns the [Size] that is smaller than
/// the [Size] operand by the difference given by the [Offset] operand. In other
/// words, the returned [Size] has a [width] consisting of the [width] of the
/// left-hand-side operand minus the [Offset.dx] dimension of the
/// right-hand-side operand, and a [height] consisting of the [height] of the
/// left-hand-side operand minus the [Offset.dy] dimension of the
/// right-hand-side operand.
OffsetBase operator -(OffsetBase other) {
if (other is Size) {
return Offset(width - other.width, height - other.height);
}
if (other is Offset) {
return Size(width - other.dx, height - other.dy);
}
throw ArgumentError(other);
}
/// Binary addition operator for adding an [Offset] to a [Size].
///
/// Returns a [Size] whose [width] is the sum of the [width] of the
/// left-hand-side operand, a [Size], and the [Offset.dx] dimension of the
/// right-hand-side operand, an [Offset], and whose [height] is the sum of the
/// [height] of the left-hand-side operand and the [Offset.dy] dimension of
/// the right-hand-side operand.
Size operator +(Offset other) => Size(width + other.dx, height + other.dy);
/// Multiplication operator.
///
/// Returns a [Size] whose dimensions are the dimensions of the left-hand-side
/// operand (a [Size]) multiplied by the scalar right-hand-side operand (a
/// [double]).
Size operator *(double operand) => Size(width * operand, height * operand);
/// Division operator.
///
/// Returns a [Size] whose dimensions are the dimensions of the left-hand-side
/// operand (a [Size]) divided by the scalar right-hand-side operand (a
/// [double]).
Size operator /(double operand) => Size(width / operand, height / operand);
/// Integer (truncating) division operator.
///
/// Returns a [Size] whose dimensions are the dimensions of the left-hand-side
/// operand (a [Size]) divided by the scalar right-hand-side operand (a
/// [double]), rounded towards zero.
Size operator ~/(double operand) => Size((width ~/ operand).toDouble(), (height ~/ operand).toDouble());
/// Modulo (remainder) operator.
///
/// Returns a [Size] whose dimensions are the remainder of dividing the
/// left-hand-side operand (a [Size]) by the scalar right-hand-side operand (a
/// [double]).
Size operator %(double operand) => Size(width % operand, height % operand);
/// The lesser of the magnitudes of the [width] and the [height].
double get shortestSide => math.min(width.abs(), height.abs());
/// The greater of the magnitudes of the [width] and the [height].
double get longestSide => math.max(width.abs(), height.abs());
// Convenience methods that do the equivalent of calling the similarly named
// methods on a Rect constructed from the given origin and this size.
/// The offset to the intersection of the top and left edges of the rectangle
/// described by the given [Offset] (which is interpreted as the top-left corner)
/// and this [Size].
///
/// See also [Rect.topLeft].
Offset topLeft(Offset origin) => origin;
/// The offset to the center of the top edge of the rectangle described by the
/// given offset (which is interpreted as the top-left corner) and this size.
///
/// See also [Rect.topCenter].
Offset topCenter(Offset origin) => Offset(origin.dx + width / 2.0, origin.dy);
/// The offset to the intersection of the top and right edges of the rectangle
/// described by the given offset (which is interpreted as the top-left corner)
/// and this size.
///
/// See also [Rect.topRight].
Offset topRight(Offset origin) => Offset(origin.dx + width, origin.dy);
/// The offset to the center of the left edge of the rectangle described by the
/// given offset (which is interpreted as the top-left corner) and this size.
///
/// See also [Rect.centerLeft].
Offset centerLeft(Offset origin) => Offset(origin.dx, origin.dy + height / 2.0);
/// The offset to the point halfway between the left and right and the top and
/// bottom edges of the rectangle described by the given offset (which is
/// interpreted as the top-left corner) and this size.
///
/// See also [Rect.center].
Offset center(Offset origin) => Offset(origin.dx + width / 2.0, origin.dy + height / 2.0);
/// The offset to the center of the right edge of the rectangle described by the
/// given offset (which is interpreted as the top-left corner) and this size.
///
/// See also [Rect.centerLeft].
Offset centerRight(Offset origin) => Offset(origin.dx + width, origin.dy + height / 2.0);
/// The offset to the intersection of the bottom and left edges of the
/// rectangle described by the given offset (which is interpreted as the
/// top-left corner) and this size.
///
/// See also [Rect.bottomLeft].
Offset bottomLeft(Offset origin) => Offset(origin.dx, origin.dy + height);
/// The offset to the center of the bottom edge of the rectangle described by
/// the given offset (which is interpreted as the top-left corner) and this
/// size.
///
/// See also [Rect.bottomLeft].
Offset bottomCenter(Offset origin) => Offset(origin.dx + width / 2.0, origin.dy + height);
/// The offset to the intersection of the bottom and right edges of the
/// rectangle described by the given offset (which is interpreted as the
/// top-left corner) and this size.
///
/// See also [Rect.bottomRight].
Offset bottomRight(Offset origin) => Offset(origin.dx + width, origin.dy + height);
/// Whether the point specified by the given offset (which is assumed to be
/// relative to the top left of the size) lies between the left and right and
/// the top and bottom edges of a rectangle of this size.
///
/// Rectangles include their top and left edges but exclude their bottom and
/// right edges.
bool contains(Offset offset) {
return offset.dx >= 0.0 && offset.dx < width && offset.dy >= 0.0 && offset.dy < height;
}
/// A [Size] with the [width] and [height] swapped.
Size get flipped => Size(height, width);
/// Linearly interpolate between two sizes
///
/// If either size is null, this function interpolates from [Size.zero].
///
/// The `t` argument represents position on the timeline, with 0.0 meaning
/// that the interpolation has not started, returning `a` (or something
/// equivalent to `a`), 1.0 meaning that the interpolation has finished,
/// returning `b` (or something equivalent to `b`), and values in between
/// meaning that the interpolation is at the relevant point on the timeline
/// between `a` and `b`. The interpolation can be extrapolated beyond 0.0 and
/// 1.0, so negative values and values greater than 1.0 are valid (and can
/// easily be generated by curves such as [Curves.elasticInOut]).
///
/// Values for `t` are usually obtained from an [Animation<double>], such as
/// an [AnimationController].
static Size? lerp(Size? a, Size? b, double t) {
if (b == null) {
if (a == null) {
return null;
} else {
return a * (1.0 - t);
}
} else {
if (a == null) {
return b * t;
} else {
return Size(_lerpDouble(a.width, b.width, t), _lerpDouble(a.height, b.height, t));
}
}
}
/// Compares two Sizes for equality.
// We don't compare the runtimeType because of _DebugSize in the framework.
@override
bool operator ==(Object other) {
return other is Size
&& other._dx == _dx
&& other._dy == _dy;
}
@override
int get hashCode => Object.hash(_dx, _dy);
@override
String toString() => 'Size(${width.toStringAsFixed(1)}, ${height.toStringAsFixed(1)})';
}
/// An immutable, 2D, axis-aligned, floating-point rectangle whose coordinates
/// are relative to a given origin.
///
/// A Rect can be created with one of its constructors or from an [Offset] and a
/// [Size] using the `&` operator:
///
/// ```dart
/// Rect myRect = const Offset(1.0, 2.0) & const Size(3.0, 4.0);
/// ```
class Rect {
/// Construct a rectangle from its left, top, right, and bottom edges.
///
/// 
/// 
const Rect.fromLTRB(this.left, this.top, this.right, this.bottom);
/// Construct a rectangle from its left and top edges, its width, and its
/// height.
///
/// To construct a [Rect] from an [Offset] and a [Size], you can use the
/// rectangle constructor operator `&`. See [Offset.&].
///
/// 
/// 
const Rect.fromLTWH(double left, double top, double width, double height) : this.fromLTRB(left, top, left + width, top + height);
/// Construct a rectangle that bounds the given circle.
///
/// The `center` argument is assumed to be an offset from the origin.
///
/// 
/// 
Rect.fromCircle({ required Offset center, required double radius }) : this.fromCenter(
center: center,
width: radius * 2,
height: radius * 2,
);
/// Constructs a rectangle from its center point, width, and height.
///
/// The `center` argument is assumed to be an offset from the origin.
///
/// 
/// 
Rect.fromCenter({ required Offset center, required double width, required double height }) : this.fromLTRB(
center.dx - width / 2,
center.dy - height / 2,
center.dx + width / 2,
center.dy + height / 2,
);
/// Construct the smallest rectangle that encloses the given offsets, treating
/// them as vectors from the origin.
///
/// 
/// 
Rect.fromPoints(Offset a, Offset b) : this.fromLTRB(
math.min(a.dx, b.dx),
math.min(a.dy, b.dy),
math.max(a.dx, b.dx),
math.max(a.dy, b.dy),
);
Float32List _getValue32() {
final Float32List result = Float32List(4);
result[0] = left;
result[1] = top;
result[2] = right;
result[3] = bottom;
return result;
}
/// The offset of the left edge of this rectangle from the x axis.
final double left;
/// The offset of the top edge of this rectangle from the y axis.
final double top;
/// The offset of the right edge of this rectangle from the x axis.
final double right;
/// The offset of the bottom edge of this rectangle from the y axis.
final double bottom;
/// The distance between the left and right edges of this rectangle.
double get width => right - left;
/// The distance between the top and bottom edges of this rectangle.
double get height => bottom - top;
/// The distance between the upper-left corner and the lower-right corner of
/// this rectangle.
Size get size => Size(width, height);
/// Whether any of the dimensions are `NaN`.
bool get hasNaN => left.isNaN || top.isNaN || right.isNaN || bottom.isNaN;
/// A rectangle with left, top, right, and bottom edges all at zero.
static const Rect zero = Rect.fromLTRB(0.0, 0.0, 0.0, 0.0);
static const double _giantScalar = 1.0E+9; // matches kGiantRect from layer.h
/// A rectangle that covers the entire coordinate space.
///
/// This covers the space from -1e9,-1e9 to 1e9,1e9.
/// This is the space over which graphics operations are valid.
static const Rect largest = Rect.fromLTRB(-_giantScalar, -_giantScalar, _giantScalar, _giantScalar);
/// Whether any of the coordinates of this rectangle are equal to positive infinity.
// included for consistency with Offset and Size
bool get isInfinite {
return left >= double.infinity
|| top >= double.infinity
|| right >= double.infinity
|| bottom >= double.infinity;
}
/// Whether all coordinates of this rectangle are finite.
bool get isFinite => left.isFinite && top.isFinite && right.isFinite && bottom.isFinite;
/// Whether this rectangle encloses a non-zero area. Negative areas are
/// considered empty.
bool get isEmpty => left >= right || top >= bottom;
/// Returns a new rectangle translated by the given offset.
///
/// To translate a rectangle by separate x and y components rather than by an
/// [Offset], consider [translate].
Rect shift(Offset offset) {
return Rect.fromLTRB(left + offset.dx, top + offset.dy, right + offset.dx, bottom + offset.dy);
}
/// Returns a new rectangle with translateX added to the x components and
/// translateY added to the y components.
///
/// To translate a rectangle by an [Offset] rather than by separate x and y
/// components, consider [shift].
Rect translate(double translateX, double translateY) {
return Rect.fromLTRB(left + translateX, top + translateY, right + translateX, bottom + translateY);
}
/// Returns a new rectangle with edges moved outwards by the given delta.
Rect inflate(double delta) {
return Rect.fromLTRB(left - delta, top - delta, right + delta, bottom + delta);
}
/// Returns a new rectangle with edges moved inwards by the given delta.
Rect deflate(double delta) => inflate(-delta);
/// Returns a new rectangle that is the intersection of the given
/// rectangle and this rectangle. The two rectangles must overlap
/// for this to be meaningful. If the two rectangles do not overlap,
/// then the resulting Rect will have a negative width or height.
Rect intersect(Rect other) {
return Rect.fromLTRB(
math.max(left, other.left),
math.max(top, other.top),
math.min(right, other.right),
math.min(bottom, other.bottom)
);
}
/// Returns a new rectangle which is the bounding box containing this
/// rectangle and the given rectangle.
Rect expandToInclude(Rect other) {
return Rect.fromLTRB(
math.min(left, other.left),
math.min(top, other.top),
math.max(right, other.right),
math.max(bottom, other.bottom),
);
}
/// Whether `other` has a nonzero area of overlap with this rectangle.
bool overlaps(Rect other) {
if (right <= other.left || other.right <= left) {
return false;
}
if (bottom <= other.top || other.bottom <= top) {
return false;
}
return true;
}
/// The lesser of the magnitudes of the [width] and the [height] of this
/// rectangle.
double get shortestSide => math.min(width.abs(), height.abs());
/// The greater of the magnitudes of the [width] and the [height] of this
/// rectangle.
double get longestSide => math.max(width.abs(), height.abs());
/// The offset to the intersection of the top and left edges of this rectangle.
///
/// See also [Size.topLeft].
Offset get topLeft => Offset(left, top);
/// The offset to the center of the top edge of this rectangle.
///
/// See also [Size.topCenter].
Offset get topCenter => Offset(left + width / 2.0, top);
/// The offset to the intersection of the top and right edges of this rectangle.
///
/// See also [Size.topRight].
Offset get topRight => Offset(right, top);
/// The offset to the center of the left edge of this rectangle.
///
/// See also [Size.centerLeft].
Offset get centerLeft => Offset(left, top + height / 2.0);
/// The offset to the point halfway between the left and right and the top and
/// bottom edges of this rectangle.
///
/// See also [Size.center].
Offset get center => Offset(left + width / 2.0, top + height / 2.0);
/// The offset to the center of the right edge of this rectangle.
///
/// See also [Size.centerLeft].
Offset get centerRight => Offset(right, top + height / 2.0);
/// The offset to the intersection of the bottom and left edges of this rectangle.
///
/// See also [Size.bottomLeft].
Offset get bottomLeft => Offset(left, bottom);
/// The offset to the center of the bottom edge of this rectangle.
///
/// See also [Size.bottomLeft].
Offset get bottomCenter => Offset(left + width / 2.0, bottom);
/// The offset to the intersection of the bottom and right edges of this rectangle.
///
/// See also [Size.bottomRight].
Offset get bottomRight => Offset(right, bottom);
/// Whether the point specified by the given offset (which is assumed to be
/// relative to the origin) lies between the left and right and the top and
/// bottom edges of this rectangle.
///
/// Rectangles include their top and left edges but exclude their bottom and
/// right edges.
bool contains(Offset offset) {
return offset.dx >= left && offset.dx < right && offset.dy >= top && offset.dy < bottom;
}
/// Linearly interpolate between two rectangles.
///
/// If either rect is null, [Rect.zero] is used as a substitute.
///
/// The `t` argument represents position on the timeline, with 0.0 meaning
/// that the interpolation has not started, returning `a` (or something
/// equivalent to `a`), 1.0 meaning that the interpolation has finished,
/// returning `b` (or something equivalent to `b`), and values in between
/// meaning that the interpolation is at the relevant point on the timeline
/// between `a` and `b`. The interpolation can be extrapolated beyond 0.0 and
/// 1.0, so negative values and values greater than 1.0 are valid (and can
/// easily be generated by curves such as [Curves.elasticInOut]).
///
/// Values for `t` are usually obtained from an [Animation<double>], such as
/// an [AnimationController].
static Rect? lerp(Rect? a, Rect? b, double t) {
if (b == null) {
if (a == null) {
return null;
} else {
final double k = 1.0 - t;
return Rect.fromLTRB(a.left * k, a.top * k, a.right * k, a.bottom * k);
}
} else {
if (a == null) {
return Rect.fromLTRB(b.left * t, b.top * t, b.right * t, b.bottom * t);
} else {
return Rect.fromLTRB(
_lerpDouble(a.left, b.left, t),
_lerpDouble(a.top, b.top, t),
_lerpDouble(a.right, b.right, t),
_lerpDouble(a.bottom, b.bottom, t),
);
}
}
}
@override
bool operator ==(Object other) {
if (identical(this, other)) {
return true;
}
if (runtimeType != other.runtimeType) {
return false;
}
return other is Rect
&& other.left == left
&& other.top == top
&& other.right == right
&& other.bottom == bottom;
}
@override
int get hashCode => Object.hash(left, top, right, bottom);
@override
String toString() => 'Rect.fromLTRB(${left.toStringAsFixed(1)}, ${top.toStringAsFixed(1)}, ${right.toStringAsFixed(1)}, ${bottom.toStringAsFixed(1)})';
}
/// A radius for either circular or elliptical shapes.
class Radius {
/// Constructs a circular radius. [x] and [y] will have the same radius value.
///
/// 
/// 
const Radius.circular(double radius) : this.elliptical(radius, radius);
/// Constructs an elliptical radius with the given radii.
///
/// 
/// 
const Radius.elliptical(this.x, this.y);
/// The radius value on the horizontal axis.
final double x;
/// The radius value on the vertical axis.
final double y;
/// A radius with [x] and [y] values set to zero.
///
/// You can use [Radius.zero] with [RRect] to have right-angle corners.
static const Radius zero = Radius.circular(0.0);
/// Returns this [Radius], with values clamped to the given min and max
/// [Radius] values.
///
/// The `min` value defaults to `Radius.circular(-double.infinity)`, and
/// the `max` value defaults to `Radius.circular(double.infinity)`.
Radius clamp({Radius? minimum, Radius? maximum}) {
minimum ??= const Radius.circular(-double.infinity);
maximum ??= const Radius.circular(double.infinity);
return Radius.elliptical(
clampDouble(x, minimum.x, maximum.x),
clampDouble(y, minimum.y, maximum.y),
);
}
/// Returns this [Radius], with values clamped to the given min and max
/// values in each dimension
///
/// The `minimumX` and `minimumY` values default to `-double.infinity`, and
/// the `maximumX` and `maximumY` values default to `double.infinity`.
Radius clampValues({
double? minimumX,
double? minimumY,
double? maximumX,
double? maximumY,
}) {
return Radius.elliptical(
clampDouble(x, minimumX ?? -double.infinity, maximumX ?? double.infinity),
clampDouble(y, minimumY ?? -double.infinity, maximumY ?? double.infinity),
);
}
/// Unary negation operator.
///
/// Returns a Radius with the distances negated.
///
/// Radiuses with negative values aren't geometrically meaningful, but could
/// occur as part of expressions. For example, negating a radius of one pixel
/// and then adding the result to another radius is equivalent to subtracting
/// a radius of one pixel from the other.
Radius operator -() => Radius.elliptical(-x, -y);
/// Binary subtraction operator.
///
/// Returns a radius whose [x] value is the left-hand-side operand's [x]
/// minus the right-hand-side operand's [x] and whose [y] value is the
/// left-hand-side operand's [y] minus the right-hand-side operand's [y].
Radius operator -(Radius other) => Radius.elliptical(x - other.x, y - other.y);
/// Binary addition operator.
///
/// Returns a radius whose [x] value is the sum of the [x] values of the
/// two operands, and whose [y] value is the sum of the [y] values of the
/// two operands.
Radius operator +(Radius other) => Radius.elliptical(x + other.x, y + other.y);
/// Multiplication operator.
///
/// Returns a radius whose coordinates are the coordinates of the
/// left-hand-side operand (a radius) multiplied by the scalar
/// right-hand-side operand (a double).
Radius operator *(double operand) => Radius.elliptical(x * operand, y * operand);
/// Division operator.
///
/// Returns a radius whose coordinates are the coordinates of the
/// left-hand-side operand (a radius) divided by the scalar right-hand-side
/// operand (a double).
Radius operator /(double operand) => Radius.elliptical(x / operand, y / operand);
/// Integer (truncating) division operator.
///
/// Returns a radius whose coordinates are the coordinates of the
/// left-hand-side operand (a radius) divided by the scalar right-hand-side
/// operand (a double), rounded towards zero.
Radius operator ~/(double operand) => Radius.elliptical((x ~/ operand).toDouble(), (y ~/ operand).toDouble());
/// Modulo (remainder) operator.
///
/// Returns a radius whose coordinates are the remainder of dividing the
/// coordinates of the left-hand-side operand (a radius) by the scalar
/// right-hand-side operand (a double).
Radius operator %(double operand) => Radius.elliptical(x % operand, y % operand);
/// Linearly interpolate between two radii.
///
/// If either is null, this function substitutes [Radius.zero] instead.
///
/// The `t` argument represents position on the timeline, with 0.0 meaning
/// that the interpolation has not started, returning `a` (or something
/// equivalent to `a`), 1.0 meaning that the interpolation has finished,
/// returning `b` (or something equivalent to `b`), and values in between
/// meaning that the interpolation is at the relevant point on the timeline
/// between `a` and `b`. The interpolation can be extrapolated beyond 0.0 and
/// 1.0, so negative values and values greater than 1.0 are valid (and can
/// easily be generated by curves such as [Curves.elasticInOut]).
///
/// Values for `t` are usually obtained from an [Animation<double>], such as
/// an [AnimationController].
static Radius? lerp(Radius? a, Radius? b, double t) {
if (b == null) {
if (a == null) {
return null;
} else {
final double k = 1.0 - t;
return Radius.elliptical(a.x * k, a.y * k);
}
} else {
if (a == null) {
return Radius.elliptical(b.x * t, b.y * t);
} else {
return Radius.elliptical(
_lerpDouble(a.x, b.x, t),
_lerpDouble(a.y, b.y, t),
);
}
}
}
@override
bool operator ==(Object other) {
if (identical(this, other)) {
return true;
}
if (runtimeType != other.runtimeType) {
return false;
}
return other is Radius
&& other.x == x
&& other.y == y;
}
@override
int get hashCode => Object.hash(x, y);
@override
String toString() {
return x == y ? 'Radius.circular(${x.toStringAsFixed(1)})' :
'Radius.elliptical(${x.toStringAsFixed(1)}, '
'${y.toStringAsFixed(1)})';
}
}
/// An immutable rounded rectangle with the custom radii for all four corners.
class RRect {
/// Construct a rounded rectangle from its left, top, right, and bottom edges,
/// and the same radii along its horizontal axis and its vertical axis.
///
/// Will assert in debug mode if `radiusX` or `radiusY` are negative.
const RRect.fromLTRBXY(
double left,
double top,
double right,
double bottom,
double radiusX,
double radiusY,
) : this._raw(
top: top,
left: left,
right: right,
bottom: bottom,
tlRadiusX: radiusX,
tlRadiusY: radiusY,
trRadiusX: radiusX,
trRadiusY: radiusY,
blRadiusX: radiusX,
blRadiusY: radiusY,
brRadiusX: radiusX,
brRadiusY: radiusY,
);
/// Construct a rounded rectangle from its left, top, right, and bottom edges,
/// and the same radius in each corner.
///
/// Will assert in debug mode if the `radius` is negative in either x or y.
RRect.fromLTRBR(
double left,
double top,
double right,
double bottom,
Radius radius,
)
: this._raw(
top: top,
left: left,
right: right,
bottom: bottom,
tlRadiusX: radius.x,
tlRadiusY: radius.y,
trRadiusX: radius.x,
trRadiusY: radius.y,
blRadiusX: radius.x,
blRadiusY: radius.y,
brRadiusX: radius.x,
brRadiusY: radius.y,
);
/// Construct a rounded rectangle from its bounding box and the same radii
/// along its horizontal axis and its vertical axis.
///
/// Will assert in debug mode if `radiusX` or `radiusY` are negative.
RRect.fromRectXY(Rect rect, double radiusX, double radiusY)
: this._raw(
top: rect.top,
left: rect.left,
right: rect.right,
bottom: rect.bottom,
tlRadiusX: radiusX,
tlRadiusY: radiusY,
trRadiusX: radiusX,
trRadiusY: radiusY,
blRadiusX: radiusX,
blRadiusY: radiusY,
brRadiusX: radiusX,
brRadiusY: radiusY,
);
/// Construct a rounded rectangle from its bounding box and a radius that is
/// the same in each corner.
///
/// Will assert in debug mode if the `radius` is negative in either x or y.
RRect.fromRectAndRadius(Rect rect, Radius radius)
: this._raw(
top: rect.top,
left: rect.left,
right: rect.right,
bottom: rect.bottom,
tlRadiusX: radius.x,
tlRadiusY: radius.y,
trRadiusX: radius.x,
trRadiusY: radius.y,
blRadiusX: radius.x,
blRadiusY: radius.y,
brRadiusX: radius.x,
brRadiusY: radius.y,
);
/// Construct a rounded rectangle from its left, top, right, and bottom edges,
/// and topLeft, topRight, bottomRight, and bottomLeft radii.
///
/// The corner radii default to [Radius.zero], i.e. right-angled corners. Will
/// assert in debug mode if any of the radii are negative in either x or y.
RRect.fromLTRBAndCorners(
double left,
double top,
double right,
double bottom, {
Radius topLeft = Radius.zero,
Radius topRight = Radius.zero,
Radius bottomRight = Radius.zero,
Radius bottomLeft = Radius.zero,
}) : this._raw(
top: top,
left: left,
right: right,
bottom: bottom,
tlRadiusX: topLeft.x,
tlRadiusY: topLeft.y,
trRadiusX: topRight.x,
trRadiusY: topRight.y,
blRadiusX: bottomLeft.x,
blRadiusY: bottomLeft.y,
brRadiusX: bottomRight.x,
brRadiusY: bottomRight.y,
);
/// Construct a rounded rectangle from its bounding box and topLeft,
/// topRight, bottomRight, and bottomLeft radii.
///
/// The corner radii default to [Radius.zero], i.e. right-angled corners. Will
/// assert in debug mode if any of the radii are negative in either x or y.
RRect.fromRectAndCorners(
Rect rect,
{
Radius topLeft = Radius.zero,
Radius topRight = Radius.zero,
Radius bottomRight = Radius.zero,
Radius bottomLeft = Radius.zero
}
) : this._raw(
top: rect.top,
left: rect.left,
right: rect.right,
bottom: rect.bottom,
tlRadiusX: topLeft.x,
tlRadiusY: topLeft.y,
trRadiusX: topRight.x,
trRadiusY: topRight.y,
blRadiusX: bottomLeft.x,
blRadiusY: bottomLeft.y,
brRadiusX: bottomRight.x,
brRadiusY: bottomRight.y,
);
const RRect._raw({
this.left = 0.0,
this.top = 0.0,
this.right = 0.0,
this.bottom = 0.0,
this.tlRadiusX = 0.0,
this.tlRadiusY = 0.0,
this.trRadiusX = 0.0,
this.trRadiusY = 0.0,
this.brRadiusX = 0.0,
this.brRadiusY = 0.0,
this.blRadiusX = 0.0,
this.blRadiusY = 0.0,
}) : assert(tlRadiusX >= 0),
assert(tlRadiusY >= 0),
assert(trRadiusX >= 0),
assert(trRadiusY >= 0),
assert(brRadiusX >= 0),
assert(brRadiusY >= 0),
assert(blRadiusX >= 0),
assert(blRadiusY >= 0);
Float32List _getValue32() {
final Float32List result = Float32List(12);
result[0] = left;
result[1] = top;
result[2] = right;
result[3] = bottom;
result[4] = tlRadiusX;
result[5] = tlRadiusY;
result[6] = trRadiusX;
result[7] = trRadiusY;
result[8] = brRadiusX;
result[9] = brRadiusY;
result[10] = blRadiusX;
result[11] = blRadiusY;
return result;
}
/// The offset of the left edge of this rectangle from the x axis.
final double left;
/// The offset of the top edge of this rectangle from the y axis.
final double top;
/// The offset of the right edge of this rectangle from the x axis.
final double right;
/// The offset of the bottom edge of this rectangle from the y axis.
final double bottom;
/// The top-left horizontal radius.
final double tlRadiusX;
/// The top-left vertical radius.
final double tlRadiusY;
/// The top-left [Radius].
Radius get tlRadius => Radius.elliptical(tlRadiusX, tlRadiusY);
/// The top-right horizontal radius.
final double trRadiusX;
/// The top-right vertical radius.
final double trRadiusY;
/// The top-right [Radius].
Radius get trRadius => Radius.elliptical(trRadiusX, trRadiusY);
/// The bottom-right horizontal radius.
final double brRadiusX;
/// The bottom-right vertical radius.
final double brRadiusY;
/// The bottom-right [Radius].
Radius get brRadius => Radius.elliptical(brRadiusX, brRadiusY);
/// The bottom-left horizontal radius.
final double blRadiusX;
/// The bottom-left vertical radius.
final double blRadiusY;
/// The bottom-left [Radius].
Radius get blRadius => Radius.elliptical(blRadiusX, blRadiusY);
/// A rounded rectangle with all the values set to zero.
static const RRect zero = RRect._raw();
/// Returns a new [RRect] translated by the given offset.
RRect shift(Offset offset) {
return RRect._raw(
left: left + offset.dx,
top: top + offset.dy,
right: right + offset.dx,
bottom: bottom + offset.dy,
tlRadiusX: tlRadiusX,
tlRadiusY: tlRadiusY,
trRadiusX: trRadiusX,
trRadiusY: trRadiusY,
blRadiusX: blRadiusX,
blRadiusY: blRadiusY,
brRadiusX: brRadiusX,
brRadiusY: brRadiusY,
);
}
/// Returns a new [RRect] with edges and radii moved outwards by the given
/// delta.
RRect inflate(double delta) {
return RRect._raw(
left: left - delta,
top: top - delta,
right: right + delta,
bottom: bottom + delta,
tlRadiusX: math.max(0, tlRadiusX + delta),
tlRadiusY: math.max(0, tlRadiusY + delta),
trRadiusX: math.max(0, trRadiusX + delta),
trRadiusY: math.max(0, trRadiusY + delta),
blRadiusX: math.max(0, blRadiusX + delta),
blRadiusY: math.max(0, blRadiusY + delta),
brRadiusX: math.max(0, brRadiusX + delta),
brRadiusY: math.max(0, brRadiusY + delta),
);
}
/// Returns a new [RRect] with edges and radii moved inwards by the given delta.
RRect deflate(double delta) => inflate(-delta);
/// The distance between the left and right edges of this rectangle.
double get width => right - left;
/// The distance between the top and bottom edges of this rectangle.
double get height => bottom - top;
/// The bounding box of this rounded rectangle (the rectangle with no rounded corners).
Rect get outerRect => Rect.fromLTRB(left, top, right, bottom);
/// The non-rounded rectangle that is constrained by the smaller of the two
/// diagonals, with each diagonal traveling through the middle of the curve
/// corners. The middle of a corner is the intersection of the curve with its
/// respective quadrant bisector.
Rect get safeInnerRect {
const double kInsetFactor = 0.29289321881; // 1-cos(pi/4)
final double leftRadius = math.max(blRadiusX, tlRadiusX);
final double topRadius = math.max(tlRadiusY, trRadiusY);
final double rightRadius = math.max(trRadiusX, brRadiusX);
final double bottomRadius = math.max(brRadiusY, blRadiusY);
return Rect.fromLTRB(
left + leftRadius * kInsetFactor,
top + topRadius * kInsetFactor,
right - rightRadius * kInsetFactor,
bottom - bottomRadius * kInsetFactor
);
}
/// The rectangle that would be formed using the axis-aligned intersection of
/// the sides of the rectangle, i.e., the rectangle formed from the
/// inner-most centers of the ellipses that form the corners. This is the
/// intersection of the [wideMiddleRect] and the [tallMiddleRect]. If any of
/// the intersections are void, the resulting [Rect] will have negative width
/// or height.
Rect get middleRect {
final double leftRadius = math.max(blRadiusX, tlRadiusX);
final double topRadius = math.max(tlRadiusY, trRadiusY);
final double rightRadius = math.max(trRadiusX, brRadiusX);
final double bottomRadius = math.max(brRadiusY, blRadiusY);
return Rect.fromLTRB(
left + leftRadius,
top + topRadius,
right - rightRadius,
bottom - bottomRadius
);
}
/// The biggest rectangle that is entirely inside the rounded rectangle and
/// has the full width of the rounded rectangle. If the rounded rectangle does
/// not have an axis-aligned intersection of its left and right side, the
/// resulting [Rect] will have negative width or height.
Rect get wideMiddleRect {
final double topRadius = math.max(tlRadiusY, trRadiusY);
final double bottomRadius = math.max(brRadiusY, blRadiusY);
return Rect.fromLTRB(
left,
top + topRadius,
right,
bottom - bottomRadius
);
}
/// The biggest rectangle that is entirely inside the rounded rectangle and
/// has the full height of the rounded rectangle. If the rounded rectangle
/// does not have an axis-aligned intersection of its top and bottom side, the
/// resulting [Rect] will have negative width or height.
Rect get tallMiddleRect {
final double leftRadius = math.max(blRadiusX, tlRadiusX);
final double rightRadius = math.max(trRadiusX, brRadiusX);
return Rect.fromLTRB(
left + leftRadius,
top,
right - rightRadius,
bottom
);
}
/// Whether this rounded rectangle encloses a non-zero area.
/// Negative areas are considered empty.
bool get isEmpty => left >= right || top >= bottom;
/// Whether all coordinates of this rounded rectangle are finite.
bool get isFinite => left.isFinite && top.isFinite && right.isFinite && bottom.isFinite;
/// Whether this rounded rectangle is a simple rectangle with zero
/// corner radii.
bool get isRect {
return (tlRadiusX == 0.0 || tlRadiusY == 0.0) &&
(trRadiusX == 0.0 || trRadiusY == 0.0) &&
(blRadiusX == 0.0 || blRadiusY == 0.0) &&
(brRadiusX == 0.0 || brRadiusY == 0.0);
}
/// Whether this rounded rectangle has a side with no straight section.
bool get isStadium {
return tlRadius == trRadius
&& trRadius == brRadius
&& brRadius == blRadius
&& (width <= 2.0 * tlRadiusX || height <= 2.0 * tlRadiusY);
}
/// Whether this rounded rectangle has no side with a straight section.
bool get isEllipse {
return tlRadius == trRadius
&& trRadius == brRadius
&& brRadius == blRadius
&& width <= 2.0 * tlRadiusX
&& height <= 2.0 * tlRadiusY;
}
/// Whether this rounded rectangle would draw as a circle.
bool get isCircle => width == height && isEllipse;
/// The lesser of the magnitudes of the [width] and the [height] of this
/// rounded rectangle.
double get shortestSide => math.min(width.abs(), height.abs());
/// The greater of the magnitudes of the [width] and the [height] of this
/// rounded rectangle.
double get longestSide => math.max(width.abs(), height.abs());
/// Whether any of the dimensions are `NaN`.
bool get hasNaN => left.isNaN || top.isNaN || right.isNaN || bottom.isNaN ||
trRadiusX.isNaN || trRadiusY.isNaN || tlRadiusX.isNaN || tlRadiusY.isNaN ||
brRadiusX.isNaN || brRadiusY.isNaN || blRadiusX.isNaN || blRadiusY.isNaN;
/// The offset to the point halfway between the left and right and the top and
/// bottom edges of this rectangle.
Offset get center => Offset(left + width / 2.0, top + height / 2.0);
// Returns the minimum between min and scale to which radius1 and radius2
// should be scaled with in order not to exceed the limit.
double _getMin(double min, double radius1, double radius2, double limit) {
final double sum = radius1 + radius2;
if (sum > limit && sum != 0.0) {
return math.min(min, limit / sum);
}
return min;
}
/// Scales all radii so that on each side their sum will not exceed the size
/// of the width/height.
///
/// Skia already handles RRects with radii that are too large in this way.
/// Therefore, this method is only needed for RRect use cases that require
/// the appropriately scaled radii values.
///
/// See the [Skia scaling implementation](https://github.com/google/skia/blob/main/src/core/SkRRect.cpp)
/// for more details.
RRect scaleRadii() {
double scale = 1.0;
scale = _getMin(scale, blRadiusY, tlRadiusY, height);
scale = _getMin(scale, tlRadiusX, trRadiusX, width);
scale = _getMin(scale, trRadiusY, brRadiusY, height);
scale = _getMin(scale, brRadiusX, blRadiusX, width);
assert(scale >= 0);
if (scale < 1.0) {
return RRect._raw(
top: top,
left: left,
right: right,
bottom: bottom,
tlRadiusX: tlRadiusX * scale,
tlRadiusY: tlRadiusY * scale,
trRadiusX: trRadiusX * scale,
trRadiusY: trRadiusY * scale,
blRadiusX: blRadiusX * scale,
blRadiusY: blRadiusY * scale,
brRadiusX: brRadiusX * scale,
brRadiusY: brRadiusY * scale,
);
}
return RRect._raw(
top: top,
left: left,
right: right,
bottom: bottom,
tlRadiusX: tlRadiusX,
tlRadiusY: tlRadiusY,
trRadiusX: trRadiusX,
trRadiusY: trRadiusY,
blRadiusX: blRadiusX,
blRadiusY: blRadiusY,
brRadiusX: brRadiusX,
brRadiusY: brRadiusY,
);
}
/// Whether the point specified by the given offset (which is assumed to be
/// relative to the origin) lies inside the rounded rectangle.
///
/// This method may allocate (and cache) a copy of the object with normalized
/// radii the first time it is called on a particular [RRect] instance. When
/// using this method, prefer to reuse existing [RRect]s rather than
/// recreating the object each time.
bool contains(Offset point) {
if (point.dx < left || point.dx >= right || point.dy < top || point.dy >= bottom) {
return false;
} // outside bounding box
final RRect scaled = scaleRadii();
double x;
double y;
double radiusX;
double radiusY;
// check whether point is in one of the rounded corner areas
// x, y -> translate to ellipse center
if (point.dx < left + scaled.tlRadiusX &&
point.dy < top + scaled.tlRadiusY) {
x = point.dx - left - scaled.tlRadiusX;
y = point.dy - top - scaled.tlRadiusY;
radiusX = scaled.tlRadiusX;
radiusY = scaled.tlRadiusY;
} else if (point.dx > right - scaled.trRadiusX &&
point.dy < top + scaled.trRadiusY) {
x = point.dx - right + scaled.trRadiusX;
y = point.dy - top - scaled.trRadiusY;
radiusX = scaled.trRadiusX;
radiusY = scaled.trRadiusY;
} else if (point.dx > right - scaled.brRadiusX &&
point.dy > bottom - scaled.brRadiusY) {
x = point.dx - right + scaled.brRadiusX;
y = point.dy - bottom + scaled.brRadiusY;
radiusX = scaled.brRadiusX;
radiusY = scaled.brRadiusY;
} else if (point.dx < left + scaled.blRadiusX &&
point.dy > bottom - scaled.blRadiusY) {
x = point.dx - left - scaled.blRadiusX;
y = point.dy - bottom + scaled.blRadiusY;
radiusX = scaled.blRadiusX;
radiusY = scaled.blRadiusY;
} else {
return true; // inside and not within the rounded corner area
}
x = x / radiusX;
y = y / radiusY;
// check if the point is outside the unit circle
if (x * x + y * y > 1.0) {
return false;
}
return true;
}
/// Linearly interpolate between two rounded rectangles.
///
/// If either is null, this function substitutes [RRect.zero] instead.
///
/// The `t` argument represents position on the timeline, with 0.0 meaning
/// that the interpolation has not started, returning `a` (or something
/// equivalent to `a`), 1.0 meaning that the interpolation has finished,
/// returning `b` (or something equivalent to `b`), and values in between
/// meaning that the interpolation is at the relevant point on the timeline
/// between `a` and `b`. The interpolation can be extrapolated beyond 0.0 and
/// 1.0, so negative values and values greater than 1.0 are valid (and can
/// easily be generated by curves such as [Curves.elasticInOut]).
///
/// Values for `t` are usually obtained from an [Animation<double>], such as
/// an [AnimationController].
static RRect? lerp(RRect? a, RRect? b, double t) {
if (b == null) {
if (a == null) {
return null;
} else {
final double k = 1.0 - t;
return RRect._raw(
left: a.left * k,
top: a.top * k,
right: a.right * k,
bottom: a.bottom * k,
tlRadiusX: math.max(0, a.tlRadiusX * k),
tlRadiusY: math.max(0, a.tlRadiusY * k),
trRadiusX: math.max(0, a.trRadiusX * k),
trRadiusY: math.max(0, a.trRadiusY * k),
brRadiusX: math.max(0, a.brRadiusX * k),
brRadiusY: math.max(0, a.brRadiusY * k),
blRadiusX: math.max(0, a.blRadiusX * k),
blRadiusY: math.max(0, a.blRadiusY * k),
);
}
} else {
if (a == null) {
return RRect._raw(
left: b.left * t,
top: b.top * t,
right: b.right * t,
bottom: b.bottom * t,
tlRadiusX: math.max(0, b.tlRadiusX * t),
tlRadiusY: math.max(0, b.tlRadiusY * t),
trRadiusX: math.max(0, b.trRadiusX * t),
trRadiusY: math.max(0, b.trRadiusY * t),
brRadiusX: math.max(0, b.brRadiusX * t),
brRadiusY: math.max(0, b.brRadiusY * t),
blRadiusX: math.max(0, b.blRadiusX * t),
blRadiusY: math.max(0, b.blRadiusY * t),
);
} else {
return RRect._raw(
left: _lerpDouble(a.left, b.left, t),
top: _lerpDouble(a.top, b.top, t),
right: _lerpDouble(a.right, b.right, t),
bottom: _lerpDouble(a.bottom, b.bottom, t),
tlRadiusX: math.max(0, _lerpDouble(a.tlRadiusX, b.tlRadiusX, t)),
tlRadiusY: math.max(0, _lerpDouble(a.tlRadiusY, b.tlRadiusY, t)),
trRadiusX: math.max(0, _lerpDouble(a.trRadiusX, b.trRadiusX, t)),
trRadiusY: math.max(0, _lerpDouble(a.trRadiusY, b.trRadiusY, t)),
brRadiusX: math.max(0, _lerpDouble(a.brRadiusX, b.brRadiusX, t)),
brRadiusY: math.max(0, _lerpDouble(a.brRadiusY, b.brRadiusY, t)),
blRadiusX: math.max(0, _lerpDouble(a.blRadiusX, b.blRadiusX, t)),
blRadiusY: math.max(0, _lerpDouble(a.blRadiusY, b.blRadiusY, t)),
);
}
}
}
@override
bool operator ==(Object other) {
if (identical(this, other)) {
return true;
}
if (runtimeType != other.runtimeType) {
return false;
}
return other is RRect
&& other.left == left
&& other.top == top
&& other.right == right
&& other.bottom == bottom
&& other.tlRadiusX == tlRadiusX
&& other.tlRadiusY == tlRadiusY
&& other.trRadiusX == trRadiusX
&& other.trRadiusY == trRadiusY
&& other.blRadiusX == blRadiusX
&& other.blRadiusY == blRadiusY
&& other.brRadiusX == brRadiusX
&& other.brRadiusY == brRadiusY;
}
@override
int get hashCode => Object.hash(left, top, right, bottom,
tlRadiusX, tlRadiusY, trRadiusX, trRadiusY,
blRadiusX, blRadiusY, brRadiusX, brRadiusY);
@override
String toString() {
final String rect = '${left.toStringAsFixed(1)}, '
'${top.toStringAsFixed(1)}, '
'${right.toStringAsFixed(1)}, '
'${bottom.toStringAsFixed(1)}';
if (tlRadius == trRadius &&
trRadius == brRadius &&
brRadius == blRadius) {
if (tlRadius.x == tlRadius.y) {
return 'RRect.fromLTRBR($rect, ${tlRadius.x.toStringAsFixed(1)})';
}
return 'RRect.fromLTRBXY($rect, ${tlRadius.x.toStringAsFixed(1)}, ${tlRadius.y.toStringAsFixed(1)})';
}
return 'RRect.fromLTRBAndCorners('
'$rect, '
'topLeft: $tlRadius, '
'topRight: $trRadius, '
'bottomRight: $brRadius, '
'bottomLeft: $blRadius'
')';
}
}
/// A transform consisting of a translation, a rotation, and a uniform scale.
///
/// Used by [Canvas.drawAtlas]. This is a more efficient way to represent these
/// simple transformations than a full matrix.
// Modeled after Skia's SkRSXform.
class RSTransform {
/// Creates an RSTransform.
///
/// An [RSTransform] expresses the combination of a translation, a rotation
/// around a particular point, and a scale factor.
///
/// The first argument, `scos`, is the cosine of the rotation, multiplied by
/// the scale factor.
///
/// The second argument, `ssin`, is the sine of the rotation, multiplied by
/// that same scale factor.
///
/// The third argument is the x coordinate of the translation, minus the
/// `scos` argument multiplied by the x-coordinate of the rotation point, plus
/// the `ssin` argument multiplied by the y-coordinate of the rotation point.
///
/// The fourth argument is the y coordinate of the translation, minus the `ssin`
/// argument multiplied by the x-coordinate of the rotation point, minus the
/// `scos` argument multiplied by the y-coordinate of the rotation point.
///
/// The [RSTransform.fromComponents] method may be a simpler way to
/// construct these values. However, if there is a way to factor out the
/// computations of the sine and cosine of the rotation so that they can be
/// reused over multiple calls to this constructor, it may be more efficient
/// to directly use this constructor instead.
RSTransform(double scos, double ssin, double tx, double ty) {
_value
..[0] = scos
..[1] = ssin
..[2] = tx
..[3] = ty;
}
/// Creates an RSTransform from its individual components.
///
/// The `rotation` parameter gives the rotation in radians.
///
/// The `scale` parameter describes the uniform scale factor.
///
/// The `anchorX` and `anchorY` parameters give the coordinate of the point
/// around which to rotate.
///
/// The `translateX` and `translateY` parameters give the coordinate of the
/// offset by which to translate.
///
/// This constructor computes the arguments of the [RSTransform.new]
/// constructor and then defers to that constructor to actually create the
/// object. If many [RSTransform] objects are being created and there is a way
/// to factor out the computations of the sine and cosine of the rotation
/// (which are computed each time this constructor is called) and reuse them
/// over multiple [RSTransform] objects, it may be more efficient to directly
/// use the more direct [RSTransform.new] constructor instead.
factory RSTransform.fromComponents({
required double rotation,
required double scale,
required double anchorX,
required double anchorY,
required double translateX,
required double translateY
}) {
final double scos = math.cos(rotation) * scale;
final double ssin = math.sin(rotation) * scale;
final double tx = translateX + -scos * anchorX + ssin * anchorY;
final double ty = translateY + -ssin * anchorX - scos * anchorY;
return RSTransform(scos, ssin, tx, ty);
}
final Float32List _value = Float32List(4);
/// The cosine of the rotation multiplied by the scale factor.
double get scos => _value[0];
/// The sine of the rotation multiplied by that same scale factor.
double get ssin => _value[1];
/// The x coordinate of the translation, minus [scos] multiplied by the
/// x-coordinate of the rotation point, plus [ssin] multiplied by the
/// y-coordinate of the rotation point.
double get tx => _value[2];
/// The y coordinate of the translation, minus [ssin] multiplied by the
/// x-coordinate of the rotation point, minus [scos] multiplied by the
/// y-coordinate of the rotation point.
double get ty => _value[3];
}
| engine/lib/ui/geometry.dart/0 | {
"file_path": "engine/lib/ui/geometry.dart",
"repo_id": "engine",
"token_count": 23762
} | 236 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/lib/ui/painting/canvas.h"
#include <cmath>
#include "flutter/display_list/dl_builder.h"
#include "flutter/lib/ui/floating_point.h"
#include "flutter/lib/ui/painting/image.h"
#include "flutter/lib/ui/painting/image_filter.h"
#include "flutter/lib/ui/painting/paint.h"
#include "flutter/lib/ui/ui_dart_state.h"
#include "flutter/lib/ui/window/platform_configuration.h"
using tonic::ToDart;
namespace flutter {
IMPLEMENT_WRAPPERTYPEINFO(ui, Canvas);
void Canvas::Create(Dart_Handle wrapper,
PictureRecorder* recorder,
double left,
double top,
double right,
double bottom) {
UIDartState::ThrowIfUIOperationsProhibited();
if (!recorder) {
Dart_ThrowException(
ToDart("Canvas constructor called with non-genuine PictureRecorder."));
return;
}
fml::RefPtr<Canvas> canvas =
fml::MakeRefCounted<Canvas>(recorder->BeginRecording(
SkRect::MakeLTRB(SafeNarrow(left), SafeNarrow(top), SafeNarrow(right),
SafeNarrow(bottom))));
recorder->set_canvas(canvas);
canvas->AssociateWithDartWrapper(wrapper);
}
Canvas::Canvas(sk_sp<DisplayListBuilder> builder)
: display_list_builder_(std::move(builder)) {}
Canvas::~Canvas() {}
void Canvas::save() {
if (display_list_builder_) {
builder()->Save();
}
}
void Canvas::saveLayerWithoutBounds(Dart_Handle paint_objects,
Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
DlPaint dl_paint;
const DlPaint* save_paint = paint.paint(dl_paint, kSaveLayerWithPaintFlags);
FML_DCHECK(save_paint);
TRACE_EVENT0("flutter", "ui.Canvas::saveLayer (Recorded)");
builder()->SaveLayer(nullptr, save_paint);
}
}
void Canvas::saveLayer(double left,
double top,
double right,
double bottom,
Dart_Handle paint_objects,
Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
SkRect bounds = SkRect::MakeLTRB(SafeNarrow(left), SafeNarrow(top),
SafeNarrow(right), SafeNarrow(bottom));
if (display_list_builder_) {
DlPaint dl_paint;
const DlPaint* save_paint = paint.paint(dl_paint, kSaveLayerWithPaintFlags);
FML_DCHECK(save_paint);
TRACE_EVENT0("flutter", "ui.Canvas::saveLayer (Recorded)");
builder()->SaveLayer(&bounds, save_paint);
}
}
void Canvas::restore() {
if (display_list_builder_) {
builder()->Restore();
}
}
int Canvas::getSaveCount() {
if (display_list_builder_) {
return builder()->GetSaveCount();
} else {
return 0;
}
}
void Canvas::restoreToCount(int count) {
if (display_list_builder_ && count < getSaveCount()) {
builder()->RestoreToCount(count);
}
}
void Canvas::translate(double dx, double dy) {
if (display_list_builder_) {
builder()->Translate(SafeNarrow(dx), SafeNarrow(dy));
}
}
void Canvas::scale(double sx, double sy) {
if (display_list_builder_) {
builder()->Scale(SafeNarrow(sx), SafeNarrow(sy));
}
}
void Canvas::rotate(double radians) {
if (display_list_builder_) {
builder()->Rotate(SafeNarrow(radians) * 180.0f / static_cast<float>(M_PI));
}
}
void Canvas::skew(double sx, double sy) {
if (display_list_builder_) {
builder()->Skew(SafeNarrow(sx), SafeNarrow(sy));
}
}
void Canvas::transform(const tonic::Float64List& matrix4) {
// The Float array stored by Dart Matrix4 is in column-major order
// Both DisplayList and SkM44 constructor take row-major matrix order
if (display_list_builder_) {
// clang-format off
builder()->TransformFullPerspective(
SafeNarrow(matrix4[ 0]), SafeNarrow(matrix4[ 4]), SafeNarrow(matrix4[ 8]), SafeNarrow(matrix4[12]),
SafeNarrow(matrix4[ 1]), SafeNarrow(matrix4[ 5]), SafeNarrow(matrix4[ 9]), SafeNarrow(matrix4[13]),
SafeNarrow(matrix4[ 2]), SafeNarrow(matrix4[ 6]), SafeNarrow(matrix4[10]), SafeNarrow(matrix4[14]),
SafeNarrow(matrix4[ 3]), SafeNarrow(matrix4[ 7]), SafeNarrow(matrix4[11]), SafeNarrow(matrix4[15]));
// clang-format on
}
}
void Canvas::getTransform(Dart_Handle matrix4_handle) {
if (display_list_builder_) {
SkM44 sk_m44 = builder()->GetTransformFullPerspective();
SkScalar m44_values[16];
// The Float array stored by Dart Matrix4 is in column-major order
sk_m44.getColMajor(m44_values);
auto matrix4 = tonic::Float64List(matrix4_handle);
for (int i = 0; i < 16; i++) {
matrix4[i] = m44_values[i];
}
}
}
void Canvas::clipRect(double left,
double top,
double right,
double bottom,
DlCanvas::ClipOp clipOp,
bool doAntiAlias) {
if (display_list_builder_) {
builder()->ClipRect(SkRect::MakeLTRB(SafeNarrow(left), SafeNarrow(top),
SafeNarrow(right), SafeNarrow(bottom)),
clipOp, doAntiAlias);
}
}
void Canvas::clipRRect(const RRect& rrect, bool doAntiAlias) {
if (display_list_builder_) {
builder()->ClipRRect(rrect.sk_rrect, DlCanvas::ClipOp::kIntersect,
doAntiAlias);
}
}
void Canvas::clipPath(const CanvasPath* path, bool doAntiAlias) {
if (!path) {
Dart_ThrowException(
ToDart("Canvas.clipPath called with non-genuine Path."));
return;
}
if (display_list_builder_) {
builder()->ClipPath(path->path(), DlCanvas::ClipOp::kIntersect,
doAntiAlias);
}
}
void Canvas::getDestinationClipBounds(Dart_Handle rect_handle) {
if (display_list_builder_) {
auto rect = tonic::Float64List(rect_handle);
SkRect bounds = builder()->GetDestinationClipBounds();
rect[0] = bounds.fLeft;
rect[1] = bounds.fTop;
rect[2] = bounds.fRight;
rect[3] = bounds.fBottom;
}
}
void Canvas::getLocalClipBounds(Dart_Handle rect_handle) {
if (display_list_builder_) {
auto rect = tonic::Float64List(rect_handle);
SkRect bounds = builder()->GetLocalClipBounds();
rect[0] = bounds.fLeft;
rect[1] = bounds.fTop;
rect[2] = bounds.fRight;
rect[3] = bounds.fBottom;
}
}
void Canvas::drawColor(SkColor color, DlBlendMode blend_mode) {
if (display_list_builder_) {
builder()->DrawColor(DlColor(color), blend_mode);
}
}
void Canvas::drawLine(double x1,
double y1,
double x2,
double y2,
Dart_Handle paint_objects,
Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
DlPaint dl_paint;
paint.paint(dl_paint, kDrawLineFlags);
builder()->DrawLine(SkPoint::Make(SafeNarrow(x1), SafeNarrow(y1)),
SkPoint::Make(SafeNarrow(x2), SafeNarrow(y2)),
dl_paint);
}
}
void Canvas::drawPaint(Dart_Handle paint_objects, Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
DlPaint dl_paint;
paint.paint(dl_paint, kDrawPaintFlags);
std::shared_ptr<const DlImageFilter> filter = dl_paint.getImageFilter();
if (filter && !filter->asColorFilter()) {
// drawPaint does an implicit saveLayer if an SkImageFilter is
// present that cannot be replaced by an SkColorFilter.
TRACE_EVENT0("flutter", "ui.Canvas::saveLayer (Recorded)");
}
builder()->DrawPaint(dl_paint);
}
}
void Canvas::drawRect(double left,
double top,
double right,
double bottom,
Dart_Handle paint_objects,
Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
DlPaint dl_paint;
paint.paint(dl_paint, kDrawRectFlags);
builder()->DrawRect(SkRect::MakeLTRB(SafeNarrow(left), SafeNarrow(top),
SafeNarrow(right), SafeNarrow(bottom)),
dl_paint);
}
}
void Canvas::drawRRect(const RRect& rrect,
Dart_Handle paint_objects,
Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
DlPaint dl_paint;
paint.paint(dl_paint, kDrawRRectFlags);
builder()->DrawRRect(rrect.sk_rrect, dl_paint);
}
}
void Canvas::drawDRRect(const RRect& outer,
const RRect& inner,
Dart_Handle paint_objects,
Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
DlPaint dl_paint;
paint.paint(dl_paint, kDrawDRRectFlags);
builder()->DrawDRRect(outer.sk_rrect, inner.sk_rrect, dl_paint);
}
}
void Canvas::drawOval(double left,
double top,
double right,
double bottom,
Dart_Handle paint_objects,
Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
DlPaint dl_paint;
paint.paint(dl_paint, kDrawOvalFlags);
builder()->DrawOval(SkRect::MakeLTRB(SafeNarrow(left), SafeNarrow(top),
SafeNarrow(right), SafeNarrow(bottom)),
dl_paint);
}
}
void Canvas::drawCircle(double x,
double y,
double radius,
Dart_Handle paint_objects,
Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
DlPaint dl_paint;
paint.paint(dl_paint, kDrawCircleFlags);
builder()->DrawCircle(SkPoint::Make(SafeNarrow(x), SafeNarrow(y)),
SafeNarrow(radius), dl_paint);
}
}
void Canvas::drawArc(double left,
double top,
double right,
double bottom,
double startAngle,
double sweepAngle,
bool useCenter,
Dart_Handle paint_objects,
Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
DlPaint dl_paint;
paint.paint(dl_paint, useCenter //
? kDrawArcWithCenterFlags
: kDrawArcNoCenterFlags);
builder()->DrawArc(
SkRect::MakeLTRB(SafeNarrow(left), SafeNarrow(top), SafeNarrow(right),
SafeNarrow(bottom)),
SafeNarrow(startAngle) * 180.0f / static_cast<float>(M_PI),
SafeNarrow(sweepAngle) * 180.0f / static_cast<float>(M_PI), useCenter,
dl_paint);
}
}
void Canvas::drawPath(const CanvasPath* path,
Dart_Handle paint_objects,
Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (!path) {
Dart_ThrowException(
ToDart("Canvas.drawPath called with non-genuine Path."));
return;
}
if (display_list_builder_) {
DlPaint dl_paint;
paint.paint(dl_paint, kDrawPathFlags);
builder()->DrawPath(path->path(), dl_paint);
}
}
Dart_Handle Canvas::drawImage(const CanvasImage* image,
double x,
double y,
Dart_Handle paint_objects,
Dart_Handle paint_data,
int filterQualityIndex) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (!image) {
return ToDart("Canvas.drawImage called with non-genuine Image.");
}
auto dl_image = image->image();
if (!dl_image) {
return Dart_Null();
}
auto error = dl_image->get_error();
if (error) {
return ToDart(error.value());
}
auto sampling = ImageFilter::SamplingFromIndex(filterQualityIndex);
if (display_list_builder_) {
DlPaint dl_paint;
const DlPaint* opt_paint = paint.paint(dl_paint, kDrawImageWithPaintFlags);
builder()->DrawImage(dl_image, SkPoint::Make(SafeNarrow(x), SafeNarrow(y)),
sampling, opt_paint);
}
return Dart_Null();
}
Dart_Handle Canvas::drawImageRect(const CanvasImage* image,
double src_left,
double src_top,
double src_right,
double src_bottom,
double dst_left,
double dst_top,
double dst_right,
double dst_bottom,
Dart_Handle paint_objects,
Dart_Handle paint_data,
int filterQualityIndex) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (!image) {
return ToDart("Canvas.drawImageRect called with non-genuine Image.");
}
auto dl_image = image->image();
if (!dl_image) {
return Dart_Null();
}
auto error = dl_image->get_error();
if (error) {
return ToDart(error.value());
}
SkRect src = SkRect::MakeLTRB(SafeNarrow(src_left), SafeNarrow(src_top),
SafeNarrow(src_right), SafeNarrow(src_bottom));
SkRect dst = SkRect::MakeLTRB(SafeNarrow(dst_left), SafeNarrow(dst_top),
SafeNarrow(dst_right), SafeNarrow(dst_bottom));
auto sampling = ImageFilter::SamplingFromIndex(filterQualityIndex);
if (display_list_builder_) {
DlPaint dl_paint;
const DlPaint* opt_paint =
paint.paint(dl_paint, kDrawImageRectWithPaintFlags);
builder()->DrawImageRect(dl_image, src, dst, sampling, opt_paint,
DlCanvas::SrcRectConstraint::kFast);
}
return Dart_Null();
}
Dart_Handle Canvas::drawImageNine(const CanvasImage* image,
double center_left,
double center_top,
double center_right,
double center_bottom,
double dst_left,
double dst_top,
double dst_right,
double dst_bottom,
Dart_Handle paint_objects,
Dart_Handle paint_data,
int bitmapSamplingIndex) {
Paint paint(paint_objects, paint_data);
FML_DCHECK(paint.isNotNull());
if (!image) {
return ToDart("Canvas.drawImageNine called with non-genuine Image.");
}
auto dl_image = image->image();
if (!dl_image) {
return Dart_Null();
}
auto error = dl_image->get_error();
if (error) {
return ToDart(error.value());
}
SkRect center =
SkRect::MakeLTRB(SafeNarrow(center_left), SafeNarrow(center_top),
SafeNarrow(center_right), SafeNarrow(center_bottom));
SkIRect icenter;
center.round(&icenter);
SkRect dst = SkRect::MakeLTRB(SafeNarrow(dst_left), SafeNarrow(dst_top),
SafeNarrow(dst_right), SafeNarrow(dst_bottom));
auto filter = ImageFilter::FilterModeFromIndex(bitmapSamplingIndex);
if (display_list_builder_) {
DlPaint dl_paint;
const DlPaint* opt_paint =
paint.paint(dl_paint, kDrawImageNineWithPaintFlags);
builder()->DrawImageNine(dl_image, icenter, dst, filter, opt_paint);
}
return Dart_Null();
}
void Canvas::drawPicture(Picture* picture) {
if (!picture) {
Dart_ThrowException(
ToDart("Canvas.drawPicture called with non-genuine Picture."));
return;
}
if (picture->display_list()) {
if (display_list_builder_) {
builder()->DrawDisplayList(picture->display_list());
}
} else {
FML_DCHECK(false);
}
}
void Canvas::drawPoints(Dart_Handle paint_objects,
Dart_Handle paint_data,
DlCanvas::PointMode point_mode,
const tonic::Float32List& points) {
Paint paint(paint_objects, paint_data);
static_assert(sizeof(SkPoint) == sizeof(float) * 2,
"SkPoint doesn't use floats.");
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
DlPaint dl_paint;
switch (point_mode) {
case DlCanvas::PointMode::kPoints:
paint.paint(dl_paint, kDrawPointsAsPointsFlags);
break;
case DlCanvas::PointMode::kLines:
paint.paint(dl_paint, kDrawPointsAsLinesFlags);
break;
case DlCanvas::PointMode::kPolygon:
paint.paint(dl_paint, kDrawPointsAsPolygonFlags);
break;
}
builder()->DrawPoints(point_mode,
points.num_elements() / 2, // SkPoints have 2 floats
reinterpret_cast<const SkPoint*>(points.data()),
dl_paint);
}
}
void Canvas::drawVertices(const Vertices* vertices,
DlBlendMode blend_mode,
Dart_Handle paint_objects,
Dart_Handle paint_data) {
Paint paint(paint_objects, paint_data);
if (!vertices) {
Dart_ThrowException(
ToDart("Canvas.drawVertices called with non-genuine Vertices."));
return;
}
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
DlPaint dl_paint;
paint.paint(dl_paint, kDrawVerticesFlags);
builder()->DrawVertices(vertices->vertices(), blend_mode, dl_paint);
}
}
Dart_Handle Canvas::drawAtlas(Dart_Handle paint_objects,
Dart_Handle paint_data,
int filterQualityIndex,
CanvasImage* atlas,
Dart_Handle transforms_handle,
Dart_Handle rects_handle,
Dart_Handle colors_handle,
DlBlendMode blend_mode,
Dart_Handle cull_rect_handle) {
Paint paint(paint_objects, paint_data);
if (!atlas) {
return ToDart(
"Canvas.drawAtlas or Canvas.drawRawAtlas called with "
"non-genuine Image.");
}
auto dl_image = atlas->image();
auto error = dl_image->get_error();
if (error) {
return ToDart(error.value());
}
static_assert(sizeof(SkRSXform) == sizeof(float) * 4,
"SkRSXform doesn't use floats.");
static_assert(sizeof(SkRect) == sizeof(float) * 4,
"SkRect doesn't use floats.");
auto sampling = ImageFilter::SamplingFromIndex(filterQualityIndex);
FML_DCHECK(paint.isNotNull());
if (display_list_builder_) {
tonic::Float32List transforms(transforms_handle);
tonic::Float32List rects(rects_handle);
tonic::Int32List colors(colors_handle);
tonic::Float32List cull_rect(cull_rect_handle);
DlPaint dl_paint;
const DlPaint* opt_paint = paint.paint(dl_paint, kDrawAtlasWithPaintFlags);
builder()->DrawAtlas(
dl_image, reinterpret_cast<const SkRSXform*>(transforms.data()),
reinterpret_cast<const SkRect*>(rects.data()),
reinterpret_cast<const DlColor*>(colors.data()),
rects.num_elements() / 4, // SkRect have four floats.
blend_mode, sampling, reinterpret_cast<const SkRect*>(cull_rect.data()),
opt_paint);
}
return Dart_Null();
}
void Canvas::drawShadow(const CanvasPath* path,
SkColor color,
double elevation,
bool transparentOccluder) {
if (!path) {
Dart_ThrowException(
ToDart("Canvas.drawShader called with non-genuine Path."));
return;
}
// Not using SafeNarrow because DPR will always be a relatively small number.
const ViewportMetrics* metrics =
UIDartState::Current()->platform_configuration()->GetMetrics(0);
SkScalar dpr;
// TODO(dkwingsmt): We should support rendering shadows on non-implicit views.
// However, currently this method has no way to get the target view ID.
if (metrics == nullptr) {
dpr = 1.0f;
} else {
dpr = static_cast<float>(metrics->device_pixel_ratio);
}
if (display_list_builder_) {
// The DrawShadow mechanism results in non-public operations to be
// performed on the canvas involving an SkDrawShadowRec. Since we
// cannot include the header that defines that structure, we cannot
// record an operation that it injects into an SkCanvas. To prevent
// that situation we bypass the canvas interface and inject the
// shadow parameters directly into the underlying DisplayList.
// See: https://bugs.chromium.org/p/skia/issues/detail?id=12125
builder()->DrawShadow(path->path(), DlColor(color), SafeNarrow(elevation),
transparentOccluder, dpr);
}
}
void Canvas::Invalidate() {
display_list_builder_ = nullptr;
if (dart_wrapper()) {
ClearDartWrapper();
}
}
} // namespace flutter
| engine/lib/ui/painting/canvas.cc/0 | {
"file_path": "engine/lib/ui/painting/canvas.cc",
"repo_id": "engine",
"token_count": 10286
} | 237 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <memory>
#include <utility>
#include "flutter/lib/ui/painting/fragment_shader.h"
#include "flutter/display_list/dl_tile_mode.h"
#include "flutter/display_list/effects/dl_color_source.h"
#include "flutter/lib/ui/dart_wrapper.h"
#include "flutter/lib/ui/painting/fragment_program.h"
#include "flutter/lib/ui/ui_dart_state.h"
#include "third_party/skia/include/core/SkString.h"
#include "third_party/tonic/converter/dart_converter.h"
#include "third_party/tonic/dart_args.h"
#include "third_party/tonic/dart_binding_macros.h"
#include "third_party/tonic/dart_library_natives.h"
#include "third_party/tonic/typed_data/typed_list.h"
namespace flutter {
IMPLEMENT_WRAPPERTYPEINFO(ui, ReusableFragmentShader);
ReusableFragmentShader::ReusableFragmentShader(
fml::RefPtr<FragmentProgram> program,
uint64_t float_count,
uint64_t sampler_count)
: program_(std::move(program)),
uniform_data_(SkData::MakeUninitialized(
(float_count + 2 * sampler_count) * sizeof(float))),
samplers_(sampler_count),
float_count_(float_count) {}
Dart_Handle ReusableFragmentShader::Create(Dart_Handle wrapper,
Dart_Handle program,
Dart_Handle float_count_handle,
Dart_Handle sampler_count_handle) {
auto* fragment_program =
tonic::DartConverter<FragmentProgram*>::FromDart(program);
uint64_t float_count =
tonic::DartConverter<uint64_t>::FromDart(float_count_handle);
uint64_t sampler_count =
tonic::DartConverter<uint64_t>::FromDart(sampler_count_handle);
auto res = fml::MakeRefCounted<ReusableFragmentShader>(
fml::Ref(fragment_program), float_count, sampler_count);
res->AssociateWithDartWrapper(wrapper);
void* raw_uniform_data =
reinterpret_cast<void*>(res->uniform_data_->writable_data());
return Dart_NewExternalTypedData(Dart_TypedData_kFloat32, raw_uniform_data,
float_count);
}
bool ReusableFragmentShader::ValidateSamplers() {
for (auto i = 0u; i < samplers_.size(); i += 1) {
if (samplers_[i] == nullptr) {
return false;
}
// The samplers should have been checked as they were added, this
// is a double-sanity-check.
FML_DCHECK(samplers_[i]->isUIThreadSafe());
}
return true;
}
void ReusableFragmentShader::SetImageSampler(Dart_Handle index_handle,
Dart_Handle image_handle) {
uint64_t index = tonic::DartConverter<uint64_t>::FromDart(index_handle);
CanvasImage* image =
tonic::DartConverter<CanvasImage*>::FromDart(image_handle);
if (index >= samplers_.size()) {
Dart_ThrowException(tonic::ToDart("Sampler index out of bounds"));
}
if (!image->image()->isUIThreadSafe()) {
Dart_ThrowException(tonic::ToDart("Image is not thread-safe"));
}
// TODO(115794): Once the DlImageSampling enum is replaced, expose the
// sampling options as a new default parameter for users.
samplers_[index] = std::make_shared<DlImageColorSource>(
image->image(), DlTileMode::kClamp, DlTileMode::kClamp,
DlImageSampling::kNearestNeighbor, nullptr);
// This should be true since we already checked the image above, but
// we check again for sanity.
FML_DCHECK(samplers_[index]->isUIThreadSafe());
auto* uniform_floats =
reinterpret_cast<float*>(uniform_data_->writable_data());
uniform_floats[float_count_ + 2 * index] = image->width();
uniform_floats[float_count_ + 2 * index + 1] = image->height();
}
std::shared_ptr<DlColorSource> ReusableFragmentShader::shader(
DlImageSampling sampling) {
FML_CHECK(program_);
// The lifetime of this object is longer than a frame, and the uniforms can be
// continually changed on the UI thread. So we take a copy of the uniforms
// before handing it to the DisplayList for consumption on the render thread.
auto uniform_data = std::make_shared<std::vector<uint8_t>>();
uniform_data->resize(uniform_data_->size());
memcpy(uniform_data->data(), uniform_data_->bytes(), uniform_data->size());
auto source = program_->MakeDlColorSource(std::move(uniform_data), samplers_);
// The samplers should have been checked as they were added, this
// is a double-sanity-check.
FML_DCHECK(source->isUIThreadSafe());
return source;
}
void ReusableFragmentShader::Dispose() {
uniform_data_.reset();
program_ = nullptr;
samplers_.clear();
ClearDartWrapper();
}
ReusableFragmentShader::~ReusableFragmentShader() = default;
} // namespace flutter
| engine/lib/ui/painting/fragment_shader.cc/0 | {
"file_path": "engine/lib/ui/painting/fragment_shader.cc",
"repo_id": "engine",
"token_count": 1869
} | 238 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_UI_PAINTING_IMAGE_DESCRIPTOR_H_
#define FLUTTER_LIB_UI_PAINTING_IMAGE_DESCRIPTOR_H_
#include <cstdint>
#include <memory>
#include <optional>
#include "flutter/fml/macros.h"
#include "flutter/lib/ui/dart_wrapper.h"
#include "flutter/lib/ui/painting/image_generator_registry.h"
#include "flutter/lib/ui/painting/immutable_buffer.h"
#include "third_party/skia/include/core/SkData.h"
#include "third_party/skia/include/core/SkImage.h"
#include "third_party/skia/include/core/SkImageInfo.h"
#include "third_party/skia/include/core/SkPixmap.h"
#include "third_party/skia/include/core/SkSize.h"
#include "third_party/tonic/dart_library_natives.h"
namespace flutter {
/// @brief Creates an image descriptor for encoded or decoded image data,
/// describing the width, height, and bytes per pixel for that image.
/// This class will hold a reference on the underlying image data, and
/// in the case of compressed data, an `ImageGenerator` for the data.
/// The Codec initialization actually happens in initEncoded, making
/// `initstantiateCodec` a lightweight operation.
/// @see `ImageGenerator`
class ImageDescriptor : public RefCountedDartWrappable<ImageDescriptor> {
public:
~ImageDescriptor() override = default;
// This must be kept in sync with the enum in painting.dart
enum PixelFormat {
kRGBA8888,
kBGRA8888,
kRGBAFloat32,
};
/// @brief Asynchronously initializes an ImageDescriptor for an encoded
/// image, as long as the format is recognized by an encoder installed
/// in the `ImageGeneratorRegistry`. Calling this method will create
/// an `ImageGenerator` and read EXIF corrected dimensions from the
/// image data.
/// @see `ImageGeneratorRegistry`
static Dart_Handle initEncoded(Dart_Handle descriptor_handle,
ImmutableBuffer* immutable_buffer,
Dart_Handle callback_handle);
/// @brief Synchronously initializes an `ImageDescriptor` for decompressed
/// image data as specified by the `PixelFormat`.
static void initRaw(Dart_Handle descriptor_handle,
const fml::RefPtr<ImmutableBuffer>& data,
int width,
int height,
int row_bytes,
PixelFormat pixel_format);
/// @brief Associates a flutter::Codec object with the dart.ui Codec handle.
void instantiateCodec(Dart_Handle codec, int target_width, int target_height);
/// @brief The width of this image, EXIF oriented if applicable.
int width() const { return image_info_.width(); }
/// @brief The height of this image. EXIF oriented if applicable.
int height() const { return image_info_.height(); }
/// @brief The bytes per pixel of the image.
int bytesPerPixel() const { return image_info_.bytesPerPixel(); }
/// @brief The byte length of the first row of the image.
/// Defaults to width() * 4.
int row_bytes() const {
return row_bytes_.value_or(
static_cast<size_t>(image_info_.width() * image_info_.bytesPerPixel()));
}
/// @brief Whether the given `target_width` or `target_height` differ from
/// `width()` and `height()` respectively.
bool should_resize(int target_width, int target_height) const {
return target_width != width() || target_height != height();
}
/// @brief The underlying buffer for this image.
sk_sp<SkData> data() const { return buffer_; }
sk_sp<SkImage> image() const;
/// @brief Whether this descriptor represents compressed (encoded) data or
/// not.
bool is_compressed() const { return !!generator_; }
/// @brief The orientation corrected image info for this image.
const SkImageInfo& image_info() const { return image_info_; }
/// @brief Gets the scaled dimensions of this image, if backed by an
/// `ImageGenerator` that can perform efficient subpixel scaling.
/// @see `ImageGenerator::GetScaledDimensions`
SkISize get_scaled_dimensions(float scale) {
if (generator_) {
return generator_->GetScaledDimensions(scale);
}
return image_info_.dimensions();
}
/// @brief Gets pixels for this image transformed based on the EXIF
/// orientation tag, if applicable.
bool get_pixels(const SkPixmap& pixmap) const;
void dispose() {
buffer_.reset();
generator_.reset();
ClearDartWrapper();
}
private:
ImageDescriptor(sk_sp<SkData> buffer,
const SkImageInfo& image_info,
std::optional<size_t> row_bytes);
ImageDescriptor(sk_sp<SkData> buffer,
std::shared_ptr<ImageGenerator> generator);
sk_sp<SkData> buffer_;
std::shared_ptr<ImageGenerator> generator_;
const SkImageInfo image_info_;
std::optional<size_t> row_bytes_;
const SkImageInfo CreateImageInfo() const;
DEFINE_WRAPPERTYPEINFO();
FML_FRIEND_MAKE_REF_COUNTED(ImageDescriptor);
FML_DISALLOW_COPY_AND_ASSIGN(ImageDescriptor);
friend class ImageDecoderFixtureTest;
};
} // namespace flutter
#endif // FLUTTER_LIB_UI_PAINTING_IMAGE_DESCRIPTOR_H_
| engine/lib/ui/painting/image_descriptor.h/0 | {
"file_path": "engine/lib/ui/painting/image_descriptor.h",
"repo_id": "engine",
"token_count": 1952
} | 239 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <algorithm>
#include <utility>
#include "flutter/lib/ui/painting/image_generator_registry.h"
#include "third_party/skia/include/codec/SkCodec.h"
#include "third_party/skia/include/core/SkImageGenerator.h"
#ifdef FML_OS_MACOSX
#include "third_party/skia/include/ports/SkImageGeneratorCG.h"
#elif FML_OS_WIN
#include "third_party/skia/include/ports/SkImageGeneratorWIC.h"
#endif
#include "image_generator_apng.h"
namespace flutter {
ImageGeneratorRegistry::ImageGeneratorRegistry() : weak_factory_(this) {
AddFactory(
[](sk_sp<SkData> buffer) {
return APNGImageGenerator::MakeFromData(std::move(buffer));
},
0);
AddFactory(
[](sk_sp<SkData> buffer) {
return BuiltinSkiaCodecImageGenerator::MakeFromData(std::move(buffer));
},
0);
// todo(bdero): https://github.com/flutter/flutter/issues/82603
#ifdef FML_OS_MACOSX
AddFactory(
[](sk_sp<SkData> buffer) {
auto generator =
SkImageGeneratorCG::MakeFromEncodedCG(std::move(buffer));
return BuiltinSkiaImageGenerator::MakeFromGenerator(
std::move(generator));
},
0);
#elif FML_OS_WIN
AddFactory(
[](sk_sp<SkData> buffer) {
auto generator = SkImageGeneratorWIC::MakeFromEncodedWIC(buffer);
return BuiltinSkiaImageGenerator::MakeFromGenerator(
std::move(generator));
},
0);
#endif
}
ImageGeneratorRegistry::~ImageGeneratorRegistry() = default;
void ImageGeneratorRegistry::AddFactory(ImageGeneratorFactory factory,
int32_t priority) {
image_generator_factories_.insert({std::move(factory), priority, ++nonce_});
}
std::shared_ptr<ImageGenerator>
ImageGeneratorRegistry::CreateCompatibleGenerator(const sk_sp<SkData>& buffer) {
if (!image_generator_factories_.size()) {
FML_LOG(WARNING)
<< "There are currently no image decoders installed. If you're writing "
"your own platform embedding, you can register new image decoders "
"via `ImageGeneratorRegistry::AddFactory` on the "
"`ImageGeneratorRegistry` provided by the engine. Otherwise, please "
"file a bug on https://github.com/flutter/flutter/issues.";
}
for (auto& factory : image_generator_factories_) {
std::shared_ptr<ImageGenerator> result = factory.callback(buffer);
if (result) {
return result;
}
}
return nullptr;
}
fml::WeakPtr<ImageGeneratorRegistry> ImageGeneratorRegistry::GetWeakPtr()
const {
return weak_factory_.GetWeakPtr();
}
} // namespace flutter
| engine/lib/ui/painting/image_generator_registry.cc/0 | {
"file_path": "engine/lib/ui/painting/image_generator_registry.cc",
"repo_id": "engine",
"token_count": 1085
} | 240 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/lib/ui/painting/path_measure.h"
#include <cmath>
#include "flutter/lib/ui/floating_point.h"
#include "flutter/lib/ui/painting/matrix.h"
#include "flutter/lib/ui/ui_dart_state.h"
#include "third_party/tonic/converter/dart_converter.h"
#include "third_party/tonic/dart_args.h"
#include "third_party/tonic/dart_binding_macros.h"
#include "third_party/tonic/dart_library_natives.h"
namespace flutter {
typedef CanvasPathMeasure PathMeasure;
IMPLEMENT_WRAPPERTYPEINFO(ui, PathMeasure);
void CanvasPathMeasure::Create(Dart_Handle wrapper,
const CanvasPath* path,
bool forceClosed) {
UIDartState::ThrowIfUIOperationsProhibited();
fml::RefPtr<CanvasPathMeasure> pathMeasure =
fml::MakeRefCounted<CanvasPathMeasure>();
if (path) {
const SkPath& skPath = path->path();
SkScalar resScale = 1;
pathMeasure->path_measure_ =
std::make_unique<SkContourMeasureIter>(skPath, forceClosed, resScale);
} else {
pathMeasure->path_measure_ = std::make_unique<SkContourMeasureIter>();
}
pathMeasure->AssociateWithDartWrapper(wrapper);
}
CanvasPathMeasure::CanvasPathMeasure() {}
CanvasPathMeasure::~CanvasPathMeasure() {}
void CanvasPathMeasure::setPath(const CanvasPath* path, bool isClosed) {
const SkPath& skPath = path->path();
path_measure_->reset(skPath, isClosed);
}
double CanvasPathMeasure::getLength(int contour_index) {
if (static_cast<std::vector<sk_sp<SkContourMeasure>>::size_type>(
contour_index) < measures_.size()) {
return measures_[contour_index]->length();
}
return -1;
}
tonic::Float32List CanvasPathMeasure::getPosTan(int contour_index,
double distance) {
tonic::Float32List posTan(Dart_NewTypedData(Dart_TypedData_kFloat32, 5));
posTan[0] = 0; // dart code will check for this for failure
if (static_cast<std::vector<sk_sp<SkContourMeasure>>::size_type>(
contour_index) >= measures_.size()) {
return posTan;
}
SkPoint pos;
SkVector tan;
float fdistance = SafeNarrow(distance);
bool success = measures_[contour_index]->getPosTan(fdistance, &pos, &tan);
if (success) {
posTan[0] = 1; // dart code will check for this for success
posTan[1] = pos.x();
posTan[2] = pos.y();
posTan[3] = tan.x();
posTan[4] = tan.y();
}
return posTan;
}
void CanvasPathMeasure::getSegment(Dart_Handle path_handle,
int contour_index,
double start_d,
double stop_d,
bool start_with_move_to) {
if (static_cast<std::vector<sk_sp<SkContourMeasure>>::size_type>(
contour_index) >= measures_.size()) {
CanvasPath::Create(path_handle);
}
SkPath dst;
bool success = measures_[contour_index]->getSegment(
SafeNarrow(start_d), SafeNarrow(stop_d), &dst, start_with_move_to);
if (!success) {
CanvasPath::Create(path_handle);
} else {
CanvasPath::CreateFrom(path_handle, dst);
}
}
bool CanvasPathMeasure::isClosed(int contour_index) {
if (static_cast<std::vector<sk_sp<SkContourMeasure>>::size_type>(
contour_index) < measures_.size()) {
return measures_[contour_index]->isClosed();
}
return false;
}
bool CanvasPathMeasure::nextContour() {
auto measure = path_measure_->next();
if (measure) {
measures_.push_back(std::move(measure));
return true;
}
return false;
}
} // namespace flutter
| engine/lib/ui/painting/path_measure.cc/0 | {
"file_path": "engine/lib/ui/painting/path_measure.cc",
"repo_id": "engine",
"token_count": 1553
} | 241 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_UI_PAINTING_SINGLE_FRAME_CODEC_H_
#define FLUTTER_LIB_UI_PAINTING_SINGLE_FRAME_CODEC_H_
#include "flutter/fml/macros.h"
#include "flutter/lib/ui/painting/codec.h"
#include "flutter/lib/ui/painting/image.h"
#include "flutter/lib/ui/painting/image_decoder.h"
#include "flutter/lib/ui/painting/image_descriptor.h"
namespace flutter {
class SingleFrameCodec : public Codec {
public:
SingleFrameCodec(const fml::RefPtr<ImageDescriptor>& descriptor,
uint32_t target_width,
uint32_t target_height);
~SingleFrameCodec() override;
// |Codec|
int frameCount() const override;
// |Codec|
int repetitionCount() const override;
// |Codec|
Dart_Handle getNextFrame(Dart_Handle args) override;
private:
enum class Status { kNew, kInProgress, kComplete };
Status status_ = Status::kNew;
fml::RefPtr<ImageDescriptor> descriptor_;
uint32_t target_width_;
uint32_t target_height_;
fml::RefPtr<CanvasImage> cached_image_;
std::vector<tonic::DartPersistentValue> pending_callbacks_;
FML_FRIEND_MAKE_REF_COUNTED(SingleFrameCodec);
FML_FRIEND_REF_COUNTED_THREAD_SAFE(SingleFrameCodec);
};
} // namespace flutter
#endif // FLUTTER_LIB_UI_PAINTING_SINGLE_FRAME_CODEC_H_
| engine/lib/ui/painting/single_frame_codec.h/0 | {
"file_path": "engine/lib/ui/painting/single_frame_codec.h",
"repo_id": "engine",
"token_count": 552
} | 242 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_UI_SEMANTICS_SEMANTICS_UPDATE_H_
#define FLUTTER_LIB_UI_SEMANTICS_SEMANTICS_UPDATE_H_
#include "flutter/lib/ui/dart_wrapper.h"
#include "flutter/lib/ui/semantics/custom_accessibility_action.h"
#include "flutter/lib/ui/semantics/semantics_node.h"
namespace flutter {
class SemanticsUpdate : public RefCountedDartWrappable<SemanticsUpdate> {
DEFINE_WRAPPERTYPEINFO();
FML_FRIEND_MAKE_REF_COUNTED(SemanticsUpdate);
public:
~SemanticsUpdate() override;
static void create(Dart_Handle semantics_update_handle,
SemanticsNodeUpdates nodes,
CustomAccessibilityActionUpdates actions);
SemanticsNodeUpdates takeNodes();
CustomAccessibilityActionUpdates takeActions();
void dispose();
private:
explicit SemanticsUpdate(SemanticsNodeUpdates nodes,
CustomAccessibilityActionUpdates updates);
SemanticsNodeUpdates nodes_;
CustomAccessibilityActionUpdates actions_;
};
} // namespace flutter
#endif // FLUTTER_LIB_UI_SEMANTICS_SEMANTICS_UPDATE_H_
| engine/lib/ui/semantics/semantics_update.h/0 | {
"file_path": "engine/lib/ui/semantics/semantics_update.h",
"repo_id": "engine",
"token_count": 442
} | 243 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_UI_TEXT_PARAGRAPH_BUILDER_H_
#define FLUTTER_LIB_UI_TEXT_PARAGRAPH_BUILDER_H_
#include <memory>
#include "flutter/lib/ui/dart_wrapper.h"
#include "flutter/lib/ui/painting/paint.h"
#include "flutter/lib/ui/text/paragraph.h"
#include "flutter/third_party/txt/src/txt/paragraph_builder.h"
#include "third_party/tonic/typed_data/typed_list.h"
namespace flutter {
class Paragraph;
class ParagraphBuilder : public RefCountedDartWrappable<ParagraphBuilder> {
DEFINE_WRAPPERTYPEINFO();
FML_FRIEND_MAKE_REF_COUNTED(ParagraphBuilder);
public:
static void Create(Dart_Handle wrapper,
Dart_Handle encoded_handle,
Dart_Handle strutData,
const std::string& fontFamily,
const std::vector<std::string>& strutFontFamilies,
double fontSize,
double height,
const std::u16string& ellipsis,
const std::string& locale);
~ParagraphBuilder() override;
void pushStyle(const tonic::Int32List& encoded,
const std::vector<std::string>& fontFamilies,
double fontSize,
double letterSpacing,
double wordSpacing,
double height,
double decorationThickness,
const std::string& locale,
Dart_Handle background_objects,
Dart_Handle background_data,
Dart_Handle foreground_objects,
Dart_Handle foreground_data,
Dart_Handle shadows_data,
Dart_Handle font_features_data,
Dart_Handle font_variations_data);
void pop();
Dart_Handle addText(const std::u16string& text);
// Pushes the information required to leave an open space, where Flutter may
// draw a custom placeholder into.
//
// Internally, this method adds a single object replacement character (0xFFFC)
// and emplaces a new PlaceholderRun instance to the vector of inline
// placeholders.
void addPlaceholder(double width,
double height,
unsigned alignment,
double baseline_offset,
unsigned baseline);
void build(Dart_Handle paragraph_handle);
private:
explicit ParagraphBuilder(Dart_Handle encoded,
Dart_Handle strutData,
const std::string& fontFamily,
const std::vector<std::string>& strutFontFamilies,
double fontSize,
double height,
const std::u16string& ellipsis,
const std::string& locale);
std::unique_ptr<txt::ParagraphBuilder> m_paragraph_builder_;
};
} // namespace flutter
#endif // FLUTTER_LIB_UI_TEXT_PARAGRAPH_BUILDER_H_
| engine/lib/ui/text/paragraph_builder.h/0 | {
"file_path": "engine/lib/ui/text/paragraph_builder.h",
"repo_id": "engine",
"token_count": 1419
} | 244 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_UI_WINDOW_POINTER_DATA_PACKET_CONVERTER_H_
#define FLUTTER_LIB_UI_WINDOW_POINTER_DATA_PACKET_CONVERTER_H_
#include <cstring>
#include <map>
#include <memory>
#include <vector>
#include "flutter/fml/macros.h"
#include "flutter/lib/ui/window/pointer_data_packet.h"
namespace flutter {
//------------------------------------------------------------------------------
/// The current information about a pointer.
///
/// This struct is used by PointerDataPacketConverter to fill in necessary
/// information for the raw pointer packet sent from embedding. This struct also
/// stores the button state of the last pointer down, up, move, or hover event.
/// When an embedder issues a pointer up or down event where the pointer's
/// position has changed since the last move or hover event,
/// PointerDataPacketConverter generates a synthetic move or hover to notify the
/// framework. In these cases, these events must be issued with the button state
/// prior to the pointer up or down.
///
struct PointerState {
int64_t pointer_identifier;
bool is_down;
bool is_pan_zoom_active;
double physical_x;
double physical_y;
double pan_x;
double pan_y;
double scale;
double rotation;
int64_t buttons;
};
//------------------------------------------------------------------------------
/// Converter to convert the raw pointer data packet from the platforms.
///
/// Framework requires certain information to process pointer data. e.g. pointer
/// identifier and the delta of pointer moment. The converter keeps track each
/// pointer state and fill in those information appropriately.
///
/// The converter is also resposible for providing a clean pointer data stream.
/// It will attempt to correct the stream if the it contains illegal pointer
/// transitions.
///
/// Example 1 Missing Add:
///
/// Down(position x) -> Up(position x)
///
/// ###After Conversion###
///
/// Synthesized_Add(position x) -> Down(position x) -> Up(position x)
///
/// Example 2 Missing another move:
///
/// Add(position x) -> Down(position x) -> Move(position y) ->
/// Up(position z)
///
/// ###After Conversion###
///
/// Add(position x) -> Down(position x) -> Move(position y) ->
/// Synthesized_Move(position z) -> Up(position z)
///
/// Platform view is the only client that uses this class to convert all the
/// incoming pointer packet and is responsible for the life cycle of its
/// instance.
///
class PointerDataPacketConverter {
public:
PointerDataPacketConverter();
~PointerDataPacketConverter();
//----------------------------------------------------------------------------
/// @brief Converts pointer data packet into a form that framework
/// understands. The raw pointer data packet from embedding does
/// not have sufficient information and may contain illegal
/// pointer transitions. This method will fill out that
/// information and attempt to correct pointer transitions.
///
/// @param[in] packet The raw pointer packet sent from
/// embedding.
///
/// @return A full converted packet with all the required information
/// filled.
/// It may contain synthetic pointer data as the result of
/// converter's attempt to correct illegal pointer transitions.
///
std::unique_ptr<PointerDataPacket> Convert(
std::unique_ptr<PointerDataPacket> packet);
private:
std::map<int64_t, PointerState> states_;
int64_t pointer_ = 0;
void ConvertPointerData(PointerData pointer_data,
std::vector<PointerData>& converted_pointers);
PointerState EnsurePointerState(PointerData pointer_data);
void UpdateDeltaAndState(PointerData& pointer_data, PointerState& state);
void UpdatePointerIdentifier(PointerData& pointer_data,
PointerState& state,
bool start_new_pointer);
bool LocationNeedsUpdate(const PointerData pointer_data,
const PointerState state);
FML_DISALLOW_COPY_AND_ASSIGN(PointerDataPacketConverter);
};
} // namespace flutter
#endif // FLUTTER_LIB_UI_WINDOW_POINTER_DATA_PACKET_CONVERTER_H_
| engine/lib/ui/window/pointer_data_packet_converter.h/0 | {
"file_path": "engine/lib/ui/window/pointer_data_packet_converter.h",
"repo_id": "engine",
"token_count": 1407
} | 245 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'package:args/command_runner.dart';
import 'package:path/path.dart' as path;
import 'package:watcher/watcher.dart';
import 'environment.dart';
import 'exceptions.dart';
import 'pipeline.dart';
import 'utils.dart';
const Map<String, String> targetAliases = <String, String>{
'sdk': 'flutter/web_sdk',
'web_sdk': 'flutter/web_sdk',
'canvaskit': 'flutter/third_party/canvaskit:canvaskit_group',
'canvaskit_chromium': 'flutter/third_party/canvaskit:canvaskit_chromium_group',
'skwasm': 'flutter/third_party/canvaskit:skwasm_group',
'archive': 'flutter/web_sdk:flutter_web_sdk_archive',
};
class BuildCommand extends Command<bool> with ArgUtils<bool> {
BuildCommand() {
argParser.addFlag(
'watch',
abbr: 'w',
help: 'Run the build in watch mode so it rebuilds whenever a change is '
'made. Disabled by default.',
);
argParser.addFlag(
'host',
help: 'Build the host build instead of the wasm build, which is '
'currently needed for `flutter run --local-engine` to work.'
);
argParser.addFlag(
'profile',
help: 'Build in profile mode instead of release mode. In this mode, the '
'output will be located at "out/wasm_profile".\nThis only applies to '
'the wasm build. The host build is always built in release mode.',
);
argParser.addFlag(
'debug',
help: 'Build in debug mode instead of release mode. In this mode, the '
'output will be located at "out/wasm_debug".\nThis only applies to '
'the wasm build. The host build is always built in release mode.',
);
argParser.addFlag(
'dwarf',
help: 'Embed DWARF debugging info into the output wasm modules. This is '
'only valid in debug mode.',
);
}
@override
String get name => 'build';
@override
String get description => 'Build the Flutter web engine.';
bool get isWatchMode => boolArg('watch');
bool get host => boolArg('host');
List<String> get targets => argResults?.rest ?? <String>[];
bool get embedDwarf => boolArg('dwarf');
@override
FutureOr<bool> run() async {
if (embedDwarf && runtimeMode != RuntimeMode.debug) {
throw ToolExit('Embedding DWARF data requires debug runtime mode.');
}
final FilePath libPath = FilePath.fromWebUi('lib');
final List<PipelineStep> steps = <PipelineStep>[
GnPipelineStep(
host: host,
runtimeMode: runtimeMode,
embedDwarf: embedDwarf,
),
NinjaPipelineStep(
host: host,
runtimeMode: runtimeMode,
targets: targets.map((String target) => targetAliases[target] ?? target),
),
];
final Pipeline buildPipeline = Pipeline(steps: steps);
await buildPipeline.run();
if (isWatchMode) {
print('Initial build done!');
print('Watching directory: ${libPath.relativeToCwd}/');
await PipelineWatcher(
dir: libPath.absolute,
pipeline: buildPipeline,
// Ignore font files that are copied whenever tests run.
ignore: (WatchEvent event) => event.path.endsWith('.ttf'),
).start();
}
return true;
}
}
/// Runs `gn`.
///
/// Not safe to interrupt as it may leave the `out/` directory in a corrupted
/// state. GN is pretty quick though, so it's OK to not support interruption.
class GnPipelineStep extends ProcessStep {
GnPipelineStep({
required this.host,
required this.runtimeMode,
required this.embedDwarf,
});
final bool host;
final RuntimeMode runtimeMode;
final bool embedDwarf;
@override
String get description => 'gn';
@override
bool get isSafeToInterrupt => false;
List<String> get _gnArgs {
if (host) {
return <String>[
'--unoptimized',
'--full-dart-sdk',
];
} else {
return <String>[
'--web',
'--runtime-mode=${runtimeMode.name}',
if (runtimeMode == RuntimeMode.debug)
'--unoptimized',
if (embedDwarf)
'--wasm-use-dwarf',
];
}
}
@override
Future<ProcessManager> createProcess() {
print('Running gn...');
return startProcess(
path.join(environment.flutterDirectory.path, 'tools', 'gn'),
_gnArgs,
);
}
}
/// Runs `autoninja`.
///
/// Can be safely interrupted.
class NinjaPipelineStep extends ProcessStep {
NinjaPipelineStep({
required this.host,
required this.runtimeMode,
required this.targets,
});
@override
String get description => 'ninja';
@override
bool get isSafeToInterrupt => true;
final bool host;
final Iterable<String> targets;
final RuntimeMode runtimeMode;
String get buildDirectory {
if (host) {
return environment.hostDebugUnoptDir.path;
}
return getBuildDirectoryForRuntimeMode(runtimeMode).path;
}
@override
Future<ProcessManager> createProcess() {
print('Running autoninja...');
return startProcess(
'autoninja',
<String>[
'-C',
buildDirectory,
...targets,
],
);
}
}
| engine/lib/web_ui/dev/build.dart/0 | {
"file_path": "engine/lib/web_ui/dev/build.dart",
"repo_id": "engine",
"token_count": 2014
} | 246 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:convert';
import 'felt_config.dart';
String generateBuilderJson(FeltConfig config) {
final Map<String, dynamic> outputJson = <String, dynamic>{
'_comment': 'THIS IS A GENERATED FILE. Do not edit this file directly.',
'_comment2': 'See `generate_builder_json.dart` for the generator code',
'builds': <dynamic>[
_getArtifactBuildStep(),
for (final TestBundle bundle in config.testBundles)
_getBundleBuildStep(bundle),
],
'tests': _getAllTestSteps(config.testSuites)
};
return const JsonEncoder.withIndent(' ').convert(outputJson);
}
Map<String, dynamic> _getArtifactBuildStep() {
return <String, dynamic>{
'name': 'web_tests/artifacts',
'drone_dimensions': <String>[
'device_type=none',
'os=Linux',
'cores=32'
],
'gclient_variables': <String, dynamic>{
'download_android_deps': false,
'download_emsdk': true,
},
'gn': <String>[
'--web',
'--runtime-mode=release',
'--no-goma',
],
'ninja': <String, dynamic>{
'config': 'wasm_release',
'targets': <String>[
'flutter/web_sdk:flutter_web_sdk_archive'
]
},
'archives': <dynamic>[
<String, dynamic>{
'name': 'wasm_release',
'base_path': 'out/wasm_release/zip_archives/',
'type': 'gcs',
'include_paths': <String>[
'out/wasm_release/zip_archives/flutter-web-sdk.zip'
],
'realm': 'production',
}
],
'generators': <String, dynamic>{
'tasks': <dynamic>[
<String, dynamic>{
'name': 'check licenses',
'parameters': <String>[
'check-licenses'
],
'scripts': <String>[ 'flutter/lib/web_ui/dev/felt' ],
},
<String, dynamic>{
'name': 'web engine analysis',
'parameters': <String>[
'analyze'
],
'scripts': <String>[ 'flutter/lib/web_ui/dev/felt' ],
},
<String, dynamic>{
'name': 'copy artifacts for web tests',
'parameters': <String>[
'test',
'--copy-artifacts',
],
'scripts': <String>[ 'flutter/lib/web_ui/dev/felt' ],
},
]
},
};
}
Map<String, dynamic> _getBundleBuildStep(TestBundle bundle) {
return <String, dynamic>{
'name': 'web_tests/test_bundles/${bundle.name}',
'drone_dimensions': <String>[
'device_type=none',
'os=Linux',
],
'generators': <String, dynamic>{
'tasks': <dynamic>[
<String, dynamic>{
'name': 'compile bundle ${bundle.name}',
'parameters': <String>[
'test',
'--compile',
'--bundle=${bundle.name}',
],
'scripts': <String>[ 'flutter/lib/web_ui/dev/felt' ],
}
]
},
};
}
Iterable<dynamic> _getAllTestSteps(List<TestSuite> suites) {
return <dynamic>[
..._getTestStepsForPlatform(suites, 'Linux', (TestSuite suite) =>
suite.runConfig.browser == BrowserName.chrome ||
suite.runConfig.browser == BrowserName.firefox
),
// TODO(jacksongardner): Stop filtering to Mac-12 after macOS 13 issues are fixed:
// https://github.com/flutter/flutter/issues/136274,
// https://github.com/flutter/flutter/issues/136279
..._getTestStepsForPlatform(suites, 'Mac', specificOS: 'Mac-13', cpu: 'arm64', (TestSuite suite) =>
suite.runConfig.browser == BrowserName.safari
),
..._getTestStepsForPlatform(suites, 'Windows', (TestSuite suite) =>
suite.runConfig.browser == BrowserName.chrome
),
];
}
Iterable<dynamic> _getTestStepsForPlatform(
List<TestSuite> suites,
String platform,
bool Function(TestSuite suite) filter, {
String? specificOS,
String? cpu,
}) {
return suites
.where(filter)
.map((TestSuite suite) => <String, dynamic>{
'name': '$platform run ${suite.name} suite',
'recipe': 'engine_v2/tester_engine',
'drone_dimensions': <String>[
'device_type=none',
'os=${specificOS ?? platform}',
if (cpu != null) 'cpu=$cpu',
],
'gclient_variables': <String, dynamic>{
'download_android_deps': false,
},
'dependencies': <String>[
'web_tests/artifacts',
'web_tests/test_bundles/${suite.testBundle.name}',
],
'test_dependencies': <dynamic>[
<String, dynamic>{
'dependency': 'goldctl',
'version': 'git_revision:720a542f6fe4f92922c3b8f0fdcc4d2ac6bb83cd',
},
if (suite.runConfig.browser == BrowserName.chrome)
<String, dynamic>{
'dependency': 'chrome_and_driver',
'version': '119.0.6045.9',
},
if (suite.runConfig.browser == BrowserName.firefox)
<String, dynamic>{
'dependency': 'firefox',
'version': 'version:106.0',
}
],
'tasks': <dynamic>[
<String, dynamic>{
'name': 'run suite ${suite.name}',
'parameters': <String>[
'test',
'--run',
'--suite=${suite.name}'
],
'script': 'flutter/lib/web_ui/dev/felt',
}
]
}
);
}
| engine/lib/web_ui/dev/generate_builder_json.dart/0 | {
"file_path": "engine/lib/web_ui/dev/generate_builder_json.dart",
"repo_id": "engine",
"token_count": 2606
} | 247 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'dart:math';
import 'package:image/image.dart';
import 'package:webdriver/async_io.dart' show WebDriver, createDriver;
import 'browser.dart';
abstract class WebDriverBrowserEnvironment extends BrowserEnvironment {
late int portNumber;
late final Process _driverProcess;
Future<Process> spawnDriverProcess();
Uri get driverUri;
/// Finds and returns an unused port on the test host in the local port range.
Future<int> pickUnusedPort() async {
// Use bind to allocate an unused port, then unbind from that port to
// make it available for use.
final ServerSocket socket = await ServerSocket.bind('localhost', 0);
final int port = socket.port;
await socket.close();
return port;
}
@override
Future<void> prepare() async {
portNumber = await pickUnusedPort();
_driverProcess = await spawnDriverProcess();
_driverProcess.stderr
.transform(utf8.decoder)
.transform(const LineSplitter())
.listen((String error) {
print('[Webdriver][Error] $error');
});
_driverProcess.stdout
.transform(utf8.decoder)
.transform(const LineSplitter())
.listen((String log) {
print('[Webdriver] $log');
});
}
@override
Future<void> cleanup() async {
_driverProcess.kill();
}
@override
Future<Browser> launchBrowserInstance(Uri url, {bool debug = false}) async {
while (true) {
try {
final WebDriver driver = await createDriver(
uri: driverUri, desired: <String, dynamic>{'browserName': packageTestRuntime.identifier});
return WebDriverBrowser(driver, url);
} on SocketException {
// Sometimes we may try to connect before the web driver port is ready.
// So we should retry here. Note that if there was some issue with the
// webdriver process, we may loop infinitely here, so we're relying on
// the test timeout to kill us if it takes too long to connect.
print('Failed to connect to webdriver process. Retrying in 100 ms');
await Future<void>.delayed(const Duration(milliseconds: 100));
} catch (exception) {
rethrow;
}
}
}
}
class WebDriverBrowser extends Browser {
WebDriverBrowser(this._driver, this._url) {
_driver.get(_url);
}
final WebDriver _driver;
final Uri _url;
final Completer<void> _onExitCompleter = Completer<void>();
@override
Future<void> close() async {
await (await _driver.window).close();
if (!_onExitCompleter.isCompleted) {
_onExitCompleter.complete();
}
}
@override
Future<void> get onExit => _onExitCompleter.future;
@override
bool get supportsScreenshots => true;
@override
Future<Image> captureScreenshot(Rectangle<num> region) async {
final Image image = decodePng(await _driver.captureScreenshotAsList())!;
return copyCrop(image, region.left.round(), region.top.round(),
region.width.round(), region.height.round());
}
}
| engine/lib/web_ui/dev/webdriver_browser.dart/0 | {
"file_path": "engine/lib/web_ui/dev/webdriver_browser.dart",
"repo_id": "engine",
"token_count": 1108
} | 248 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This is identical to ../../../../ui/channel_buffers.dart with the
// following exceptions:
//
// * All comments except this one are removed.
// * _invokeX is replaced with engine.invokeX (X=1,2)
// * _printDebug is replaced with print in an assert.
part of ui;
typedef DrainChannelCallback = Future<void> Function(ByteData? data, PlatformMessageResponseCallback callback);
typedef ChannelCallback = void Function(ByteData? data, PlatformMessageResponseCallback callback);
class _ChannelCallbackRecord {
_ChannelCallbackRecord(this._callback) : _zone = Zone.current;
final ChannelCallback _callback;
final Zone _zone;
void invoke(ByteData? dataArg, PlatformMessageResponseCallback callbackArg) {
engine.invoke2<ByteData?, PlatformMessageResponseCallback>(_callback, _zone, dataArg, callbackArg);
}
}
class _StoredMessage {
_StoredMessage(this.data, this._callback) : _zone = Zone.current;
final ByteData? data;
final PlatformMessageResponseCallback _callback;
final Zone _zone;
void invoke(ByteData? dataArg) {
engine.invoke1(_callback, _zone, dataArg);
}
}
class _Channel {
_Channel([ this._capacity = ChannelBuffers.kDefaultBufferSize ])
: _queue = collection.ListQueue<_StoredMessage>(_capacity);
final collection.ListQueue<_StoredMessage> _queue;
int get length => _queue.length;
bool debugEnableDiscardWarnings = true;
int get capacity => _capacity;
int _capacity;
set capacity(int newSize) {
_capacity = newSize;
_dropOverflowMessages(newSize);
}
bool _draining = false;
bool push(_StoredMessage message) {
if (!_draining && _channelCallbackRecord != null) {
assert(_queue.isEmpty);
_channelCallbackRecord!.invoke(message.data, message.invoke);
return false;
}
if (_capacity <= 0) {
return debugEnableDiscardWarnings;
}
final bool result = _dropOverflowMessages(_capacity - 1);
_queue.addLast(message);
return result;
}
_StoredMessage pop() => _queue.removeFirst();
bool _dropOverflowMessages(int lengthLimit) {
bool result = false;
while (_queue.length > lengthLimit) {
final _StoredMessage message = _queue.removeFirst();
message.invoke(null); // send empty reply to the plugin side
result = true;
}
return result;
}
_ChannelCallbackRecord? _channelCallbackRecord;
void setListener(ChannelCallback callback) {
final bool needDrain = _channelCallbackRecord == null;
_channelCallbackRecord = _ChannelCallbackRecord(callback);
if (needDrain && !_draining) {
_drain();
}
}
void clearListener() {
_channelCallbackRecord = null;
}
void _drain() {
assert(!_draining);
_draining = true;
scheduleMicrotask(_drainStep);
}
void _drainStep() {
assert(_draining);
if (_queue.isNotEmpty && _channelCallbackRecord != null) {
final _StoredMessage message = pop();
_channelCallbackRecord!.invoke(message.data, message.invoke);
scheduleMicrotask(_drainStep);
} else {
_draining = false;
}
}
}
class ChannelBuffers {
ChannelBuffers();
static const int kDefaultBufferSize = 1;
static const String kControlChannelName = 'dev.flutter/channel-buffers';
final Map<String, _Channel> _channels = <String, _Channel>{};
void push(String name, ByteData? data, PlatformMessageResponseCallback callback) {
final _Channel channel = _channels.putIfAbsent(name, () => _Channel());
if (channel.push(_StoredMessage(data, callback))) {
assert(() {
print(
'A message on the $name channel was discarded before it could be handled.\n'
'This happens when a plugin sends messages to the framework side before the '
'framework has had an opportunity to register a listener. See the ChannelBuffers '
'API documentation for details on how to configure the channel to expect more '
'messages, or to expect messages to get discarded:\n'
' https://api.flutter.dev/flutter/dart-ui/ChannelBuffers-class.html'
);
return true;
}());
}
}
void setListener(String name, ChannelCallback callback) {
final _Channel channel = _channels.putIfAbsent(name, () => _Channel());
channel.setListener(callback);
}
void clearListener(String name) {
final _Channel? channel = _channels[name];
if (channel != null) {
channel.clearListener();
}
}
Future<void> drain(String name, DrainChannelCallback callback) async {
final _Channel? channel = _channels[name];
while (channel != null && !channel._queue.isEmpty) {
final _StoredMessage message = channel.pop();
await callback(message.data, message.invoke);
}
}
void handleMessage(ByteData data) {
final Uint8List bytes = data.buffer.asUint8List(data.offsetInBytes, data.lengthInBytes);
if (bytes[0] == 0x07) { // 7 = value code for string
final int methodNameLength = bytes[1];
if (methodNameLength >= 254) {
throw Exception('Unrecognized message sent to $kControlChannelName (method name too long)');
}
int index = 2; // where we are in reading the bytes
final String methodName = utf8.decode(bytes.sublist(index, index + methodNameLength));
index += methodNameLength;
switch (methodName) {
case 'resize':
if (bytes[index] != 0x0C) {
throw Exception("Invalid arguments for 'resize' method sent to $kControlChannelName (arguments must be a two-element list, channel name and new capacity)");
}
index += 1;
if (bytes[index] < 0x02) {
throw Exception("Invalid arguments for 'resize' method sent to $kControlChannelName (arguments must be a two-element list, channel name and new capacity)");
}
index += 1;
if (bytes[index] != 0x07) {
throw Exception("Invalid arguments for 'resize' method sent to $kControlChannelName (first argument must be a string)");
}
index += 1;
final int channelNameLength = bytes[index];
if (channelNameLength >= 254) {
throw Exception("Invalid arguments for 'resize' method sent to $kControlChannelName (channel name must be less than 254 characters long)");
}
index += 1;
final String channelName = utf8.decode(bytes.sublist(index, index + channelNameLength));
index += channelNameLength;
if (bytes[index] != 0x03) {
throw Exception("Invalid arguments for 'resize' method sent to $kControlChannelName (second argument must be an integer in the range 0 to 2147483647)");
}
index += 1;
resize(channelName, data.getUint32(index, Endian.host));
case 'overflow':
if (bytes[index] != 0x0C) {
throw Exception("Invalid arguments for 'overflow' method sent to $kControlChannelName (arguments must be a two-element list, channel name and flag state)");
}
index += 1;
if (bytes[index] < 0x02) {
throw Exception("Invalid arguments for 'overflow' method sent to $kControlChannelName (arguments must be a two-element list, channel name and flag state)");
}
index += 1;
if (bytes[index] != 0x07) {
throw Exception("Invalid arguments for 'overflow' method sent to $kControlChannelName (first argument must be a string)");
}
index += 1;
final int channelNameLength = bytes[index];
if (channelNameLength >= 254) {
throw Exception("Invalid arguments for 'overflow' method sent to $kControlChannelName (channel name must be less than 254 characters long)");
}
index += 1;
final String channelName = utf8.decode(bytes.sublist(index, index + channelNameLength));
index += channelNameLength;
if (bytes[index] != 0x01 && bytes[index] != 0x02) {
throw Exception("Invalid arguments for 'overflow' method sent to $kControlChannelName (second argument must be a boolean)");
}
allowOverflow(channelName, bytes[index] == 0x01);
default:
throw Exception("Unrecognized method '$methodName' sent to $kControlChannelName");
}
} else {
final List<String> parts = utf8.decode(bytes).split('\r');
if (parts.length == 1 + /*arity=*/2 && parts[0] == 'resize') {
resize(parts[1], int.parse(parts[2]));
} else {
throw Exception('Unrecognized message $parts sent to $kControlChannelName.');
}
}
}
void resize(String name, int newSize) {
_Channel? channel = _channels[name];
if (channel == null) {
channel = _Channel(newSize);
_channels[name] = channel;
} else {
channel.capacity = newSize;
}
}
void allowOverflow(String name, bool allowed) {
assert(() {
_Channel? channel = _channels[name];
if (channel == null && allowed) {
channel = _Channel();
_channels[name] = channel;
}
channel?.debugEnableDiscardWarnings = !allowed;
return true;
}());
}
void sendChannelUpdate(String name, {required bool listening}) {}
}
final ChannelBuffers channelBuffers = ChannelBuffers();
| engine/lib/web_ui/lib/channel_buffers.dart/0 | {
"file_path": "engine/lib/web_ui/lib/channel_buffers.dart",
"repo_id": "engine",
"token_count": 3377
} | 249 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is transformed during the build process into a single library with
// part files (`dart:_engine`) by performing the following:
//
// - Replace all exports with part directives.
// - Rewrite the libraries into `part of` part files without imports.
// - Add imports to this file sufficient to cover the needs of `dart:_engine`.
//
// The code that performs the transformations lives in:
//
// - https://github.com/flutter/engine/blob/main/web_sdk/sdk_rewriter.dart
// ignore: unnecessary_library_directive
library engine;
export 'engine/alarm_clock.dart';
export 'engine/app_bootstrap.dart';
export 'engine/browser_detection.dart';
export 'engine/canvas_pool.dart';
export 'engine/canvaskit/canvas.dart';
export 'engine/canvaskit/canvaskit_api.dart';
export 'engine/canvaskit/canvaskit_canvas.dart';
export 'engine/canvaskit/color_filter.dart';
export 'engine/canvaskit/display_canvas_factory.dart';
export 'engine/canvaskit/embedded_views.dart';
export 'engine/canvaskit/fonts.dart';
export 'engine/canvaskit/image.dart';
export 'engine/canvaskit/image_filter.dart';
export 'engine/canvaskit/image_wasm_codecs.dart';
export 'engine/canvaskit/image_web_codecs.dart';
export 'engine/canvaskit/layer.dart';
export 'engine/canvaskit/layer_scene_builder.dart';
export 'engine/canvaskit/layer_tree.dart';
export 'engine/canvaskit/mask_filter.dart';
export 'engine/canvaskit/multi_surface_rasterizer.dart';
export 'engine/canvaskit/n_way_canvas.dart';
export 'engine/canvaskit/native_memory.dart';
export 'engine/canvaskit/offscreen_canvas_rasterizer.dart';
export 'engine/canvaskit/overlay_scene_optimizer.dart';
export 'engine/canvaskit/painting.dart';
export 'engine/canvaskit/path.dart';
export 'engine/canvaskit/path_metrics.dart';
export 'engine/canvaskit/picture.dart';
export 'engine/canvaskit/picture_recorder.dart';
export 'engine/canvaskit/raster_cache.dart';
export 'engine/canvaskit/rasterizer.dart';
export 'engine/canvaskit/render_canvas.dart';
export 'engine/canvaskit/renderer.dart';
export 'engine/canvaskit/shader.dart';
export 'engine/canvaskit/surface.dart';
export 'engine/canvaskit/text.dart';
export 'engine/canvaskit/text_fragmenter.dart';
export 'engine/canvaskit/util.dart';
export 'engine/canvaskit/vertices.dart';
export 'engine/clipboard.dart';
export 'engine/color_filter.dart';
export 'engine/configuration.dart';
export 'engine/display.dart';
export 'engine/dom.dart';
export 'engine/engine_canvas.dart';
export 'engine/font_change_util.dart';
export 'engine/font_fallback_data.dart';
export 'engine/font_fallbacks.dart';
export 'engine/fonts.dart';
export 'engine/frame_reference.dart';
export 'engine/frame_timing_recorder.dart';
export 'engine/html/backdrop_filter.dart';
export 'engine/html/bitmap_canvas.dart';
export 'engine/html/canvas.dart';
export 'engine/html/clip.dart';
export 'engine/html/color_filter.dart';
export 'engine/html/debug_canvas_reuse_overlay.dart';
export 'engine/html/dom_canvas.dart';
export 'engine/html/image_filter.dart';
export 'engine/html/offset.dart';
export 'engine/html/opacity.dart';
export 'engine/html/painting.dart';
export 'engine/html/path/conic.dart';
export 'engine/html/path/cubic.dart';
export 'engine/html/path/path.dart';
export 'engine/html/path/path_iterator.dart';
export 'engine/html/path/path_metrics.dart';
export 'engine/html/path/path_ref.dart';
export 'engine/html/path/path_to_svg.dart';
export 'engine/html/path/path_utils.dart';
export 'engine/html/path/path_windings.dart';
export 'engine/html/path/tangent.dart';
export 'engine/html/path_to_svg_clip.dart';
export 'engine/html/picture.dart';
export 'engine/html/platform_view.dart';
export 'engine/html/recording_canvas.dart';
export 'engine/html/render_vertices.dart';
export 'engine/html/renderer.dart';
export 'engine/html/resource_manager.dart';
export 'engine/html/scene.dart';
export 'engine/html/scene_builder.dart';
export 'engine/html/shader_mask.dart';
export 'engine/html/shaders/image_shader.dart';
export 'engine/html/shaders/normalized_gradient.dart';
export 'engine/html/shaders/shader.dart';
export 'engine/html/shaders/shader_builder.dart';
export 'engine/html/shaders/vertex_shaders.dart';
export 'engine/html/surface.dart';
export 'engine/html/surface_stats.dart';
export 'engine/html/transform.dart';
export 'engine/html_image_codec.dart';
export 'engine/image_decoder.dart';
export 'engine/initialization.dart';
export 'engine/js_interop/js_app.dart';
export 'engine/js_interop/js_loader.dart';
export 'engine/js_interop/js_promise.dart';
export 'engine/js_interop/js_typed_data.dart';
export 'engine/key_map.g.dart';
export 'engine/keyboard_binding.dart';
export 'engine/layers.dart';
export 'engine/mouse/context_menu.dart';
export 'engine/mouse/cursor.dart';
export 'engine/mouse/prevent_default.dart';
export 'engine/navigation/history.dart';
export 'engine/noto_font.dart';
export 'engine/noto_font_encoding.dart';
export 'engine/onscreen_logging.dart';
export 'engine/picture.dart';
export 'engine/platform_dispatcher.dart';
export 'engine/platform_dispatcher/app_lifecycle_state.dart';
export 'engine/platform_dispatcher/view_focus_binding.dart';
export 'engine/platform_views.dart';
export 'engine/platform_views/content_manager.dart';
export 'engine/platform_views/message_handler.dart';
export 'engine/platform_views/slots.dart';
export 'engine/plugins.dart';
export 'engine/pointer_binding.dart';
export 'engine/pointer_binding/event_position_helper.dart';
export 'engine/pointer_converter.dart';
export 'engine/profiler.dart';
export 'engine/raw_keyboard.dart';
export 'engine/renderer.dart';
export 'engine/rrect_renderer.dart';
export 'engine/safe_browser_api.dart';
export 'engine/scene_builder.dart';
export 'engine/scene_painting.dart';
export 'engine/scene_view.dart';
export 'engine/semantics/accessibility.dart';
export 'engine/semantics/checkable.dart';
export 'engine/semantics/dialog.dart';
export 'engine/semantics/focusable.dart';
export 'engine/semantics/image.dart';
export 'engine/semantics/incrementable.dart';
export 'engine/semantics/label_and_value.dart';
export 'engine/semantics/link.dart';
export 'engine/semantics/live_region.dart';
export 'engine/semantics/platform_view.dart';
export 'engine/semantics/scrollable.dart';
export 'engine/semantics/semantics.dart';
export 'engine/semantics/semantics_helper.dart';
export 'engine/semantics/tappable.dart';
export 'engine/semantics/text_field.dart';
export 'engine/services/buffers.dart';
export 'engine/services/message_codec.dart';
export 'engine/services/message_codecs.dart';
export 'engine/services/serialization.dart';
export 'engine/shader_data.dart';
export 'engine/shadow.dart';
export 'engine/svg.dart';
export 'engine/test_embedding.dart';
export 'engine/text/canvas_paragraph.dart';
export 'engine/text/font_collection.dart';
export 'engine/text/fragmenter.dart';
export 'engine/text/layout_fragmenter.dart';
export 'engine/text/layout_service.dart';
export 'engine/text/line_break_properties.dart';
export 'engine/text/line_breaker.dart';
export 'engine/text/measurement.dart';
export 'engine/text/paint_service.dart';
export 'engine/text/paragraph.dart';
export 'engine/text/ruler.dart';
export 'engine/text/text_direction.dart';
export 'engine/text/unicode_range.dart';
export 'engine/text/word_break_properties.dart';
export 'engine/text/word_breaker.dart';
export 'engine/text_editing/autofill_hint.dart';
export 'engine/text_editing/composition_aware_mixin.dart';
export 'engine/text_editing/input_action.dart';
export 'engine/text_editing/input_type.dart';
export 'engine/text_editing/text_capitalization.dart';
export 'engine/text_editing/text_editing.dart';
export 'engine/util.dart';
export 'engine/validators.dart';
export 'engine/vector_math.dart';
export 'engine/view_embedder/dimensions_provider/custom_element_dimensions_provider.dart';
export 'engine/view_embedder/dimensions_provider/dimensions_provider.dart';
export 'engine/view_embedder/dimensions_provider/full_page_dimensions_provider.dart';
export 'engine/view_embedder/display_dpr_stream.dart';
export 'engine/view_embedder/dom_manager.dart';
export 'engine/view_embedder/embedding_strategy/custom_element_embedding_strategy.dart';
export 'engine/view_embedder/embedding_strategy/embedding_strategy.dart';
export 'engine/view_embedder/embedding_strategy/full_page_embedding_strategy.dart';
export 'engine/view_embedder/flutter_view_manager.dart';
export 'engine/view_embedder/global_html_attributes.dart';
export 'engine/view_embedder/hot_restart_cache_handler.dart';
export 'engine/view_embedder/style_manager.dart';
export 'engine/window.dart';
| engine/lib/web_ui/lib/src/engine.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine.dart",
"repo_id": "engine",
"token_count": 3107
} | 250 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:ui/ui.dart' as ui;
import '../vector_math.dart';
import 'canvas.dart';
import 'canvaskit_api.dart';
import 'embedded_views.dart';
import 'image_filter.dart';
import 'n_way_canvas.dart';
import 'painting.dart';
import 'path.dart';
import 'picture.dart';
import 'raster_cache.dart';
/// A layer to be composed into a scene.
///
/// A layer is the lowest-level rendering primitive. It represents an atomic
/// painting command.
abstract class Layer implements ui.EngineLayer {
/// The layer that contains us as a child.
ContainerLayer? parent;
/// An estimated rectangle that this layer will draw into.
ui.Rect paintBounds = ui.Rect.zero;
/// Whether or not this layer actually needs to be painted in the scene.
bool get needsPainting => !paintBounds.isEmpty;
/// Pre-process this layer before painting.
///
/// In this step, we compute the estimated [paintBounds] as well as
/// apply heuristics to prepare the render cache for pictures that
/// should be cached.
void preroll(PrerollContext prerollContext, Matrix4 matrix);
/// Paint this layer into the scene.
void paint(PaintContext paintContext);
// TODO(dnfield): Implement ui.EngineLayer.dispose for CanvasKit.
// https://github.com/flutter/flutter/issues/82878
@override
void dispose() {}
}
/// A context shared by all layers during the preroll pass.
class PrerollContext {
PrerollContext(this.rasterCache, this.viewEmbedder);
/// A raster cache. Used to register candidates for caching.
final RasterCache? rasterCache;
/// A compositor for embedded HTML views.
final HtmlViewEmbedder? viewEmbedder;
final MutatorsStack mutatorsStack = MutatorsStack();
ui.Rect get cullRect {
ui.Rect cullRect = ui.Rect.largest;
for (final Mutator m in mutatorsStack) {
ui.Rect clipRect;
switch (m.type) {
case MutatorType.clipRect:
clipRect = m.rect!;
case MutatorType.clipRRect:
clipRect = m.rrect!.outerRect;
case MutatorType.clipPath:
clipRect = m.path!.getBounds();
default:
continue;
}
cullRect = cullRect.intersect(clipRect);
}
return cullRect;
}
}
/// A context shared by all layers during the paint pass.
class PaintContext {
PaintContext(
this.internalNodesCanvas,
this.leafNodesCanvas,
this.rasterCache,
this.viewEmbedder,
);
/// A multi-canvas that applies clips, transforms, and opacity
/// operations to all canvases (root canvas and overlay canvases for the
/// platform views).
CkNWayCanvas internalNodesCanvas;
/// The canvas for leaf nodes to paint to.
CkCanvas? leafNodesCanvas;
/// A raster cache potentially containing pre-rendered pictures.
final RasterCache? rasterCache;
/// A compositor for embedded HTML views.
final HtmlViewEmbedder? viewEmbedder;
}
/// A layer that contains child layers.
abstract class ContainerLayer extends Layer {
final List<Layer> _layers = <Layer>[];
/// The list of child layers.
///
/// Useful in tests.
List<Layer> get debugLayers => _layers;
/// Register [child] as a child of this layer.
void add(Layer child) {
child.parent = this;
_layers.add(child);
}
@override
void preroll(PrerollContext prerollContext, Matrix4 matrix) {
paintBounds = prerollChildren(prerollContext, matrix);
}
/// Run [preroll] on all of the child layers.
///
/// Returns a [Rect] that covers the paint bounds of all of the child layers.
/// If all of the child layers have empty paint bounds, then the returned
/// [Rect] is empty.
ui.Rect prerollChildren(PrerollContext context, Matrix4 childMatrix) {
ui.Rect childPaintBounds = ui.Rect.zero;
for (final Layer layer in _layers) {
layer.preroll(context, childMatrix);
if (childPaintBounds.isEmpty) {
childPaintBounds = layer.paintBounds;
} else if (!layer.paintBounds.isEmpty) {
childPaintBounds = childPaintBounds.expandToInclude(layer.paintBounds);
}
}
return childPaintBounds;
}
/// Calls [paint] on all child layers that need painting.
void paintChildren(PaintContext context) {
assert(needsPainting);
for (final Layer layer in _layers) {
if (layer.needsPainting) {
layer.paint(context);
}
}
}
}
/// The top-most layer in the layer tree.
///
/// This layer does not draw anything. It's only used so we can add leaf layers
/// to [LayerSceneBuilder] without requiring a [ContainerLayer].
class RootLayer extends ContainerLayer {
@override
void paint(PaintContext paintContext) {
paintChildren(paintContext);
}
}
class BackdropFilterEngineLayer extends ContainerLayer
implements ui.BackdropFilterEngineLayer {
BackdropFilterEngineLayer(this._filter, this._blendMode);
final ui.ImageFilter _filter;
final ui.BlendMode _blendMode;
@override
void preroll(PrerollContext prerollContext, Matrix4 matrix) {
final ui.Rect childBounds = prerollChildren(prerollContext, matrix);
paintBounds = childBounds.expandToInclude(prerollContext.cullRect);
}
@override
void paint(PaintContext paintContext) {
final CkPaint paint = CkPaint()..blendMode = _blendMode;
// Only apply the backdrop filter to the current canvas. If we apply the
// backdrop filter to every canvas (i.e. by applying it to the
// [internalNodesCanvas]), then later when we compose the canvases into a
// single canvas, the backdrop filter will be applied multiple times.
final CkCanvas currentCanvas = paintContext.leafNodesCanvas!;
currentCanvas.saveLayerWithFilter(paintBounds, _filter, paint);
paint.dispose();
paintChildren(paintContext);
currentCanvas.restore();
}
// TODO(dnfield): dispose of the _filter
// https://github.com/flutter/flutter/issues/82832
}
/// A layer that clips its child layers by a given [Path].
class ClipPathEngineLayer extends ContainerLayer
implements ui.ClipPathEngineLayer {
ClipPathEngineLayer(this._clipPath, this._clipBehavior)
: assert(_clipBehavior != ui.Clip.none);
/// The path used to clip child layers.
final CkPath _clipPath;
final ui.Clip _clipBehavior;
@override
void preroll(PrerollContext prerollContext, Matrix4 matrix) {
prerollContext.mutatorsStack.pushClipPath(_clipPath);
final ui.Rect childPaintBounds = prerollChildren(prerollContext, matrix);
final ui.Rect clipBounds = _clipPath.getBounds();
if (childPaintBounds.overlaps(clipBounds)) {
paintBounds = childPaintBounds.intersect(clipBounds);
}
prerollContext.mutatorsStack.pop();
}
@override
void paint(PaintContext paintContext) {
assert(needsPainting);
paintContext.internalNodesCanvas.save();
paintContext.internalNodesCanvas
.clipPath(_clipPath, _clipBehavior != ui.Clip.hardEdge);
if (_clipBehavior == ui.Clip.antiAliasWithSaveLayer) {
paintContext.internalNodesCanvas.saveLayer(paintBounds, null);
}
paintChildren(paintContext);
if (_clipBehavior == ui.Clip.antiAliasWithSaveLayer) {
paintContext.internalNodesCanvas.restore();
}
paintContext.internalNodesCanvas.restore();
}
}
/// A layer that clips its child layers by a given [Rect].
class ClipRectEngineLayer extends ContainerLayer
implements ui.ClipRectEngineLayer {
ClipRectEngineLayer(this._clipRect, this._clipBehavior)
: assert(_clipBehavior != ui.Clip.none);
/// The rectangle used to clip child layers.
final ui.Rect _clipRect;
final ui.Clip _clipBehavior;
@override
void preroll(PrerollContext prerollContext, Matrix4 matrix) {
prerollContext.mutatorsStack.pushClipRect(_clipRect);
final ui.Rect childPaintBounds = prerollChildren(prerollContext, matrix);
if (childPaintBounds.overlaps(_clipRect)) {
paintBounds = childPaintBounds.intersect(_clipRect);
}
prerollContext.mutatorsStack.pop();
}
@override
void paint(PaintContext paintContext) {
assert(needsPainting);
paintContext.internalNodesCanvas.save();
paintContext.internalNodesCanvas.clipRect(
_clipRect,
ui.ClipOp.intersect,
_clipBehavior != ui.Clip.hardEdge,
);
if (_clipBehavior == ui.Clip.antiAliasWithSaveLayer) {
paintContext.internalNodesCanvas.saveLayer(_clipRect, null);
}
paintChildren(paintContext);
if (_clipBehavior == ui.Clip.antiAliasWithSaveLayer) {
paintContext.internalNodesCanvas.restore();
}
paintContext.internalNodesCanvas.restore();
}
}
/// A layer that clips its child layers by a given [RRect].
class ClipRRectEngineLayer extends ContainerLayer
implements ui.ClipRRectEngineLayer {
ClipRRectEngineLayer(this._clipRRect, this._clipBehavior)
: assert(_clipBehavior != ui.Clip.none);
/// The rounded rectangle used to clip child layers.
final ui.RRect _clipRRect;
final ui.Clip? _clipBehavior;
@override
void preroll(PrerollContext prerollContext, Matrix4 matrix) {
prerollContext.mutatorsStack.pushClipRRect(_clipRRect);
final ui.Rect childPaintBounds = prerollChildren(prerollContext, matrix);
if (childPaintBounds.overlaps(_clipRRect.outerRect)) {
paintBounds = childPaintBounds.intersect(_clipRRect.outerRect);
}
prerollContext.mutatorsStack.pop();
}
@override
void paint(PaintContext paintContext) {
assert(needsPainting);
paintContext.internalNodesCanvas.save();
paintContext.internalNodesCanvas
.clipRRect(_clipRRect, _clipBehavior != ui.Clip.hardEdge);
if (_clipBehavior == ui.Clip.antiAliasWithSaveLayer) {
paintContext.internalNodesCanvas.saveLayer(paintBounds, null);
}
paintChildren(paintContext);
if (_clipBehavior == ui.Clip.antiAliasWithSaveLayer) {
paintContext.internalNodesCanvas.restore();
}
paintContext.internalNodesCanvas.restore();
}
}
/// A layer that paints its children with the given opacity.
class OpacityEngineLayer extends ContainerLayer
implements ui.OpacityEngineLayer {
OpacityEngineLayer(this._alpha, this._offset);
final int _alpha;
final ui.Offset _offset;
@override
void preroll(PrerollContext prerollContext, Matrix4 matrix) {
final Matrix4 childMatrix = Matrix4.copy(matrix);
childMatrix.translate(_offset.dx, _offset.dy);
prerollContext.mutatorsStack
.pushTransform(Matrix4.translationValues(_offset.dx, _offset.dy, 0.0));
prerollContext.mutatorsStack.pushOpacity(_alpha);
super.preroll(prerollContext, childMatrix);
prerollContext.mutatorsStack.pop();
prerollContext.mutatorsStack.pop();
paintBounds = paintBounds.translate(_offset.dx, _offset.dy);
}
@override
void paint(PaintContext paintContext) {
assert(needsPainting);
final CkPaint paint = CkPaint();
paint.color = ui.Color.fromARGB(_alpha, 0, 0, 0);
paintContext.internalNodesCanvas.save();
paintContext.internalNodesCanvas.translate(_offset.dx, _offset.dy);
final ui.Rect saveLayerBounds = paintBounds.shift(-_offset);
paintContext.internalNodesCanvas.saveLayer(saveLayerBounds, paint);
paint.dispose();
paintChildren(paintContext);
// Restore twice: once for the translate and once for the saveLayer.
paintContext.internalNodesCanvas.restore();
paintContext.internalNodesCanvas.restore();
}
}
/// A layer that transforms its child layers by the given transform matrix.
class TransformEngineLayer extends ContainerLayer
implements ui.TransformEngineLayer {
TransformEngineLayer(this._transform);
/// The matrix with which to transform the child layers.
final Matrix4 _transform;
@override
void preroll(PrerollContext prerollContext, Matrix4 matrix) {
final Matrix4 childMatrix = matrix.multiplied(_transform);
prerollContext.mutatorsStack.pushTransform(_transform);
final ui.Rect childPaintBounds =
prerollChildren(prerollContext, childMatrix);
paintBounds = _transform.transformRect(childPaintBounds);
prerollContext.mutatorsStack.pop();
}
@override
void paint(PaintContext paintContext) {
assert(needsPainting);
paintContext.internalNodesCanvas.save();
paintContext.internalNodesCanvas.transform(_transform.storage);
paintChildren(paintContext);
paintContext.internalNodesCanvas.restore();
}
}
/// Translates its children along x and y coordinates.
///
/// This is a thin wrapper over [TransformEngineLayer] just so the framework
/// gets the "OffsetEngineLayer" when calling `runtimeType.toString()`. This is
/// better for debugging.
class OffsetEngineLayer extends TransformEngineLayer
implements ui.OffsetEngineLayer {
OffsetEngineLayer(double dx, double dy)
: super(Matrix4.translationValues(dx, dy, 0.0));
}
/// A layer that applies an [ui.ImageFilter] to its children.
class ImageFilterEngineLayer extends ContainerLayer
implements ui.ImageFilterEngineLayer {
ImageFilterEngineLayer(this._filter, this._offset);
final ui.Offset _offset;
final ui.ImageFilter _filter;
@override
void preroll(PrerollContext prerollContext, Matrix4 matrix) {
final Matrix4 childMatrix = Matrix4.copy(matrix);
childMatrix.translate(_offset.dx, _offset.dy);
prerollContext.mutatorsStack
.pushTransform(Matrix4.translationValues(_offset.dx, _offset.dy, 0.0));
final ui.Rect childPaintBounds =
prerollChildren(prerollContext, childMatrix);
(_filter as CkManagedSkImageFilterConvertible)
.imageFilter((SkImageFilter filter) {
paintBounds =
rectFromSkIRect(filter.getOutputBounds(toSkRect(childPaintBounds)));
});
prerollContext.mutatorsStack.pop();
}
@override
void paint(PaintContext paintContext) {
assert(needsPainting);
paintContext.internalNodesCanvas.save();
paintContext.internalNodesCanvas.translate(_offset.dx, _offset.dy);
final CkPaint paint = CkPaint();
paint.imageFilter = _filter;
paintContext.internalNodesCanvas.saveLayer(paintBounds, paint);
paint.dispose();
paintChildren(paintContext);
paintContext.internalNodesCanvas.restore();
paintContext.internalNodesCanvas.restore();
}
// TODO(dnfield): dispose of the _filter
// https://github.com/flutter/flutter/issues/82832
}
class ShaderMaskEngineLayer extends ContainerLayer
implements ui.ShaderMaskEngineLayer {
ShaderMaskEngineLayer(
this.shader, this.maskRect, this.blendMode, this.filterQuality);
final ui.Shader shader;
final ui.Rect maskRect;
final ui.BlendMode blendMode;
final ui.FilterQuality filterQuality;
@override
void paint(PaintContext paintContext) {
assert(needsPainting);
paintContext.internalNodesCanvas.saveLayer(paintBounds, null);
paintChildren(paintContext);
final CkPaint paint = CkPaint();
paint.shader = shader;
paint.blendMode = blendMode;
paint.filterQuality = filterQuality;
paintContext.leafNodesCanvas!.save();
paintContext.leafNodesCanvas!.translate(maskRect.left, maskRect.top);
paintContext.leafNodesCanvas!.drawRect(
ui.Rect.fromLTWH(0, 0, maskRect.width, maskRect.height), paint);
paint.dispose();
paintContext.leafNodesCanvas!.restore();
paintContext.internalNodesCanvas.restore();
}
}
/// A layer containing a [Picture].
class PictureLayer extends Layer {
PictureLayer(this.picture, this.offset, this.isComplex, this.willChange);
/// The picture to paint into the canvas.
final CkPicture picture;
/// The offset at which to paint the picture.
final ui.Offset offset;
/// A hint to the compositor about whether this picture is complex.
final bool isComplex;
/// A hint to the compositor that this picture is likely to change.
final bool willChange;
@override
void preroll(PrerollContext prerollContext, Matrix4 matrix) {
paintBounds = picture.cullRect.shift(offset);
}
@override
void paint(PaintContext paintContext) {
assert(needsPainting);
paintContext.leafNodesCanvas!.save();
paintContext.leafNodesCanvas!.translate(offset.dx, offset.dy);
paintContext.leafNodesCanvas!.drawPicture(picture);
paintContext.leafNodesCanvas!.restore();
}
}
/// A layer which contains a [ui.ColorFilter].
class ColorFilterEngineLayer extends ContainerLayer
implements ui.ColorFilterEngineLayer {
ColorFilterEngineLayer(this.filter);
final ui.ColorFilter filter;
@override
void paint(PaintContext paintContext) {
assert(needsPainting);
final CkPaint paint = CkPaint();
paint.colorFilter = filter;
paintContext.internalNodesCanvas.saveLayer(paintBounds, paint);
paint.dispose();
paintChildren(paintContext);
paintContext.internalNodesCanvas.restore();
}
}
/// A layer which renders a platform view (an HTML element in this case).
class PlatformViewLayer extends Layer {
PlatformViewLayer(this.viewId, this.offset, this.width, this.height);
final int viewId;
final ui.Offset offset;
final double width;
final double height;
@override
void preroll(PrerollContext prerollContext, Matrix4 matrix) {
paintBounds = ui.Rect.fromLTWH(offset.dx, offset.dy, width, height);
/// ViewEmbedder is set to null when screenshotting. Therefore, skip
/// rendering
prerollContext.viewEmbedder?.prerollCompositeEmbeddedView(
viewId,
EmbeddedViewParams(
offset,
ui.Size(width, height),
prerollContext.mutatorsStack,
),
);
}
@override
void paint(PaintContext paintContext) {
final CkCanvas? canvas =
paintContext.viewEmbedder?.compositeEmbeddedView(viewId);
if (canvas != null) {
paintContext.leafNodesCanvas = canvas;
}
}
}
| engine/lib/web_ui/lib/src/engine/canvaskit/layer.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/canvaskit/layer.dart",
"repo_id": "engine",
"token_count": 6002
} | 251 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
abstract class Rasterizer {
/// Creates a [ViewRasterizer] for a given [view].
ViewRasterizer createViewRasterizer(EngineFlutterView view);
/// Sets the maximum size of the resource cache to [bytes].
void setResourceCacheMaxBytes(int bytes);
/// Disposes this rasterizer and all [ViewRasterizer]s that it created.
void dispose();
}
abstract class ViewRasterizer {
ViewRasterizer(this.view);
/// The view this rasterizer renders into.
final EngineFlutterView view;
/// The queue of render requests for this view.
final RenderQueue queue = RenderQueue();
/// The size of the current frame being rasterized.
ui.Size currentFrameSize = ui.Size.zero;
/// The context which is persisted between frames.
final CompositorContext context = CompositorContext();
/// The platform view embedder.
late final HtmlViewEmbedder viewEmbedder = HtmlViewEmbedder(sceneHost, this);
/// A factory for creating overlays.
DisplayCanvasFactory<DisplayCanvas> get displayFactory;
/// The scene host which this rasterizer should raster into.
DomElement get sceneHost => view.dom.sceneHost;
/// Draws the [layerTree] to the screen for the view associated with this
/// rasterizer.
Future<void> draw(LayerTree layerTree) async {
final ui.Size frameSize = view.physicalSize;
if (frameSize.isEmpty) {
// Available drawing area is empty. Skip drawing.
return;
}
currentFrameSize = frameSize;
prepareToDraw();
viewEmbedder.frameSize = currentFrameSize;
final CkPictureRecorder pictureRecorder = CkPictureRecorder();
pictureRecorder.beginRecording(ui.Offset.zero & currentFrameSize);
final Frame compositorFrame =
context.acquireFrame(pictureRecorder.recordingCanvas!, viewEmbedder);
compositorFrame.raster(layerTree, ignoreRasterCache: true);
await viewEmbedder.submitFrame(pictureRecorder.endRecording());
}
/// Do some initialization to prepare to draw a frame.
///
/// For example, in the [OffscreenCanvasRasterizer], this ensures the backing
/// [OffscreenCanvas] is the correct size to draw the frame.
void prepareToDraw();
/// Rasterize the [pictures] to the given [canvas].
Future<void> rasterizeToCanvas(
DisplayCanvas canvas, List<CkPicture> pictures);
/// Get a [DisplayCanvas] to use as an overlay.
DisplayCanvas getOverlay() {
return displayFactory.getCanvas();
}
/// Release the given [overlay] so it may be reused.
void releaseOverlay(DisplayCanvas overlay) {
displayFactory.releaseCanvas(overlay);
}
/// Release all overlays.
void releaseOverlays() {
displayFactory.releaseCanvases();
}
/// Remove all overlays that have been created from the DOM.
void removeOverlaysFromDom() {
displayFactory.removeCanvasesFromDom();
}
/// Disposes this rasterizer.
void dispose() {
viewEmbedder.dispose();
displayFactory.dispose();
}
/// Clears the state. Used in tests.
void debugClear() {
viewEmbedder.debugClear();
}
}
/// A [DisplayCanvas] is an abstraction for a canvas element which displays
/// Skia-drawn pictures to the screen. They are also sometimes called "overlays"
/// because they can be overlaid on top of platform views, which are HTML
/// content that isn't rendered by Skia.
///
/// [DisplayCanvas]es are drawn into with [ViewRasterizer.rasterizeToCanvas].
abstract class DisplayCanvas {
/// The DOM element which, when appended to the scene host, will display the
/// Skia-rendered content to the screen.
DomElement get hostElement;
/// Whether or not this overlay canvas is attached to the DOM.
bool get isConnected;
/// Initialize the overlay.
void initialize();
/// Disposes this overlay.
void dispose();
}
/// Encapsulates a request to render a [ui.Scene]. Contains the scene to render
/// and a [Completer] which completes when the scene has been rendered.
typedef RenderRequest = ({
ui.Scene scene,
Completer<void> completer,
FrameTimingRecorder? recorder,
});
/// A per-view queue of render requests. Only contains the current render
/// request and the next render request. If a new render request is made before
/// the current request is complete, then the next render request is replaced
/// with the most recently requested render and the other one is dropped.
class RenderQueue {
RenderQueue();
RenderRequest? current;
RenderRequest? next;
}
| engine/lib/web_ui/lib/src/engine/canvaskit/rasterizer.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/canvaskit/rasterizer.dart",
"repo_id": "engine",
"token_count": 1363
} | 252 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'dart:typed_data';
import 'package:ui/ui.dart' as ui;
import 'path_utils.dart';
/// Converts conic curve to a list of quadratic curves for rendering on
/// canvas or conversion to svg.
///
/// See "High order approximation of conic sections by quadratic splines"
/// by Michael Floater, 1993.
/// Skia implementation reference:
/// https://github.com/google/skia/blob/main/src/core/SkGeometry.cpp
class Conic {
Conic(this.p0x, this.p0y, this.p1x, this.p1y, this.p2x, this.p2y, this.fW);
double p0x, p0y, p1x, p1y, p2x, p2y;
final double fW;
static const int _maxSubdivisionCount = 5;
/// Returns array of points for the approximation of the conic as quad(s).
///
/// First offset is start Point. Each pair of offsets after are quadratic
/// control and end points.
List<ui.Offset> toQuads() {
final List<ui.Offset> pointList = <ui.Offset>[];
// This value specifies error bound.
const double conicTolerance = 1.0 / 4.0;
// Based on error bound, compute how many times we should subdivide
final int subdivideCount = _computeSubdivisionCount(conicTolerance);
// Split conic into quads, writes quad coordinates into [_pointList] and
// returns number of quads.
assert(subdivideCount >= 0 && subdivideCount <= _maxSubdivisionCount);
int quadCount = 1 << subdivideCount;
bool skipSubdivide = false;
pointList.add(ui.Offset(p0x, p0y));
if (subdivideCount == _maxSubdivisionCount) {
// We have an extreme number of quads, chop this conic and check if
// it generates a pair of lines, in which case we should not subdivide.
final _ConicPair dst = _ConicPair();
_chop(dst);
final Conic conic0 = dst.first!;
final Conic conic1 = dst.second!;
// If this chop generates pair of lines no need to subdivide.
if (conic0.p1x == conic0.p2x &&
conic0.p1y == conic0.p2y &&
conic1.p0x == conic1.p1x &&
conic1.p0y == conic1.p1y) {
final ui.Offset controlPointOffset = ui.Offset(conic0.p1x, conic0.p1y);
pointList.add(controlPointOffset);
pointList.add(controlPointOffset);
pointList.add(controlPointOffset);
pointList.add(ui.Offset(conic1.p2x, conic1.p2y));
quadCount = 2;
skipSubdivide = true;
}
}
if (!skipSubdivide) {
_subdivide(this, subdivideCount, pointList);
}
// If there are any non-finite generated points, pin to middle of hull.
final int pointCount = 2 * quadCount + 1;
bool hasNonFinitePoints = false;
for (int p = 0; p < pointCount; ++p) {
if (pointList[p].dx.isNaN || pointList[p].dy.isNaN) {
hasNonFinitePoints = true;
break;
}
}
if (hasNonFinitePoints) {
for (int p = 1; p < pointCount - 1; ++p) {
pointList[p] = ui.Offset(p1x, p1y);
}
}
return pointList;
}
// Subdivides a conic and writes to points list.
static void _subdivide(Conic src, int level, List<ui.Offset> pointList) {
assert(level >= 0);
if (0 == level) {
// At lowest subdivision point, copy control point and end point to
// target.
pointList.add(ui.Offset(src.p1x, src.p1y));
pointList.add(ui.Offset(src.p2x, src.p2y));
return;
}
final _ConicPair dst = _ConicPair();
src._chop(dst);
final Conic conic0 = dst.first!;
final Conic conic1 = dst.second!;
final double startY = src.p0y;
final double endY = src.p2y;
final double cpY = src.p1y;
if (SPath.between(startY, cpY, endY)) {
// Ensure that chopped conics maintain their y-order.
final double midY = conic0.p2y;
if (!SPath.between(startY, midY, endY)) {
// The computed midpoint is outside end points, move it to
// closer one.
final double closerY =
(midY - startY).abs() < (midY - endY).abs() ? startY : endY;
conic0.p2y = conic1.p0y = closerY;
}
if (!SPath.between(startY, conic0.p1y, conic0.p2y)) {
// First control point not between start and end points, move it
// to start.
conic0.p1y = startY;
}
if (!SPath.between(conic1.p0y, conic1.p1y, endY)) {
// Second control point not between start and end points, move it
// to end.
conic1.p1y = endY;
}
// Verify that conics points are ordered.
assert(SPath.between(startY, conic0.p1y, conic0.p2y));
assert(SPath.between(conic0.p1y, conic0.p2y, conic1.p1y));
assert(SPath.between(conic0.p2y, conic1.p1y, endY));
}
--level;
_subdivide(conic0, level, pointList);
_subdivide(conic1, level, pointList);
}
static double _subdivideWeightValue(double w) {
return math.sqrt(0.5 + w * 0.5);
}
// Splits conic into 2 parts based on weight.
void _chop(_ConicPair pair) {
final double scale = 1.0 / (1.0 + fW);
final double newW = _subdivideWeightValue(fW);
final ui.Offset wp1 = ui.Offset(fW * p1x, fW * p1y);
ui.Offset m = ui.Offset((p0x + (2 * wp1.dx) + p2x) * scale * 0.5,
(p0y + 2 * wp1.dy + p2y) * scale * 0.5);
if (m.dx.isNaN || m.dy.isNaN) {
final double w2 = fW * 2;
final double scaleHalf = 1.0 / (1 + fW) * 0.5;
m = ui.Offset((p0x + (w2 * p1x) + p2x) * scaleHalf,
(p0y + (w2 * p1y) + p2y) * scaleHalf);
}
pair.first = Conic(p0x, p0y, (p0x + wp1.dx) * scale, (p0y + wp1.dy) * scale,
m.dx, m.dy, newW);
pair.second = Conic(m.dx, m.dy, (p2x + wp1.dx) * scale,
(p2y + wp1.dy) * scale, p2x, p2y, newW);
}
void chopAtYExtrema(List<Conic> dst) {
final double? t = _findYExtrema();
if (t == null) {
dst.add(this);
return;
}
if (!_chopAt(t, dst, cleanupMiddle: true)) {
// If chop can't return finite values, don't chop.
dst.add(this);
return;
}
}
///////////////////////////////////////////////////////////////////////////////
//
// NURB representation for conics. Helpful explanations at:
//
// http://citeseerx.ist.psu.edu/viewdoc/
// download?doi=10.1.1.44.5740&rep=rep1&type=ps
// and
// http://www.cs.mtu.edu/~shene/COURSES/cs3621/NOTES/spline/NURBS/RB-conics.html
//
// F = (A (1 - t)^2 + C t^2 + 2 B (1 - t) t w)
// ------------------------------------------
// ((1 - t)^2 + t^2 + 2 (1 - t) t w)
//
// = {t^2 (P0 + P2 - 2 P1 w), t (-2 P0 + 2 P1 w), P0}
// ------------------------------------------------
// {t^2 (2 - 2 w), t (-2 + 2 w), 1}
//
// F' = 2 (C t (1 + t (-1 + w)) - A (-1 + t) (t (-1 + w) - w) + B (1 - 2 t) w)
//
// t^2 : (2 P0 - 2 P2 - 2 P0 w + 2 P2 w)
// t^1 : (-2 P0 + 2 P2 + 4 P0 w - 4 P1 w)
// t^0 : -2 P0 w + 2 P1 w
//
// We disregard magnitude, so we can freely ignore the denominator of F', and
// divide the numerator by 2
//
// coeff[0] for t^2
// coeff[1] for t^1
// coeff[2] for t^0
//
double? _findYExtrema() {
final double p20 = p2y - p0y;
final double p10 = p1y - p0y;
final double wP10 = fW * p10;
final double coeff0 = fW * p20 - p20;
final double coeff1 = p20 - 2 * wP10;
final double coeff2 = wP10;
final QuadRoots quadRoots = QuadRoots();
final int rootCount = quadRoots.findRoots(coeff0, coeff1, coeff2);
assert(rootCount == 0 || rootCount == 1);
if (rootCount == 1) {
return quadRoots.root0;
}
return null;
}
bool _chopAt(double t, List<Conic> dst, {bool cleanupMiddle = false}) {
// Map conic to 3D.
final double tx0 = p0x;
final double ty0 = p0y;
const double tz0 = 1;
final double tx1 = p1x * fW;
final double ty1 = p1y * fW;
final double tz1 = fW;
final double tx2 = p2x;
final double ty2 = p2y;
const double tz2 = 1;
// Now interpolate each dimension.
final double dx0 = tx0 + (tx1 - tx0) * t;
final double dx2 = tx1 + (tx2 - tx1) * t;
final double dx1 = dx0 + (dx2 - dx0) * t;
final double dy0 = ty0 + (ty1 - ty0) * t;
final double dy2 = ty1 + (ty2 - ty1) * t;
final double dy1 = dy0 + (dy2 - dy0) * t;
final double dz0 = tz0 + (tz1 - tz0) * t;
final double dz2 = tz1 + (tz2 - tz1) * t;
final double dz1 = dz0 + (dz2 - dz0) * t;
// Compute new weights.
final double root = math.sqrt(dz1);
if (SPath.nearlyEqual(root, 0)) {
return false;
}
final double w0 = dz0 / root;
final double w2 = dz2 / root;
if (SPath.nearlyEqual(dz0, 0) || SPath.nearlyEqual(dz1, 0) || SPath.nearlyEqual(dz2, 0)) {
return false;
}
// Now we can construct the 2 conics by projecting 3D down to 2D.
final double chopPointX = dx1 / dz1;
final double chopPointY = dy1 / dz1;
double cp0y = dy0 / dz0;
double cp1y = dy2 / dz2;
if (cleanupMiddle) {
// Clean-up the middle, since we know t was meant to be at
// an Y-extrema.
cp0y = chopPointY;
cp1y = chopPointY;
}
final Conic conic0 =
Conic(p0x, p0y, dx0 / dz0, cp0y, chopPointX, chopPointY, w0);
final Conic conic1 =
Conic(chopPointX, chopPointY, dx2 / dz2, cp1y, p2x, p2y, w2);
dst.add(conic0);
dst.add(conic1);
return true;
}
/// Computes number of binary subdivisions of the curve given
/// the tolerance.
///
/// The number of subdivisions never exceed [_maxSubdivisionCount].
int _computeSubdivisionCount(double tolerance) {
assert(tolerance.isFinite);
// Expecting finite coordinates.
assert(p0x.isFinite &&
p1x.isFinite &&
p2x.isFinite &&
p0y.isFinite &&
p1y.isFinite &&
p2y.isFinite);
if (tolerance < 0) {
return 0;
}
// See "High order approximation of conic sections by quadratic splines"
// by Michael Floater, 1993.
// Error bound e0 = |a| |p0 - 2p1 + p2| / 4(2 + a).
final double a = fW - 1;
final double k = a / (4.0 * (2.0 + a));
final double x = k * (p0x - 2 * p1x + p2x);
final double y = k * (p0y - 2 * p1y + p2y);
double error = math.sqrt(x * x + y * y);
int pow2 = 0;
for (; pow2 < _maxSubdivisionCount; ++pow2) {
if (error <= tolerance) {
break;
}
error *= 0.25;
}
return pow2;
}
ui.Offset evalTangentAt(double t) {
// The derivative equation returns a zero tangent vector when t is 0 or 1,
// and the control point is equal to the end point.
// In this case, use the conic endpoints to compute the tangent.
if ((t == 0 && p0x == p1x && p0y == p1y) ||
(t == 1 && p1x == p2x && p1y == p2y)) {
return ui.Offset(p2x - p0x, p2y - p0y);
}
final double p20x = p2x - p0x;
final double p20y = p2y - p0y;
final double p10x = p1x - p0x;
final double p10y = p1y - p0y;
final double cx = fW * p10x;
final double cy = fW * p10y;
final double ax = fW * p20x - p20x;
final double ay = fW * p20y - p20y;
final double bx = p20x - cx - cx;
final double by = p20y - cy - cy;
final SkQuadCoefficients quadC = SkQuadCoefficients(ax, ay, bx, by, cx, cy);
return ui.Offset(quadC.evalX(t), quadC.evalY(t));
}
static double evalNumerator(
double p0, double p1, double p2, double w, double t) {
assert(t >= 0 && t <= 1);
final double src2w = p1 * w;
final double C = p0;
final double A = p2 - 2 * src2w + C;
final double B = 2 * (src2w - C);
return polyEval(A, B, C, t);
}
static double evalDenominator(double w, double t) {
final double B = 2 * (w - 1);
const double C = 1;
final double A = -B;
return polyEval(A, B, C, t);
}
}
class QuadBounds {
double minX = 0;
double minY = 0;
double maxX = 0;
double maxY = 0;
void calculateBounds(Float32List points, int pointIndex) {
final double x1 = points[pointIndex++];
final double y1 = points[pointIndex++];
final double cpX = points[pointIndex++];
final double cpY = points[pointIndex++];
final double x2 = points[pointIndex++];
final double y2 = points[pointIndex++];
minX = math.min(x1, x2);
minY = math.min(y1, y2);
maxX = math.max(x1, x2);
maxY = math.max(y1, y2);
// At extrema's derivative = 0.
// Solve for
// -2x1+2tx1 + 2cpX + 4tcpX + 2tx2 = 0
// -2x1 + 2cpX +2t(x1 + 2cpX + x2) = 0
// t = (x1 - cpX) / (x1 - 2cpX + x2)
double denom = x1 - (2 * cpX) + x2;
if (denom.abs() > SPath.scalarNearlyZero) {
final double t1 = (x1 - cpX) / denom;
if ((t1 >= 0) && (t1 <= 1.0)) {
// Solve (x,y) for curve at t = tx to find extrema
final double tprime = 1.0 - t1;
final double extremaX =
(tprime * tprime * x1) + (2 * t1 * tprime * cpX) + (t1 * t1 * x2);
final double extremaY =
(tprime * tprime * y1) + (2 * t1 * tprime * cpY) + (t1 * t1 * y2);
// Expand bounds.
minX = math.min(minX, extremaX);
maxX = math.max(maxX, extremaX);
minY = math.min(minY, extremaY);
maxY = math.max(maxY, extremaY);
}
}
// Now calculate dy/dt = 0
denom = y1 - (2 * cpY) + y2;
if (denom.abs() > SPath.scalarNearlyZero) {
final double t2 = (y1 - cpY) / denom;
if ((t2 >= 0) && (t2 <= 1.0)) {
final double tprime2 = 1.0 - t2;
final double extrema2X = (tprime2 * tprime2 * x1) +
(2 * t2 * tprime2 * cpX) +
(t2 * t2 * x2);
final double extrema2Y = (tprime2 * tprime2 * y1) +
(2 * t2 * tprime2 * cpY) +
(t2 * t2 * y2);
// Expand bounds.
minX = math.min(minX, extrema2X);
maxX = math.max(maxX, extrema2X);
minY = math.min(minY, extrema2Y);
maxY = math.max(maxY, extrema2Y);
}
}
}
}
class ConicBounds {
double minX = 0;
double minY = 0;
double maxX = 0;
double maxY = 0;
void calculateBounds(Float32List points, double w, int pointIndex) {
final double x1 = points[pointIndex++];
final double y1 = points[pointIndex++];
final double cpX = points[pointIndex++];
final double cpY = points[pointIndex++];
final double x2 = points[pointIndex++];
final double y2 = points[pointIndex++];
minX = math.min(x1, x2);
minY = math.min(y1, y2);
maxX = math.max(x1, x2);
maxY = math.max(y1, y2);
// {t^2 (P0 + P2 - 2 P1 w), t (-2 P0 + 2 P1 w), P0}
// ------------------------------------------------
// {t^2 (2 - 2 w), t (-2 + 2 w), 1}
// Calculate coefficients and solve root.
final QuadRoots roots = QuadRoots();
final double p20x = x2 - x1;
final double p10x = cpX - x1;
final double wP10x = w * p10x;
final double ax = w * p20x - p20x;
final double bx = p20x - 2 * wP10x;
final double cx = wP10x;
int n = roots.findRoots(ax, bx, cx);
if (n != 0) {
final double t1 = roots.root0!;
if ((t1 >= 0) && (t1 <= 1.0)) {
final double denom = Conic.evalDenominator(w, t1);
double numerator = Conic.evalNumerator(x1, cpX, x2, w, t1);
final double extremaX = numerator / denom;
numerator = Conic.evalNumerator(y1, cpY, y2, w, t1);
final double extremaY = numerator / denom;
// Expand bounds.
minX = math.min(minX, extremaX);
maxX = math.max(maxX, extremaX);
minY = math.min(minY, extremaY);
maxY = math.max(maxY, extremaY);
}
}
final double p20y = y2 - y1;
final double p10y = cpY - y1;
final double wP10y = w * p10y;
final double a = w * p20y - p20y;
final double b = p20y - 2 * wP10y;
final double c = wP10y;
n = roots.findRoots(a, b, c);
if (n != 0) {
final double t2 = roots.root0!;
if ((t2 >= 0) && (t2 <= 1.0)) {
final double denom = Conic.evalDenominator(w, t2);
double numerator = Conic.evalNumerator(x1, cpX, x2, w, t2);
final double extrema2X = numerator / denom;
numerator = Conic.evalNumerator(y1, cpY, y2, w, t2);
final double extrema2Y = numerator / denom;
// Expand bounds.
minX = math.min(minX, extrema2X);
maxX = math.max(maxX, extrema2X);
minY = math.min(minY, extrema2Y);
maxY = math.max(maxY, extrema2Y);
}
}
}
}
class _ConicPair {
Conic? first;
Conic? second;
}
| engine/lib/web_ui/lib/src/engine/html/path/conic.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/html/path/conic.dart",
"repo_id": "engine",
"token_count": 7540
} | 253 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import '../browser_detection.dart';
import '../dom.dart';
import '../view_embedder/dom_manager.dart';
import '../window.dart';
/// Manages global resources for the singleton Flutter Window in the HTML
/// renderer.
///
/// It's used for resources that are referenced by CSS, such as svg filters.
class ResourceManager {
ResourceManager(this._domManager);
final DomManager _domManager;
static const String resourcesHostTagName = 'flt-svg-filters';
static ResourceManager? _instance;
static ResourceManager get instance {
assert(_instance != null, 'ResourceManager has not been initialized.');
return _instance!;
}
/// A child element of body outside the shadowroot that hosts
/// global resources such svg filters and clip paths when using webkit.
DomElement? _resourcesHost;
/// Add an element as a global resource to be referenced by CSS.
///
/// Creates a global resource host element on demand and either places it as
/// first element of body(webkit), or as a child of the implicit view element
/// for other browsers to make sure url resolution works correctly when
/// content is inside a shadow root.
void addResource(DomElement element) {
_getOrCreateResourcesHost().append(element);
}
DomElement _getOrCreateResourcesHost() {
if (_resourcesHost != null) {
return _resourcesHost!;
}
final DomElement resourcesHost = domDocument.createElement(resourcesHostTagName);
resourcesHost.style.visibility = 'hidden';
_resourcesHost = resourcesHost;
if (browserEngine == BrowserEngine.webkit) {
final DomElement rootElement = _domManager.rootElement;
// The resourcesHost *must* be a sibling of the rootElement.
rootElement.parent!.prepend(resourcesHost);
} else {
final DomShadowRoot renderingHost = _domManager.renderingHost;
renderingHost.prepend(resourcesHost);
}
return resourcesHost;
}
/// Removes a global resource element.
void removeResource(DomElement? element) {
if (element == null) {
return;
}
assert(element.parentNode == _resourcesHost);
element.remove();
}
}
/// Initializes the [ResourceManager.instance] singleton if it hasn't been
/// initialized yet.
void ensureResourceManagerInitialized(EngineFlutterWindow implicitView) {
if (ResourceManager._instance != null) {
return;
}
ResourceManager._instance = ResourceManager(implicitView.dom);
}
| engine/lib/web_ui/lib/src/engine/html/resource_manager.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/html/resource_manager.dart",
"repo_id": "engine",
"token_count": 742
} | 254 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@JS()
library js_loader;
import 'dart:js_interop';
import 'package:js/js_util.dart' as js_util;
import 'package:ui/src/engine.dart';
@JS()
@staticInterop
class FlutterJS {}
extension FlutterJSExtension on FlutterJS {
external FlutterLoader? get loader;
}
// Both `flutter`, `loader`(_flutter.loader), must be checked for null before
// `didCreateEngineInitializer` can be safely accessed.
@JS('_flutter')
external FlutterJS? get flutter;
@JS()
@staticInterop
class FlutterLoader {}
extension FlutterLoaderExtension on FlutterLoader {
external void didCreateEngineInitializer(FlutterEngineInitializer initializer);
bool get isAutoStart => !js_util.hasProperty(this, 'didCreateEngineInitializer');
}
/// Typedef for the function that initializes the flutter engine.
/// ///
/// [JsFlutterConfiguration] comes from `../configuration.dart`. It is the same
/// object that can be used to configure flutter "inline", through the
/// (to be deprecated) `window.flutterConfiguration` object.
typedef InitializeEngineFn = Future<FlutterAppRunner> Function([JsFlutterConfiguration?]);
/// Typedef for the `autoStart` function that can be called straight from an engine initializer instance.
/// (Similar to [RunAppFn], but taking no specific "runApp" parameters).
typedef ImmediateRunAppFn = Future<FlutterApp> Function();
// FlutterEngineInitializer
/// An object that allows the user to initialize the Engine of a Flutter App.
///
/// As a convenience method, [autoStart] allows the user to immediately initialize
/// and run a Flutter Web app, from JavaScript.
@JS()
@anonymous
@staticInterop
abstract class FlutterEngineInitializer{
factory FlutterEngineInitializer({
required InitializeEngineFn initializeEngine,
required ImmediateRunAppFn autoStart,
}) => FlutterEngineInitializer._(
initializeEngine: (([JsFlutterConfiguration? config]) => futureToPromise(initializeEngine(config) as Future<JSObject>)).toJS,
autoStart: (() => futureToPromise(autoStart() as Future<JSObject>)).toJS,
);
external factory FlutterEngineInitializer._({
required JSFunction initializeEngine,
required JSFunction autoStart,
});
}
// FlutterAppRunner
/// A class that exposes a function that runs the Flutter app,
/// and returns a promise of a FlutterAppCleaner.
@JS()
@anonymous
@staticInterop
abstract class FlutterAppRunner {
factory FlutterAppRunner({required RunAppFn runApp,}) => FlutterAppRunner._(
runApp: (([RunAppFnParameters? args]) => futureToPromise(runApp(args) as Future<JSObject>)).toJS
);
/// Runs a flutter app
external factory FlutterAppRunner._({
required JSFunction runApp, // Returns an App
});
}
/// The shape of the object that can be passed as parameter to the
/// runApp function of the FlutterAppRunner object (from JS).
@JS()
@anonymous
@staticInterop
abstract class RunAppFnParameters {
}
/// Typedef for the function that runs the flutter app main entrypoint.
typedef RunAppFn = Future<FlutterApp> Function([RunAppFnParameters?]);
| engine/lib/web_ui/lib/src/engine/js_interop/js_loader.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/js_interop/js_loader.dart",
"repo_id": "engine",
"token_count": 938
} | 255 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
/// Tracks the [FlutterView]s focus changes.
final class ViewFocusBinding {
ViewFocusBinding(this._viewManager, this._onViewFocusChange);
final FlutterViewManager _viewManager;
final ui.ViewFocusChangeCallback _onViewFocusChange;
int? _lastViewId;
ui.ViewFocusDirection _viewFocusDirection = ui.ViewFocusDirection.forward;
StreamSubscription<int>? _onViewCreatedListener;
void init() {
domDocument.body?.addEventListener(_keyDown, _handleKeyDown);
domDocument.body?.addEventListener(_keyUp, _handleKeyUp);
domDocument.body?.addEventListener(_focusin, _handleFocusin);
domDocument.body?.addEventListener(_focusout, _handleFocusout);
_onViewCreatedListener = _viewManager.onViewCreated.listen(_handleViewCreated);
}
void dispose() {
domDocument.body?.removeEventListener(_keyDown, _handleKeyDown);
domDocument.body?.removeEventListener(_keyUp, _handleKeyUp);
domDocument.body?.removeEventListener(_focusin, _handleFocusin);
domDocument.body?.removeEventListener(_focusout, _handleFocusout);
_onViewCreatedListener?.cancel();
}
void changeViewFocus(int viewId, ui.ViewFocusState state) {
final DomElement? viewElement = _viewManager[viewId]?.dom.rootElement;
if (state == ui.ViewFocusState.focused) {
// Only move the focus to the flutter view if nothing inside it is focused already.
if (viewId != _viewId(domDocument.activeElement)) {
viewElement?.focus();
}
} else {
viewElement?.blur();
}
}
late final DomEventListener _handleFocusin = createDomEventListener((DomEvent event) {
event as DomFocusEvent;
_handleFocusChange(event.target as DomElement?);
});
late final DomEventListener _handleFocusout = createDomEventListener((DomEvent event) {
event as DomFocusEvent;
_handleFocusChange(event.relatedTarget as DomElement?);
});
late final DomEventListener _handleKeyDown = createDomEventListener((DomEvent event) {
event as DomKeyboardEvent;
if (event.shiftKey) {
_viewFocusDirection = ui.ViewFocusDirection.backward;
}
});
late final DomEventListener _handleKeyUp = createDomEventListener((DomEvent event) {
_viewFocusDirection = ui.ViewFocusDirection.forward;
});
void _handleFocusChange(DomElement? focusedElement) {
final int? viewId = _viewId(focusedElement);
if (viewId == _lastViewId) {
return;
}
final ui.ViewFocusEvent event;
if (viewId == null) {
event = ui.ViewFocusEvent(
viewId: _lastViewId!,
state: ui.ViewFocusState.unfocused,
direction: ui.ViewFocusDirection.undefined,
);
} else {
event = ui.ViewFocusEvent(
viewId: viewId,
state: ui.ViewFocusState.focused,
direction: _viewFocusDirection,
);
}
_maybeMarkViewAsFocusable(_lastViewId, reachableByKeyboard: true);
_maybeMarkViewAsFocusable(viewId, reachableByKeyboard: false);
_lastViewId = viewId;
_onViewFocusChange(event);
}
int? _viewId(DomElement? element) {
final DomElement? rootElement = element?.closest(DomManager.flutterViewTagName);
if (rootElement == null) {
return null;
}
return _viewManager.viewIdForRootElement(rootElement);
}
void _handleViewCreated(int viewId) {
_maybeMarkViewAsFocusable(viewId, reachableByKeyboard: true);
}
void _maybeMarkViewAsFocusable(
int? viewId, {
required bool reachableByKeyboard,
}) {
if (viewId == null) {
return;
}
final DomElement? rootElement = _viewManager[viewId]?.dom.rootElement;
if (EngineSemantics.instance.semanticsEnabled) {
rootElement?.removeAttribute('tabindex');
} else {
// A tabindex with value zero means the DOM element can be reached by using
// the keyboard (tab, shift + tab). When its value is -1 it is still focusable
// but can't be focused by the result of keyboard events This is specially
// important when the semantics tree is enabled as it puts DOM nodes inside
// the flutter view and having it with a zero tabindex messes the focus
// traversal order when pressing tab or shift tab.
rootElement?.setAttribute('tabindex', reachableByKeyboard ? 0 : -1);
}
}
static const String _focusin = 'focusin';
static const String _focusout = 'focusout';
static const String _keyDown = 'keydown';
static const String _keyUp = 'keyup';
}
| engine/lib/web_ui/lib/src/engine/platform_dispatcher/view_focus_binding.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/platform_dispatcher/view_focus_binding.dart",
"repo_id": "engine",
"token_count": 1609
} | 256 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
const String kCanvasContainerTag = 'flt-canvas-container';
typedef RenderResult = ({
List<DomImageBitmap> imageBitmaps,
int rasterStartMicros,
int rasterEndMicros,
});
// This is an interface that renders a `ScenePicture` as a `DomImageBitmap`.
// It is optionally asynchronous. It is required for the `EngineSceneView` to
// composite pictures into the canvases in the DOM tree it builds.
abstract class PictureRenderer {
FutureOr<RenderResult> renderPictures(List<ScenePicture> picture);
ScenePicture clipPicture(ScenePicture picture, ui.Rect clip);
}
class _SceneRender {
_SceneRender(
this.scene,
this._completer, {
this.recorder,
}) {
scene.beginRender();
}
final EngineScene scene;
final Completer<void> _completer;
final FrameTimingRecorder? recorder;
void done() {
scene.endRender();
_completer.complete();
}
}
// This class builds a DOM tree that composites an `EngineScene`.
class EngineSceneView {
factory EngineSceneView(PictureRenderer pictureRenderer, ui.FlutterView flutterView) {
final DomElement sceneElement = createDomElement('flt-scene');
return EngineSceneView._(pictureRenderer, flutterView, sceneElement);
}
EngineSceneView._(this.pictureRenderer, this.flutterView, this.sceneElement);
final PictureRenderer pictureRenderer;
final DomElement sceneElement;
final ui.FlutterView flutterView;
List<SliceContainer> containers = <SliceContainer>[];
_SceneRender? _currentRender;
_SceneRender? _nextRender;
Future<void> renderScene(EngineScene scene, FrameTimingRecorder? recorder) {
if (_currentRender != null) {
// If a scene is already queued up, drop it and queue this one up instead
// so that the scene view always displays the most recently requested scene.
_nextRender?.done();
final Completer<void> completer = Completer<void>();
_nextRender = _SceneRender(scene, completer, recorder: recorder);
return completer.future;
}
final Completer<void> completer = Completer<void>();
_currentRender = _SceneRender(scene, completer, recorder: recorder);
_kickRenderLoop();
return completer.future;
}
Future<void> _kickRenderLoop() async {
final _SceneRender current = _currentRender!;
await _renderScene(current.scene, current.recorder);
current.done();
_currentRender = _nextRender;
_nextRender = null;
if (_currentRender == null) {
return;
} else {
return _kickRenderLoop();
}
}
Future<void> _renderScene(EngineScene scene, FrameTimingRecorder? recorder) async {
final ui.Rect screenBounds = ui.Rect.fromLTWH(
0,
0,
flutterView.physicalSize.width,
flutterView.physicalSize.height,
);
final List<LayerSlice> slices = scene.rootLayer.slices;
final List<ScenePicture> picturesToRender = <ScenePicture>[];
final List<ScenePicture> originalPicturesToRender = <ScenePicture>[];
for (final LayerSlice slice in slices) {
if (slice is PictureSlice) {
final ui.Rect clippedRect = slice.picture.cullRect.intersect(screenBounds);
if (clippedRect.isEmpty) {
// This picture is completely offscreen, so don't render it at all
continue;
} else if (clippedRect == slice.picture.cullRect) {
// The picture doesn't need to be clipped, just render the original
originalPicturesToRender.add(slice.picture);
picturesToRender.add(slice.picture);
} else {
originalPicturesToRender.add(slice.picture);
picturesToRender.add(pictureRenderer.clipPicture(slice.picture, clippedRect));
}
}
}
final Map<ScenePicture, DomImageBitmap> renderMap;
if (picturesToRender.isNotEmpty) {
final RenderResult renderResult = await pictureRenderer.renderPictures(picturesToRender);
renderMap = <ScenePicture, DomImageBitmap>{
for (int i = 0; i < picturesToRender.length; i++)
originalPicturesToRender[i]: renderResult.imageBitmaps[i],
};
recorder?.recordRasterStart(renderResult.rasterStartMicros);
recorder?.recordRasterFinish(renderResult.rasterEndMicros);
} else {
renderMap = <ScenePicture, DomImageBitmap>{};
recorder?.recordRasterStart();
recorder?.recordRasterFinish();
}
recorder?.submitTimings();
final List<SliceContainer?> reusableContainers = List<SliceContainer?>.from(containers);
final List<SliceContainer> newContainers = <SliceContainer>[];
for (final LayerSlice slice in slices) {
switch (slice) {
case PictureSlice():
final DomImageBitmap? bitmap = renderMap[slice.picture];
if (bitmap == null) {
// We didn't render this slice because no part of it is visible.
continue;
}
PictureSliceContainer? container;
for (int j = 0; j < reusableContainers.length; j++) {
final SliceContainer? candidate = reusableContainers[j];
if (candidate is PictureSliceContainer) {
container = candidate;
reusableContainers[j] = null;
break;
}
}
final ui.Rect clippedBounds = slice.picture.cullRect.intersect(screenBounds);
if (container != null) {
container.bounds = clippedBounds;
} else {
container = PictureSliceContainer(clippedBounds);
}
container.updateContents();
container.renderBitmap(bitmap);
newContainers.add(container);
case PlatformViewSlice():
for (final PlatformView view in slice.views) {
// TODO(harryterkelsen): Inject the FlutterView instance from `renderScene`,
// instead of using `EnginePlatformDispatcher...implicitView` directly,
// or make the FlutterView "register" like in canvaskit.
// Ensure the platform view contents are injected in the DOM.
EnginePlatformDispatcher.instance.implicitView?.dom.injectPlatformView(view.viewId);
// Attempt to reuse a container for the existing view
PlatformViewContainer? container;
for (int j = 0; j < reusableContainers.length; j++) {
final SliceContainer? candidate = reusableContainers[j];
if (candidate is PlatformViewContainer && candidate.viewId == view.viewId) {
container = candidate;
reusableContainers[j] = null;
break;
}
}
container ??= PlatformViewContainer(view.viewId);
container.size = view.size;
container.styling = view.styling;
container.updateContents();
newContainers.add(container);
}
}
}
containers = newContainers;
DomElement? currentElement = sceneElement.firstElementChild;
for (final SliceContainer container in containers) {
if (currentElement == null) {
sceneElement.appendChild(container.container);
} else if (currentElement == container.container) {
currentElement = currentElement.nextElementSibling;
} else {
sceneElement.insertBefore(container.container, currentElement);
}
}
// Remove any other unused containers
while (currentElement != null) {
final DomElement? sibling = currentElement.nextElementSibling;
sceneElement.removeChild(currentElement);
currentElement = sibling;
}
}
}
sealed class SliceContainer {
DomElement get container;
void updateContents();
}
final class PictureSliceContainer extends SliceContainer {
factory PictureSliceContainer(ui.Rect bounds) {
final DomElement container = domDocument.createElement(kCanvasContainerTag);
final DomCanvasElement canvas = createDomCanvasElement(
width: bounds.width.toInt(),
height: bounds.height.toInt()
);
container.appendChild(canvas);
return PictureSliceContainer._(bounds, container, canvas);
}
PictureSliceContainer._(this._bounds, this.container, this.canvas);
ui.Rect _bounds;
bool _dirty = true;
ui.Rect get bounds => _bounds;
set bounds(ui.Rect bounds) {
if (_bounds != bounds) {
_bounds = bounds;
_dirty = true;
}
}
@override
void updateContents() {
if (_dirty) {
_dirty = false;
final ui.Rect roundedOutBounds = ui.Rect.fromLTRB(
bounds.left.floorToDouble(),
bounds.top.floorToDouble(),
bounds.right.ceilToDouble(),
bounds.bottom.ceilToDouble()
);
final DomCSSStyleDeclaration style = canvas.style;
final double devicePixelRatio = EngineFlutterDisplay.instance.devicePixelRatio;
final double logicalWidth = roundedOutBounds.width / devicePixelRatio;
final double logicalHeight = roundedOutBounds.height / devicePixelRatio;
final double logicalLeft = roundedOutBounds.left / devicePixelRatio;
final double logicalTop = roundedOutBounds.top / devicePixelRatio;
style.width = '${logicalWidth}px';
style.height = '${logicalHeight}px';
style.position = 'absolute';
style.left = '${logicalLeft}px';
style.top = '${logicalTop}px';
canvas.width = roundedOutBounds.width.ceilToDouble();
canvas.height = roundedOutBounds.height.ceilToDouble();
}
}
void renderBitmap(DomImageBitmap bitmap) {
final DomCanvasRenderingContextBitmapRenderer ctx = canvas.contextBitmapRenderer;
ctx.transferFromImageBitmap(bitmap);
}
@override
final DomElement container;
final DomCanvasElement canvas;
}
final class PlatformViewContainer extends SliceContainer {
PlatformViewContainer(this.viewId) : container = createPlatformViewSlot(viewId);
final int viewId;
PlatformViewStyling? _styling;
ui.Size? _size;
bool _dirty = false;
@override
final DomElement container;
set styling(PlatformViewStyling styling) {
if (_styling != styling) {
_styling = styling;
_dirty = true;
}
}
set size(ui.Size size) {
if (_size != size) {
_size = size;
_dirty = true;
}
}
@override
void updateContents() {
assert(_styling != null);
assert(_size != null);
if (_dirty) {
final DomCSSStyleDeclaration style = container.style;
final double devicePixelRatio = EngineFlutterDisplay.instance.devicePixelRatio;
final double logicalWidth = _size!.width / devicePixelRatio;
final double logicalHeight = _size!.height / devicePixelRatio;
style.width = '${logicalWidth}px';
style.height = '${logicalHeight}px';
style.position = 'absolute';
final ui.Offset? offset = _styling!.position.offset;
final double logicalLeft = (offset?.dx ?? 0) / devicePixelRatio;
final double logicalTop = (offset?.dy ?? 0) / devicePixelRatio;
style.left = '${logicalLeft}px';
style.top = '${logicalTop}px';
final Matrix4? transform = _styling!.position.transform;
style.transform = transform != null ? float64ListToCssTransform3d(transform.storage) : '';
style.opacity = _styling!.opacity != 1.0 ? '${_styling!.opacity}' : '';
// TODO(jacksongardner): Implement clip styling for platform views
_dirty = false;
}
}
}
| engine/lib/web_ui/lib/src/engine/scene_view.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/scene_view.dart",
"repo_id": "engine",
"token_count": 4254
} | 257 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'package:ui/ui.dart' as ui;
import '../browser_detection.dart';
import '../dom.dart';
import '../platform_dispatcher.dart';
import '../text_editing/text_editing.dart';
import 'semantics.dart';
/// Text editing used by accesibility mode.
///
/// [SemanticsTextEditingStrategy] assumes the caller will own the creation,
/// insertion and disposal of the DOM element. Due to this
/// [initializeElementPlacement], [initializeTextEditing] and
/// [disable] strategies are handled differently.
///
/// This class is still responsible for hooking up the DOM element with the
/// [HybridTextEditing] instance so that changes are communicated to Flutter.
class SemanticsTextEditingStrategy extends DefaultTextEditingStrategy {
/// Creates a [SemanticsTextEditingStrategy] that eagerly instantiates
/// [domElement] so the caller can insert it before calling
/// [SemanticsTextEditingStrategy.enable].
SemanticsTextEditingStrategy(super.owner);
/// Initializes the [SemanticsTextEditingStrategy] singleton.
///
/// This method must be called prior to accessing [instance].
static SemanticsTextEditingStrategy ensureInitialized(
HybridTextEditing owner) {
if (_instance != null && _instance?.owner == owner) {
return _instance!;
}
return _instance = SemanticsTextEditingStrategy(owner);
}
/// The [SemanticsTextEditingStrategy] singleton.
static SemanticsTextEditingStrategy get instance => _instance!;
static SemanticsTextEditingStrategy? _instance;
/// The text field whose DOM element is currently used for editing.
///
/// If this field is null, no editing takes place.
TextField? activeTextField;
/// Current input configuration supplied by the "flutter/textinput" channel.
InputConfiguration? inputConfig;
/// The semantics implementation does not operate on DOM nodes, but only
/// remembers the config and callbacks. This is because the DOM nodes are
/// supplied in the semantics update and enabled by [activate].
@override
void enable(
InputConfiguration inputConfig, {
required OnChangeCallback onChange,
required OnActionCallback onAction,
}) {
this.inputConfig = inputConfig;
this.onChange = onChange;
this.onAction = onAction;
}
/// Attaches the DOM element owned by [textField] to the text editing
/// strategy.
///
/// This method must be called after [enable] to name sure that [inputConfig],
/// [onChange], and [onAction] are not null.
void activate(TextField textField) {
assert(
inputConfig != null && onChange != null && onAction != null,
'"enable" should be called before "enableFromSemantics" and initialize input configuration',
);
if (activeTextField == textField) {
// The specified field is already active. Skip.
return;
} else if (activeTextField != null) {
// Another text field is currently active. Deactivate it before switching.
disable();
}
activeTextField = textField;
domElement = textField.editableElement;
_syncStyle();
super.enable(inputConfig!, onChange: onChange!, onAction: onAction!);
}
/// Detaches the DOM element owned by [textField] from this text editing
/// strategy.
///
/// Typically at this point the element loses focus (blurs) and stops being
/// used for editing.
void deactivate(TextField textField) {
if (activeTextField == textField) {
disable();
}
}
@override
void disable() {
// We don't want to remove the DOM element because the caller is responsible
// for that. However we still want to stop editing, cleanup the handlers.
if (!isEnabled) {
return;
}
isEnabled = false;
style = null;
geometry = null;
for (int i = 0; i < subscriptions.length; i++) {
subscriptions[i].cancel();
}
subscriptions.clear();
lastEditingState = null;
// If the text element still has focus, remove focus from the editable
// element to cause the on-screen keyboard, if any, to hide (e.g. on iOS,
// Android).
// Otherwise, the keyboard stays on screen even when the user navigates to
// a different screen (e.g. by hitting the "back" button).
domElement?.blur();
domElement = null;
activeTextField = null;
_queuedStyle = null;
}
@override
void addEventHandlers() {
if (inputConfiguration.autofillGroup != null) {
subscriptions
.addAll(inputConfiguration.autofillGroup!.addInputEventListeners());
}
// Subscribe to text and selection changes.
subscriptions.add(
DomSubscription(activeDomElement, 'input', handleChange));
subscriptions.add(
DomSubscription(activeDomElement, 'keydown',
maybeSendAction));
subscriptions.add(
DomSubscription(domDocument, 'selectionchange',
handleChange));
preventDefaultForMouseEvents();
}
@override
void initializeTextEditing(InputConfiguration inputConfig,
{OnChangeCallback? onChange, OnActionCallback? onAction}) {
isEnabled = true;
inputConfiguration = inputConfig;
applyConfiguration(inputConfig);
}
@override
void placeElement() {
// If this text editing element is a part of an autofill group.
if (hasAutofillGroup) {
placeForm();
}
activeDomElement.focus();
}
@override
void initializeElementPlacement() {
// Element placement is done by [TextField].
}
@override
void placeForm() {
}
@override
void updateElementPlacement(EditableTextGeometry textGeometry) {
// Element placement is done by [TextField].
}
EditableTextStyle? _queuedStyle;
@override
void updateElementStyle(EditableTextStyle textStyle) {
_queuedStyle = textStyle;
_syncStyle();
}
/// Apply style to the element, if both style and element are available.
///
/// Because style is supplied by the "flutter/textinput" channel and the DOM
/// element is supplied by the semantics tree, the existence of both at the
/// same time is not guaranteed.
void _syncStyle() {
if (_queuedStyle == null || domElement == null) {
return;
}
super.updateElementStyle(_queuedStyle!);
}
}
/// Manages semantics objects that represent editable text fields.
///
/// This role is implemented via a content-editable HTML element. This role does
/// not proactively switch modes depending on the current
/// [EngineSemanticsOwner.gestureMode]. However, in Chrome on Android it ignores
/// browser gestures when in pointer mode. In Safari on iOS pointer events are
/// used to detect text box invocation. This is because Safari issues touch
/// events even when Voiceover is enabled.
class TextField extends PrimaryRoleManager {
TextField(SemanticsObject semanticsObject) : super.blank(PrimaryRole.textField, semanticsObject) {
_setupDomElement();
}
/// The element used for editing, e.g. `<input>`, `<textarea>`.
DomHTMLElement? editableElement;
/// Same as [editableElement] but null-checked.
DomHTMLElement get activeEditableElement {
assert(
editableElement != null,
'The textField does not have an active editable element',
);
return editableElement!;
}
@override
bool focusAsRouteDefault() {
final DomHTMLElement? editableElement = this.editableElement;
if (editableElement == null) {
return false;
}
editableElement.focus();
return true;
}
/// Timer that times when to set the location of the input text.
///
/// This is only used for iOS. In iOS, virtual keyboard shifts the screen.
/// There is no callback to know if the keyboard is up and how much the screen
/// has shifted. Therefore instead of listening to the shift and passing this
/// information to Flutter Framework, we are trying to stop the shift.
///
/// In iOS, the virtual keyboard shifts the screen up if the focused input
/// element is under the keyboard or very close to the keyboard. Before the
/// focus is called we are positioning it offscreen. The location of the input
/// in iOS is set to correct place, 100ms after focus. We use this timer for
/// timing this delay.
Timer? _positionInputElementTimer;
static const Duration _delayBeforePlacement = Duration(milliseconds: 100);
void _initializeEditableElement() {
assert(editableElement == null,
'Editable element has already been initialized');
editableElement = semanticsObject.hasFlag(ui.SemanticsFlag.isMultiline)
? createDomHTMLTextAreaElement()
: createDomHTMLInputElement();
// On iOS, even though the semantic text field is transparent, the cursor
// and text highlighting are still visible. The cursor and text selection
// are made invisible by CSS in [StyleManager.attachGlobalStyles].
// But there's one more case where iOS highlights text. That's when there's
// and autocorrect suggestion. To disable that, we have to do the following:
activeEditableElement
..spellcheck = false
..setAttribute('autocorrect', 'off')
..setAttribute('autocomplete', 'off')
..setAttribute('data-semantics-role', 'text-field');
activeEditableElement.style
..position = 'absolute'
// `top` and `left` are intentionally set to zero here.
//
// The text field would live inside a `<flt-semantics>` which should
// already be positioned using semantics.rect.
//
// See also:
//
// * [SemanticsObject.recomputePositionAndSize], which sets the position
// and size of the parent `<flt-semantics>` element.
..top = '0'
..left = '0'
..width = '${semanticsObject.rect!.width}px'
..height = '${semanticsObject.rect!.height}px';
append(activeEditableElement);
}
void _setupDomElement() {
switch (browserEngine) {
case BrowserEngine.blink:
case BrowserEngine.firefox:
_initializeForBlink();
case BrowserEngine.webkit:
_initializeForWebkit();
}
}
/// Chrome on Android reports text field activation as a "click" event.
///
/// When in browser gesture mode, the focus is forwarded to the framework as
/// a tap to initialize editing.
void _initializeForBlink() {
_initializeEditableElement();
activeEditableElement.addEventListener('focus',
createDomEventListener((DomEvent event) {
if (EngineSemantics.instance.gestureMode != GestureMode.browserGestures) {
return;
}
EnginePlatformDispatcher.instance.invokeOnSemanticsAction(
semanticsObject.id, ui.SemanticsAction.didGainAccessibilityFocus, null);
}));
activeEditableElement.addEventListener('blur',
createDomEventListener((DomEvent event) {
if (EngineSemantics.instance.gestureMode != GestureMode.browserGestures) {
return;
}
EnginePlatformDispatcher.instance.invokeOnSemanticsAction(
semanticsObject.id, ui.SemanticsAction.didLoseAccessibilityFocus, null);
}));
}
/// Safari on iOS reports text field activation via pointer events.
///
/// This emulates a tap recognizer to detect the activation. Because pointer
/// events are present regardless of whether accessibility is enabled or not,
/// this mode is always enabled.
///
/// In iOS, the virtual keyboard shifts the screen up if the focused input
/// element is under the keyboard or very close to the keyboard. To avoid the shift,
/// the creation of the editable element is delayed until a tap is detected.
///
/// In the absence of an editable DOM element, role of 'textbox' is assigned to the
/// semanticsObject.element to communicate to the assistive technologies that
/// the user can start editing by tapping on the element. Once a tap is detected,
/// the editable element gets created and the role of textbox is removed from
/// semanicsObject.element to avoid confusing VoiceOver.
void _initializeForWebkit() {
// Safari for desktop is also initialized as the other browsers.
if (operatingSystem == OperatingSystem.macOs) {
_initializeForBlink();
return;
}
setAttribute('role', 'textbox');
setAttribute('contenteditable', 'false');
setAttribute('tabindex', '0');
num? lastPointerDownOffsetX;
num? lastPointerDownOffsetY;
addEventListener('pointerdown',
createDomEventListener((DomEvent event) {
final DomPointerEvent pointerEvent = event as DomPointerEvent;
lastPointerDownOffsetX = pointerEvent.clientX;
lastPointerDownOffsetY = pointerEvent.clientY;
}), true);
addEventListener('pointerup',
createDomEventListener((DomEvent event) {
final DomPointerEvent pointerEvent = event as DomPointerEvent;
if (lastPointerDownOffsetX != null) {
assert(lastPointerDownOffsetY != null);
final num deltaX = pointerEvent.clientX - lastPointerDownOffsetX!;
final num deltaY = pointerEvent.clientY - lastPointerDownOffsetY!;
// This should match the similar constant defined in:
//
// lib/src/gestures/constants.dart
//
// The value is pre-squared so we have to do less math at runtime.
const double kTouchSlop = 18.0 * 18.0; // Logical pixels squared
if (deltaX * deltaX + deltaY * deltaY < kTouchSlop) {
// Recognize it as a tap that requires a keyboard.
EnginePlatformDispatcher.instance.invokeOnSemanticsAction(
semanticsObject.id, ui.SemanticsAction.tap, null);
_invokeIosWorkaround();
}
} else {
assert(lastPointerDownOffsetY == null);
}
lastPointerDownOffsetX = null;
lastPointerDownOffsetY = null;
}), true);
}
void _invokeIosWorkaround() {
if (editableElement != null) {
return;
}
_initializeEditableElement();
activeEditableElement.style.transform = 'translate(${offScreenOffset}px, ${offScreenOffset}px)';
_positionInputElementTimer?.cancel();
_positionInputElementTimer = Timer(_delayBeforePlacement, () {
editableElement?.style.transform = '';
_positionInputElementTimer = null;
});
// Can not have both activeEditableElement and semanticsObject.element
// represent the same text field. It will confuse VoiceOver, so `role` needs to
// be assigned and removed, based on whether or not editableElement exists.
activeEditableElement.focus();
removeAttribute('role');
activeEditableElement.addEventListener('blur',
createDomEventListener((DomEvent event) {
setAttribute('role', 'textbox');
activeEditableElement.remove();
SemanticsTextEditingStrategy._instance?.deactivate(this);
// Focus on semantics element before removing the editable element, so that
// the user can continue navigating the page with the assistive technology.
element.focus();
editableElement = null;
}));
}
@override
void update() {
super.update();
// Ignore the update if editableElement has not been created yet.
// On iOS Safari, when the user dismisses the keyboard using the 'done' button,
// we recieve a `blur` event from the browswer and a semantic update with
// [hasFocus] set to true from the framework. In this case, we ignore the update
// and wait for a tap event before invoking the iOS workaround and creating
// the editable element.
if (editableElement != null) {
activeEditableElement.style
..width = '${semanticsObject.rect!.width}px'
..height = '${semanticsObject.rect!.height}px';
if (semanticsObject.hasFocus) {
if (domDocument.activeElement !=
activeEditableElement) {
semanticsObject.owner.addOneTimePostUpdateCallback(() {
activeEditableElement.focus();
});
}
SemanticsTextEditingStrategy._instance?.activate(this);
} else if (domDocument.activeElement ==
activeEditableElement) {
if (!isIosSafari) {
SemanticsTextEditingStrategy._instance?.deactivate(this);
// Only apply text, because this node is not focused.
}
activeEditableElement.blur();
}
}
final DomElement element = editableElement ?? this.element;
if (semanticsObject.hasLabel) {
element.setAttribute(
'aria-label',
semanticsObject.label!,
);
} else {
element.removeAttribute('aria-label');
}
}
@override
void dispose() {
super.dispose();
_positionInputElementTimer?.cancel();
_positionInputElementTimer = null;
// on iOS, the `blur` event listener callback will remove the element.
if (!isIosSafari) {
editableElement?.remove();
}
SemanticsTextEditingStrategy._instance?.deactivate(this);
}
}
| engine/lib/web_ui/lib/src/engine/semantics/text_field.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/semantics/text_field.dart",
"repo_id": "engine",
"token_count": 5525
} | 258 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:ffi';
import 'package:ui/src/engine.dart';
import 'package:ui/src/engine/skwasm/skwasm_impl.dart';
import 'package:ui/ui.dart' as ui;
class SkwasmPaint extends SkwasmObjectWrapper<RawPaint> implements ui.Paint {
SkwasmPaint() : super(paintCreate(), _registry);
static final SkwasmFinalizationRegistry<RawPaint> _registry =
SkwasmFinalizationRegistry<RawPaint>(paintDispose);
ui.BlendMode _cachedBlendMode = ui.BlendMode.srcOver;
SkwasmShader? _shader;
ui.ImageFilter? _imageFilter;
EngineColorFilter? _colorFilter;
ui.MaskFilter? _maskFilter;
bool _invertColors = false;
static final SkwasmColorFilter _invertColorFilter = SkwasmColorFilter.fromEngineColorFilter(
const EngineColorFilter.matrix(<double>[
-1.0, 0, 0, 1.0, 0, // row
0, -1.0, 0, 1.0, 0, // row
0, 0, -1.0, 1.0, 0, // row
1.0, 1.0, 1.0, 1.0, 0
])
);
@override
ui.BlendMode get blendMode {
return _cachedBlendMode;
}
@override
set blendMode(ui.BlendMode blendMode) {
if (_cachedBlendMode != blendMode) {
_cachedBlendMode = blendMode;
paintSetBlendMode(handle, blendMode.index);
}
}
@override
ui.PaintingStyle get style => ui.PaintingStyle.values[paintGetStyle(handle)];
@override
set style(ui.PaintingStyle style) => paintSetStyle(handle, style.index);
@override
double get strokeWidth => paintGetStrokeWidth(handle);
@override
set strokeWidth(double width) => paintSetStrokeWidth(handle, width);
@override
ui.StrokeCap get strokeCap => ui.StrokeCap.values[paintGetStrokeCap(handle)];
@override
set strokeCap(ui.StrokeCap cap) => paintSetStrokeCap(handle, cap.index);
@override
ui.StrokeJoin get strokeJoin => ui.StrokeJoin.values[paintGetStrokeJoin(handle)];
@override
set strokeJoin(ui.StrokeJoin join) => paintSetStrokeJoin(handle, join.index);
@override
bool get isAntiAlias => paintGetAntiAlias(handle);
@override
set isAntiAlias(bool value) => paintSetAntiAlias(handle, value);
@override
ui.Color get color => ui.Color(paintGetColorInt(handle));
@override
set color(ui.Color color) => paintSetColorInt(handle, color.value);
@override
double get strokeMiterLimit => paintGetMiterLimit(handle);
@override
set strokeMiterLimit(double limit) => paintSetMiterLimit(handle, limit);
@override
ui.Shader? get shader => _shader;
@override
set shader(ui.Shader? uiShader) {
final SkwasmShader? skwasmShader = uiShader as SkwasmShader?;
_shader = skwasmShader;
final ShaderHandle shaderHandle =
skwasmShader != null ? skwasmShader.handle : nullptr;
paintSetShader(handle, shaderHandle);
}
@override
ui.FilterQuality filterQuality = ui.FilterQuality.none;
@override
ui.ImageFilter? get imageFilter => _imageFilter;
@override
set imageFilter(ui.ImageFilter? filter) {
_imageFilter = filter;
final SkwasmImageFilter? nativeImageFilter = filter != null
? SkwasmImageFilter.fromUiFilter(filter)
: null;
paintSetImageFilter(handle, nativeImageFilter != null ? nativeImageFilter.handle : nullptr);
}
@override
ui.ColorFilter? get colorFilter => _colorFilter;
void _setEffectiveColorFilter() {
final SkwasmColorFilter? nativeFilter = _colorFilter != null
? SkwasmColorFilter.fromEngineColorFilter(_colorFilter!) : null;
if (_invertColors) {
if (nativeFilter != null) {
final SkwasmColorFilter composedFilter = SkwasmColorFilter.composed(_invertColorFilter, nativeFilter);
nativeFilter.dispose();
paintSetColorFilter(handle, composedFilter.handle);
composedFilter.dispose();
} else {
paintSetColorFilter(handle, _invertColorFilter.handle);
}
} else if (nativeFilter != null) {
paintSetColorFilter(handle, nativeFilter.handle);
nativeFilter.dispose();
} else {
paintSetColorFilter(handle, nullptr);
}
}
@override
set colorFilter(ui.ColorFilter? filter) {
_colorFilter = filter as EngineColorFilter?;
_setEffectiveColorFilter();
}
@override
ui.MaskFilter? get maskFilter => _maskFilter;
@override
set maskFilter(ui.MaskFilter? filter) {
_maskFilter = filter;
if (filter == null) {
paintSetMaskFilter(handle, nullptr);
} else {
final SkwasmMaskFilter nativeFilter = SkwasmMaskFilter.fromUiMaskFilter(filter);
paintSetMaskFilter(handle, nativeFilter.handle);
nativeFilter.dispose();
}
}
@override
bool get invertColors => _invertColors;
@override
set invertColors(bool invertColors) {
if (_invertColors == invertColors) {
return;
}
_invertColors = invertColors;
_setEffectiveColorFilter();
}
// TODO(yjbanov): https://github.com/flutter/flutter/issues/141639
@override
String toString() => 'Paint()';
}
| engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/paint.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/paint.dart",
"repo_id": "engine",
"token_count": 1852
} | 259 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@DefaultAsset('skwasm')
library skwasm_impl;
import 'dart:ffi';
final class RawSkData extends Opaque {}
typedef SkDataHandle = Pointer<RawSkData>;
@Native<SkDataHandle Function(Size)>(symbol: 'skData_create', isLeaf: true)
external SkDataHandle skDataCreate(int size);
@Native<Pointer<Void> Function(SkDataHandle)>(symbol: 'skData_getPointer', isLeaf: true)
external Pointer<Void> skDataGetPointer(SkDataHandle handle);
@Native<Pointer<Void> Function(SkDataHandle)>(symbol: 'skData_getConstPointer', isLeaf: true)
external Pointer<Void> skDataGetConstPointer(SkDataHandle handle);
@Native<Size Function(SkDataHandle)>(symbol: 'skData_getSize', isLeaf: true)
external int skDataGetSize(SkDataHandle handle);
@Native<Void Function(SkDataHandle)>(symbol: 'skData_dispose', isLeaf: true)
external void skDataDispose(SkDataHandle handle);
| engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/raw_skdata.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/skwasm/skwasm_impl/raw/raw_skdata.dart",
"repo_id": "engine",
"token_count": 331
} | 260 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
const int kChar_0 = 48;
const int kChar_9 = kChar_0 + 9;
const int kChar_A = 65;
const int kChar_Z = 90;
const int kChar_a = 97;
const int kChar_z = 122;
const int kCharBang = 33;
const int kMashriqi_0 = 0x660;
const int kMashriqi_9 = kMashriqi_0 + 9;
enum _ComparisonResult {
inside,
higher,
lower,
}
/// Each instance of [UnicodeRange] represents a range of unicode characters
/// that are assigned a [CharProperty]. For example, the following snippet:
///
/// ```dart
/// UnicodeRange(0x0041, 0x005A, CharProperty.ALetter);
/// ```
///
/// is saying that all characters between 0x0041 ("A") and 0x005A ("Z") are
/// assigned the property [CharProperty.ALetter].
///
/// Note that the Unicode spec uses inclusive ranges and we are doing the
/// same here.
class UnicodeRange<P> {
const UnicodeRange(this.start, this.end, this.property);
final int start;
final int end;
final P property;
/// Compare a [value] to this range.
///
/// The return value is either:
/// - lower: The value is lower than the range.
/// - higher: The value is higher than the range
/// - inside: The value is within the range.
_ComparisonResult compare(int value) {
if (value < start) {
return _ComparisonResult.lower;
}
if (value > end) {
return _ComparisonResult.higher;
}
return _ComparisonResult.inside;
}
}
/// Checks whether the given char code is a UTF-16 surrogate.
///
/// See:
/// - http://www.unicode.org/faq//utf_bom.html#utf16-2
bool isUtf16Surrogate(int char) {
return char & 0xF800 == 0xD800;
}
/// Combines a pair of UTF-16 surrogate into a single character code point.
///
/// The surrogate pair is expected to start at [index] in the [text].
///
/// See:
/// - http://www.unicode.org/faq//utf_bom.html#utf16-3
int combineSurrogatePair(String text, int index) {
final int hi = text.codeUnitAt(index);
final int lo = text.codeUnitAt(index + 1);
final int x = (hi & ((1 << 6) - 1)) << 10 | lo & ((1 << 10) - 1);
final int w = (hi >> 6) & ((1 << 5) - 1);
final int u = w + 1;
return u << 16 | x;
}
/// Returns the code point from [text] at [index] and handles surrogate pairs
/// for cases that involve two UTF-16 codes.
int? getCodePoint(String text, int index) {
if (index < 0 || index >= text.length) {
return null;
}
final int char = text.codeUnitAt(index);
if (isUtf16Surrogate(char) && index < text.length - 1) {
return combineSurrogatePair(text, index);
}
return char;
}
/// Given a list of [UnicodeRange]s, this class performs efficient lookup
/// to find which range a value falls into.
///
/// The lookup algorithm expects the ranges to have the following constraints:
/// - Be sorted.
/// - No overlap between the ranges.
/// - Gaps between ranges are ok.
///
/// This is used in the context of unicode to find out what property a letter
/// has. The properties are then used to decide word boundaries, line break
/// opportunities, etc.
class UnicodePropertyLookup<P> {
UnicodePropertyLookup(this.ranges, this.defaultProperty);
/// Creates a [UnicodePropertyLookup] from packed line break data.
factory UnicodePropertyLookup.fromPackedData(
String packedData,
int singleRangesCount,
List<P> propertyEnumValues,
P defaultProperty,
) {
return UnicodePropertyLookup<P>(
_unpackProperties<P>(packedData, singleRangesCount, propertyEnumValues),
defaultProperty,
);
}
/// The list of unicode ranges and their associated properties.
final List<UnicodeRange<P>> ranges;
/// The default property to use when a character doesn't belong in any
/// known range.
final P defaultProperty;
/// Cache for lookup results.
final Map<int, P> _cache = <int, P>{};
/// Take a [text] and an [index], and returns the property of the character
/// located at that [index].
///
/// If the [index] is out of range, null will be returned.
P find(String text, int index) {
final int? codePoint = getCodePoint(text, index);
return codePoint == null ? defaultProperty : findForChar(codePoint);
}
/// Takes one character as an integer code unit and returns its property.
///
/// If a property can't be found for the given character, then the default
/// property will be returned.
P findForChar(int? char) {
if (char == null) {
return defaultProperty;
}
final P? cacheHit = _cache[char];
if (cacheHit != null) {
return cacheHit;
}
final int rangeIndex = _binarySearch(char);
final P result = rangeIndex == -1 ? defaultProperty : ranges[rangeIndex].property;
// Cache the result.
_cache[char] = result;
return result;
}
int _binarySearch(int value) {
int min = 0;
int max = ranges.length;
while (min < max) {
final int mid = min + ((max - min) >> 1);
final UnicodeRange<P> range = ranges[mid];
switch (range.compare(value)) {
case _ComparisonResult.higher:
min = mid + 1;
case _ComparisonResult.lower:
max = mid;
case _ComparisonResult.inside:
return mid;
}
}
return -1;
}
}
List<UnicodeRange<P>> _unpackProperties<P>(
String packedData,
int singleRangesCount,
List<P> propertyEnumValues,
) {
// Packed data is mostly structured in chunks of 9 characters each:
//
// * [0..3]: Range start, encoded as a base36 integer.
// * [4..7]: Range end, encoded as a base36 integer.
// * [8]: Index of the property enum value, encoded as a single letter.
//
// When the range is a single number (i.e. range start == range end), it gets
// packed more efficiently in a chunk of 6 characters:
//
// * [0..3]: Range start (and range end), encoded as a base 36 integer.
// * [4]: "!" to indicate that there's no range end.
// * [5]: Index of the property enum value, encoded as a single letter.
// `packedData.length + singleRangesCount * 3` would have been the size of the
// packed data if the efficient packing of single-range items wasn't applied.
assert((packedData.length + singleRangesCount * 3) % 9 == 0);
final List<UnicodeRange<P>> ranges = <UnicodeRange<P>>[];
final int dataLength = packedData.length;
int i = 0;
while (i < dataLength) {
final int rangeStart = _consumeInt(packedData, i);
i += 4;
int rangeEnd;
if (packedData.codeUnitAt(i) == kCharBang) {
rangeEnd = rangeStart;
i++;
} else {
rangeEnd = _consumeInt(packedData, i);
i += 4;
}
final int charCode = packedData.codeUnitAt(i);
final P property =
propertyEnumValues[_getEnumIndexFromPackedValue(charCode)];
i++;
ranges.add(UnicodeRange<P>(rangeStart, rangeEnd, property));
}
return ranges;
}
int _getEnumIndexFromPackedValue(int charCode) {
// This has to stay in sync with [EnumValue.serialized] in
// `tool/unicode_sync_script.dart`.
assert((charCode >= kChar_A && charCode <= kChar_Z) ||
(charCode >= kChar_a && charCode <= kChar_z));
// Uppercase letters were assigned to the first 26 enum values.
if (charCode <= kChar_Z) {
return charCode - kChar_A;
}
// Lowercase letters were assigned to enum values above 26.
return 26 + charCode - kChar_a;
}
int _consumeInt(String packedData, int index) {
// The implementation is equivalent to:
//
// ```dart
// return int.tryParse(packedData.substring(index, index + 4), radix: 36);
// ```
//
// But using substring is slow when called too many times. This custom
// implementation makes the unpacking 25%-45% faster than using substring.
final int digit0 = getIntFromCharCode(packedData.codeUnitAt(index + 3));
final int digit1 = getIntFromCharCode(packedData.codeUnitAt(index + 2));
final int digit2 = getIntFromCharCode(packedData.codeUnitAt(index + 1));
final int digit3 = getIntFromCharCode(packedData.codeUnitAt(index));
return digit0 + (digit1 * 36) + (digit2 * 36 * 36) + (digit3 * 36 * 36 * 36);
}
/// Does the same thing as [int.parse(str, 36)] but takes only a single
/// character as a [charCode] integer.
int getIntFromCharCode(int charCode) {
assert((charCode >= kChar_0 && charCode <= kChar_9) ||
(charCode >= kChar_a && charCode <= kChar_z));
if (charCode <= kChar_9) {
return charCode - kChar_0;
}
// "a" starts from 10 and remaining letters go up from there.
return charCode - kChar_a + 10;
}
| engine/lib/web_ui/lib/src/engine/text/unicode_range.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/text/unicode_range.dart",
"repo_id": "engine",
"token_count": 2873
} | 261 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:ui/ui.dart' as ui;
import '../configuration.dart';
import '../dom.dart';
import '../platform_views/content_manager.dart';
import '../safe_browser_api.dart';
import 'style_manager.dart';
/// Manages DOM elements and the DOM structure for a [ui.FlutterView].
///
/// Here's the general DOM structure of a Flutter View:
///
/// [rootElement] <flutter-view>
/// |
/// +- [platformViewsHost] <flt-glass-pane>
/// | |
/// | +- [renderingHost] #shadow-root
/// | | |
/// | | +- <flt-semantics-placeholder>
/// | | |
/// | | +- [sceneHost] <flt-scene-host>
/// | | | |
/// | | | +- <flt-scene>
/// | | |
/// | | +- [announcementsHost] <flt-announcement-host>
/// | | |
/// | | +- <style>
/// | |
/// | +- ...platform views
/// |
/// +- [textEditingHost] <flt-text-editing-host>
/// | |
/// | +- ...text fields
/// |
/// +- [semanticsHost] <flt-semantics-host>
/// | |
/// | +- ...semantics nodes
/// |
/// +- <style>
///
class DomManager {
factory DomManager({required double devicePixelRatio}) {
final DomElement rootElement = domDocument.createElement(DomManager.flutterViewTagName);
final DomElement platformViewsHost = domDocument.createElement(DomManager.glassPaneTagName);
final DomShadowRoot renderingHost = _attachShadowRoot(platformViewsHost);
final DomElement sceneHost = domDocument.createElement(DomManager.sceneHostTagName);
final DomElement textEditingHost = domDocument.createElement(DomManager.textEditingHostTagName);
final DomElement semanticsHost = domDocument.createElement(DomManager.semanticsHostTagName);
final DomElement announcementsHost = createDomElement(DomManager.announcementsHostTagName);
// Root element children.
rootElement.appendChild(platformViewsHost);
rootElement.appendChild(textEditingHost);
// The semantic host goes last because hit-test order-wise it must be
// first. If semantics goes under the scene host, platform views will
// obscure semantic elements.
//
// You may be wondering: wouldn't semantics obscure platform views and
// make then not accessible? At least with some careful planning, that
// should not be the case. The semantics tree makes all of its non-leaf
// elements transparent. This way, if a platform view appears among other
// interactive Flutter widgets, as long as those widgets do not intersect
// with the platform view, the platform view will be reachable.
rootElement.appendChild(semanticsHost);
// Rendering host (shadow root) children.
renderingHost.append(sceneHost);
renderingHost.append(announcementsHost);
// Styling.
StyleManager.attachGlobalStyles(
node: rootElement,
styleId: 'flt-text-editing-stylesheet',
styleNonce: configuration.nonce,
cssSelectorPrefix: DomManager.flutterViewTagName,
);
StyleManager.attachGlobalStyles(
node: renderingHost,
styleId: 'flt-internals-stylesheet',
styleNonce: configuration.nonce,
cssSelectorPrefix: '',
);
StyleManager.styleSceneHost(
sceneHost,
debugShowSemanticsNodes: configuration.debugShowSemanticsNodes,
);
StyleManager.styleSemanticsHost(
semanticsHost,
devicePixelRatio,
);
return DomManager._(
rootElement: rootElement,
platformViewsHost: platformViewsHost,
renderingHost: renderingHost,
sceneHost: sceneHost,
textEditingHost: textEditingHost,
semanticsHost: semanticsHost,
announcementsHost: announcementsHost,
);
}
DomManager._({
required this.rootElement,
required this.platformViewsHost,
required this.renderingHost,
required this.sceneHost,
required this.textEditingHost,
required this.semanticsHost,
required this.announcementsHost,
});
/// The tag name for the Flutter View root element.
static const String flutterViewTagName = 'flutter-view';
/// The tag name for the glass-pane.
static const String glassPaneTagName = 'flt-glass-pane';
/// The tag name for the scene host.
static const String sceneHostTagName = 'flt-scene-host';
/// The tag name for the text editing host.
static const String textEditingHostTagName = 'flt-text-editing-host';
/// The tag name for the semantics host.
static const String semanticsHostTagName = 'flt-semantics-host';
/// The tag name for the accessibility announcements host.
static const String announcementsHostTagName = 'flt-announcement-host';
/// The root DOM element for the entire Flutter View.
///
/// This is where input events are captured, such as pointer events.
///
/// If semantics is enabled, this element also contains the semantics DOM tree,
/// which captures semantics input events.
final DomElement rootElement;
/// Hosts all platform view elements.
final DomElement platformViewsHost;
/// Hosts all rendering elements and canvases.
final DomShadowRoot renderingHost;
/// Hosts the <flt-scene> element.
///
/// This element is created and inserted in the HTML DOM once. It is never
/// removed or moved. However the <flt-scene> inside of it may be replaced.
final DomElement sceneHost;
/// Hosts all text editing elements.
final DomElement textEditingHost;
/// Hosts the semantics tree.
///
/// This element is in front of the [renderingHost] and [platformViewsHost].
/// Otherwise, the phone will disable focusing by touch, only by tabbing
/// around the UI.
final DomElement semanticsHost;
/// This is where accessibility announcements are inserted.
final DomElement announcementsHost;
DomElement? _lastSceneElement;
/// Inserts the [sceneElement] into the DOM and removes the existing scene (if
/// any).
///
/// The [sceneElement] is inserted as a child of the <flt-scene-host> element
/// inside the [renderingHost].
///
/// If the [sceneElement] has already been inserted, this method does nothing
/// to avoid unnecessary DOM mutations. This is both faster and more correct,
/// because moving DOM nodes loses internal state, such as text selection.
void setScene(DomElement sceneElement) {
if (sceneElement != _lastSceneElement) {
_lastSceneElement?.remove();
_lastSceneElement = sceneElement;
sceneHost.append(sceneElement);
}
}
/// Injects a platform view with [platformViewId] into [platformViewsHost].
///
/// If the platform view is already injected, this method does *nothing*.
///
/// The `platformViewsHost` can only be different if `platformViewId` is moving
/// from one [FlutterView] to another. In that case, the browser will move the
/// slot contents from the old `platformViewsHost` to the new one, but that
/// will cause the platformView to reset its state (an iframe will re-render,
/// text selections will be lost, video playback interrupted, etc...)
///
/// Try not to move platform views across views!
void injectPlatformView(int platformViewId) {
// For now, we don't need anything fancier. If needed, this can be converted
// to a PlatformViewStrategy class for each web-renderer backend?
final DomElement? pv = PlatformViewManager.instance.getSlottedContent(platformViewId);
if (pv == null) {
domWindow.console.debug('Failed to inject Platform View Id: $platformViewId. '
'Render seems to be happening before a `flutter/platform_views:create` platform message!');
return;
}
// If pv is already a descendant of platformViewsHost -> noop
if (pv.parent == platformViewsHost) {
return;
}
platformViewsHost.append(pv);
}
}
DomShadowRoot _attachShadowRoot(DomElement element) {
assert(
getJsProperty<Object?>(element, 'attachShadow') != null,
'ShadowDOM is not supported in this browser.',
);
return element.attachShadow(<String, dynamic>{
'mode': 'open',
// This needs to stay false to prevent issues like this:
// - https://github.com/flutter/flutter/issues/85759
'delegatesFocus': false,
});
}
| engine/lib/web_ui/lib/src/engine/view_embedder/dom_manager.dart/0 | {
"file_path": "engine/lib/web_ui/lib/src/engine/view_embedder/dom_manager.dart",
"repo_id": "engine",
"token_count": 2649
} | 262 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:js_interop';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
/// Signature of the callback that receives progress updates as image chunks are
/// loaded.
typedef ImageCodecChunkCallback = void Function(
int cumulativeBytesLoaded,
int expectedTotalBytes,
);
/// Creates a [ui.Codec] for the image located at [uri].
///
/// The [chunkCallback] is called with progress updates as image chunks are
/// loaded.
Future<ui.Codec> createImageCodecFromUrl(
Uri uri, {
ImageCodecChunkCallback? chunkCallback,
}) {
return renderer.instantiateImageCodecFromUrl(
uri,
chunkCallback: chunkCallback,
);
}
/// Creates a [ui.Image] from an ImageBitmap object.
///
/// The contents of the ImageBitmap must have a premultiplied alpha.
/// The engine will take ownership of the ImageBitmap object and consume its
/// contents.
///
/// See https://developer.mozilla.org/en-US/docs/Web/API/ImageBitmap
FutureOr<ui.Image> createImageFromImageBitmap(JSAny imageSource) {
if (!domInstanceOfString(imageSource, 'ImageBitmap')) {
throw ArgumentError('Image source $imageSource is not an ImageBitmap.', 'imageSource');
}
return renderer.createImageFromImageBitmap(
imageSource as DomImageBitmap,
);
}
| engine/lib/web_ui/lib/ui_web/src/ui_web/images.dart/0 | {
"file_path": "engine/lib/web_ui/lib/ui_web/src/ui_web/images.dart",
"repo_id": "engine",
"token_count": 444
} | 263 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_LIB_WEB_UI_SKWASM_HELPERS_H_
#define FLUTTER_LIB_WEB_UI_SKWASM_HELPERS_H_
#include "third_party/skia/include/core/SkMatrix.h"
#include "third_party/skia/include/core/SkRRect.h"
#include "third_party/skia/include/core/SkSamplingOptions.h"
namespace Skwasm {
inline SkMatrix createMatrix(const SkScalar* f) {
return SkMatrix::MakeAll(f[0], f[1], f[2], f[3], f[4], f[5], f[6], f[7],
f[8]);
}
inline SkRRect createRRect(const SkScalar* f) {
const SkRect* rect = reinterpret_cast<const SkRect*>(f);
const SkVector* radiiValues = reinterpret_cast<const SkVector*>(f + 4);
SkRRect rr;
rr.setRectRadii(*rect, radiiValues);
return rr;
}
// This needs to be kept in sync with the "FilterQuality" enum in dart:ui
enum class FilterQuality {
none,
low,
medium,
high,
};
inline SkFilterMode filterModeForQuality(FilterQuality quality) {
switch (quality) {
case FilterQuality::none:
case FilterQuality::low:
return SkFilterMode::kNearest;
case FilterQuality::medium:
case FilterQuality::high:
return SkFilterMode::kLinear;
}
}
inline SkSamplingOptions samplingOptionsForQuality(FilterQuality quality) {
switch (quality) {
case FilterQuality::none:
return SkSamplingOptions(SkFilterMode::kNearest, SkMipmapMode::kNone);
case FilterQuality::low:
return SkSamplingOptions(SkFilterMode::kNearest, SkMipmapMode::kNearest);
case FilterQuality::medium:
return SkSamplingOptions(SkFilterMode::kLinear, SkMipmapMode::kLinear);
case FilterQuality::high:
// Cubic equation coefficients recommended by Mitchell & Netravali
// in their paper on cubic interpolation.
return SkSamplingOptions(SkCubicResampler::Mitchell());
}
}
} // namespace Skwasm
#endif // FLUTTER_LIB_WEB_UI_SKWASM_HELPERS_H_
| engine/lib/web_ui/skwasm/helpers.h/0 | {
"file_path": "engine/lib/web_ui/skwasm/helpers.h",
"repo_id": "engine",
"token_count": 748
} | 264 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "../export.h"
#include "../wrappers.h"
#include "third_party/skia/modules/skparagraph/include/Paragraph.h"
using namespace skia::textlayout;
using namespace Skwasm;
SKWASM_EXPORT TextStyle* textStyle_create() {
auto style = new TextStyle();
// Default color in flutter is black.
style->setColor(SK_ColorBLACK);
return style;
}
SKWASM_EXPORT TextStyle* textStyle_copy(TextStyle* style) {
return new TextStyle(*style);
}
SKWASM_EXPORT void textStyle_dispose(TextStyle* style) {
delete style;
}
SKWASM_EXPORT void textStyle_setColor(TextStyle* style, SkColor color) {
style->setColor(color);
}
SKWASM_EXPORT void textStyle_setDecoration(TextStyle* style,
TextDecoration decoration) {
style->setDecoration(decoration);
}
SKWASM_EXPORT void textStyle_setDecorationColor(TextStyle* style,
SkColor color) {
style->setDecorationColor(color);
}
SKWASM_EXPORT void textStyle_setDecorationStyle(
TextStyle* style,
TextDecorationStyle decorationStyle) {
style->setDecorationStyle(decorationStyle);
}
SKWASM_EXPORT void textStyle_setDecorationThickness(TextStyle* style,
SkScalar thickness) {
style->setDecorationThicknessMultiplier(thickness);
}
SKWASM_EXPORT void textStyle_setFontStyle(TextStyle* style,
int weight,
SkFontStyle::Slant slant) {
style->setFontStyle(SkFontStyle(weight, SkFontStyle::kNormal_Width, slant));
}
SKWASM_EXPORT void textStyle_setTextBaseline(TextStyle* style,
TextBaseline baseline) {
style->setTextBaseline(baseline);
}
SKWASM_EXPORT void textStyle_clearFontFamilies(TextStyle* style) {
style->setFontFamilies({});
}
SKWASM_EXPORT void textStyle_addFontFamilies(TextStyle* style,
SkString** fontFamilies,
int count) {
const std::vector<SkString>& currentFamilies = style->getFontFamilies();
std::vector<SkString> newFamilies;
newFamilies.reserve(currentFamilies.size() + count);
for (int i = 0; i < count; i++) {
newFamilies.push_back(*fontFamilies[i]);
}
for (const auto& family : currentFamilies) {
newFamilies.push_back(family);
}
style->setFontFamilies(std::move(newFamilies));
}
SKWASM_EXPORT void textStyle_setFontSize(TextStyle* style, SkScalar size) {
style->setFontSize(size);
}
SKWASM_EXPORT void textStyle_setLetterSpacing(TextStyle* style,
SkScalar letterSpacing) {
style->setLetterSpacing(letterSpacing);
}
SKWASM_EXPORT void textStyle_setWordSpacing(TextStyle* style,
SkScalar wordSpacing) {
style->setWordSpacing(wordSpacing);
}
SKWASM_EXPORT void textStyle_setHeight(TextStyle* style, SkScalar height) {
style->setHeight(height);
style->setHeightOverride(true);
}
SKWASM_EXPORT void textStyle_setHalfLeading(TextStyle* style,
bool halfLeading) {
style->setHalfLeading(halfLeading);
}
SKWASM_EXPORT void textStyle_setLocale(TextStyle* style, SkString* locale) {
style->setLocale(*locale);
}
SKWASM_EXPORT void textStyle_setBackground(TextStyle* style, SkPaint* paint) {
style->setBackgroundColor(*paint);
}
SKWASM_EXPORT void textStyle_setForeground(TextStyle* style, SkPaint* paint) {
style->setForegroundColor(*paint);
}
SKWASM_EXPORT void textStyle_addShadow(TextStyle* style,
SkColor color,
SkScalar offsetX,
SkScalar offsetY,
SkScalar blurSigma) {
style->addShadow(TextShadow(color, {offsetX, offsetY}, blurSigma));
}
SKWASM_EXPORT void textStyle_addFontFeature(TextStyle* style,
SkString* featureName,
int value) {
style->addFontFeature(*featureName, value);
}
SKWASM_EXPORT void textStyle_setFontVariations(TextStyle* style,
SkFourByteTag* axes,
float* values,
int count) {
std::vector<SkFontArguments::VariationPosition::Coordinate> coordinates;
for (int i = 0; i < count; i++) {
coordinates.push_back({axes[i], values[i]});
}
SkFontArguments::VariationPosition position = {
coordinates.data(), static_cast<int>(coordinates.size())};
style->setFontArguments(
SkFontArguments().setVariationDesignPosition(position));
}
| engine/lib/web_ui/skwasm/text/text_style.cpp/0 | {
"file_path": "engine/lib/web_ui/skwasm/text/text_style.cpp",
"repo_id": "engine",
"token_count": 2292
} | 265 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
void testMain() {
test('CanvasKit reuses the instance already set on `window`', () async {
// First initialization should make CanvasKit available through `window`.
await renderer.initialize();
expect(windowFlutterCanvasKit, isNotNull);
// Remember the initial instance.
final CanvasKit firstCanvasKitInstance = windowFlutterCanvasKit!;
// Try to load CanvasKit again.
await renderer.initialize();
// Should find the existing instance and reuse it.
expect(firstCanvasKitInstance, windowFlutterCanvasKit);
});
}
| engine/lib/web_ui/test/canvaskit/hot_restart_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/canvaskit/hot_restart_test.dart",
"repo_id": "engine",
"token_count": 271
} | 266 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import 'common.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
class TestRasterizer extends Rasterizer {
Map<EngineFlutterView, TestViewRasterizer> viewRasterizers =
<EngineFlutterView, TestViewRasterizer>{};
@override
TestViewRasterizer createViewRasterizer(EngineFlutterView view) {
return viewRasterizers.putIfAbsent(view, () => TestViewRasterizer(view));
}
@override
void dispose() {
// Do nothing
}
@override
void setResourceCacheMaxBytes(int bytes) {
// Do nothing
}
List<LayerTree> treesRenderedInView(EngineFlutterView view) {
return viewRasterizers[view]!.treesRendered;
}
}
class TestViewRasterizer extends ViewRasterizer {
TestViewRasterizer(super.view);
List<LayerTree> treesRendered = <LayerTree>[];
@override
DisplayCanvasFactory<DisplayCanvas> get displayFactory =>
throw UnimplementedError();
@override
void prepareToDraw() {
// Do nothing
}
@override
Future<void> draw(LayerTree tree) async {
treesRendered.add(tree);
return Future<void>.value();
}
@override
Future<void> rasterizeToCanvas(
DisplayCanvas canvas, List<CkPicture> pictures) {
// No-op
return Future<void>.value();
}
}
void testMain() {
group('Renderer', () {
setUpCanvasKitTest();
test('always renders most recent picture and skips intermediate pictures',
() async {
final TestRasterizer testRasterizer = TestRasterizer();
CanvasKitRenderer.instance.debugOverrideRasterizer(testRasterizer);
// Create another view to render into to force the renderer to make
// a [ViewRasterizer] for it.
final EngineFlutterView testView = EngineFlutterView(
EnginePlatformDispatcher.instance, createDomElement('test-view'));
EnginePlatformDispatcher.instance.viewManager.registerView(testView);
final List<LayerTree> treesToRender = <LayerTree>[];
final List<Future<void>> renderFutures = <Future<void>>[];
for (int i = 1; i < 20; i++) {
final ui.PictureRecorder recorder = ui.PictureRecorder();
final ui.Canvas canvas = ui.Canvas(recorder);
canvas.drawRect(const ui.Rect.fromLTWH(0, 0, 50, 50),
ui.Paint()..color = const ui.Color(0xff00ff00));
final ui.Picture picture = recorder.endRecording();
final ui.SceneBuilder builder = ui.SceneBuilder();
builder.addPicture(ui.Offset.zero, picture);
final ui.Scene scene = builder.build();
treesToRender.add((scene as LayerScene).layerTree);
renderFutures
.add(CanvasKitRenderer.instance.renderScene(scene, testView));
}
await Future.wait(renderFutures);
// Should just render the first and last pictures and skip the one inbetween.
final List<LayerTree> treesRendered =
testRasterizer.treesRenderedInView(testView);
expect(treesRendered.length, 2);
expect(treesRendered.first, treesToRender.first);
expect(treesRendered.last, treesToRender.last);
});
test('can render multiple frames at once into multiple views', () async {
final TestRasterizer testRasterizer = TestRasterizer();
CanvasKitRenderer.instance.debugOverrideRasterizer(testRasterizer);
// Create another view to render into to force the renderer to make
// a [ViewRasterizer] for it.
final EngineFlutterView testView1 = EngineFlutterView(
EnginePlatformDispatcher.instance, createDomElement('test-view'));
EnginePlatformDispatcher.instance.viewManager.registerView(testView1);
final EngineFlutterView testView2 = EngineFlutterView(
EnginePlatformDispatcher.instance, createDomElement('test-view'));
EnginePlatformDispatcher.instance.viewManager.registerView(testView2);
final EngineFlutterView testView3 = EngineFlutterView(
EnginePlatformDispatcher.instance, createDomElement('test-view'));
EnginePlatformDispatcher.instance.viewManager.registerView(testView3);
final Map<EngineFlutterView, List<LayerTree>> treesToRender =
<EngineFlutterView, List<LayerTree>>{};
treesToRender[testView1] = <LayerTree>[];
treesToRender[testView2] = <LayerTree>[];
treesToRender[testView3] = <LayerTree>[];
final List<Future<void>> renderFutures = <Future<void>>[];
for (int i = 1; i < 20; i++) {
for (final EngineFlutterView testView in <EngineFlutterView>[
testView1,
testView2,
testView3,
]) {
final ui.PictureRecorder recorder = ui.PictureRecorder();
final ui.Canvas canvas = ui.Canvas(recorder);
canvas.drawRect(const ui.Rect.fromLTWH(0, 0, 50, 50),
ui.Paint()..color = const ui.Color(0xff00ff00));
final ui.Picture picture = recorder.endRecording();
final ui.SceneBuilder builder = ui.SceneBuilder();
builder.addPicture(ui.Offset.zero, picture);
final ui.Scene scene = builder.build();
treesToRender[testView]!.add((scene as LayerScene).layerTree);
renderFutures
.add(CanvasKitRenderer.instance.renderScene(scene, testView));
}
}
await Future.wait(renderFutures);
// Should just render the first and last pictures and skip the one inbetween.
final List<LayerTree> treesRenderedInView1 =
testRasterizer.treesRenderedInView(testView1);
final List<LayerTree> treesToRenderInView1 = treesToRender[testView1]!;
expect(treesRenderedInView1.length, 2);
expect(treesRenderedInView1.first, treesToRenderInView1.first);
expect(treesRenderedInView1.last, treesToRenderInView1.last);
final List<LayerTree> treesRenderedInView2 =
testRasterizer.treesRenderedInView(testView2);
final List<LayerTree> treesToRenderInView2 = treesToRender[testView2]!;
expect(treesRenderedInView2.length, 2);
expect(treesRenderedInView2.first, treesToRenderInView2.first);
expect(treesRenderedInView2.last, treesToRenderInView2.last);
final List<LayerTree> treesRenderedInView3 =
testRasterizer.treesRenderedInView(testView3);
final List<LayerTree> treesToRenderInView3 = treesToRender[testView3]!;
expect(treesRenderedInView3.length, 2);
expect(treesRenderedInView3.first, treesToRenderInView3.first);
expect(treesRenderedInView3.last, treesToRenderInView3.last);
});
});
}
| engine/lib/web_ui/test/canvaskit/renderer_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/canvaskit/renderer_test.dart",
"repo_id": "engine",
"token_count": 2560
} | 267 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:typed_data';
import 'package:quiver/testing/async.dart';
import 'package:ui/src/engine.dart' hide window;
import 'package:ui/ui.dart';
/// Encapsulates the info of a platform message that was intercepted by
/// [PlatformMessagesSpy].
class PlatformMessage {
PlatformMessage(this.channel, this.methodCall);
/// The name of the channel on which the message was sent.
final String channel;
/// The [MethodCall] instance that was sent in the platform message.
final MethodCall methodCall;
/// Shorthand for getting the name of the method call.
String get methodName => methodCall.method;
/// Shorthand for getting the arguments of the method call.
dynamic get methodArguments => methodCall.arguments;
}
/// Intercepts platform messages sent from the engine to the framework.
///
/// It holds all intercepted platform messages in a [messages] list that can
/// be inspected in tests.
class PlatformMessagesSpy {
PlatformMessageCallback? _callback;
PlatformMessageCallback? _backup;
bool get _isActive => _callback != null;
/// List of intercepted messages since the last [setUp] call.
final List<PlatformMessage> messages = <PlatformMessage>[];
/// Start spying on platform messages.
///
/// This is typically called inside a test's `setUp` callback.
void setUp() {
assert(!_isActive);
_callback = (String channel, ByteData? data,
PlatformMessageResponseCallback? callback) {
messages.add(PlatformMessage(
channel,
const JSONMethodCodec().decodeMethodCall(data),
));
};
_backup = PlatformDispatcher.instance.onPlatformMessage;
PlatformDispatcher.instance.onPlatformMessage = _callback;
}
/// Stop spying on platform messages and clear all intercepted messages.
///
/// Make sure this is called after each test that uses [PlatformMessagesSpy].
void tearDown() {
assert(_isActive);
// Make sure [PlatformDispatcher.instance.onPlatformMessage] wasn't tampered with.
assert(PlatformDispatcher.instance.onPlatformMessage == _callback);
_callback = null;
messages.clear();
PlatformDispatcher.instance.onPlatformMessage = _backup;
}
}
/// Runs code in a [FakeAsync] zone and spies on what's going on in it.
class ZoneSpy {
final FakeAsync fakeAsync = FakeAsync();
final List<String> printLog = <String>[];
dynamic run(dynamic Function() function) {
final ZoneSpecification printInterceptor = ZoneSpecification(
print: (Zone self, ZoneDelegate parent, Zone zone, String line) {
printLog.add(line);
},
);
return Zone.current.fork(specification: printInterceptor).run<dynamic>(() {
return fakeAsync.run((FakeAsync self) {
return function();
});
});
}
}
| engine/lib/web_ui/test/common/spy.dart/0 | {
"file_path": "engine/lib/web_ui/test/common/spy.dart",
"repo_id": "engine",
"token_count": 884
} | 268 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
const String _kDefaultCssFont = '14px monospace';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
void testMain() {
late DomHTMLStyleElement styleElement;
setUp(() {
styleElement = createDomHTMLStyleElement(null);
applyGlobalCssRulesToSheet(
styleElement,
defaultCssFont: _kDefaultCssFont,
);
});
tearDown(() {
styleElement.remove();
});
test('createDomHTMLStyleElement sets a nonce value, when passed', () {
expect(styleElement.nonce, isEmpty);
final DomHTMLStyleElement style = createDomHTMLStyleElement('a-nonce-value');
expect(style.nonce, 'a-nonce-value');
});
test('(Self-test) hasCssRule can extract rules', () {
final bool hasRule = hasCssRule(styleElement,
selector: '.flt-text-editing::placeholder', declaration: 'opacity: 0');
final bool hasFakeRule = hasCssRule(styleElement,
selector: 'input::selection', declaration: 'color: #fabada;');
expect(hasRule, isTrue);
expect(hasFakeRule, isFalse);
});
test('Attaches styling to remove password reveal icons on Edge', () {
// Check that style.sheet! contains input::-ms-reveal rule
final bool hidesRevealIcons = hasCssRule(styleElement,
selector: 'input::-ms-reveal', declaration: 'display: none');
final bool codeRanInFakeyBrowser = hasCssRule(styleElement,
selector: 'input.fallback-for-fakey-browser-in-ci',
declaration: 'display: none');
if (codeRanInFakeyBrowser) {
print('Please, fix https://github.com/flutter/flutter/issues/116302');
}
expect(hidesRevealIcons || codeRanInFakeyBrowser, isTrue,
reason: 'In Edge, stylesheet must contain "input::-ms-reveal" rule.');
}, skip: !isEdge);
test('Does not attach the Edge-specific style tag on non-Edge browsers', () {
// Check that style.sheet! contains input::-ms-reveal rule
final bool hidesRevealIcons = hasCssRule(styleElement,
selector: 'input::-ms-reveal', declaration: 'display: none');
expect(hidesRevealIcons, isFalse);
}, skip: isEdge);
test(
'Attaches styles to hide the autofill overlay for browsers that support it',
() {
final String vendorPrefix = (isSafari || isFirefox) ? '' : '-webkit-';
final bool autofillOverlay = hasCssRule(styleElement,
selector: '.transparentTextEditing:${vendorPrefix}autofill',
declaration: 'opacity: 0 !important');
final bool autofillOverlayHovered = hasCssRule(styleElement,
selector: '.transparentTextEditing:${vendorPrefix}autofill:hover',
declaration: 'opacity: 0 !important');
final bool autofillOverlayFocused = hasCssRule(styleElement,
selector: '.transparentTextEditing:${vendorPrefix}autofill:focus',
declaration: 'opacity: 0 !important');
final bool autofillOverlayActive = hasCssRule(styleElement,
selector: '.transparentTextEditing:${vendorPrefix}autofill:active',
declaration: 'opacity: 0 !important');
expect(autofillOverlay, isTrue);
expect(autofillOverlayHovered, isTrue);
expect(autofillOverlayFocused, isTrue);
expect(autofillOverlayActive, isTrue);
}, skip: !browserHasAutofillOverlay());
}
/// Finds out whether a given CSS Rule ([selector] { [declaration]; }) exists in a [styleElement].
bool hasCssRule(
DomHTMLStyleElement styleElement, {
required String selector,
required String declaration,
}) {
domDocument.body!.append(styleElement);
assert(styleElement.sheet != null);
// regexr.com/740ff
final RegExp ruleLike =
RegExp('[^{]*(?:$selector)[^{]*{[^}]*(?:$declaration)[^}]*}');
final DomCSSStyleSheet sheet = styleElement.sheet! as DomCSSStyleSheet;
// Check that the cssText of any rule matches the ruleLike RegExp.
final bool result = sheet.cssRules
.map((DomCSSRule rule) => rule.cssText)
.any((String rule) => ruleLike.hasMatch(rule));
styleElement.remove();
return result;
}
| engine/lib/web_ui/test/engine/global_styles_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/global_styles_test.dart",
"repo_id": "engine",
"token_count": 1480
} | 269 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:typed_data';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import '../../common/test_initialization.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
void testMain() {
setUpAll(() async {
await bootstrapAndRunApp(withImplicitView: true);
});
group('PlatformDispatcher', () {
late EnginePlatformDispatcher dispatcher;
setUp(() {
dispatcher = EnginePlatformDispatcher();
});
tearDown(() {
dispatcher.dispose();
});
test('reports at least one display', () {
expect(ui.PlatformDispatcher.instance.displays.length, greaterThan(0));
});
test('high contrast in accessibilityFeatures has the correct value', () {
final MockHighContrastSupport mockHighContrast =
MockHighContrastSupport();
HighContrastSupport.instance = mockHighContrast;
final EnginePlatformDispatcher dispatcher =
EnginePlatformDispatcher();
expect(dispatcher.accessibilityFeatures.highContrast, isTrue);
mockHighContrast.isEnabled = false;
mockHighContrast.invokeListeners(mockHighContrast.isEnabled);
expect(dispatcher.accessibilityFeatures.highContrast, isFalse);
dispatcher.dispose();
});
test('AppLifecycleState transitions through all states', () {
final List<ui.AppLifecycleState> states = <ui.AppLifecycleState>[];
void listener(ui.AppLifecycleState state) {
states.add(state);
}
final MockAppLifecycleState mockAppLifecycleState =
MockAppLifecycleState();
expect(mockAppLifecycleState.appLifecycleState,
ui.AppLifecycleState.resumed);
mockAppLifecycleState.addListener(listener);
expect(mockAppLifecycleState.activeCallCount, 1);
expect(
states, equals(<ui.AppLifecycleState>[ui.AppLifecycleState.resumed]));
mockAppLifecycleState.inactive();
expect(mockAppLifecycleState.appLifecycleState,
ui.AppLifecycleState.inactive);
expect(
states,
equals(<ui.AppLifecycleState>[
ui.AppLifecycleState.resumed,
ui.AppLifecycleState.inactive
]));
// consecutive same states are skipped
mockAppLifecycleState.inactive();
expect(
states,
equals(<ui.AppLifecycleState>[
ui.AppLifecycleState.resumed,
ui.AppLifecycleState.inactive
]));
mockAppLifecycleState.hidden();
expect(
mockAppLifecycleState.appLifecycleState, ui.AppLifecycleState.hidden);
expect(
states,
equals(<ui.AppLifecycleState>[
ui.AppLifecycleState.resumed,
ui.AppLifecycleState.inactive,
ui.AppLifecycleState.hidden
]));
mockAppLifecycleState.resume();
expect(mockAppLifecycleState.appLifecycleState,
ui.AppLifecycleState.resumed);
expect(
states,
equals(<ui.AppLifecycleState>[
ui.AppLifecycleState.resumed,
ui.AppLifecycleState.inactive,
ui.AppLifecycleState.hidden,
ui.AppLifecycleState.resumed
]));
mockAppLifecycleState.detach();
expect(mockAppLifecycleState.appLifecycleState,
ui.AppLifecycleState.detached);
expect(
states,
equals(<ui.AppLifecycleState>[
ui.AppLifecycleState.resumed,
ui.AppLifecycleState.inactive,
ui.AppLifecycleState.hidden,
ui.AppLifecycleState.resumed,
ui.AppLifecycleState.detached
]));
mockAppLifecycleState.removeListener(listener);
expect(mockAppLifecycleState.deactivateCallCount, 1);
// No more states should be recorded after the listener is removed.
mockAppLifecycleState.resume();
expect(
states,
equals(<ui.AppLifecycleState>[
ui.AppLifecycleState.resumed,
ui.AppLifecycleState.inactive,
ui.AppLifecycleState.hidden,
ui.AppLifecycleState.resumed,
ui.AppLifecycleState.detached
]));
});
test('responds to flutter/skia Skia.setResourceCacheMaxBytes', () async {
const MethodCodec codec = JSONMethodCodec();
final Completer<ByteData?> completer = Completer<ByteData?>();
ui.PlatformDispatcher.instance.sendPlatformMessage(
'flutter/skia',
codec.encodeMethodCall(const MethodCall(
'Skia.setResourceCacheMaxBytes',
512 * 1000 * 1000,
)),
completer.complete,
);
final ByteData? response = await completer.future;
expect(response, isNotNull);
expect(
codec.decodeEnvelope(response!),
<bool>[true],
);
});
test('responds to flutter/platform HapticFeedback.vibrate', () async {
const MethodCodec codec = JSONMethodCodec();
final Completer<ByteData?> completer = Completer<ByteData?>();
ui.PlatformDispatcher.instance.sendPlatformMessage(
'flutter/platform',
codec.encodeMethodCall(const MethodCall(
'HapticFeedback.vibrate',
)),
completer.complete,
);
final ByteData? response = await completer.future;
expect(response, isNotNull);
expect(
codec.decodeEnvelope(response!),
true,
);
});
test('responds to flutter/platform SystemChrome.setSystemUIOverlayStyle',
() async {
const MethodCodec codec = JSONMethodCodec();
final Completer<ByteData?> completer = Completer<ByteData?>();
ui.PlatformDispatcher.instance.sendPlatformMessage(
'flutter/platform',
codec.encodeMethodCall(const MethodCall(
'SystemChrome.setSystemUIOverlayStyle',
<String, dynamic>{},
)),
completer.complete,
);
final ByteData? response = await completer.future;
expect(response, isNotNull);
expect(
codec.decodeEnvelope(response!),
true,
);
});
test('responds to flutter/contextmenu enable', () async {
const MethodCodec codec = JSONMethodCodec();
final Completer<ByteData?> completer = Completer<ByteData?>();
ui.PlatformDispatcher.instance.sendPlatformMessage(
'flutter/contextmenu',
codec.encodeMethodCall(const MethodCall(
'enableContextMenu',
)),
completer.complete,
);
final ByteData? response = await completer.future;
expect(response, isNotNull);
expect(
codec.decodeEnvelope(response!),
true,
);
});
test('responds to flutter/contextmenu disable', () async {
const MethodCodec codec = JSONMethodCodec();
final Completer<ByteData?> completer = Completer<ByteData?>();
ui.PlatformDispatcher.instance.sendPlatformMessage(
'flutter/contextmenu',
codec.encodeMethodCall(const MethodCall(
'disableContextMenu',
)),
completer.complete,
);
final ByteData? response = await completer.future;
expect(response, isNotNull);
expect(
codec.decodeEnvelope(response!),
true,
);
});
test('can find text scale factor', () async {
const double deltaTolerance = 1e-5;
final DomElement root = domDocument.documentElement!;
final String oldFontSize = root.style.fontSize;
addTearDown(() {
root.style.fontSize = oldFontSize;
});
root.style.fontSize = '16px';
expect(findBrowserTextScaleFactor(), 1.0);
root.style.fontSize = '20px';
expect(findBrowserTextScaleFactor(), 1.25);
root.style.fontSize = '24px';
expect(findBrowserTextScaleFactor(), 1.5);
root.style.fontSize = '14.4px';
expect(findBrowserTextScaleFactor(), closeTo(0.9, deltaTolerance));
root.style.fontSize = '12.8px';
expect(findBrowserTextScaleFactor(), closeTo(0.8, deltaTolerance));
root.style.fontSize = '';
expect(findBrowserTextScaleFactor(), 1.0);
});
test(
"calls onTextScaleFactorChanged when the <html> element's font-size changes",
() async {
final DomElement root = domDocument.documentElement!;
final String oldFontSize = root.style.fontSize;
final ui.VoidCallback? oldCallback =
ui.PlatformDispatcher.instance.onTextScaleFactorChanged;
addTearDown(() {
root.style.fontSize = oldFontSize;
ui.PlatformDispatcher.instance.onTextScaleFactorChanged = oldCallback;
});
root.style.fontSize = '16px';
bool isCalled = false;
ui.PlatformDispatcher.instance.onTextScaleFactorChanged = () {
isCalled = true;
};
root.style.fontSize = '20px';
await Future<void>.delayed(Duration.zero);
expect(root.style.fontSize, '20px');
expect(isCalled, isTrue);
expect(ui.PlatformDispatcher.instance.textScaleFactor,
findBrowserTextScaleFactor());
isCalled = false;
root.style.fontSize = '16px';
await Future<void>.delayed(Duration.zero);
expect(root.style.fontSize, '16px');
expect(isCalled, isTrue);
expect(ui.PlatformDispatcher.instance.textScaleFactor,
findBrowserTextScaleFactor());
});
test('disposes all its views', () {
final EngineFlutterView view1 =
EngineFlutterView(dispatcher, createDomHTMLDivElement());
final EngineFlutterView view2 =
EngineFlutterView(dispatcher, createDomHTMLDivElement());
final EngineFlutterView view3 =
EngineFlutterView(dispatcher, createDomHTMLDivElement());
dispatcher.viewManager
..registerView(view1)
..registerView(view2)
..registerView(view3);
expect(view1.isDisposed, isFalse);
expect(view2.isDisposed, isFalse);
expect(view3.isDisposed, isFalse);
dispatcher.dispose();
expect(view1.isDisposed, isTrue);
expect(view2.isDisposed, isTrue);
expect(view3.isDisposed, isTrue);
});
test('connects view disposal to metrics changed event', () {
final EngineFlutterView view1 =
EngineFlutterView(dispatcher, createDomHTMLDivElement());
final EngineFlutterView view2 =
EngineFlutterView(dispatcher, createDomHTMLDivElement());
dispatcher.viewManager
..registerView(view1)
..registerView(view2);
expect(view1.isDisposed, isFalse);
expect(view2.isDisposed, isFalse);
bool onMetricsChangedCalled = false;
dispatcher.onMetricsChanged = () {
onMetricsChangedCalled = true;
};
expect(onMetricsChangedCalled, isFalse);
dispatcher.viewManager.disposeAndUnregisterView(view2.viewId);
expect(onMetricsChangedCalled, isTrue, reason: 'onMetricsChanged should have been called.');
dispatcher.dispose();
});
test('disconnects view disposal event on dispose', () {
final EngineFlutterView view1 =
EngineFlutterView(dispatcher, createDomHTMLDivElement());
dispatcher.viewManager.registerView(view1);
expect(view1.isDisposed, isFalse);
bool onMetricsChangedCalled = false;
dispatcher.onMetricsChanged = () {
onMetricsChangedCalled = true;
};
dispatcher.dispose();
expect(onMetricsChangedCalled, isFalse);
expect(view1.isDisposed, isTrue);
});
test('invokeOnViewFocusChange calls onViewFocusChange', () {
final List<ui.ViewFocusEvent> dispatchedViewFocusEvents = <ui.ViewFocusEvent>[];
const ui.ViewFocusEvent viewFocusEvent = ui.ViewFocusEvent(
viewId: 0,
state: ui.ViewFocusState.focused,
direction: ui.ViewFocusDirection.undefined,
);
dispatcher.onViewFocusChange = dispatchedViewFocusEvents.add;
dispatcher.invokeOnViewFocusChange(viewFocusEvent);
expect(dispatchedViewFocusEvents, hasLength(1));
expect(dispatchedViewFocusEvents.single, viewFocusEvent);
});
test('invokeOnViewFocusChange preserves the zone', () {
final Zone zone1 = Zone.current.fork();
final Zone zone2 = Zone.current.fork();
const ui.ViewFocusEvent viewFocusEvent = ui.ViewFocusEvent(
viewId: 0,
state: ui.ViewFocusState.focused,
direction: ui.ViewFocusDirection.undefined,
);
zone1.runGuarded(() {
dispatcher.onViewFocusChange = (_) {
expect(Zone.current, zone1);
};
});
zone2.runGuarded(() {
dispatcher.invokeOnViewFocusChange(viewFocusEvent);
});
});
test('adds the accesibility placeholder', () {
expect(dispatcher.accessibilityPlaceholder.isConnected, isTrue);
expect(domDocument.body!.children.first, dispatcher.accessibilityPlaceholder);
});
test('removes the accesibility placeholder', () {
dispatcher.dispose();
expect(dispatcher.accessibilityPlaceholder.isConnected, isFalse);
});
test('scheduleWarmupFrame should call both callbacks', () async {
bool beginFrameCalled = false;
final Completer<void> drawFrameCalled = Completer<void>();
dispatcher.scheduleWarmUpFrame(beginFrame: () {
expect(drawFrameCalled.isCompleted, false);
expect(beginFrameCalled, false);
beginFrameCalled = true;
}, drawFrame: () {
expect(beginFrameCalled, true);
expect(drawFrameCalled.isCompleted, false);
drawFrameCalled.complete();
});
await drawFrameCalled.future;
expect(beginFrameCalled, true);
expect(drawFrameCalled.isCompleted, true);
});
});
}
class MockHighContrastSupport implements HighContrastSupport {
bool isEnabled = true;
final List<HighContrastListener> _listeners = <HighContrastListener>[];
@override
bool get isHighContrastEnabled => isEnabled;
void invokeListeners(bool val) {
for (final HighContrastListener listener in _listeners) {
listener(val);
}
}
@override
void addListener(HighContrastListener listener) {
_listeners.add(listener);
}
@override
void removeListener(HighContrastListener listener) {
_listeners.remove(listener);
}
}
class MockAppLifecycleState extends AppLifecycleState {
int activeCallCount = 0;
int deactivateCallCount = 0;
void detach() {
onAppLifecycleStateChange(ui.AppLifecycleState.detached);
}
void resume() {
onAppLifecycleStateChange(ui.AppLifecycleState.resumed);
}
void inactive() {
onAppLifecycleStateChange(ui.AppLifecycleState.inactive);
}
void hidden() {
onAppLifecycleStateChange(ui.AppLifecycleState.hidden);
}
@override
void activate() {
activeCallCount++;
}
@override
void deactivate() {
deactivateCallCount++;
}
}
| engine/lib/web_ui/test/engine/platform_dispatcher/platform_dispatcher_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/platform_dispatcher/platform_dispatcher_test.dart",
"repo_id": "engine",
"token_count": 6108
} | 270 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@TestOn('chrome || safari || firefox')
library;
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/ui.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
// The body of this file is the same as ../../../../../testing/dart/semantics_test.dart
// Please keep them in sync.
void testMain() {
// This must match the number of flags in lib/ui/semantics.dart
const int numSemanticsFlags = 28;
test('SemanticsFlag.values refers to all flags.', () async {
expect(SemanticsFlag.values.length, equals(numSemanticsFlags));
for (int index = 0; index < numSemanticsFlags; ++index) {
final int flag = 1 << index;
expect(SemanticsFlag.fromIndex(flag), isNotNull);
expect(SemanticsFlag.fromIndex(flag).toString(), startsWith('SemanticsFlag.'));
}
});
// This must match the number of actions in lib/ui/semantics.dart
const int numSemanticsActions = 22;
test('SemanticsAction.values refers to all actions.', () async {
expect(SemanticsAction.values.length, equals(numSemanticsActions));
for (int index = 0; index < numSemanticsActions; ++index) {
final int action = 1 << index;
expect(SemanticsAction.fromIndex(action), isNotNull);
expect(SemanticsAction.fromIndex(action).toString(), startsWith('SemanticsAction.'));
}
});
test('SpellOutStringAttribute.toString', () async {
expect(SpellOutStringAttribute(range: const TextRange(start: 2, end: 5)).toString(), 'SpellOutStringAttribute(TextRange(start: 2, end: 5))');
});
test('LocaleStringAttribute.toString', () async {
expect(LocaleStringAttribute(range: const TextRange(start: 2, end: 5), locale: const Locale('test')).toString(), 'LocaleStringAttribute(TextRange(start: 2, end: 5), test)');
});
}
| engine/lib/web_ui/test/engine/semantics/semantics_api_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/semantics/semantics_api_test.dart",
"repo_id": "engine",
"token_count": 634
} | 271 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import '../../../common/test_initialization.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
void testMain() {
const String mat2Sample = 'mat2(1.1, 2.1, 1.2, 2.2)';
const String mat3Sample = 'mat3(1.1, 2.1, 3.1, // first column (not row!)\n'
'1.2, 2.2, 3.2, // second column\n'
'1.3, 2.3, 3.3 // third column\n'
')';
const String mat4Sample = 'mat3(1.1, 2.1, 3.1, 4.1,\n'
'1.2, 2.2, 3.2, 4.2,\n'
'1.3, 2.3, 3.3, 4.3,\n'
'1.4, 2.4, 3.4, 4.4,\n'
')';
setUpAll(() async {
await bootstrapAndRunApp();
});
group('Shader Declarations', () {
test('Constant declaration WebGL1', () {
final ShaderBuilder builder = ShaderBuilder(WebGLVersion.webgl1);
builder.addConst(ShaderType.kBool, 'false');
builder.addConst(ShaderType.kInt, '0');
builder.addConst(ShaderType.kFloat, '1.0');
builder.addConst(ShaderType.kBVec2, 'bvec2(false, false)');
builder.addConst(ShaderType.kBVec3, 'bvec3(false, false, true)');
builder.addConst(ShaderType.kBVec4, 'bvec4(true, true, false, false)');
builder.addConst(ShaderType.kIVec2, 'ivec2(1, 2)');
builder.addConst(ShaderType.kIVec3, 'ivec3(1, 2, 3)');
builder.addConst(ShaderType.kIVec4, 'ivec4(1, 2, 3, 4)');
builder.addConst(ShaderType.kVec2, 'vec2(1.0, 2.0)');
builder.addConst(ShaderType.kVec3, 'vec3(1.0, 2.0, 3.0)');
builder.addConst(ShaderType.kVec4, 'vec4(1.0, 2.0, 3.0, 4.0)');
builder.addConst(ShaderType.kMat2, mat2Sample);
builder.addConst(ShaderType.kMat2, mat2Sample, name: 'transform1');
builder.addConst(ShaderType.kMat3, mat3Sample);
builder.addConst(ShaderType.kMat4, mat4Sample);
expect(
builder.build(),
'const bool c_0 = false;\n'
'const int c_1 = 0;\n'
'const float c_2 = 1.0;\n'
'const bvec2 c_3 = bvec2(false, false);\n'
'const bvec3 c_4 = bvec3(false, false, true);\n'
'const bvec4 c_5 = bvec4(true, true, false, false);\n'
'const ivec2 c_6 = ivec2(1, 2);\n'
'const ivec3 c_7 = ivec3(1, 2, 3);\n'
'const ivec4 c_8 = ivec4(1, 2, 3, 4);\n'
'const vec2 c_9 = vec2(1.0, 2.0);\n'
'const vec3 c_10 = vec3(1.0, 2.0, 3.0);\n'
'const vec4 c_11 = vec4(1.0, 2.0, 3.0, 4.0);\n'
'const mat2 c_12 = $mat2Sample;\n'
'const mat2 transform1 = $mat2Sample;\n'
'const mat3 c_13 = $mat3Sample;\n'
'const mat4 c_14 = $mat4Sample;\n');
});
test('Constant declaration WebGL2', () {
final ShaderBuilder builder = ShaderBuilder(WebGLVersion.webgl2);
builder.addConst(ShaderType.kBool, 'false');
builder.addConst(ShaderType.kInt, '0');
builder.addConst(ShaderType.kFloat, '1.0');
builder.addConst(ShaderType.kBVec2, 'bvec2(false, false)');
builder.addConst(ShaderType.kBVec3, 'bvec3(false, false, true)');
builder.addConst(ShaderType.kBVec4, 'bvec4(true, true, false, false)');
builder.addConst(ShaderType.kIVec2, 'ivec2(1, 2)');
builder.addConst(ShaderType.kIVec3, 'ivec3(1, 2, 3)');
builder.addConst(ShaderType.kIVec4, 'ivec4(1, 2, 3, 4)');
builder.addConst(ShaderType.kVec2, 'vec2(1.0, 2.0)');
builder.addConst(ShaderType.kVec3, 'vec3(1.0, 2.0, 3.0)');
builder.addConst(ShaderType.kVec4, 'vec4(1.0, 2.0, 3.0, 4.0)');
builder.addConst(ShaderType.kMat2, mat2Sample);
builder.addConst(ShaderType.kMat2, mat2Sample, name: 'transform2');
builder.addConst(ShaderType.kMat3, mat3Sample);
builder.addConst(ShaderType.kMat4, mat4Sample);
expect(
builder.build(),
'#version 300 es\n'
'const bool c_0 = false;\n'
'const int c_1 = 0;\n'
'const float c_2 = 1.0;\n'
'const bvec2 c_3 = bvec2(false, false);\n'
'const bvec3 c_4 = bvec3(false, false, true);\n'
'const bvec4 c_5 = bvec4(true, true, false, false);\n'
'const ivec2 c_6 = ivec2(1, 2);\n'
'const ivec3 c_7 = ivec3(1, 2, 3);\n'
'const ivec4 c_8 = ivec4(1, 2, 3, 4);\n'
'const vec2 c_9 = vec2(1.0, 2.0);\n'
'const vec3 c_10 = vec3(1.0, 2.0, 3.0);\n'
'const vec4 c_11 = vec4(1.0, 2.0, 3.0, 4.0);\n'
'const mat2 c_12 = $mat2Sample;\n'
'const mat2 transform2 = $mat2Sample;\n'
'const mat3 c_13 = $mat3Sample;\n'
'const mat4 c_14 = $mat4Sample;\n');
});
test('Attribute declaration WebGL1', () {
final ShaderBuilder builder = ShaderBuilder(WebGLVersion.webgl1);
builder.addIn(ShaderType.kVec4, name: 'position');
builder.addIn(ShaderType.kVec4);
expect(
builder.build(),
'attribute vec4 position;\n'
'attribute vec4 attr_0;\n');
});
test('in declaration WebGL1', () {
final ShaderBuilder builder = ShaderBuilder.fragment(WebGLVersion.webgl1);
builder.addIn(ShaderType.kVec4, name: 'position');
builder.addIn(ShaderType.kVec4);
expect(
builder.build(),
'varying vec4 position;\n'
'varying vec4 attr_0;\n');
});
test('Attribute declaration WebGL2', () {
final ShaderBuilder builder = ShaderBuilder(WebGLVersion.webgl2);
builder.addIn(ShaderType.kVec4, name: 'position');
builder.addIn(ShaderType.kVec4);
expect(
builder.build(),
'#version 300 es\n'
'in vec4 position;\n'
'in vec4 attr_0;\n');
});
test('Uniform declaration WebGL1', () {
final ShaderBuilder builder = ShaderBuilder(WebGLVersion.webgl1);
final ShaderDeclaration variable =
builder.addUniform(ShaderType.kVec4, name: 'v1');
expect(variable.name, 'v1');
expect(variable.dataType, ShaderType.kVec4);
expect(variable.storage, ShaderStorageQualifier.kUniform);
builder.addUniform(ShaderType.kVec4);
expect(
builder.build(),
'uniform vec4 v1;\n'
'uniform vec4 uni_0;\n');
});
test('Uniform declaration WebGL2', () {
final ShaderBuilder builder = ShaderBuilder(WebGLVersion.webgl2);
final ShaderDeclaration variable =
builder.addUniform(ShaderType.kVec4, name: 'v1');
expect(variable.name, 'v1');
expect(variable.dataType, ShaderType.kVec4);
expect(variable.storage, ShaderStorageQualifier.kUniform);
builder.addUniform(ShaderType.kVec4);
expect(
builder.build(),
'#version 300 es\n'
'uniform vec4 v1;\n'
'uniform vec4 uni_0;\n');
});
test('Float precision', () {
final ShaderBuilder builder = ShaderBuilder(WebGLVersion.webgl2);
builder.floatPrecision = ShaderPrecision.kLow;
builder.addUniform(ShaderType.kFloat, name: 'f1');
expect(
builder.build(),
'#version 300 es\n'
'precision lowp float;\n'
'uniform float f1;\n');
});
test('Integer precision', () {
final ShaderBuilder builder = ShaderBuilder(WebGLVersion.webgl2);
builder.integerPrecision = ShaderPrecision.kLow;
builder.addUniform(ShaderType.kInt, name: 'i1');
expect(
builder.build(),
'#version 300 es\n'
'precision lowp int;\n'
'uniform int i1;\n');
});
test('Method', () {
final ShaderBuilder builder = ShaderBuilder(WebGLVersion.webgl2);
builder.floatPrecision = ShaderPrecision.kMedium;
final ShaderDeclaration variable =
builder.addUniform(ShaderType.kFloat, name: 'f1');
final ShaderMethod m = builder.addMethod('main');
m.addStatement('f1 = 5.0;');
expect(
builder.build(),
'#version 300 es\n'
'precision mediump float;\n'
'uniform float ${variable.name};\n'
'void main() {\n'
' f1 = 5.0;\n'
'}\n');
});
});
}
| engine/lib/web_ui/test/engine/surface/shaders/shader_builder_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/surface/shaders/shader_builder_test.dart",
"repo_id": "engine",
"token_count": 4014
} | 272 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
void main() {
internalBootstrapBrowserTest(() => doTests);
}
void doTests() {
group('GlobalHtmlAttributes', () {
test('applies global attributes to the root and host elements', () {
final DomElement hostElement = createDomElement('host-element');
final DomElement rootElement = createDomElement('root-element');
final GlobalHtmlAttributes globalHtmlAttributes = GlobalHtmlAttributes(
rootElement: rootElement,
hostElement: hostElement,
);
globalHtmlAttributes.applyAttributes(
viewId: 123,
autoDetectRenderer: true,
rendererTag: 'canvaskit',
buildMode: 'release',
);
expect(rootElement.getAttribute('flt-view-id'), '123');
expect(hostElement.getAttribute('flt-renderer'), 'canvaskit (auto-selected)');
expect(hostElement.getAttribute('flt-build-mode'), 'release');
expect(hostElement.getAttribute('spellcheck'), 'false');
globalHtmlAttributes.applyAttributes(
viewId: 456,
autoDetectRenderer: false,
rendererTag: 'html',
buildMode: 'debug',
);
expect(rootElement.getAttribute('flt-view-id'), '456');
expect(hostElement.getAttribute('flt-renderer'), 'html (requested explicitly)');
expect(hostElement.getAttribute('flt-build-mode'), 'debug');
expect(hostElement.getAttribute('spellcheck'), 'false');
});
});
}
| engine/lib/web_ui/test/engine/view_embedder/global_html_attributes_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/engine/view_embedder/global_html_attributes_test.dart",
"repo_id": "engine",
"token_count": 606
} | 273 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:math' as math;
import 'dart:typed_data';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import '../../common/test_initialization.dart';
import '../screenshot.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests(
setUpTestViewDimensions: false,
);
tearDown(() {
ContextStateHandle.debugEmulateWebKitMaskFilter = false;
});
// Regression test for https://github.com/flutter/flutter/issues/55930
void testMaskFilterBlur({bool isWebkit = false}) {
final String browser = isWebkit ? 'Safari' : 'Chrome';
test('renders MaskFilter.blur in $browser', () async {
const double screenWidth = 800.0;
const double screenHeight = 150.0;
const ui.Rect screenRect = ui.Rect.fromLTWH(0, 0, screenWidth, screenHeight);
ContextStateHandle.debugEmulateWebKitMaskFilter = isWebkit;
final RecordingCanvas rc = RecordingCanvas(screenRect);
rc.translate(0, 75);
final SurfacePaint paint = SurfacePaint()
..maskFilter = const ui.MaskFilter.blur(ui.BlurStyle.normal, 5);
rc.translate(50, 0);
rc.drawRect(
ui.Rect.fromCircle(center: ui.Offset.zero, radius: 30),
paint,
);
rc.translate(100, 0);
paint.color = const ui.Color(0xFF00FF00);
rc.drawRRect(
ui.RRect.fromRectAndRadius(
ui.Rect.fromCircle(center: ui.Offset.zero, radius: 30),
const ui.Radius.circular(20),
),
paint,
);
rc.translate(100, 0);
paint.color = const ui.Color(0xFF0000FF);
rc.drawCircle(ui.Offset.zero, 30, paint);
rc.translate(100, 0);
paint.color = const ui.Color(0xFF00FFFF);
rc.drawPath(
SurfacePath()
..moveTo(-20, 0)
..lineTo(0, -50)
..lineTo(20, 0)
..lineTo(0, 50)
..close(),
paint,
);
rc.translate(100, 0);
paint.color = const ui.Color(0xFFFF00FF);
rc.drawOval(
ui.Rect.fromCenter(center: ui.Offset.zero, width: 40, height: 100),
paint,
);
rc.translate(100, 0);
paint.color = const ui.Color(0xFF888800);
paint.strokeWidth = 5;
rc.drawLine(
const ui.Offset(-20, -50),
const ui.Offset(20, 50),
paint,
);
rc.translate(100, 0);
paint.color = const ui.Color(0xFF888888);
rc.drawDRRect(
ui.RRect.fromRectAndRadius(
ui.Rect.fromCircle(center: ui.Offset.zero, radius: 35),
const ui.Radius.circular(20),
),
ui.RRect.fromRectAndRadius(
ui.Rect.fromCircle(center: ui.Offset.zero, radius: 15),
const ui.Radius.circular(7),
),
paint,
);
rc.translate(100, 0);
paint.color = const ui.Color(0xFF6500C9);
rc.drawRawPoints(
ui.PointMode.points,
Float32List.fromList(<double>[-10, -10, -10, 10, 10, -10, 10, 10]),
paint,
);
await canvasScreenshot(rc, 'mask_filter_$browser', region: screenRect);
});
test('renders transformed MaskFilter.blur in $browser', () async {
const double screenWidth = 300.0;
const double screenHeight = 300.0;
const ui.Rect screenRect = ui.Rect.fromLTWH(0, 0, screenWidth, screenHeight);
ContextStateHandle.debugEmulateWebKitMaskFilter = isWebkit;
final RecordingCanvas rc = RecordingCanvas(screenRect);
rc.translate(150, 150);
final SurfacePaint paint = SurfacePaint()
..maskFilter = const ui.MaskFilter.blur(ui.BlurStyle.normal, 5);
const List<ui.Color> colors = <ui.Color>[
ui.Color(0xFF000000),
ui.Color(0xFF00FF00),
ui.Color(0xFF0000FF),
ui.Color(0xFF00FFFF),
ui.Color(0xFFFF00FF),
ui.Color(0xFF888800),
ui.Color(0xFF888888),
ui.Color(0xFF6500C9),
];
for (final ui.Color color in colors) {
paint.color = color;
rc.rotate(math.pi / 4);
rc.drawRect(
ui.Rect.fromCircle(center: const ui.Offset(90, 0), radius: 20),
paint,
);
}
await canvasScreenshot(rc, 'mask_filter_transformed_$browser',
region: screenRect);
});
}
testMaskFilterBlur();
testMaskFilterBlur(isWebkit: true);
for (final int testDpr in <int>[1, 2, 4]) {
test('MaskFilter.blur blurs correctly for device-pixel ratio $testDpr', () async {
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(testDpr.toDouble());
const ui.Rect screenRect = ui.Rect.fromLTWH(0, 0, 150, 150);
final RecordingCanvas rc = RecordingCanvas(screenRect);
rc.translate(0, 75);
final SurfacePaint paint = SurfacePaint()
..maskFilter = const ui.MaskFilter.blur(ui.BlurStyle.normal, 5);
rc.translate(75, 0);
rc.drawRect(
ui.Rect.fromCircle(center: ui.Offset.zero, radius: 30),
paint,
);
await canvasScreenshot(rc, 'mask_filter_blur_dpr_$testDpr',
region: screenRect);
EngineFlutterDisplay.instance.debugOverrideDevicePixelRatio(1.0);
});
}
}
| engine/lib/web_ui/test/html/compositing/canvas_mask_filter_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/compositing/canvas_mask_filter_golden_test.dart",
"repo_id": "engine",
"token_count": 2434
} | 274 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:js_util' as js_util;
import 'dart:typed_data';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' hide ImageShader, TextStyle;
import '../../common/test_initialization.dart';
import '../screenshot.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
const double screenWidth = 600.0;
const double screenHeight = 800.0;
const Rect screenRect = Rect.fromLTWH(0, 0, screenWidth, screenHeight);
setUpUnitTests(
setUpTestViewDimensions: false,
);
setUp(() {
GlContextCache.dispose();
glRenderer = null;
});
Future<void> testVertices(
String fileName, Vertices vertices, BlendMode blendMode, Paint paint) async {
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500));
rc.drawVertices(
vertices as SurfaceVertices, blendMode, paint as SurfacePaint);
await canvasScreenshot(rc, fileName, canvasRect: screenRect);
}
test('Should draw green hairline triangles when colors array is null.',
() async {
final Vertices vertices = Vertices.raw(
VertexMode.triangles,
Float32List.fromList(<double>[
20.0,
20.0,
220.0,
10.0,
110.0,
220.0,
220.0,
320.0,
20.0,
310.0,
200.0,
420.0
]));
await testVertices('draw_vertices_hairline_triangle', vertices,
BlendMode.srcOver, Paint()..color = const Color.fromARGB(255, 0, 128, 0));
});
test(
'Should draw black hairline triangles when colors array is null'
' and Paint() has no color.', () async {
// ignore: unused_local_variable
final Int32List colors = Int32List.fromList(<int>[
0xFFFF0000,
0xFF00FF00,
0xFF0000FF,
0xFFFF0000,
0xFF00FF00,
0xFF0000FF,
0xFFFF0000,
0xFF00FF00,
0xFF0000FF,
0xFFFF0000,
0xFF00FF00,
0xFF0000FF
]);
final Vertices vertices = Vertices.raw(
VertexMode.triangles,
Float32List.fromList(<double>[
20.0,
20.0,
220.0,
10.0,
110.0,
220.0,
220.0,
320.0,
20.0,
310.0,
200.0,
420.0
]));
await testVertices('draw_vertices_hairline_triangle_black', vertices,
BlendMode.srcOver, Paint());
});
/// Regression test for https://github.com/flutter/flutter/issues/71442.
test(
'Should draw filled triangles when colors array is null'
' and Paint() has color.', () async {
// ignore: unused_local_variable
final Int32List colors = Int32List.fromList(<int>[
0xFFFF0000,
0xFF00FF00,
0xFF0000FF,
0xFFFF0000,
0xFF00FF00,
0xFF0000FF,
0xFFFF0000,
0xFF00FF00,
0xFF0000FF,
0xFFFF0000,
0xFF00FF00,
0xFF0000FF
]);
final Vertices vertices = Vertices.raw(
VertexMode.triangles,
Float32List.fromList(<double>[
20.0,
20.0,
220.0,
10.0,
110.0,
220.0,
220.0,
320.0,
20.0,
310.0,
200.0,
420.0
]));
await testVertices(
'draw_vertices_triangle_green_filled',
vertices,
BlendMode.srcOver,
Paint()
..style = PaintingStyle.fill
..color = const Color(0xFF00FF00));
},
// TODO(yjbanov): https://github.com/flutter/flutter/issues/86623
skip: isFirefox);
test('Should draw hairline triangleFan.', () async {
final Vertices vertices = Vertices.raw(
VertexMode.triangleFan,
Float32List.fromList(<double>[
150.0,
150.0,
20.0,
10.0,
80.0,
20.0,
220.0,
15.0,
280.0,
30.0,
300.0,
420.0
]));
await testVertices('draw_vertices_hairline_triangle_fan', vertices,
BlendMode.srcOver, Paint()..color = const Color.fromARGB(255, 0, 128, 0));
});
test('Should draw hairline triangleStrip.', () async {
final Vertices vertices = Vertices.raw(
VertexMode.triangleStrip,
Float32List.fromList(<double>[
20.0,
20.0,
220.0,
10.0,
110.0,
220.0,
220.0,
320.0,
20.0,
310.0,
200.0,
420.0
]));
await testVertices('draw_vertices_hairline_triangle_strip', vertices,
BlendMode.srcOver, Paint()..color = const Color.fromARGB(255, 0, 128, 0));
});
test('Should draw triangles with colors.', () async {
final Int32List colors = Int32List.fromList(<int>[
0xFFFF0000,
0xFF00FF00,
0xFF0000FF,
0xFFFF0000,
0xFF00FF00,
0xFF0000FF
]);
final Vertices vertices = Vertices.raw(
VertexMode.triangles,
Float32List.fromList(<double>[
150.0,
150.0,
20.0,
10.0,
80.0,
20.0,
220.0,
15.0,
280.0,
30.0,
300.0,
420.0
]),
colors: colors);
await testVertices('draw_vertices_triangles', vertices, BlendMode.srcOver,
Paint()..color = const Color.fromARGB(255, 0, 128, 0));
},
// TODO(yjbanov): https://github.com/flutter/flutter/issues/86623
skip: isFirefox);
test('Should draw triangles with colors and indices.', () async {
final Int32List colors = Int32List.fromList(
<int>[0xFFFF0000, 0xFF00FF00, 0xFF0000FF, 0xFFFF0000, 0xFF0000FF]);
final Uint16List indices = Uint16List.fromList(<int>[0, 1, 2, 3, 4, 0]);
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500));
final Vertices vertices = Vertices.raw(
VertexMode.triangles,
Float32List.fromList(<double>[
210.0,
150.0,
30.0,
110.0,
80.0,
30.0,
220.0,
15.0,
280.0,
30.0,
]),
colors: colors,
indices: indices);
rc.drawVertices(
vertices as SurfaceVertices, BlendMode.srcOver, SurfacePaint());
await canvasScreenshot(rc, 'draw_vertices_triangles_indexed', canvasRect: screenRect);
},
// TODO(yjbanov): https://github.com/flutter/flutter/issues/86623
skip: isFirefox);
test('Should draw triangleFan with colors.', () async {
final Int32List colors = Int32List.fromList(<int>[
0xFFFF0000,
0xFF00FF00,
0xFF0000FF,
0xFFFF0000,
0xFF00FF00,
0xFF0000FF
]);
final Vertices vertices = Vertices.raw(
VertexMode.triangleFan,
Float32List.fromList(<double>[
150.0,
150.0,
20.0,
10.0,
80.0,
20.0,
220.0,
15.0,
280.0,
30.0,
300.0,
420.0
]),
colors: colors);
await testVertices('draw_vertices_triangle_fan', vertices,
BlendMode.srcOver, Paint()..color = const Color.fromARGB(255, 0, 128, 0));
},
// TODO(yjbanov): https://github.com/flutter/flutter/issues/86623
skip: isFirefox);
test('Should draw triangleStrip with colors.', () async {
final Int32List colors = Int32List.fromList(<int>[
0xFFFF0000,
0xFF00FF00,
0xFF0000FF,
0xFFFF0000,
0xFF00FF00,
0xFF0000FF
]);
final Vertices vertices = Vertices.raw(
VertexMode.triangleStrip,
Float32List.fromList(<double>[
20.0,
20.0,
220.0,
10.0,
110.0,
220.0,
220.0,
320.0,
20.0,
310.0,
200.0,
420.0
]),
colors: colors);
await testVertices('draw_vertices_triangle_strip', vertices,
BlendMode.srcOver, Paint()..color = const Color.fromARGB(255, 0, 128, 0));
},
// TODO(yjbanov): https://github.com/flutter/flutter/issues/86623
skip: isFirefox);
Future<void> testTexture(TileMode tileMode, String filename) async {
final Uint16List indices = Uint16List.fromList(<int>[0, 1, 2, 3, 4, 0]);
final RecordingCanvas rc =
RecordingCanvas(const Rect.fromLTRB(0, 0, 500, 500));
final Vertices vertices = Vertices.raw(
VertexMode.triangles,
Float32List.fromList(<double>[
210.0,
150.0,
0.0,
0.0,
80.0,
30.0,
220.0,
15.0,
280.0,
30.0,
]),
indices: indices);
final Float32List matrix4 = Matrix4.identity().storage;
final HtmlImage img = await createTestImage();
final SurfacePaint paint = SurfacePaint();
final EngineImageShader imgShader = EngineImageShader(img, tileMode, tileMode,
Float64List.fromList(matrix4), FilterQuality.high);
paint.shader = imgShader;
rc.drawVertices(vertices as SurfaceVertices, BlendMode.srcOver, paint);
await canvasScreenshot(rc, filename, canvasRect: screenRect);
expect(imgShader.debugDisposed, false);
imgShader.dispose();
expect(imgShader.debugDisposed, true);
}
test('Should draw triangle with texture and indices', () async {
await testTexture(TileMode.clamp, 'draw_vertices_texture');
},
// TODO(yjbanov): https://github.com/flutter/flutter/issues/86623
skip: isFirefox);
test('Should draw triangle with texture and indices', () async {
await testTexture(TileMode.mirror, 'draw_vertices_texture_mirror');
},
// TODO(yjbanov): https://github.com/flutter/flutter/issues/86623
skip: isFirefox);
test('Should draw triangle with texture and indices', () async {
await testTexture(TileMode.repeated, 'draw_vertices_texture_repeated');
},
// TODO(yjbanov): https://github.com/flutter/flutter/issues/86623
skip: isFirefox);
}
Future<HtmlImage> createTestImage({int width = 50, int height = 40}) {
final DomCanvasElement canvas =
createDomCanvasElement(width: width, height: height);
final DomCanvasRenderingContext2D ctx = canvas.context2D;
ctx.fillStyle = '#E04040';
ctx.fillRect(0, 0, width / 3, height);
ctx.fill();
ctx.fillStyle = '#40E080';
ctx.fillRect(width / 3, 0, width / 3, height);
ctx.fill();
ctx.fillStyle = '#2040E0';
ctx.fillRect(2 * width / 3, 0, width / 3, height);
ctx.fill();
final DomHTMLImageElement imageElement = createDomHTMLImageElement();
final Completer<HtmlImage> completer = Completer<HtmlImage>();
imageElement.addEventListener('load', createDomEventListener((DomEvent event) {
completer.complete(HtmlImage(imageElement, width, height));
}));
imageElement.src = js_util.callMethod<String>(canvas, 'toDataURL', <dynamic>[]);
return completer.future;
}
| engine/lib/web_ui/test/html/drawing/draw_vertices_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/drawing/draw_vertices_golden_test.dart",
"repo_id": "engine",
"token_count": 5301
} | 275 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart';
import 'package:web_engine_tester/golden_tester.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
enum PaintMode {
kStrokeAndFill,
kStroke,
kFill,
kStrokeWidthOnly,
}
Future<void> testMain() async {
const Rect region =
Rect.fromLTWH(8, 8, 600, 400); // Compensate for old golden tester padding
Future<void> testPath(Path path, String goldenFileName,
{SurfacePaint? paint,
PaintMode mode = PaintMode.kStrokeAndFill}) async {
const Rect canvasBounds = Rect.fromLTWH(0, 0, 600, 400);
final BitmapCanvas bitmapCanvas =
BitmapCanvas(canvasBounds, RenderStrategy());
final RecordingCanvas canvas = RecordingCanvas(canvasBounds);
final bool enableFill =
mode == PaintMode.kStrokeAndFill || mode == PaintMode.kFill;
if (enableFill) {
paint ??= SurfacePaint()
..color = const Color(0x807F7F7F)
..style = PaintingStyle.fill;
canvas.drawPath(path, paint);
}
if (mode == PaintMode.kStrokeAndFill || mode == PaintMode.kStroke) {
paint = SurfacePaint()
..strokeWidth = 2
..color = enableFill ? const Color(0xFFFF0000) : const Color(0xFF000000)
..style = PaintingStyle.stroke;
}
if (mode == PaintMode.kStrokeWidthOnly) {
paint = SurfacePaint()
..color = const Color(0xFF4060E0)
..strokeWidth = 10;
}
canvas.drawPath(path, paint!);
final DomElement svgElement = pathToSvgElement(path, paint, enableFill);
canvas.endRecording();
canvas.apply(bitmapCanvas, canvasBounds);
final DomElement sceneElement = createDomElement('flt-scene');
domDocument.body!.append(sceneElement);
if (isIosSafari) {
// Shrink to fit on the iPhone screen.
sceneElement.style.position = 'absolute';
sceneElement.style.transformOrigin = '0 0 0';
sceneElement.style.transform = 'scale(0.3)';
}
sceneElement.append(bitmapCanvas.rootElement);
sceneElement.append(svgElement);
await matchGoldenFile('$goldenFileName.png',
region: region);
bitmapCanvas.rootElement.remove();
svgElement.remove();
}
tearDown(() {
domDocument.body!.clearChildren();
});
test('render line strokes', () async {
final Path path = Path();
path.moveTo(50, 60);
path.lineTo(200, 300);
await testPath(path, 'svg_stroke_line',
paint: SurfacePaint()
..color = const Color(0xFFFF0000)
..strokeWidth = 2.0
..style = PaintingStyle.stroke);
});
test('render quad bezier curve', () async {
final Path path = Path();
path.moveTo(50, 60);
path.quadraticBezierTo(200, 60, 50, 200);
await testPath(path, 'svg_quad_bezier');
});
test('render cubic curve', () async {
final Path path = Path();
path.moveTo(50, 60);
path.cubicTo(200, 60, -100, -50, 150, 200);
await testPath(path, 'svg_cubic_bezier');
});
test('render arcs', () async {
final List<ArcSample> arcs = <ArcSample>[
ArcSample(Offset.zero, distance: 20),
ArcSample(const Offset(200, 0),
largeArc: true, distance: 20),
ArcSample(Offset.zero, clockwise: true, distance: 20),
ArcSample(const Offset(200, 0),
largeArc: true, clockwise: true, distance: 20),
ArcSample(Offset.zero, distance: -20),
ArcSample(const Offset(200, 0),
largeArc: true, distance: -20),
ArcSample(Offset.zero, clockwise: true, distance: -20),
ArcSample(const Offset(200, 0),
largeArc: true, clockwise: true, distance: -20)
];
int sampleIndex = 0;
for (final ArcSample sample in arcs) {
++sampleIndex;
final Path path = sample.createPath();
await testPath(path, 'svg_arc_$sampleIndex');
}
});
test('render rect', () async {
final Path path = Path();
path.addRect(const Rect.fromLTRB(15, 15, 60, 20));
path.addRect(const Rect.fromLTRB(35, 160, 15, 100));
await testPath(path, 'svg_rect');
});
test('render notch', () async {
final Path path = Path();
path.moveTo(0, 0);
path.lineTo(83, 0);
path.quadraticBezierTo(98, 0, 99.97, 7.8);
path.arcToPoint(const Offset(162, 7.8),
radius: const Radius.circular(32),
clockwise: false);
path.lineTo(200, 7.8);
path.lineTo(200, 80);
path.lineTo(0, 80);
path.lineTo(0, 10);
await testPath(path, 'svg_notch');
});
/// Regression test for https://github.com/flutter/flutter/issues/70980
test('render notch', () async {
const double w = 0.7;
final Path path = Path();
path.moveTo(0.5, 14);
path.conicTo(0.5, 10.5, 4, 10.5, w);
path.moveTo(4, 10.5);
path.lineTo(6.5, 10.5);
path.moveTo(36.0, 10.5);
path.lineTo(158, 10.5);
path.conicTo(161.5, 10.5, 161.5, 14, w);
path.moveTo(161.5, 14);
path.lineTo(161.5, 48);
path.conicTo(161.5, 51.5, 158, 51.5, w);
path.lineTo(4, 51.5);
path.conicTo(0.5, 51.5, 0.5, 48, w);
path.lineTo(0.5, 14);
await testPath(path, 'svg_editoutline', mode: PaintMode.kStroke);
});
/// Regression test for https://github.com/flutter/flutter/issues/74416
test('render stroke', () async {
final Path path = Path();
path.moveTo(20, 20);
path.lineTo(200, 200);
await testPath(path, 'svg_stroke_width', mode: PaintMode.kStrokeWidthOnly);
});
}
DomElement pathToSvgElement(Path path, Paint paint, bool enableFill) {
final Rect bounds = path.getBounds();
final SVGSVGElement root = createSVGSVGElement();
root.style.transform = 'translate(200px, 0px)';
root.setAttribute('viewBox', '0 0 ${bounds.right} ${bounds.bottom}');
root.width!.baseVal!.newValueSpecifiedUnits(svgLengthTypeNumber, bounds.right);
root.height!.baseVal!.newValueSpecifiedUnits(svgLengthTypeNumber, bounds.bottom);
final SVGPathElement pathElement = createSVGPathElement();
root.append(pathElement);
if (paint.style == PaintingStyle.stroke ||
paint.strokeWidth != 0.0) {
pathElement.setAttribute('stroke', paint.color.toCssString());
pathElement.setAttribute('stroke-width', paint.strokeWidth);
if (!enableFill) {
pathElement.setAttribute('fill', 'none');
}
}
if (paint.style == PaintingStyle.fill) {
pathElement.setAttribute('fill', paint.color.toCssString());
}
pathElement.setAttribute('d', pathToSvg((path as SurfacePath).pathRef)); // This is what we're testing!
return root;
}
class ArcSample {
ArcSample(this.offset,
{this.largeArc = false, this.clockwise = false, this.distance = 0});
final Offset offset;
final bool largeArc;
final bool clockwise;
final double distance;
Path createPath() {
final Offset startP =
Offset(75 - distance + offset.dx, 75 - distance + offset.dy);
final Offset endP =
Offset(75.0 + distance + offset.dx, 75.0 + distance + offset.dy);
final Path path = Path();
path.moveTo(startP.dx, startP.dy);
path.arcToPoint(endP,
rotation: 60,
radius: const Radius.elliptical(40, 60),
largeArc: largeArc,
clockwise: clockwise);
return path;
}
}
| engine/lib/web_ui/test/html/path_to_svg_golden_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/path_to_svg_golden_test.dart",
"repo_id": "engine",
"token_count": 2943
} | 276 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'dart:async';
import 'dart:convert';
import 'dart:typed_data';
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/src/engine.dart';
import 'package:ui/ui.dart' as ui;
import '../../common/test_initialization.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests(withImplicitView: true);
group('loadFontFromList', () {
const String testFontUrl = '/assets/fonts/ahem.ttf';
tearDown(() {
domDocument.fonts!.clear();
});
test('returns normally from invalid font buffer', () async {
await expectLater(
() async => ui.loadFontFromList(Uint8List(0), fontFamily: 'test-font'),
returnsNormally
);
},
// TODO(hterkelsen): https://github.com/flutter/flutter/issues/56702
skip: browserEngine == BrowserEngine.webkit);
test('loads Blehm font from buffer', () async {
expect(_containsFontFamily('Blehm'), isFalse);
final ByteBuffer response = await httpFetchByteBuffer(testFontUrl);
await ui.loadFontFromList(response.asUint8List(), fontFamily: 'Blehm');
expect(_containsFontFamily('Blehm'), isTrue);
},
// TODO(hterkelsen): https://github.com/flutter/flutter/issues/56702
skip: browserEngine == BrowserEngine.webkit);
test('loading font should clear measurement caches', () async {
final EngineParagraphStyle style = EngineParagraphStyle();
const ui.ParagraphConstraints constraints =
ui.ParagraphConstraints(width: 30.0);
final CanvasParagraphBuilder canvasBuilder = CanvasParagraphBuilder(style);
canvasBuilder.addText('test');
// Triggers the measuring and verifies the ruler cache has been populated.
canvasBuilder.build().layout(constraints);
expect(Spanometer.rulers.length, 1);
// Now, loads a new font using loadFontFromList. This should clear the
// cache
final ByteBuffer response = await httpFetchByteBuffer(testFontUrl);
await ui.loadFontFromList(response.asUint8List(), fontFamily: 'Blehm');
// Verifies the font is loaded, and the cache is cleaned.
expect(_containsFontFamily('Blehm'), isTrue);
expect(Spanometer.rulers.length, 0);
},
// TODO(hterkelsen): https://github.com/flutter/flutter/issues/56702
skip: browserEngine == BrowserEngine.webkit);
test('loading font should send font change message', () async {
final ui.PlatformMessageCallback? oldHandler = ui.PlatformDispatcher.instance.onPlatformMessage;
String? actualName;
String? message;
ui.PlatformDispatcher.instance.onPlatformMessage = (String name, ByteData? data,
ui.PlatformMessageResponseCallback? callback) {
actualName = name;
final ByteBuffer buffer = data!.buffer;
final Uint8List list =
buffer.asUint8List(data.offsetInBytes, data.lengthInBytes);
message = utf8.decode(list);
};
final ByteBuffer response = await httpFetchByteBuffer(testFontUrl);
await ui.loadFontFromList(response.asUint8List(), fontFamily: 'Blehm');
final Completer<void> completer = Completer<void>();
domWindow.requestAnimationFrame((_) { completer.complete();});
await completer.future;
ui.PlatformDispatcher.instance.onPlatformMessage = oldHandler;
expect(actualName, 'flutter/system');
expect(message, '{"type":"fontsChange"}');
},
// TODO(hterkelsen): https://github.com/flutter/flutter/issues/56702
skip: browserEngine == BrowserEngine.webkit);
});
}
bool _containsFontFamily(String family) {
bool found = false;
domDocument.fonts!.forEach((DomFontFace fontFace,
DomFontFace fontFaceAgain, DomFontFaceSet fontFaceSet) {
if (fontFace.family == family) {
found = true;
}
});
return found;
}
| engine/lib/web_ui/test/html/text/font_loading_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/html/text/font_loading_test.dart",
"repo_id": "engine",
"token_count": 1468
} | 277 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/ui.dart';
import '../common/test_initialization.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
class NotAColor extends Color {
const NotAColor(super.value);
}
Future<void> testMain() async {
setUpUnitTests();
test('color accessors should work', () {
const Color foo = Color(0x12345678);
expect(foo.alpha, equals(0x12));
expect(foo.red, equals(0x34));
expect(foo.green, equals(0x56));
expect(foo.blue, equals(0x78));
});
test('paint set to black', () {
const Color c = Color(0x00000000);
final Paint p = Paint();
p.color = c;
expect(c.toString(), equals('Color(0x00000000)'));
});
test('color created with out of bounds value', () {
const Color c = Color(0x100 << 24);
final Paint p = Paint();
p.color = c;
});
test('color created with wildly out of bounds value', () {
const Color c = Color(1 << 1000000);
final Paint p = Paint();
p.color = c;
});
test('two colors are only == if they have the same runtime type', () {
expect(const Color(123), equals(const Color(123)));
expect(const Color(123),
equals(const Color(123)));
expect(const Color(123), isNot(equals(const Color(321))));
expect(const Color(123), isNot(equals(const NotAColor(123))));
expect(const NotAColor(123), isNot(equals(const Color(123))));
expect(const NotAColor(123), equals(const NotAColor(123)));
});
test('Color.lerp', () {
expect(
Color.lerp(const Color(0x00000000), const Color(0xFFFFFFFF), 0.0),
const Color(0x00000000),
);
expect(
Color.lerp(const Color(0x00000000), const Color(0xFFFFFFFF), 0.5),
const Color(0x7F7F7F7F),
);
expect(
Color.lerp(const Color(0x00000000), const Color(0xFFFFFFFF), 1.0),
const Color(0xFFFFFFFF),
);
expect(
Color.lerp(const Color(0x00000000), const Color(0xFFFFFFFF), -0.1),
const Color(0x00000000),
);
expect(
Color.lerp(const Color(0x00000000), const Color(0xFFFFFFFF), 1.1),
const Color(0xFFFFFFFF),
);
});
test('Color.alphaBlend', () {
expect(
Color.alphaBlend(const Color(0x00000000), const Color(0x00000000)),
const Color(0x00000000),
);
expect(
Color.alphaBlend(const Color(0x00000000), const Color(0xFFFFFFFF)),
const Color(0xFFFFFFFF),
);
expect(
Color.alphaBlend(const Color(0xFFFFFFFF), const Color(0x00000000)),
const Color(0xFFFFFFFF),
);
expect(
Color.alphaBlend(const Color(0xFFFFFFFF), const Color(0xFFFFFFFF)),
const Color(0xFFFFFFFF),
);
expect(
Color.alphaBlend(const Color(0x80FFFFFF), const Color(0xFF000000)),
const Color(0xFF808080),
);
expect(
Color.alphaBlend(const Color(0x80808080), const Color(0xFFFFFFFF)),
const Color(0xFFBFBFBF),
);
expect(
Color.alphaBlend(const Color(0x80808080), const Color(0xFF000000)),
const Color(0xFF404040),
);
expect(
Color.alphaBlend(const Color(0x01020304), const Color(0xFF000000)),
const Color(0xFF000000),
);
expect(
Color.alphaBlend(const Color(0x11223344), const Color(0xFF000000)),
const Color(0xFF020304),
);
expect(
Color.alphaBlend(const Color(0x11223344), const Color(0x80000000)),
const Color(0x88040608),
);
});
test('compute gray luminance', () {
// Each color component is at 20%.
const Color lightGray = Color(0xFF333333);
// Relative luminance's formula is just the linearized color value for gray.
// ((0.2 + 0.055) / 1.055) ^ 2.4.
expect(lightGray.computeLuminance(), equals(0.033104766570885055));
});
test('compute color luminance', () {
const Color brightRed = Color(0xFFFF3B30);
// 0.2126 * ((1.0 + 0.055) / 1.055) ^ 2.4 +
// 0.7152 * ((0.23137254902 +0.055) / 1.055) ^ 2.4 +
// 0.0722 * ((0.18823529411 + 0.055) / 1.055) ^ 2.4
expect(brightRed.computeLuminance(), equals(0.24601329637099723));
});
// Regression test for https://github.com/flutter/flutter/issues/41257
// CupertinoDynamicColor was overriding base class and calling super(0).
test('subclass of Color can override value', () {
const DynamicColorClass color = DynamicColorClass(0xF0E0D0C0);
expect(color.value, 0xF0E0D0C0);
// Call base class member, make sure it uses overridden value.
expect(color.red, 0xE0);
});
test('Paint converts Color subclasses to plain Color', () {
const DynamicColorClass color = DynamicColorClass(0xF0E0D0C0);
final Paint paint = Paint()..color = color;
expect(paint.color.runtimeType, Color);
});
}
class DynamicColorClass extends Color {
const DynamicColorClass(int newValue) : _newValue = newValue, super(0);
final int _newValue;
@override
int get value => _newValue;
}
| engine/lib/web_ui/test/ui/color_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/ui/color_test.dart",
"repo_id": "engine",
"token_count": 1984
} | 278 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import 'package:test/bootstrap/browser.dart';
import 'package:test/test.dart';
import 'package:ui/ui.dart' as ui;
import '../common/test_initialization.dart';
void main() {
internalBootstrapBrowserTest(() => testMain);
}
Future<void> testMain() async {
setUpUnitTests();
test('Picture construction invokes onCreate once', () async {
int onCreateInvokedCount = 0;
ui.Picture? createdPicture;
ui.Picture.onCreate = (ui.Picture picture) {
onCreateInvokedCount++;
createdPicture = picture;
};
final ui.Picture picture1 = _createPicture();
expect(onCreateInvokedCount, 1);
expect(createdPicture, picture1);
final ui.Picture picture2 = _createPicture();
expect(onCreateInvokedCount, 2);
expect(createdPicture, picture2);
ui.Picture.onCreate = null;
});
test('approximateBytesUsed is available for onCreate', () async {
int pictureSize = -1;
ui.Picture.onCreate = (ui.Picture picture) =>
pictureSize = picture.approximateBytesUsed;
_createPicture();
expect(pictureSize >= 0, true);
ui.Picture.onCreate = null;
});
test('dispose() invokes onDispose once', () async {
int onDisposeInvokedCount = 0;
ui.Picture? disposedPicture;
ui.Picture.onDispose = (ui.Picture picture) {
onDisposeInvokedCount++;
disposedPicture = picture;
};
final ui.Picture picture1 = _createPicture()..dispose();
expect(onDisposeInvokedCount, 1);
expect(disposedPicture, picture1);
final ui.Picture picture2 = _createPicture()..dispose();
expect(onDisposeInvokedCount, 2);
expect(disposedPicture, picture2);
ui.Picture.onDispose = null;
});
}
ui.Picture _createPicture() {
final ui.PictureRecorder recorder = ui.PictureRecorder();
final ui.Canvas canvas = ui.Canvas(recorder);
const ui.Rect rect = ui.Rect.fromLTWH(0.0, 0.0, 100.0, 100.0);
canvas.clipRect(rect);
return recorder.endRecording();
}
| engine/lib/web_ui/test/ui/picture_test.dart/0 | {
"file_path": "engine/lib/web_ui/test/ui/picture_test.dart",
"repo_id": "engine",
"token_count": 740
} | 279 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/runtime/dart_isolate_group_data.h"
#include <utility>
#include "flutter/runtime/dart_snapshot.h"
namespace flutter {
DartIsolateGroupData::DartIsolateGroupData(
const Settings& settings,
fml::RefPtr<const DartSnapshot> isolate_snapshot,
std::string advisory_script_uri,
std::string advisory_script_entrypoint,
const ChildIsolatePreparer& child_isolate_preparer,
const fml::closure& isolate_create_callback,
const fml::closure& isolate_shutdown_callback)
: settings_(settings),
isolate_snapshot_(std::move(isolate_snapshot)),
advisory_script_uri_(std::move(advisory_script_uri)),
advisory_script_entrypoint_(std::move(advisory_script_entrypoint)),
child_isolate_preparer_(child_isolate_preparer),
isolate_create_callback_(isolate_create_callback),
isolate_shutdown_callback_(isolate_shutdown_callback) {
FML_DCHECK(isolate_snapshot_) << "Must contain a valid isolate snapshot.";
}
DartIsolateGroupData::~DartIsolateGroupData() = default;
const Settings& DartIsolateGroupData::GetSettings() const {
return settings_;
}
fml::RefPtr<const DartSnapshot> DartIsolateGroupData::GetIsolateSnapshot()
const {
return isolate_snapshot_;
}
const std::string& DartIsolateGroupData::GetAdvisoryScriptURI() const {
return advisory_script_uri_;
}
const std::string& DartIsolateGroupData::GetAdvisoryScriptEntrypoint() const {
return advisory_script_entrypoint_;
}
ChildIsolatePreparer DartIsolateGroupData::GetChildIsolatePreparer() const {
std::scoped_lock lock(child_isolate_preparer_mutex_);
return child_isolate_preparer_;
}
const fml::closure& DartIsolateGroupData::GetIsolateCreateCallback() const {
return isolate_create_callback_;
}
const fml::closure& DartIsolateGroupData::GetIsolateShutdownCallback() const {
return isolate_shutdown_callback_;
}
void DartIsolateGroupData::SetChildIsolatePreparer(
const ChildIsolatePreparer& value) {
std::scoped_lock lock(child_isolate_preparer_mutex_);
child_isolate_preparer_ = value;
}
void DartIsolateGroupData::SetPlatformMessageHandler(
int64_t root_isolate_token,
std::weak_ptr<PlatformMessageHandler> handler) {
std::scoped_lock lock(platform_message_handlers_mutex_);
platform_message_handlers_[root_isolate_token] = handler;
}
std::weak_ptr<PlatformMessageHandler>
DartIsolateGroupData::GetPlatformMessageHandler(
int64_t root_isolate_token) const {
std::scoped_lock lock(platform_message_handlers_mutex_);
auto it = platform_message_handlers_.find(root_isolate_token);
return it == platform_message_handlers_.end()
? std::weak_ptr<PlatformMessageHandler>()
: it->second;
}
void DartIsolateGroupData::AddKernelBuffer(
const std::shared_ptr<const fml::Mapping>& buffer) {
kernel_buffers_.push_back(buffer);
}
std::vector<std::shared_ptr<const fml::Mapping>>
DartIsolateGroupData::GetKernelBuffers() const {
return kernel_buffers_;
}
} // namespace flutter
| engine/runtime/dart_isolate_group_data.cc/0 | {
"file_path": "engine/runtime/dart_isolate_group_data.cc",
"repo_id": "engine",
"token_count": 1072
} | 280 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/runtime/dart_vm_data.h"
#include <utility>
namespace flutter {
std::shared_ptr<const DartVMData> DartVMData::Create(
const Settings& settings,
fml::RefPtr<const DartSnapshot> vm_snapshot,
fml::RefPtr<const DartSnapshot> isolate_snapshot) {
if (!vm_snapshot || !vm_snapshot->IsValid()) {
// Caller did not provide a valid VM snapshot. Attempt to infer one
// from the settings.
vm_snapshot = DartSnapshot::VMSnapshotFromSettings(settings);
if (!vm_snapshot) {
FML_LOG(ERROR)
<< "VM snapshot invalid and could not be inferred from settings.";
return {};
}
}
if (!isolate_snapshot || !isolate_snapshot->IsValid()) {
// Caller did not provide a valid isolate snapshot. Attempt to infer one
// from the settings.
isolate_snapshot = DartSnapshot::IsolateSnapshotFromSettings(settings);
if (!isolate_snapshot) {
FML_LOG(ERROR) << "Isolate snapshot invalid and could not be inferred "
"from settings.";
return {};
}
}
fml::RefPtr<const DartSnapshot> service_isolate_snapshot =
DartSnapshot::VMServiceIsolateSnapshotFromSettings(settings);
return std::shared_ptr<const DartVMData>(new DartVMData(
settings, //
std::move(vm_snapshot), //
std::move(isolate_snapshot), //
std::move(service_isolate_snapshot) //
));
}
DartVMData::DartVMData(const Settings& settings,
fml::RefPtr<const DartSnapshot> vm_snapshot,
fml::RefPtr<const DartSnapshot> isolate_snapshot,
fml::RefPtr<const DartSnapshot> service_isolate_snapshot)
: settings_(settings),
vm_snapshot_(std::move(vm_snapshot)),
isolate_snapshot_(std::move(isolate_snapshot)),
service_isolate_snapshot_(std::move(service_isolate_snapshot)) {}
DartVMData::~DartVMData() = default;
const Settings& DartVMData::GetSettings() const {
return settings_;
}
const DartSnapshot& DartVMData::GetVMSnapshot() const {
return *vm_snapshot_;
}
fml::RefPtr<const DartSnapshot> DartVMData::GetIsolateSnapshot() const {
return isolate_snapshot_;
}
fml::RefPtr<const DartSnapshot> DartVMData::GetServiceIsolateSnapshot() const {
// Use the specialized snapshot for the service isolate if the embedder
// provides one. Otherwise, use the application snapshot.
return service_isolate_snapshot_ ? service_isolate_snapshot_
: isolate_snapshot_;
}
bool DartVMData::GetServiceIsolateSnapshotNullSafety() const {
if (service_isolate_snapshot_) {
// The specialized snapshot for the service isolate is always built
// using null safety. However, calling Dart_DetectNullSafety on
// the service isolate snapshot will not work as expected - it will
// instead return a cached value representing the app snapshot.
return true;
} else {
return isolate_snapshot_->IsNullSafetyEnabled(nullptr);
}
}
} // namespace flutter
| engine/runtime/dart_vm_data.cc/0 | {
"file_path": "engine/runtime/dart_vm_data.cc",
"repo_id": "engine",
"token_count": 1192
} | 281 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_RUNTIME_TEST_FONT_DATA_H_
#define FLUTTER_RUNTIME_TEST_FONT_DATA_H_
#include <memory>
#include <string>
#include <vector>
#include "third_party/skia/include/core/SkStream.h"
#include "third_party/skia/include/core/SkTypeface.h"
namespace flutter {
std::vector<sk_sp<SkTypeface>> GetTestFontData();
std::vector<std::string> GetTestFontFamilyNames();
} // namespace flutter
#endif // FLUTTER_RUNTIME_TEST_FONT_DATA_H_
| engine/runtime/test_font_data.h/0 | {
"file_path": "engine/runtime/test_font_data.h",
"repo_id": "engine",
"token_count": 220
} | 282 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/common/display_manager.h"
#include "flutter/fml/logging.h"
#include "flutter/fml/macros.h"
namespace flutter {
DisplayManager::DisplayManager() = default;
DisplayManager::~DisplayManager() = default;
double DisplayManager::GetMainDisplayRefreshRate() const {
std::scoped_lock lock(displays_mutex_);
if (displays_.empty()) {
return kUnknownDisplayRefreshRate;
} else {
return displays_[0]->GetRefreshRate();
}
}
void DisplayManager::HandleDisplayUpdates(
std::vector<std::unique_ptr<Display>> displays) {
FML_DCHECK(!displays.empty());
std::scoped_lock lock(displays_mutex_);
displays_ = std::move(displays);
}
} // namespace flutter
| engine/shell/common/display_manager.cc/0 | {
"file_path": "engine/shell/common/display_manager.cc",
"repo_id": "engine",
"token_count": 280
} | 283 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#define FML_USED_ON_EMBEDDER
#include "flutter/shell/common/pipeline.h"
#include <functional>
#include <future>
#include <memory>
#include "gtest/gtest.h"
namespace flutter {
namespace testing {
using IntPipeline = Pipeline<int>;
using Continuation = IntPipeline::ProducerContinuation;
TEST(PipelineTest, ConsumeOneVal) {
std::shared_ptr<IntPipeline> pipeline = std::make_shared<IntPipeline>(2);
Continuation continuation = pipeline->Produce();
const int test_val = 1;
PipelineProduceResult result =
continuation.Complete(std::make_unique<int>(test_val));
ASSERT_EQ(result.success, true);
ASSERT_EQ(result.is_first_item, true);
PipelineConsumeResult consume_result = pipeline->Consume(
[&test_val](std::unique_ptr<int> v) { ASSERT_EQ(*v, test_val); });
ASSERT_EQ(consume_result, PipelineConsumeResult::Done);
}
TEST(PipelineTest, ContinuationCanOnlyBeUsedOnce) {
std::shared_ptr<IntPipeline> pipeline = std::make_shared<IntPipeline>(2);
Continuation continuation = pipeline->Produce();
const int test_val = 1;
PipelineProduceResult result =
continuation.Complete(std::make_unique<int>(test_val));
ASSERT_EQ(result.success, true);
ASSERT_EQ(result.is_first_item, true);
PipelineConsumeResult consume_result_1 = pipeline->Consume(
[&test_val](std::unique_ptr<int> v) { ASSERT_EQ(*v, test_val); });
result = continuation.Complete(std::make_unique<int>(test_val));
ASSERT_EQ(result.success, false);
ASSERT_EQ(consume_result_1, PipelineConsumeResult::Done);
PipelineConsumeResult consume_result_2 =
pipeline->Consume([](std::unique_ptr<int> v) { FAIL(); });
result = continuation.Complete(std::make_unique<int>(test_val));
ASSERT_EQ(result.success, false);
ASSERT_EQ(consume_result_2, PipelineConsumeResult::NoneAvailable);
}
TEST(PipelineTest, PushingMoreThanDepthCompletesFirstSubmission) {
const int depth = 1;
std::shared_ptr<IntPipeline> pipeline = std::make_shared<IntPipeline>(depth);
Continuation continuation_1 = pipeline->Produce();
Continuation continuation_2 = pipeline->Produce();
const int test_val_1 = 1, test_val_2 = 2;
PipelineProduceResult result =
continuation_1.Complete(std::make_unique<int>(test_val_1));
ASSERT_EQ(result.success, true);
ASSERT_EQ(result.is_first_item, true);
result = continuation_2.Complete(std::make_unique<int>(test_val_2));
ASSERT_EQ(result.success, false);
PipelineConsumeResult consume_result_1 = pipeline->Consume(
[&test_val_1](std::unique_ptr<int> v) { ASSERT_EQ(*v, test_val_1); });
ASSERT_EQ(consume_result_1, PipelineConsumeResult::Done);
}
TEST(PipelineTest, PushingMultiProcessesInOrder) {
const int depth = 2;
std::shared_ptr<IntPipeline> pipeline = std::make_shared<IntPipeline>(depth);
Continuation continuation_1 = pipeline->Produce();
Continuation continuation_2 = pipeline->Produce();
const int test_val_1 = 1, test_val_2 = 2;
PipelineProduceResult result =
continuation_1.Complete(std::make_unique<int>(test_val_1));
ASSERT_EQ(result.success, true);
ASSERT_EQ(result.is_first_item, true);
result = continuation_2.Complete(std::make_unique<int>(test_val_2));
ASSERT_EQ(result.success, true);
ASSERT_EQ(result.is_first_item, false);
PipelineConsumeResult consume_result_1 = pipeline->Consume(
[&test_val_1](std::unique_ptr<int> v) { ASSERT_EQ(*v, test_val_1); });
ASSERT_EQ(consume_result_1, PipelineConsumeResult::MoreAvailable);
PipelineConsumeResult consume_result_2 = pipeline->Consume(
[&test_val_2](std::unique_ptr<int> v) { ASSERT_EQ(*v, test_val_2); });
ASSERT_EQ(consume_result_2, PipelineConsumeResult::Done);
}
TEST(PipelineTest, ProduceIfEmptyDoesNotConsumeWhenQueueIsNotEmpty) {
const int depth = 2;
std::shared_ptr<IntPipeline> pipeline = std::make_shared<IntPipeline>(depth);
Continuation continuation_1 = pipeline->Produce();
Continuation continuation_2 = pipeline->ProduceIfEmpty();
const int test_val_1 = 1, test_val_2 = 2;
PipelineProduceResult result =
continuation_1.Complete(std::make_unique<int>(test_val_1));
ASSERT_EQ(result.success, true);
ASSERT_EQ(result.is_first_item, true);
result = continuation_2.Complete(std::make_unique<int>(test_val_2));
ASSERT_EQ(result.success, false);
PipelineConsumeResult consume_result_1 = pipeline->Consume(
[&test_val_1](std::unique_ptr<int> v) { ASSERT_EQ(*v, test_val_1); });
ASSERT_EQ(consume_result_1, PipelineConsumeResult::Done);
}
TEST(PipelineTest, ProduceIfEmptySuccessfulIfQueueIsEmpty) {
const int depth = 1;
std::shared_ptr<IntPipeline> pipeline = std::make_shared<IntPipeline>(depth);
Continuation continuation_1 = pipeline->ProduceIfEmpty();
const int test_val_1 = 1;
PipelineProduceResult result =
continuation_1.Complete(std::make_unique<int>(test_val_1));
ASSERT_EQ(result.success, true);
ASSERT_EQ(result.is_first_item, true);
PipelineConsumeResult consume_result_1 = pipeline->Consume(
[&test_val_1](std::unique_ptr<int> v) { ASSERT_EQ(*v, test_val_1); });
ASSERT_EQ(consume_result_1, PipelineConsumeResult::Done);
}
} // namespace testing
} // namespace flutter
| engine/shell/common/pipeline_unittests.cc/0 | {
"file_path": "engine/shell/common/pipeline_unittests.cc",
"repo_id": "engine",
"token_count": 1891
} | 284 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#define RAPIDJSON_HAS_STDSTRING 1
#include "flutter/shell/common/shell.h"
#include <memory>
#include <sstream>
#include <utility>
#include <vector>
#include "flutter/assets/directory_asset_bundle.h"
#include "flutter/common/constants.h"
#include "flutter/common/graphics/persistent_cache.h"
#include "flutter/fml/base32.h"
#include "flutter/fml/file.h"
#include "flutter/fml/icu_util.h"
#include "flutter/fml/log_settings.h"
#include "flutter/fml/logging.h"
#include "flutter/fml/make_copyable.h"
#include "flutter/fml/message_loop.h"
#include "flutter/fml/paths.h"
#include "flutter/fml/trace_event.h"
#include "flutter/runtime/dart_vm.h"
#include "flutter/shell/common/base64.h"
#include "flutter/shell/common/engine.h"
#include "flutter/shell/common/skia_event_tracer_impl.h"
#include "flutter/shell/common/switches.h"
#include "flutter/shell/common/vsync_waiter.h"
#include "impeller/runtime_stage/runtime_stage.h"
#include "rapidjson/stringbuffer.h"
#include "rapidjson/writer.h"
#include "third_party/dart/runtime/include/dart_tools_api.h"
#include "third_party/skia/include/codec/SkBmpDecoder.h"
#include "third_party/skia/include/codec/SkCodec.h"
#include "third_party/skia/include/codec/SkGifDecoder.h"
#include "third_party/skia/include/codec/SkIcoDecoder.h"
#include "third_party/skia/include/codec/SkJpegDecoder.h"
#include "third_party/skia/include/codec/SkPngDecoder.h"
#include "third_party/skia/include/codec/SkWbmpDecoder.h"
#include "third_party/skia/include/codec/SkWebpDecoder.h"
#include "third_party/skia/include/core/SkGraphics.h"
#include "third_party/tonic/common/log.h"
namespace flutter {
constexpr char kSkiaChannel[] = "flutter/skia";
constexpr char kSystemChannel[] = "flutter/system";
constexpr char kTypeKey[] = "type";
constexpr char kFontChange[] = "fontsChange";
namespace {
std::unique_ptr<Engine> CreateEngine(
Engine::Delegate& delegate,
const PointerDataDispatcherMaker& dispatcher_maker,
DartVM& vm,
const fml::RefPtr<const DartSnapshot>& isolate_snapshot,
const TaskRunners& task_runners,
const PlatformData& platform_data,
const Settings& settings,
std::unique_ptr<Animator> animator,
const fml::WeakPtr<IOManager>& io_manager,
const fml::RefPtr<SkiaUnrefQueue>& unref_queue,
const fml::TaskRunnerAffineWeakPtr<SnapshotDelegate>& snapshot_delegate,
const std::shared_ptr<VolatilePathTracker>& volatile_path_tracker,
const std::shared_ptr<fml::SyncSwitch>& gpu_disabled_switch,
impeller::RuntimeStageBackend runtime_stage_backend) {
return std::make_unique<Engine>(delegate, //
dispatcher_maker, //
vm, //
isolate_snapshot, //
task_runners, //
platform_data, //
settings, //
std::move(animator), //
io_manager, //
unref_queue, //
snapshot_delegate, //
volatile_path_tracker, //
gpu_disabled_switch, //
runtime_stage_backend);
}
void RegisterCodecsWithSkia() {
// These are in the order they will be attempted to be decoded from.
// If we have data to back it up, we can order these by "frequency used in
// the wild" for a very small performance bump, but for now we mirror the
// order Skia had them in.
SkCodecs::Register(SkPngDecoder::Decoder());
SkCodecs::Register(SkJpegDecoder::Decoder());
SkCodecs::Register(SkWebpDecoder::Decoder());
SkCodecs::Register(SkGifDecoder::Decoder());
SkCodecs::Register(SkBmpDecoder::Decoder());
SkCodecs::Register(SkWbmpDecoder::Decoder());
SkCodecs::Register(SkIcoDecoder::Decoder());
}
// Though there can be multiple shells, some settings apply to all components in
// the process. These have to be set up before the shell or any of its
// sub-components can be initialized. In a perfect world, this would be empty.
// TODO(chinmaygarde): The unfortunate side effect of this call is that settings
// that cause shell initialization failures will still lead to some of their
// settings being applied.
void PerformInitializationTasks(Settings& settings) {
{
fml::LogSettings log_settings;
log_settings.min_log_level =
settings.verbose_logging ? fml::kLogInfo : fml::kLogError;
fml::SetLogSettings(log_settings);
}
static std::once_flag gShellSettingsInitialization = {};
std::call_once(gShellSettingsInitialization, [&settings] {
tonic::SetLogHandler(
[](const char* message) { FML_LOG(ERROR) << message; });
if (settings.trace_skia) {
InitSkiaEventTracer(settings.trace_skia, settings.trace_skia_allowlist);
}
if (!settings.trace_allowlist.empty()) {
fml::tracing::TraceSetAllowlist(settings.trace_allowlist);
}
if (!settings.skia_deterministic_rendering_on_cpu) {
SkGraphics::Init();
} else {
FML_DLOG(INFO) << "Skia deterministic rendering is enabled.";
}
RegisterCodecsWithSkia();
if (settings.icu_initialization_required) {
if (!settings.icu_data_path.empty()) {
fml::icu::InitializeICU(settings.icu_data_path);
} else if (settings.icu_mapper) {
fml::icu::InitializeICUFromMapping(settings.icu_mapper());
} else {
FML_DLOG(WARNING) << "Skipping ICU initialization in the shell.";
}
}
});
PersistentCache::SetCacheSkSL(settings.cache_sksl);
}
} // namespace
std::pair<DartVMRef, fml::RefPtr<const DartSnapshot>>
Shell::InferVmInitDataFromSettings(Settings& settings) {
// Always use the `vm_snapshot` and `isolate_snapshot` provided by the
// settings to launch the VM. If the VM is already running, the snapshot
// arguments are ignored.
auto vm_snapshot = DartSnapshot::VMSnapshotFromSettings(settings);
auto isolate_snapshot = DartSnapshot::IsolateSnapshotFromSettings(settings);
auto vm = DartVMRef::Create(settings, vm_snapshot, isolate_snapshot);
// If the settings did not specify an `isolate_snapshot`, fall back to the
// one the VM was launched with.
if (!isolate_snapshot) {
isolate_snapshot = vm->GetVMData()->GetIsolateSnapshot();
}
return {std::move(vm), isolate_snapshot};
}
std::unique_ptr<Shell> Shell::Create(
const PlatformData& platform_data,
const TaskRunners& task_runners,
Settings settings,
const Shell::CreateCallback<PlatformView>& on_create_platform_view,
const Shell::CreateCallback<Rasterizer>& on_create_rasterizer,
bool is_gpu_disabled) {
// This must come first as it initializes tracing.
PerformInitializationTasks(settings);
TRACE_EVENT0("flutter", "Shell::Create");
auto [vm, isolate_snapshot] = InferVmInitDataFromSettings(settings);
auto resource_cache_limit_calculator =
std::make_shared<ResourceCacheLimitCalculator>(
settings.resource_cache_max_bytes_threshold);
return CreateWithSnapshot(platform_data, //
task_runners, //
/*parent_thread_merger=*/nullptr, //
/*parent_io_manager=*/nullptr, //
resource_cache_limit_calculator, //
settings, //
std::move(vm), //
std::move(isolate_snapshot), //
on_create_platform_view, //
on_create_rasterizer, //
CreateEngine, is_gpu_disabled);
}
static impeller::RuntimeStageBackend DetermineRuntimeStageBackend(
const std::shared_ptr<impeller::Context>& impeller_context) {
if (!impeller_context) {
return impeller::RuntimeStageBackend::kSkSL;
}
switch (impeller_context->GetBackendType()) {
case impeller::Context::BackendType::kMetal:
return impeller::RuntimeStageBackend::kMetal;
case impeller::Context::BackendType::kOpenGLES:
return impeller::RuntimeStageBackend::kOpenGLES;
case impeller::Context::BackendType::kVulkan:
return impeller::RuntimeStageBackend::kVulkan;
}
}
std::unique_ptr<Shell> Shell::CreateShellOnPlatformThread(
DartVMRef vm,
fml::RefPtr<fml::RasterThreadMerger> parent_merger,
std::shared_ptr<ShellIOManager> parent_io_manager,
const std::shared_ptr<ResourceCacheLimitCalculator>&
resource_cache_limit_calculator,
const TaskRunners& task_runners,
const PlatformData& platform_data,
const Settings& settings,
fml::RefPtr<const DartSnapshot> isolate_snapshot,
const Shell::CreateCallback<PlatformView>& on_create_platform_view,
const Shell::CreateCallback<Rasterizer>& on_create_rasterizer,
const Shell::EngineCreateCallback& on_create_engine,
bool is_gpu_disabled) {
if (!task_runners.IsValid()) {
FML_LOG(ERROR) << "Task runners to run the shell were invalid.";
return nullptr;
}
auto shell = std::unique_ptr<Shell>(
new Shell(std::move(vm), task_runners, std::move(parent_merger),
resource_cache_limit_calculator, settings,
std::make_shared<VolatilePathTracker>(
task_runners.GetUITaskRunner(),
!settings.skia_deterministic_rendering_on_cpu),
is_gpu_disabled));
// Create the platform view on the platform thread (this thread).
auto platform_view = on_create_platform_view(*shell.get());
if (!platform_view || !platform_view->GetWeakPtr()) {
return nullptr;
}
// Create the rasterizer on the raster thread.
std::promise<std::unique_ptr<Rasterizer>> rasterizer_promise;
auto rasterizer_future = rasterizer_promise.get_future();
std::promise<fml::TaskRunnerAffineWeakPtr<SnapshotDelegate>>
snapshot_delegate_promise;
auto snapshot_delegate_future = snapshot_delegate_promise.get_future();
fml::TaskRunner::RunNowOrPostTask(
task_runners.GetRasterTaskRunner(),
[&rasterizer_promise, //
&snapshot_delegate_promise,
on_create_rasterizer, //
shell = shell.get(), //
impeller_context = platform_view->GetImpellerContext() //
]() {
TRACE_EVENT0("flutter", "ShellSetupGPUSubsystem");
std::unique_ptr<Rasterizer> rasterizer(on_create_rasterizer(*shell));
rasterizer->SetImpellerContext(impeller_context);
snapshot_delegate_promise.set_value(rasterizer->GetSnapshotDelegate());
rasterizer_promise.set_value(std::move(rasterizer));
});
// Ask the platform view for the vsync waiter. This will be used by the engine
// to create the animator.
auto vsync_waiter = platform_view->CreateVSyncWaiter();
if (!vsync_waiter) {
return nullptr;
}
// Create the IO manager on the IO thread. The IO manager must be initialized
// first because it has state that the other subsystems depend on. It must
// first be booted and the necessary references obtained to initialize the
// other subsystems.
std::promise<std::shared_ptr<ShellIOManager>> io_manager_promise;
auto io_manager_future = io_manager_promise.get_future();
std::promise<fml::WeakPtr<ShellIOManager>> weak_io_manager_promise;
auto weak_io_manager_future = weak_io_manager_promise.get_future();
std::promise<fml::RefPtr<SkiaUnrefQueue>> unref_queue_promise;
auto unref_queue_future = unref_queue_promise.get_future();
auto io_task_runner = shell->GetTaskRunners().GetIOTaskRunner();
// The platform_view will be stored into shell's platform_view_ in
// shell->Setup(std::move(platform_view), ...) at the end.
PlatformView* platform_view_ptr = platform_view.get();
fml::TaskRunner::RunNowOrPostTask(
io_task_runner,
[&io_manager_promise, //
&weak_io_manager_promise, //
&parent_io_manager, //
&unref_queue_promise, //
platform_view_ptr, //
io_task_runner, //
is_backgrounded_sync_switch = shell->GetIsGpuDisabledSyncSwitch() //
]() {
TRACE_EVENT0("flutter", "ShellSetupIOSubsystem");
std::shared_ptr<ShellIOManager> io_manager;
if (parent_io_manager) {
io_manager = parent_io_manager;
} else {
io_manager = std::make_shared<ShellIOManager>(
platform_view_ptr->CreateResourceContext(), // resource context
is_backgrounded_sync_switch, // sync switch
io_task_runner, // unref queue task runner
platform_view_ptr->GetImpellerContext() // impeller context
);
}
weak_io_manager_promise.set_value(io_manager->GetWeakPtr());
unref_queue_promise.set_value(io_manager->GetSkiaUnrefQueue());
io_manager_promise.set_value(io_manager);
});
// Send dispatcher_maker to the engine constructor because shell won't have
// platform_view set until Shell::Setup is called later.
auto dispatcher_maker = platform_view->GetDispatcherMaker();
// Create the engine on the UI thread.
std::promise<std::unique_ptr<Engine>> engine_promise;
auto engine_future = engine_promise.get_future();
fml::TaskRunner::RunNowOrPostTask(
shell->GetTaskRunners().GetUITaskRunner(),
fml::MakeCopyable([&engine_promise, //
shell = shell.get(), //
&dispatcher_maker, //
&platform_data, //
isolate_snapshot = std::move(isolate_snapshot), //
vsync_waiter = std::move(vsync_waiter), //
&weak_io_manager_future, //
&snapshot_delegate_future, //
&unref_queue_future, //
&on_create_engine,
runtime_stage_backend = DetermineRuntimeStageBackend(
platform_view->GetImpellerContext())]() mutable {
TRACE_EVENT0("flutter", "ShellSetupUISubsystem");
const auto& task_runners = shell->GetTaskRunners();
// The animator is owned by the UI thread but it gets its vsync pulses
// from the platform.
auto animator = std::make_unique<Animator>(*shell, task_runners,
std::move(vsync_waiter));
engine_promise.set_value(on_create_engine(
*shell, //
dispatcher_maker, //
*shell->GetDartVM(), //
std::move(isolate_snapshot), //
task_runners, //
platform_data, //
shell->GetSettings(), //
std::move(animator), //
weak_io_manager_future.get(), //
unref_queue_future.get(), //
snapshot_delegate_future.get(), //
shell->volatile_path_tracker_, //
shell->is_gpu_disabled_sync_switch_, //
runtime_stage_backend //
));
}));
if (!shell->Setup(std::move(platform_view), //
engine_future.get(), //
rasterizer_future.get(), //
io_manager_future.get()) //
) {
return nullptr;
}
return shell;
}
std::unique_ptr<Shell> Shell::CreateWithSnapshot(
const PlatformData& platform_data,
const TaskRunners& task_runners,
const fml::RefPtr<fml::RasterThreadMerger>& parent_thread_merger,
const std::shared_ptr<ShellIOManager>& parent_io_manager,
const std::shared_ptr<ResourceCacheLimitCalculator>&
resource_cache_limit_calculator,
Settings settings,
DartVMRef vm,
fml::RefPtr<const DartSnapshot> isolate_snapshot,
const Shell::CreateCallback<PlatformView>& on_create_platform_view,
const Shell::CreateCallback<Rasterizer>& on_create_rasterizer,
const Shell::EngineCreateCallback& on_create_engine,
bool is_gpu_disabled) {
// This must come first as it initializes tracing.
PerformInitializationTasks(settings);
TRACE_EVENT0("flutter", "Shell::CreateWithSnapshot");
const bool callbacks_valid =
on_create_platform_view && on_create_rasterizer && on_create_engine;
if (!task_runners.IsValid() || !callbacks_valid) {
return nullptr;
}
fml::AutoResetWaitableEvent latch;
std::unique_ptr<Shell> shell;
auto platform_task_runner = task_runners.GetPlatformTaskRunner();
fml::TaskRunner::RunNowOrPostTask(
platform_task_runner,
fml::MakeCopyable([&latch, //
&shell, //
parent_thread_merger, //
parent_io_manager, //
resource_cache_limit_calculator, //
task_runners = task_runners, //
platform_data = platform_data, //
settings = settings, //
vm = std::move(vm), //
isolate_snapshot = std::move(isolate_snapshot), //
on_create_platform_view = on_create_platform_view, //
on_create_rasterizer = on_create_rasterizer, //
on_create_engine = on_create_engine,
is_gpu_disabled]() mutable {
shell = CreateShellOnPlatformThread(std::move(vm), //
parent_thread_merger, //
parent_io_manager, //
resource_cache_limit_calculator, //
task_runners, //
platform_data, //
settings, //
std::move(isolate_snapshot), //
on_create_platform_view, //
on_create_rasterizer, //
on_create_engine, is_gpu_disabled);
latch.Signal();
}));
latch.Wait();
return shell;
}
Shell::Shell(DartVMRef vm,
const TaskRunners& task_runners,
fml::RefPtr<fml::RasterThreadMerger> parent_merger,
const std::shared_ptr<ResourceCacheLimitCalculator>&
resource_cache_limit_calculator,
const Settings& settings,
std::shared_ptr<VolatilePathTracker> volatile_path_tracker,
bool is_gpu_disabled)
: task_runners_(task_runners),
parent_raster_thread_merger_(std::move(parent_merger)),
resource_cache_limit_calculator_(resource_cache_limit_calculator),
settings_(settings),
vm_(std::move(vm)),
is_gpu_disabled_sync_switch_(new fml::SyncSwitch(is_gpu_disabled)),
volatile_path_tracker_(std::move(volatile_path_tracker)),
weak_factory_gpu_(nullptr),
weak_factory_(this) {
FML_CHECK(!settings.enable_software_rendering || !settings.enable_impeller)
<< "Software rendering is incompatible with Impeller.";
FML_CHECK(vm_) << "Must have access to VM to create a shell.";
FML_DCHECK(task_runners_.IsValid());
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
display_manager_ = std::make_unique<DisplayManager>();
resource_cache_limit_calculator->AddResourceCacheLimitItem(
weak_factory_.GetWeakPtr());
// Generate a WeakPtrFactory for use with the raster thread. This does not
// need to wait on a latch because it can only ever be used from the raster
// thread from this class, so we have ordering guarantees.
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetRasterTaskRunner(), fml::MakeCopyable([this]() mutable {
this->weak_factory_gpu_ =
std::make_unique<fml::TaskRunnerAffineWeakPtrFactory<Shell>>(this);
}));
// Install service protocol handlers.
service_protocol_handlers_[ServiceProtocol::kScreenshotExtensionName] = {
task_runners_.GetRasterTaskRunner(),
std::bind(&Shell::OnServiceProtocolScreenshot, this,
std::placeholders::_1, std::placeholders::_2)};
service_protocol_handlers_[ServiceProtocol::kScreenshotSkpExtensionName] = {
task_runners_.GetRasterTaskRunner(),
std::bind(&Shell::OnServiceProtocolScreenshotSKP, this,
std::placeholders::_1, std::placeholders::_2)};
service_protocol_handlers_[ServiceProtocol::kRunInViewExtensionName] = {
task_runners_.GetUITaskRunner(),
std::bind(&Shell::OnServiceProtocolRunInView, this, std::placeholders::_1,
std::placeholders::_2)};
service_protocol_handlers_
[ServiceProtocol::kFlushUIThreadTasksExtensionName] = {
task_runners_.GetUITaskRunner(),
std::bind(&Shell::OnServiceProtocolFlushUIThreadTasks, this,
std::placeholders::_1, std::placeholders::_2)};
service_protocol_handlers_
[ServiceProtocol::kSetAssetBundlePathExtensionName] = {
task_runners_.GetUITaskRunner(),
std::bind(&Shell::OnServiceProtocolSetAssetBundlePath, this,
std::placeholders::_1, std::placeholders::_2)};
service_protocol_handlers_
[ServiceProtocol::kGetDisplayRefreshRateExtensionName] = {
task_runners_.GetUITaskRunner(),
std::bind(&Shell::OnServiceProtocolGetDisplayRefreshRate, this,
std::placeholders::_1, std::placeholders::_2)};
service_protocol_handlers_[ServiceProtocol::kGetSkSLsExtensionName] = {
task_runners_.GetIOTaskRunner(),
std::bind(&Shell::OnServiceProtocolGetSkSLs, this, std::placeholders::_1,
std::placeholders::_2)};
service_protocol_handlers_
[ServiceProtocol::kEstimateRasterCacheMemoryExtensionName] = {
task_runners_.GetRasterTaskRunner(),
std::bind(&Shell::OnServiceProtocolEstimateRasterCacheMemory, this,
std::placeholders::_1, std::placeholders::_2)};
service_protocol_handlers_
[ServiceProtocol::kRenderFrameWithRasterStatsExtensionName] = {
task_runners_.GetRasterTaskRunner(),
std::bind(&Shell::OnServiceProtocolRenderFrameWithRasterStats, this,
std::placeholders::_1, std::placeholders::_2)};
service_protocol_handlers_[ServiceProtocol::kReloadAssetFonts] = {
task_runners_.GetPlatformTaskRunner(),
std::bind(&Shell::OnServiceProtocolReloadAssetFonts, this,
std::placeholders::_1, std::placeholders::_2)};
}
Shell::~Shell() {
PersistentCache::GetCacheForProcess()->RemoveWorkerTaskRunner(
task_runners_.GetIOTaskRunner());
vm_->GetServiceProtocol()->RemoveHandler(this);
fml::AutoResetWaitableEvent platiso_latch, ui_latch, gpu_latch,
platform_latch, io_latch;
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetPlatformTaskRunner(),
fml::MakeCopyable([this, &platiso_latch]() mutable {
engine_->ShutdownPlatformIsolates();
platiso_latch.Signal();
}));
platiso_latch.Wait();
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetUITaskRunner(),
fml::MakeCopyable([this, &ui_latch]() mutable {
engine_.reset();
ui_latch.Signal();
}));
ui_latch.Wait();
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetRasterTaskRunner(),
fml::MakeCopyable(
[this, rasterizer = std::move(rasterizer_), &gpu_latch]() mutable {
rasterizer.reset();
this->weak_factory_gpu_.reset();
gpu_latch.Signal();
}));
gpu_latch.Wait();
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetIOTaskRunner(),
fml::MakeCopyable([io_manager = std::move(io_manager_),
platform_view = platform_view_.get(),
&io_latch]() mutable {
io_manager.reset();
if (platform_view) {
platform_view->ReleaseResourceContext();
}
io_latch.Signal();
}));
io_latch.Wait();
// The platform view must go last because it may be holding onto platform side
// counterparts to resources owned by subsystems running on other threads. For
// example, the NSOpenGLContext on the Mac.
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetPlatformTaskRunner(),
fml::MakeCopyable([platform_view = std::move(platform_view_),
&platform_latch]() mutable {
platform_view.reset();
platform_latch.Signal();
}));
platform_latch.Wait();
}
std::unique_ptr<Shell> Shell::Spawn(
RunConfiguration run_configuration,
const std::string& initial_route,
const CreateCallback<PlatformView>& on_create_platform_view,
const CreateCallback<Rasterizer>& on_create_rasterizer) const {
FML_DCHECK(task_runners_.IsValid());
// It's safe to store this value since it is set on the platform thread.
bool is_gpu_disabled = false;
GetIsGpuDisabledSyncSwitch()->Execute(
fml::SyncSwitch::Handlers()
.SetIfFalse([&is_gpu_disabled] { is_gpu_disabled = false; })
.SetIfTrue([&is_gpu_disabled] { is_gpu_disabled = true; }));
std::unique_ptr<Shell> result = CreateWithSnapshot(
PlatformData{}, task_runners_, rasterizer_->GetRasterThreadMerger(),
io_manager_, resource_cache_limit_calculator_, GetSettings(), vm_,
vm_->GetVMData()->GetIsolateSnapshot(), on_create_platform_view,
on_create_rasterizer,
[engine = this->engine_.get(), initial_route](
Engine::Delegate& delegate,
const PointerDataDispatcherMaker& dispatcher_maker, DartVM& vm,
const fml::RefPtr<const DartSnapshot>& isolate_snapshot,
const TaskRunners& task_runners, const PlatformData& platform_data,
const Settings& settings, std::unique_ptr<Animator> animator,
const fml::WeakPtr<IOManager>& io_manager,
const fml::RefPtr<SkiaUnrefQueue>& unref_queue,
fml::TaskRunnerAffineWeakPtr<SnapshotDelegate> snapshot_delegate,
const std::shared_ptr<VolatilePathTracker>& volatile_path_tracker,
const std::shared_ptr<fml::SyncSwitch>& is_gpu_disabled_sync_switch,
impeller::RuntimeStageBackend runtime_stage_backend) {
return engine->Spawn(
/*delegate=*/delegate,
/*dispatcher_maker=*/dispatcher_maker,
/*settings=*/settings,
/*animator=*/std::move(animator),
/*initial_route=*/initial_route,
/*io_manager=*/io_manager,
/*snapshot_delegate=*/std::move(snapshot_delegate),
/*gpu_disabled_switch=*/is_gpu_disabled_sync_switch);
},
is_gpu_disabled);
result->RunEngine(std::move(run_configuration));
return result;
}
void Shell::NotifyLowMemoryWarning() const {
auto trace_id = fml::tracing::TraceNonce();
TRACE_EVENT_ASYNC_BEGIN0("flutter", "Shell::NotifyLowMemoryWarning",
trace_id);
// This does not require a current isolate but does require a running VM.
// Since a valid shell will not be returned to the embedder without a valid
// DartVMRef, we can be certain that this is a safe spot to assume a VM is
// running.
::Dart_NotifyLowMemory();
task_runners_.GetRasterTaskRunner()->PostTask(
[rasterizer = rasterizer_->GetWeakPtr(), trace_id = trace_id]() {
if (rasterizer) {
rasterizer->NotifyLowMemoryWarning();
}
TRACE_EVENT_ASYNC_END0("flutter", "Shell::NotifyLowMemoryWarning",
trace_id);
});
// The IO Manager uses resource cache limits of 0, so it is not necessary
// to purge them.
}
void Shell::RunEngine(RunConfiguration run_configuration) {
RunEngine(std::move(run_configuration), nullptr);
}
void Shell::RunEngine(
RunConfiguration run_configuration,
const std::function<void(Engine::RunStatus)>& result_callback) {
auto result = [platform_runner = task_runners_.GetPlatformTaskRunner(),
result_callback](Engine::RunStatus run_result) {
if (!result_callback) {
return;
}
platform_runner->PostTask(
[result_callback, run_result]() { result_callback(run_result); });
};
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetUITaskRunner(),
fml::MakeCopyable(
[run_configuration = std::move(run_configuration),
weak_engine = weak_engine_, result]() mutable {
if (!weak_engine) {
FML_LOG(ERROR)
<< "Could not launch engine with configuration - no engine.";
result(Engine::RunStatus::Failure);
return;
}
auto run_result = weak_engine->Run(std::move(run_configuration));
if (run_result == flutter::Engine::RunStatus::Failure) {
FML_LOG(ERROR) << "Could not launch engine with configuration.";
}
result(run_result);
}));
}
std::optional<DartErrorCode> Shell::GetUIIsolateLastError() const {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
if (!weak_engine_) {
return std::nullopt;
}
switch (weak_engine_->GetUIIsolateLastError()) {
case tonic::kCompilationErrorType:
return DartErrorCode::CompilationError;
case tonic::kApiErrorType:
return DartErrorCode::ApiError;
case tonic::kUnknownErrorType:
return DartErrorCode::UnknownError;
case tonic::kNoError:
return DartErrorCode::NoError;
}
return DartErrorCode::UnknownError;
}
bool Shell::EngineHasLivePorts() const {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
if (!weak_engine_) {
return false;
}
return weak_engine_->UIIsolateHasLivePorts();
}
bool Shell::IsSetup() const {
return is_set_up_;
}
bool Shell::Setup(std::unique_ptr<PlatformView> platform_view,
std::unique_ptr<Engine> engine,
std::unique_ptr<Rasterizer> rasterizer,
const std::shared_ptr<ShellIOManager>& io_manager) {
if (is_set_up_) {
return false;
}
if (!platform_view || !engine || !rasterizer || !io_manager) {
return false;
}
platform_view_ = std::move(platform_view);
platform_message_handler_ = platform_view_->GetPlatformMessageHandler();
route_messages_through_platform_thread_.store(true);
task_runners_.GetPlatformTaskRunner()->PostTask(
[self = weak_factory_.GetWeakPtr()] {
if (self) {
self->route_messages_through_platform_thread_.store(false);
}
});
engine_ = std::move(engine);
rasterizer_ = std::move(rasterizer);
io_manager_ = io_manager;
// Set the external view embedder for the rasterizer.
auto view_embedder = platform_view_->CreateExternalViewEmbedder();
rasterizer_->SetExternalViewEmbedder(view_embedder);
rasterizer_->SetSnapshotSurfaceProducer(
platform_view_->CreateSnapshotSurfaceProducer());
// The weak ptr must be generated in the platform thread which owns the unique
// ptr.
weak_engine_ = engine_->GetWeakPtr();
weak_rasterizer_ = rasterizer_->GetWeakPtr();
weak_platform_view_ = platform_view_->GetWeakPtr();
engine_->AddView(kFlutterImplicitViewId, ViewportMetrics{});
// Setup the time-consuming default font manager right after engine created.
if (!settings_.prefetched_default_font_manager) {
fml::TaskRunner::RunNowOrPostTask(task_runners_.GetUITaskRunner(),
[engine = weak_engine_] {
if (engine) {
engine->SetupDefaultFontManager();
}
});
}
is_set_up_ = true;
PersistentCache::GetCacheForProcess()->AddWorkerTaskRunner(
task_runners_.GetIOTaskRunner());
PersistentCache::GetCacheForProcess()->SetIsDumpingSkp(
settings_.dump_skp_on_shader_compilation);
if (settings_.purge_persistent_cache) {
PersistentCache::GetCacheForProcess()->Purge();
}
return true;
}
const Settings& Shell::GetSettings() const {
return settings_;
}
const TaskRunners& Shell::GetTaskRunners() const {
return task_runners_;
}
const fml::RefPtr<fml::RasterThreadMerger> Shell::GetParentRasterThreadMerger()
const {
return parent_raster_thread_merger_;
}
fml::TaskRunnerAffineWeakPtr<Rasterizer> Shell::GetRasterizer() const {
FML_DCHECK(is_set_up_);
return weak_rasterizer_;
}
fml::WeakPtr<Engine> Shell::GetEngine() {
FML_DCHECK(is_set_up_);
return weak_engine_;
}
fml::WeakPtr<PlatformView> Shell::GetPlatformView() {
FML_DCHECK(is_set_up_);
return weak_platform_view_;
}
fml::WeakPtr<ShellIOManager> Shell::GetIOManager() {
FML_DCHECK(is_set_up_);
return io_manager_->GetWeakPtr();
}
DartVM* Shell::GetDartVM() {
return &vm_;
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewCreated(std::unique_ptr<Surface> surface) {
TRACE_EVENT0("flutter", "Shell::OnPlatformViewCreated");
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
// Prevent any request to change the thread configuration for raster and
// platform queues while the platform view is being created.
//
// This prevents false positives such as this method starts assuming that the
// raster and platform queues have a given thread configuration, but then the
// configuration is changed by a task, and the assumption is no longer true.
//
// This incorrect assumption can lead to deadlock.
// See `should_post_raster_task` for more.
rasterizer_->DisableThreadMergerIfNeeded();
// The normal flow executed by this method is that the platform thread is
// starting the sequence and waiting on the latch. Later the UI thread posts
// raster_task to the raster thread which signals the latch. If the raster and
// the platform threads are the same this results in a deadlock as the
// raster_task will never be posted to the platform/raster thread that is
// blocked on a latch. To avoid the described deadlock, if the raster and the
// platform threads are the same, should_post_raster_task will be false, and
// then instead of posting a task to the raster thread, the ui thread just
// signals the latch and the platform/raster thread follows with executing
// raster_task.
const bool should_post_raster_task =
!task_runners_.GetRasterTaskRunner()->RunsTasksOnCurrentThread();
auto raster_task = fml::MakeCopyable(
[&waiting_for_first_frame = waiting_for_first_frame_, //
rasterizer = rasterizer_->GetWeakPtr(), //
surface = std::move(surface) //
]() mutable {
if (rasterizer) {
// Enables the thread merger which may be used by the external view
// embedder.
rasterizer->EnableThreadMergerIfNeeded();
rasterizer->Setup(std::move(surface));
}
waiting_for_first_frame.store(true);
});
auto ui_task = [engine = engine_->GetWeakPtr()] {
if (engine) {
engine->ScheduleFrame();
}
};
// Threading: Capture platform view by raw pointer and not the weak pointer.
// We are going to use the pointer on the IO thread which is not safe with a
// weak pointer. However, we are preventing the platform view from being
// collected by using a latch.
auto* platform_view = platform_view_.get();
FML_DCHECK(platform_view);
fml::AutoResetWaitableEvent latch;
auto io_task = [io_manager = io_manager_->GetWeakPtr(), platform_view,
ui_task_runner = task_runners_.GetUITaskRunner(), ui_task,
raster_task_runner = task_runners_.GetRasterTaskRunner(),
raster_task, should_post_raster_task, &latch] {
if (io_manager && !io_manager->GetResourceContext()) {
sk_sp<GrDirectContext> resource_context =
platform_view->CreateResourceContext();
io_manager->NotifyResourceContextAvailable(resource_context);
}
// Step 1: Post a task on the UI thread to tell the engine that it has
// an output surface.
fml::TaskRunner::RunNowOrPostTask(ui_task_runner, ui_task);
// Step 2: Tell the raster thread that it should create a surface for
// its rasterizer.
if (should_post_raster_task) {
fml::TaskRunner::RunNowOrPostTask(raster_task_runner, raster_task);
}
latch.Signal();
};
fml::TaskRunner::RunNowOrPostTask(task_runners_.GetIOTaskRunner(), io_task);
latch.Wait();
if (!should_post_raster_task) {
// See comment on should_post_raster_task, in this case the raster_task
// wasn't executed, and we just run it here as the platform thread
// is the raster thread.
raster_task();
}
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewDestroyed() {
TRACE_EVENT0("flutter", "Shell::OnPlatformViewDestroyed");
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
// Prevent any request to change the thread configuration for raster and
// platform queues while the platform view is being destroyed.
//
// This prevents false positives such as this method starts assuming that the
// raster and platform queues have a given thread configuration, but then the
// configuration is changed by a task, and the assumption is no longer true.
//
// This incorrect assumption can lead to deadlock.
rasterizer_->DisableThreadMergerIfNeeded();
// Notify the Dart VM that the PlatformView has been destroyed and some
// cleanup activity can be done (e.g: garbage collect the Dart heap).
task_runners_.GetUITaskRunner()->PostTask([engine = engine_->GetWeakPtr()]() {
if (engine) {
engine->NotifyDestroyed();
}
});
// Note:
// This is a synchronous operation because certain platforms depend on
// setup/suspension of all activities that may be interacting with the GPU in
// a synchronous fashion.
// The UI thread does not need to be serialized here - there is sufficient
// guardrailing in the rasterizer to allow the UI thread to post work to it
// even after the surface has been torn down.
fml::AutoResetWaitableEvent latch;
auto io_task = [io_manager = io_manager_.get(), &latch]() {
// Execute any pending Skia object deletions while GPU access is still
// allowed.
io_manager->GetIsGpuDisabledSyncSwitch()->Execute(
fml::SyncSwitch::Handlers().SetIfFalse(
[&] { io_manager->GetSkiaUnrefQueue()->Drain(); }));
// Step 4: All done. Signal the latch that the platform thread is waiting
// on.
latch.Signal();
};
auto raster_task = [rasterizer = rasterizer_->GetWeakPtr(),
io_task_runner = task_runners_.GetIOTaskRunner(),
io_task]() {
if (rasterizer) {
// Enables the thread merger which is required prior tearing down the
// rasterizer. If the raster and platform threads are merged, tearing down
// the rasterizer unmerges the threads.
rasterizer->EnableThreadMergerIfNeeded();
rasterizer->Teardown();
}
// Step 2: Tell the IO thread to complete its remaining work.
fml::TaskRunner::RunNowOrPostTask(io_task_runner, io_task);
};
// Step 1: Post a task to the Raster thread (possibly this thread) to tell the
// rasterizer the output surface is going away.
fml::TaskRunner::RunNowOrPostTask(task_runners_.GetRasterTaskRunner(),
raster_task);
latch.Wait();
// On Android, the external view embedder may post a task to the platform
// thread, and wait until it completes if overlay surfaces must be released.
// However, the platform thread might be blocked when Dart is initializing.
// In this situation, calling TeardownExternalViewEmbedder is safe because no
// platform views have been created before Flutter renders the first frame.
// Overall, the longer term plan is to remove this implementation once
// https://github.com/flutter/flutter/issues/96679 is fixed.
rasterizer_->TeardownExternalViewEmbedder();
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewScheduleFrame() {
TRACE_EVENT0("flutter", "Shell::OnPlatformViewScheduleFrame");
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
task_runners_.GetUITaskRunner()->PostTask([engine = engine_->GetWeakPtr()]() {
if (engine) {
engine->ScheduleFrame();
}
});
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewSetViewportMetrics(int64_t view_id,
const ViewportMetrics& metrics) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
if (metrics.device_pixel_ratio <= 0 || metrics.physical_width <= 0 ||
metrics.physical_height <= 0) {
// Ignore invalid view-port metrics.
return;
}
// This is the formula Android uses.
// https://android.googlesource.com/platform/frameworks/base/+/39ae5bac216757bc201490f4c7b8c0f63006c6cd/libs/hwui/renderthread/CacheManager.cpp#45
resource_cache_limit_ =
metrics.physical_width * metrics.physical_height * 12 * 4;
size_t resource_cache_max_bytes =
resource_cache_limit_calculator_->GetResourceCacheMaxBytes();
task_runners_.GetRasterTaskRunner()->PostTask(
[rasterizer = rasterizer_->GetWeakPtr(), resource_cache_max_bytes] {
if (rasterizer) {
rasterizer->SetResourceCacheMaxBytes(resource_cache_max_bytes, false);
}
});
task_runners_.GetUITaskRunner()->PostTask(
[engine = engine_->GetWeakPtr(), view_id, metrics]() {
if (engine) {
engine->SetViewportMetrics(view_id, metrics);
}
});
{
std::scoped_lock<std::mutex> lock(resize_mutex_);
expected_frame_sizes_[view_id] =
SkISize::Make(metrics.physical_width, metrics.physical_height);
device_pixel_ratio_ = metrics.device_pixel_ratio;
}
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewDispatchPlatformMessage(
std::unique_ptr<PlatformMessage> message) {
FML_DCHECK(is_set_up_);
#if FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_DEBUG
if (!task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread()) {
std::scoped_lock lock(misbehaving_message_channels_mutex_);
auto inserted = misbehaving_message_channels_.insert(message->channel());
if (inserted.second) {
FML_LOG(ERROR)
<< "The '" << message->channel()
<< "' channel sent a message from native to Flutter on a "
"non-platform thread. Platform channel messages must be sent on "
"the platform thread. Failure to do so may result in data loss or "
"crashes, and must be fixed in the plugin or application code "
"creating that channel.\n"
"See https://docs.flutter.dev/platform-integration/"
"platform-channels#channels-and-platform-threading for more "
"information.";
}
}
#endif // FLUTTER_RUNTIME_MODE == FLUTTER_RUNTIME_MODE_DEBUG
// The static leak checker gets confused by the use of fml::MakeCopyable.
// NOLINTNEXTLINE(clang-analyzer-cplusplus.NewDeleteLeaks)
task_runners_.GetUITaskRunner()->PostTask(fml::MakeCopyable(
[engine = engine_->GetWeakPtr(), message = std::move(message)]() mutable {
if (engine) {
engine->DispatchPlatformMessage(std::move(message));
}
}));
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewDispatchPointerDataPacket(
std::unique_ptr<PointerDataPacket> packet) {
TRACE_EVENT0_WITH_FLOW_IDS(
"flutter", "Shell::OnPlatformViewDispatchPointerDataPacket",
/*flow_id_count=*/1, /*flow_ids=*/&next_pointer_flow_id_);
TRACE_FLOW_BEGIN("flutter", "PointerEvent", next_pointer_flow_id_);
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
task_runners_.GetUITaskRunner()->PostTask(
fml::MakeCopyable([engine = weak_engine_, packet = std::move(packet),
flow_id = next_pointer_flow_id_]() mutable {
if (engine) {
engine->DispatchPointerDataPacket(std::move(packet), flow_id);
}
}));
next_pointer_flow_id_++;
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewDispatchSemanticsAction(int32_t node_id,
SemanticsAction action,
fml::MallocMapping args) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
task_runners_.GetUITaskRunner()->PostTask(
fml::MakeCopyable([engine = engine_->GetWeakPtr(), node_id, action,
args = std::move(args)]() mutable {
if (engine) {
engine->DispatchSemanticsAction(node_id, action, std::move(args));
}
}));
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewSetSemanticsEnabled(bool enabled) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
task_runners_.GetUITaskRunner()->PostTask(
[engine = engine_->GetWeakPtr(), enabled] {
if (engine) {
engine->SetSemanticsEnabled(enabled);
}
});
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewSetAccessibilityFeatures(int32_t flags) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
task_runners_.GetUITaskRunner()->PostTask(
[engine = engine_->GetWeakPtr(), flags] {
if (engine) {
engine->SetAccessibilityFeatures(flags);
}
});
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewRegisterTexture(
std::shared_ptr<flutter::Texture> texture) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
task_runners_.GetRasterTaskRunner()->PostTask(
[rasterizer = rasterizer_->GetWeakPtr(), texture] {
if (rasterizer) {
if (auto registry = rasterizer->GetTextureRegistry()) {
registry->RegisterTexture(texture);
}
}
});
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewUnregisterTexture(int64_t texture_id) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
task_runners_.GetRasterTaskRunner()->PostTask(
[rasterizer = rasterizer_->GetWeakPtr(), texture_id]() {
if (rasterizer) {
if (auto registry = rasterizer->GetTextureRegistry()) {
registry->UnregisterTexture(texture_id);
}
}
});
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewMarkTextureFrameAvailable(int64_t texture_id) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
// Tell the rasterizer that one of its textures has a new frame available.
task_runners_.GetRasterTaskRunner()->PostTask(
[rasterizer = rasterizer_->GetWeakPtr(), texture_id]() {
auto registry = rasterizer->GetTextureRegistry();
if (!registry) {
return;
}
auto texture = registry->GetTexture(texture_id);
if (!texture) {
return;
}
texture->MarkNewFrameAvailable();
});
// Schedule a new frame without having to rebuild the layer tree.
task_runners_.GetUITaskRunner()->PostTask([engine = engine_->GetWeakPtr()]() {
if (engine) {
engine->ScheduleFrame(false);
}
});
}
// |PlatformView::Delegate|
void Shell::OnPlatformViewSetNextFrameCallback(const fml::closure& closure) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
task_runners_.GetRasterTaskRunner()->PostTask(
[rasterizer = rasterizer_->GetWeakPtr(), closure = closure]() {
if (rasterizer) {
rasterizer->SetNextFrameCallback(closure);
}
});
}
// |PlatformView::Delegate|
const Settings& Shell::OnPlatformViewGetSettings() const {
return settings_;
}
// |Animator::Delegate|
void Shell::OnAnimatorBeginFrame(fml::TimePoint frame_target_time,
uint64_t frame_number) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
// record the target time for use by rasterizer.
{
std::scoped_lock time_recorder_lock(time_recorder_mutex_);
latest_frame_target_time_.emplace(frame_target_time);
}
if (engine_) {
engine_->BeginFrame(frame_target_time, frame_number);
}
}
// |Animator::Delegate|
void Shell::OnAnimatorNotifyIdle(fml::TimeDelta deadline) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
if (engine_) {
engine_->NotifyIdle(deadline);
volatile_path_tracker_->OnFrame();
}
}
void Shell::OnAnimatorUpdateLatestFrameTargetTime(
fml::TimePoint frame_target_time) {
FML_DCHECK(is_set_up_);
// record the target time for use by rasterizer.
{
std::scoped_lock time_recorder_lock(time_recorder_mutex_);
if (!latest_frame_target_time_) {
latest_frame_target_time_ = frame_target_time;
} else if (latest_frame_target_time_ < frame_target_time) {
latest_frame_target_time_ = frame_target_time;
}
}
}
// |Animator::Delegate|
void Shell::OnAnimatorDraw(std::shared_ptr<FramePipeline> pipeline) {
FML_DCHECK(is_set_up_);
task_runners_.GetRasterTaskRunner()->PostTask(fml::MakeCopyable(
[&waiting_for_first_frame = waiting_for_first_frame_,
&waiting_for_first_frame_condition = waiting_for_first_frame_condition_,
rasterizer = rasterizer_->GetWeakPtr(),
weak_pipeline = std::weak_ptr<FramePipeline>(pipeline)]() mutable {
if (rasterizer) {
std::shared_ptr<FramePipeline> pipeline = weak_pipeline.lock();
if (pipeline) {
rasterizer->Draw(pipeline);
}
if (waiting_for_first_frame.load()) {
waiting_for_first_frame.store(false);
waiting_for_first_frame_condition.notify_all();
}
}
}));
}
// |Animator::Delegate|
void Shell::OnAnimatorDrawLastLayerTrees(
std::unique_ptr<FrameTimingsRecorder> frame_timings_recorder) {
FML_DCHECK(is_set_up_);
auto task = fml::MakeCopyable(
[rasterizer = rasterizer_->GetWeakPtr(),
frame_timings_recorder = std::move(frame_timings_recorder)]() mutable {
if (rasterizer) {
rasterizer->DrawLastLayerTrees(std::move(frame_timings_recorder));
}
});
task_runners_.GetRasterTaskRunner()->PostTask(task);
}
// |Engine::Delegate|
void Shell::OnEngineUpdateSemantics(SemanticsNodeUpdates update,
CustomAccessibilityActionUpdates actions) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
task_runners_.GetPlatformTaskRunner()->PostTask(
[view = platform_view_->GetWeakPtr(), update = std::move(update),
actions = std::move(actions)] {
if (view) {
view->UpdateSemantics(update, actions);
}
});
}
// |Engine::Delegate|
void Shell::OnEngineHandlePlatformMessage(
std::unique_ptr<PlatformMessage> message) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
if (message->channel() == kSkiaChannel) {
HandleEngineSkiaMessage(std::move(message));
return;
}
if (platform_message_handler_) {
if (route_messages_through_platform_thread_ &&
!platform_message_handler_
->DoesHandlePlatformMessageOnPlatformThread()) {
#if _WIN32
// On Windows capturing a TaskRunner with a TaskRunner will cause an
// uncaught exception in process shutdown because of the deletion order of
// global variables. See also
// https://github.com/flutter/flutter/issues/111575.
// This won't be an issue until Windows supports background platform
// channels (https://github.com/flutter/flutter/issues/93945). Then this
// can potentially be addressed by capturing a weak_ptr to an object that
// retains the ui TaskRunner, instead of the TaskRunner directly.
FML_DCHECK(false);
#endif
// We route messages through the platform thread temporarily when the
// shell is being initialized to be backwards compatible with setting
// message handlers in the same event as starting the isolate, but after
// it is started.
auto ui_task_runner = task_runners_.GetUITaskRunner();
task_runners_.GetPlatformTaskRunner()->PostTask(fml::MakeCopyable(
[weak_platform_message_handler =
std::weak_ptr<PlatformMessageHandler>(platform_message_handler_),
message = std::move(message), ui_task_runner]() mutable {
ui_task_runner->PostTask(
fml::MakeCopyable([weak_platform_message_handler,
message = std::move(message)]() mutable {
auto platform_message_handler =
weak_platform_message_handler.lock();
if (platform_message_handler) {
platform_message_handler->HandlePlatformMessage(
std::move(message));
}
}));
}));
} else {
platform_message_handler_->HandlePlatformMessage(std::move(message));
}
} else {
task_runners_.GetPlatformTaskRunner()->PostTask(
fml::MakeCopyable([view = platform_view_->GetWeakPtr(),
message = std::move(message)]() mutable {
if (view) {
view->HandlePlatformMessage(std::move(message));
}
}));
}
}
void Shell::OnEngineChannelUpdate(std::string name, bool listening) {
FML_DCHECK(is_set_up_);
task_runners_.GetPlatformTaskRunner()->PostTask(
[view = platform_view_->GetWeakPtr(), name = std::move(name), listening] {
if (view) {
view->SendChannelUpdate(name, listening);
}
});
}
void Shell::HandleEngineSkiaMessage(std::unique_ptr<PlatformMessage> message) {
const auto& data = message->data();
rapidjson::Document document;
document.Parse(reinterpret_cast<const char*>(data.GetMapping()),
data.GetSize());
if (document.HasParseError() || !document.IsObject()) {
return;
}
auto root = document.GetObject();
auto method = root.FindMember("method");
if (method->value != "Skia.setResourceCacheMaxBytes") {
return;
}
auto args = root.FindMember("args");
if (args == root.MemberEnd() || !args->value.IsInt()) {
return;
}
task_runners_.GetRasterTaskRunner()->PostTask(
[rasterizer = rasterizer_->GetWeakPtr(), max_bytes = args->value.GetInt(),
response = message->response()] {
if (rasterizer) {
rasterizer->SetResourceCacheMaxBytes(static_cast<size_t>(max_bytes),
true);
}
if (response) {
// The framework side expects this to be valid json encoded as a list.
// Return `[true]` to signal success.
std::vector<uint8_t> data = {'[', 't', 'r', 'u', 'e', ']'};
response->Complete(
std::make_unique<fml::DataMapping>(std::move(data)));
}
});
}
// |Engine::Delegate|
void Shell::OnPreEngineRestart() {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
fml::AutoResetWaitableEvent latch;
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetPlatformTaskRunner(),
[view = platform_view_->GetWeakPtr(), &latch]() {
if (view) {
view->OnPreEngineRestart();
}
latch.Signal();
});
// This is blocking as any embedded platform views has to be flushed before
// we re-run the Dart code.
latch.Wait();
}
// |Engine::Delegate|
void Shell::OnRootIsolateCreated() {
if (is_added_to_service_protocol_) {
return;
}
auto description = GetServiceProtocolDescription();
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetPlatformTaskRunner(),
[self = weak_factory_.GetWeakPtr(),
description = std::move(description)]() {
if (self) {
self->vm_->GetServiceProtocol()->AddHandler(self.get(), description);
}
});
is_added_to_service_protocol_ = true;
}
// |Engine::Delegate|
void Shell::UpdateIsolateDescription(const std::string isolate_name,
int64_t isolate_port) {
Handler::Description description(isolate_port, isolate_name);
vm_->GetServiceProtocol()->SetHandlerDescription(this, description);
}
void Shell::SetNeedsReportTimings(bool value) {
needs_report_timings_ = value;
}
// |Engine::Delegate|
std::unique_ptr<std::vector<std::string>> Shell::ComputePlatformResolvedLocale(
const std::vector<std::string>& supported_locale_data) {
return platform_view_->ComputePlatformResolvedLocales(supported_locale_data);
}
void Shell::LoadDartDeferredLibrary(
intptr_t loading_unit_id,
std::unique_ptr<const fml::Mapping> snapshot_data,
std::unique_ptr<const fml::Mapping> snapshot_instructions) {
task_runners_.GetUITaskRunner()->PostTask(fml::MakeCopyable(
[engine = engine_->GetWeakPtr(), loading_unit_id,
data = std::move(snapshot_data),
instructions = std::move(snapshot_instructions)]() mutable {
if (engine) {
engine->LoadDartDeferredLibrary(loading_unit_id, std::move(data),
std::move(instructions));
}
}));
}
void Shell::LoadDartDeferredLibraryError(intptr_t loading_unit_id,
const std::string error_message,
bool transient) {
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetUITaskRunner(),
[engine = weak_engine_, loading_unit_id, error_message, transient] {
if (engine) {
engine->LoadDartDeferredLibraryError(loading_unit_id, error_message,
transient);
}
});
}
void Shell::UpdateAssetResolverByType(
std::unique_ptr<AssetResolver> updated_asset_resolver,
AssetResolver::AssetResolverType type) {
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetUITaskRunner(),
fml::MakeCopyable(
[engine = weak_engine_, type,
asset_resolver = std::move(updated_asset_resolver)]() mutable {
if (engine) {
engine->GetAssetManager()->UpdateResolverByType(
std::move(asset_resolver), type);
}
}));
}
// |Engine::Delegate|
void Shell::RequestDartDeferredLibrary(intptr_t loading_unit_id) {
task_runners_.GetPlatformTaskRunner()->PostTask(
[view = platform_view_->GetWeakPtr(), loading_unit_id] {
if (view) {
view->RequestDartDeferredLibrary(loading_unit_id);
}
});
}
// |Engine::Delegate|
double Shell::GetScaledFontSize(double unscaled_font_size,
int configuration_id) const {
return platform_view_->GetScaledFontSize(unscaled_font_size,
configuration_id);
}
void Shell::ReportTimings() {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetRasterTaskRunner()->RunsTasksOnCurrentThread());
auto timings = std::move(unreported_timings_);
unreported_timings_ = {};
task_runners_.GetUITaskRunner()->PostTask([timings, engine = weak_engine_] {
if (engine) {
engine->ReportTimings(timings);
}
});
}
size_t Shell::UnreportedFramesCount() const {
// Check that this is running on the raster thread to avoid race conditions.
FML_DCHECK(task_runners_.GetRasterTaskRunner()->RunsTasksOnCurrentThread());
FML_DCHECK(unreported_timings_.size() % (FrameTiming::kStatisticsCount) == 0);
return unreported_timings_.size() / (FrameTiming::kStatisticsCount);
}
void Shell::OnFrameRasterized(const FrameTiming& timing) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetRasterTaskRunner()->RunsTasksOnCurrentThread());
// The C++ callback defined in settings.h and set by Flutter runner. This is
// independent of the timings report to the Dart side.
if (settings_.frame_rasterized_callback) {
settings_.frame_rasterized_callback(timing);
}
if (!needs_report_timings_) {
return;
}
size_t old_count = unreported_timings_.size();
(void)old_count;
for (auto phase : FrameTiming::kPhases) {
unreported_timings_.push_back(
timing.Get(phase).ToEpochDelta().ToMicroseconds());
}
unreported_timings_.push_back(timing.GetLayerCacheCount());
unreported_timings_.push_back(timing.GetLayerCacheBytes());
unreported_timings_.push_back(timing.GetPictureCacheCount());
unreported_timings_.push_back(timing.GetPictureCacheBytes());
unreported_timings_.push_back(timing.GetFrameNumber());
FML_DCHECK(unreported_timings_.size() ==
old_count + FrameTiming::kStatisticsCount);
// In tests using iPhone 6S with profile mode, sending a batch of 1 frame or a
// batch of 100 frames have roughly the same cost of less than 0.1ms. Sending
// a batch of 500 frames costs about 0.2ms. The 1 second threshold usually
// kicks in before we reaching the following 100 frames threshold. The 100
// threshold here is mainly for unit tests (so we don't have to write a
// 1-second unit test), and make sure that our vector won't grow too big with
// future 120fps, 240fps, or 1000fps displays.
//
// In the profile/debug mode, the timings are used by development tools which
// require a latency of no more than 100ms. Hence we lower that 1-second
// threshold to 100ms because performance overhead isn't that critical in
// those cases.
if (!first_frame_rasterized_ || UnreportedFramesCount() >= 100) {
first_frame_rasterized_ = true;
ReportTimings();
} else if (!frame_timings_report_scheduled_) {
#if FLUTTER_RELEASE
constexpr int kBatchTimeInMilliseconds = 1000;
#else
constexpr int kBatchTimeInMilliseconds = 100;
#endif
// Also make sure that frame times get reported with a max latency of 1
// second. Otherwise, the timings of last few frames of an animation may
// never be reported until the next animation starts.
frame_timings_report_scheduled_ = true;
task_runners_.GetRasterTaskRunner()->PostDelayedTask(
[self = weak_factory_gpu_->GetWeakPtr()]() {
if (!self) {
return;
}
self->frame_timings_report_scheduled_ = false;
if (self->UnreportedFramesCount() > 0) {
self->ReportTimings();
}
},
fml::TimeDelta::FromMilliseconds(kBatchTimeInMilliseconds));
}
}
fml::Milliseconds Shell::GetFrameBudget() {
double display_refresh_rate = display_manager_->GetMainDisplayRefreshRate();
if (display_refresh_rate > 0) {
return fml::RefreshRateToFrameBudget(display_refresh_rate);
} else {
return fml::kDefaultFrameBudget;
}
}
fml::TimePoint Shell::GetLatestFrameTargetTime() const {
std::scoped_lock time_recorder_lock(time_recorder_mutex_);
FML_CHECK(latest_frame_target_time_.has_value())
<< "GetLatestFrameTargetTime called before OnAnimatorBeginFrame";
// Covered by FML_CHECK().
// NOLINTNEXTLINE(bugprone-unchecked-optional-access)
return latest_frame_target_time_.value();
}
// |Rasterizer::Delegate|
bool Shell::ShouldDiscardLayerTree(int64_t view_id,
const flutter::LayerTree& tree) {
std::scoped_lock<std::mutex> lock(resize_mutex_);
auto expected_frame_size = ExpectedFrameSize(view_id);
return !expected_frame_size.isEmpty() &&
tree.frame_size() != expected_frame_size;
}
// |ServiceProtocol::Handler|
fml::RefPtr<fml::TaskRunner> Shell::GetServiceProtocolHandlerTaskRunner(
std::string_view method) const {
FML_DCHECK(is_set_up_);
auto found = service_protocol_handlers_.find(method);
if (found != service_protocol_handlers_.end()) {
return found->second.first;
}
return task_runners_.GetUITaskRunner();
}
// |ServiceProtocol::Handler|
bool Shell::HandleServiceProtocolMessage(
std::string_view method, // one if the extension names specified above.
const ServiceProtocolMap& params,
rapidjson::Document* response) {
auto found = service_protocol_handlers_.find(method);
if (found != service_protocol_handlers_.end()) {
return found->second.second(params, response);
}
return false;
}
// |ServiceProtocol::Handler|
ServiceProtocol::Handler::Description Shell::GetServiceProtocolDescription()
const {
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
if (!weak_engine_) {
return ServiceProtocol::Handler::Description();
}
return {
weak_engine_->GetUIIsolateMainPort(),
weak_engine_->GetUIIsolateName(),
};
}
static void ServiceProtocolParameterError(rapidjson::Document* response,
std::string error_details) {
auto& allocator = response->GetAllocator();
response->SetObject();
const int64_t kInvalidParams = -32602;
response->AddMember("code", kInvalidParams, allocator);
response->AddMember("message", "Invalid params", allocator);
{
rapidjson::Value details(rapidjson::kObjectType);
details.AddMember("details", std::move(error_details), allocator);
response->AddMember("data", details, allocator);
}
}
static void ServiceProtocolFailureError(rapidjson::Document* response,
std::string message) {
auto& allocator = response->GetAllocator();
response->SetObject();
const int64_t kJsonServerError = -32000;
response->AddMember("code", kJsonServerError, allocator);
response->AddMember("message", std::move(message), allocator);
}
// Service protocol handler
bool Shell::OnServiceProtocolScreenshot(
const ServiceProtocol::Handler::ServiceProtocolMap& params,
rapidjson::Document* response) {
FML_DCHECK(task_runners_.GetRasterTaskRunner()->RunsTasksOnCurrentThread());
auto screenshot = rasterizer_->ScreenshotLastLayerTree(
Rasterizer::ScreenshotType::CompressedImage, true);
if (screenshot.data) {
response->SetObject();
auto& allocator = response->GetAllocator();
response->AddMember("type", "Screenshot", allocator);
rapidjson::Value image;
image.SetString(static_cast<const char*>(screenshot.data->data()),
screenshot.data->size(), allocator);
response->AddMember("screenshot", image, allocator);
return true;
}
ServiceProtocolFailureError(response, "Could not capture image screenshot.");
return false;
}
// Service protocol handler
bool Shell::OnServiceProtocolScreenshotSKP(
const ServiceProtocol::Handler::ServiceProtocolMap& params,
rapidjson::Document* response) {
FML_DCHECK(task_runners_.GetRasterTaskRunner()->RunsTasksOnCurrentThread());
if (settings_.enable_impeller) {
ServiceProtocolFailureError(
response, "Cannot capture SKP screenshot with Impeller enabled.");
return false;
}
auto screenshot = rasterizer_->ScreenshotLastLayerTree(
Rasterizer::ScreenshotType::SkiaPicture, true);
if (screenshot.data) {
response->SetObject();
auto& allocator = response->GetAllocator();
response->AddMember("type", "ScreenshotSkp", allocator);
rapidjson::Value skp;
skp.SetString(static_cast<const char*>(screenshot.data->data()),
screenshot.data->size(), allocator);
response->AddMember("skp", skp, allocator);
return true;
}
ServiceProtocolFailureError(response, "Could not capture SKP screenshot.");
return false;
}
// Service protocol handler
bool Shell::OnServiceProtocolRunInView(
const ServiceProtocol::Handler::ServiceProtocolMap& params,
rapidjson::Document* response) {
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
if (params.count("mainScript") == 0) {
ServiceProtocolParameterError(response,
"'mainScript' parameter is missing.");
return false;
}
if (params.count("assetDirectory") == 0) {
ServiceProtocolParameterError(response,
"'assetDirectory' parameter is missing.");
return false;
}
std::string main_script_path =
fml::paths::FromURI(params.at("mainScript").data());
std::string asset_directory_path =
fml::paths::FromURI(params.at("assetDirectory").data());
auto main_script_file_mapping =
std::make_unique<fml::FileMapping>(fml::OpenFile(
main_script_path.c_str(), false, fml::FilePermission::kRead));
auto isolate_configuration = IsolateConfiguration::CreateForKernel(
std::move(main_script_file_mapping));
RunConfiguration configuration(std::move(isolate_configuration));
configuration.SetEntrypointAndLibrary(engine_->GetLastEntrypoint(),
engine_->GetLastEntrypointLibrary());
configuration.SetEntrypointArgs(engine_->GetLastEntrypointArgs());
configuration.AddAssetResolver(std::make_unique<DirectoryAssetBundle>(
fml::OpenDirectory(asset_directory_path.c_str(), false,
fml::FilePermission::kRead),
false));
// Preserve any original asset resolvers to avoid syncing unchanged assets
// over the DevFS connection.
auto old_asset_manager = engine_->GetAssetManager();
if (old_asset_manager != nullptr) {
for (auto& old_resolver : old_asset_manager->TakeResolvers()) {
if (old_resolver->IsValidAfterAssetManagerChange()) {
configuration.AddAssetResolver(std::move(old_resolver));
}
}
}
auto& allocator = response->GetAllocator();
response->SetObject();
if (engine_->Restart(std::move(configuration))) {
response->AddMember("type", "Success", allocator);
auto new_description = GetServiceProtocolDescription();
rapidjson::Value view(rapidjson::kObjectType);
new_description.Write(this, view, allocator);
response->AddMember("view", view, allocator);
return true;
} else {
FML_DLOG(ERROR) << "Could not run configuration in engine.";
ServiceProtocolFailureError(response,
"Could not run configuration in engine.");
return false;
}
FML_DCHECK(false);
return false;
}
// Service protocol handler
bool Shell::OnServiceProtocolFlushUIThreadTasks(
const ServiceProtocol::Handler::ServiceProtocolMap& params,
rapidjson::Document* response) {
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
// This API should not be invoked by production code.
// It can potentially starve the service isolate if the main isolate pauses
// at a breakpoint or is in an infinite loop.
//
// It should be invoked from the VM Service and blocks it until UI thread
// tasks are processed.
response->SetObject();
response->AddMember("type", "Success", response->GetAllocator());
return true;
}
bool Shell::OnServiceProtocolGetDisplayRefreshRate(
const ServiceProtocol::Handler::ServiceProtocolMap& params,
rapidjson::Document* response) {
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
response->SetObject();
response->AddMember("type", "DisplayRefreshRate", response->GetAllocator());
response->AddMember("fps", display_manager_->GetMainDisplayRefreshRate(),
response->GetAllocator());
return true;
}
double Shell::GetMainDisplayRefreshRate() {
return display_manager_->GetMainDisplayRefreshRate();
}
void Shell::RegisterImageDecoder(ImageGeneratorFactory factory,
int32_t priority) {
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
FML_DCHECK(is_set_up_);
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetUITaskRunner(),
[engine = engine_->GetWeakPtr(), factory = std::move(factory),
priority]() {
if (engine) {
engine->GetImageGeneratorRegistry()->AddFactory(factory, priority);
}
});
}
bool Shell::OnServiceProtocolGetSkSLs(
const ServiceProtocol::Handler::ServiceProtocolMap& params,
rapidjson::Document* response) {
FML_DCHECK(task_runners_.GetIOTaskRunner()->RunsTasksOnCurrentThread());
response->SetObject();
response->AddMember("type", "GetSkSLs", response->GetAllocator());
rapidjson::Value shaders_json(rapidjson::kObjectType);
PersistentCache* persistent_cache = PersistentCache::GetCacheForProcess();
std::vector<PersistentCache::SkSLCache> sksls = persistent_cache->LoadSkSLs();
for (const auto& sksl : sksls) {
size_t b64_size = Base64::EncodedSize(sksl.value->size());
sk_sp<SkData> b64_data = SkData::MakeUninitialized(b64_size + 1);
char* b64_char = static_cast<char*>(b64_data->writable_data());
Base64::Encode(sksl.value->data(), sksl.value->size(), b64_char);
b64_char[b64_size] = 0; // make it null terminated for printing
rapidjson::Value shader_value(b64_char, response->GetAllocator());
std::string_view key_view(reinterpret_cast<const char*>(sksl.key->data()),
sksl.key->size());
auto encode_result = fml::Base32Encode(key_view);
if (!encode_result.first) {
continue;
}
rapidjson::Value shader_key(encode_result.second, response->GetAllocator());
shaders_json.AddMember(shader_key, shader_value, response->GetAllocator());
}
response->AddMember("SkSLs", shaders_json, response->GetAllocator());
return true;
}
bool Shell::OnServiceProtocolEstimateRasterCacheMemory(
const ServiceProtocol::Handler::ServiceProtocolMap& params,
rapidjson::Document* response) {
FML_DCHECK(task_runners_.GetRasterTaskRunner()->RunsTasksOnCurrentThread());
const auto& raster_cache = rasterizer_->compositor_context()->raster_cache();
response->SetObject();
response->AddMember("type", "EstimateRasterCacheMemory",
response->GetAllocator());
response->AddMember<uint64_t>("layerBytes",
raster_cache.EstimateLayerCacheByteSize(),
response->GetAllocator());
response->AddMember<uint64_t>("pictureBytes",
raster_cache.EstimatePictureCacheByteSize(),
response->GetAllocator());
return true;
}
// Service protocol handler
bool Shell::OnServiceProtocolSetAssetBundlePath(
const ServiceProtocol::Handler::ServiceProtocolMap& params,
rapidjson::Document* response) {
FML_DCHECK(task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread());
if (params.count("assetDirectory") == 0) {
ServiceProtocolParameterError(response,
"'assetDirectory' parameter is missing.");
return false;
}
auto& allocator = response->GetAllocator();
response->SetObject();
auto asset_manager = std::make_shared<AssetManager>();
if (!asset_manager->PushFront(std::make_unique<DirectoryAssetBundle>(
fml::OpenDirectory(params.at("assetDirectory").data(), false,
fml::FilePermission::kRead),
false))) {
// The new asset directory path was invalid.
FML_DLOG(ERROR) << "Could not update asset directory.";
ServiceProtocolFailureError(response, "Could not update asset directory.");
return false;
}
// Preserve any original asset resolvers to avoid syncing unchanged assets
// over the DevFS connection.
auto old_asset_manager = engine_->GetAssetManager();
if (old_asset_manager != nullptr) {
for (auto& old_resolver : old_asset_manager->TakeResolvers()) {
if (old_resolver->IsValidAfterAssetManagerChange()) {
asset_manager->PushBack(std::move(old_resolver));
}
}
}
if (engine_->UpdateAssetManager(asset_manager)) {
response->AddMember("type", "Success", allocator);
auto new_description = GetServiceProtocolDescription();
rapidjson::Value view(rapidjson::kObjectType);
new_description.Write(this, view, allocator);
response->AddMember("view", view, allocator);
return true;
} else {
FML_DLOG(ERROR) << "Could not update asset directory.";
ServiceProtocolFailureError(response, "Could not update asset directory.");
return false;
}
FML_DCHECK(false);
return false;
}
static rapidjson::Value SerializeLayerSnapshot(
double device_pixel_ratio,
const LayerSnapshotData& snapshot,
rapidjson::Document* response) {
auto& allocator = response->GetAllocator();
rapidjson::Value result;
result.SetObject();
result.AddMember("layer_unique_id", snapshot.GetLayerUniqueId(), allocator);
result.AddMember("duration_micros", snapshot.GetDuration().ToMicroseconds(),
allocator);
const SkRect bounds = snapshot.GetBounds();
result.AddMember("top", bounds.top(), allocator);
result.AddMember("left", bounds.left(), allocator);
result.AddMember("width", bounds.width(), allocator);
result.AddMember("height", bounds.height(), allocator);
sk_sp<SkData> snapshot_bytes = snapshot.GetSnapshot();
if (snapshot_bytes) {
rapidjson::Value image;
image.SetArray();
const uint8_t* data =
reinterpret_cast<const uint8_t*>(snapshot_bytes->data());
for (size_t i = 0; i < snapshot_bytes->size(); i++) {
image.PushBack(data[i], allocator);
}
result.AddMember("snapshot", image, allocator);
}
return result;
}
bool Shell::OnServiceProtocolRenderFrameWithRasterStats(
const ServiceProtocol::Handler::ServiceProtocolMap& params,
rapidjson::Document* response) {
FML_DCHECK(task_runners_.GetRasterTaskRunner()->RunsTasksOnCurrentThread());
// Impeller does not support this protocol method.
if (io_manager_->GetImpellerContext()) {
const char* error = "Raster status not supported on Impeller backend.";
ServiceProtocolFailureError(response, error);
return false;
}
// TODO(dkwingsmt): This method only handles view #0, including the snapshot
// and the frame size. We need to adapt this method to multi-view.
// https://github.com/flutter/flutter/issues/131892
int64_t view_id = kFlutterImplicitViewId;
if (auto last_layer_tree = rasterizer_->GetLastLayerTree(view_id)) {
auto& allocator = response->GetAllocator();
response->SetObject();
response->AddMember("type", "RenderFrameWithRasterStats", allocator);
// When rendering the last layer tree, we do not need to build a frame,
// invariants in FrameTimingRecorder enforce that raster timings can not be
// set before build-end.
auto frame_timings_recorder = std::make_unique<FrameTimingsRecorder>();
const auto now = fml::TimePoint::Now();
frame_timings_recorder->RecordVsync(now, now);
frame_timings_recorder->RecordBuildStart(now);
frame_timings_recorder->RecordBuildEnd(now);
last_layer_tree->enable_leaf_layer_tracing(true);
rasterizer_->DrawLastLayerTrees(std::move(frame_timings_recorder));
last_layer_tree->enable_leaf_layer_tracing(false);
rapidjson::Value snapshots;
snapshots.SetArray();
LayerSnapshotStore& store =
rasterizer_->compositor_context()->snapshot_store();
for (const LayerSnapshotData& data : store) {
snapshots.PushBack(
SerializeLayerSnapshot(device_pixel_ratio_, data, response),
allocator);
}
response->AddMember("snapshots", snapshots, allocator);
const auto& frame_size = ExpectedFrameSize(view_id);
response->AddMember("frame_width", frame_size.width(), allocator);
response->AddMember("frame_height", frame_size.height(), allocator);
return true;
} else {
const char* error =
"Failed to render the last frame with raster stats."
" Rasterizer does not hold a valid last layer tree."
" This could happen if this method was invoked before a frame was "
"rendered";
FML_DLOG(ERROR) << error;
ServiceProtocolFailureError(response, error);
return false;
}
}
void Shell::SendFontChangeNotification() {
// After system fonts are reloaded, we send a system channel message
// to notify flutter framework.
rapidjson::Document document;
document.SetObject();
auto& allocator = document.GetAllocator();
rapidjson::Value message_value;
message_value.SetString(kFontChange, allocator);
document.AddMember(kTypeKey, message_value, allocator);
rapidjson::StringBuffer buffer;
rapidjson::Writer<rapidjson::StringBuffer> writer(buffer);
document.Accept(writer);
std::string message = buffer.GetString();
std::unique_ptr<PlatformMessage> fontsChangeMessage =
std::make_unique<flutter::PlatformMessage>(
kSystemChannel,
fml::MallocMapping::Copy(message.c_str(), message.length()), nullptr);
OnPlatformViewDispatchPlatformMessage(std::move(fontsChangeMessage));
}
bool Shell::OnServiceProtocolReloadAssetFonts(
const ServiceProtocol::Handler::ServiceProtocolMap& params,
rapidjson::Document* response) {
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
if (!engine_) {
return false;
}
engine_->GetFontCollection().RegisterFonts(engine_->GetAssetManager());
engine_->GetFontCollection().GetFontCollection()->ClearFontFamilyCache();
SendFontChangeNotification();
auto& allocator = response->GetAllocator();
response->SetObject();
response->AddMember("type", "Success", allocator);
return true;
}
void Shell::AddView(int64_t view_id, const ViewportMetrics& viewport_metrics) {
TRACE_EVENT0("flutter", "Shell::AddView");
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
FML_DCHECK(view_id != kFlutterImplicitViewId)
<< "Unexpected request to add the implicit view #"
<< kFlutterImplicitViewId << ". This view should never be added.";
task_runners_.GetUITaskRunner()->PostTask([engine = engine_->GetWeakPtr(), //
viewport_metrics, //
view_id //
] {
if (engine) {
engine->AddView(view_id, viewport_metrics);
}
});
}
void Shell::RemoveView(int64_t view_id, RemoveViewCallback callback) {
TRACE_EVENT0("flutter", "Shell::RemoveView");
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
FML_DCHECK(view_id != kFlutterImplicitViewId)
<< "Unexpected request to remove the implicit view #"
<< kFlutterImplicitViewId << ". This view should never be removed.";
expected_frame_sizes_.erase(view_id);
task_runners_.GetUITaskRunner()->PostTask(
[&task_runners = task_runners_, //
engine = engine_->GetWeakPtr(), //
rasterizer = rasterizer_->GetWeakPtr(), //
view_id, //
callback = std::move(callback) //
] {
if (engine) {
bool removed = engine->RemoveView(view_id);
callback(removed);
}
// Don't wait for the raster task here, which only cleans up memory and
// does not affect functionality. Make sure it is done after Dart
// removes the view to avoid receiving another rasterization request
// that adds back the view record.
task_runners.GetRasterTaskRunner()->PostTask([rasterizer, view_id]() {
if (rasterizer) {
rasterizer->CollectView(view_id);
}
});
});
}
Rasterizer::Screenshot Shell::Screenshot(
Rasterizer::ScreenshotType screenshot_type,
bool base64_encode) {
if (settings_.enable_impeller) {
switch (screenshot_type) {
case Rasterizer::ScreenshotType::SkiaPicture:
FML_LOG(ERROR)
<< "Impeller backend cannot produce ScreenshotType::SkiaPicture.";
return {};
case Rasterizer::ScreenshotType::UncompressedImage:
case Rasterizer::ScreenshotType::CompressedImage:
case Rasterizer::ScreenshotType::SurfaceData:
break;
}
}
TRACE_EVENT0("flutter", "Shell::Screenshot");
fml::AutoResetWaitableEvent latch;
Rasterizer::Screenshot screenshot;
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetRasterTaskRunner(), [&latch, //
rasterizer = GetRasterizer(), //
&screenshot, //
screenshot_type, //
base64_encode //
]() {
if (rasterizer) {
screenshot = rasterizer->ScreenshotLastLayerTree(screenshot_type,
base64_encode);
}
latch.Signal();
});
latch.Wait();
return screenshot;
}
fml::Status Shell::WaitForFirstFrame(fml::TimeDelta timeout) {
FML_DCHECK(is_set_up_);
if (task_runners_.GetUITaskRunner()->RunsTasksOnCurrentThread() ||
task_runners_.GetRasterTaskRunner()->RunsTasksOnCurrentThread()) {
return fml::Status(fml::StatusCode::kFailedPrecondition,
"WaitForFirstFrame called from thread that can't wait "
"because it is responsible for generating the frame.");
}
// Check for overflow.
auto now = std::chrono::steady_clock::now();
auto max_duration = std::chrono::steady_clock::time_point::max() - now;
auto desired_duration = std::chrono::milliseconds(timeout.ToMilliseconds());
auto duration =
now + (desired_duration > max_duration ? max_duration : desired_duration);
std::unique_lock<std::mutex> lock(waiting_for_first_frame_mutex_);
bool success = waiting_for_first_frame_condition_.wait_until(
lock, duration, [&waiting_for_first_frame = waiting_for_first_frame_] {
return !waiting_for_first_frame.load();
});
if (success) {
return fml::Status();
} else {
return fml::Status(fml::StatusCode::kDeadlineExceeded, "timeout");
}
}
bool Shell::ReloadSystemFonts() {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
if (!engine_) {
return false;
}
engine_->SetupDefaultFontManager();
engine_->GetFontCollection().GetFontCollection()->ClearFontFamilyCache();
// After system fonts are reloaded, we send a system channel message
// to notify flutter framework.
SendFontChangeNotification();
return true;
}
std::shared_ptr<const fml::SyncSwitch> Shell::GetIsGpuDisabledSyncSwitch()
const {
return is_gpu_disabled_sync_switch_;
}
void Shell::SetGpuAvailability(GpuAvailability availability) {
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
switch (availability) {
case GpuAvailability::kAvailable:
is_gpu_disabled_sync_switch_->SetSwitch(false);
return;
case GpuAvailability::kFlushAndMakeUnavailable: {
fml::AutoResetWaitableEvent latch;
fml::TaskRunner::RunNowOrPostTask(
task_runners_.GetIOTaskRunner(),
[io_manager = io_manager_.get(), &latch]() {
io_manager->GetSkiaUnrefQueue()->Drain();
latch.Signal();
});
latch.Wait();
}
// FALLTHROUGH
case GpuAvailability::kUnavailable:
is_gpu_disabled_sync_switch_->SetSwitch(true);
return;
default:
FML_DCHECK(false);
}
}
void Shell::OnDisplayUpdates(std::vector<std::unique_ptr<Display>> displays) {
FML_DCHECK(is_set_up_);
FML_DCHECK(task_runners_.GetPlatformTaskRunner()->RunsTasksOnCurrentThread());
std::vector<DisplayData> display_data;
display_data.reserve(displays.size());
for (const auto& display : displays) {
display_data.push_back(display->GetDisplayData());
}
task_runners_.GetUITaskRunner()->PostTask(
[engine = engine_->GetWeakPtr(),
display_data = std::move(display_data)]() {
if (engine) {
engine->SetDisplays(display_data);
}
});
display_manager_->HandleDisplayUpdates(std::move(displays));
}
fml::TimePoint Shell::GetCurrentTimePoint() {
return fml::TimePoint::Now();
}
const std::shared_ptr<PlatformMessageHandler>&
Shell::GetPlatformMessageHandler() const {
return platform_message_handler_;
}
const std::weak_ptr<VsyncWaiter> Shell::GetVsyncWaiter() const {
if (!engine_) {
return {};
}
return engine_->GetVsyncWaiter();
}
const std::shared_ptr<fml::ConcurrentTaskRunner>
Shell::GetConcurrentWorkerTaskRunner() const {
FML_DCHECK(vm_);
if (!vm_) {
return nullptr;
}
return vm_->GetConcurrentWorkerTaskRunner();
}
SkISize Shell::ExpectedFrameSize(int64_t view_id) {
auto found = expected_frame_sizes_.find(view_id);
if (found == expected_frame_sizes_.end()) {
return SkISize::MakeEmpty();
}
return found->second;
}
} // namespace flutter
| engine/shell/common/shell.cc/0 | {
"file_path": "engine/shell/common/shell.cc",
"repo_id": "engine",
"token_count": 36560
} | 285 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/common/shell_test_platform_view_metal.h"
#import <Metal/Metal.h>
#include <utility>
#include "flutter/fml/platform/darwin/scoped_nsobject.h"
#include "flutter/shell/gpu/gpu_surface_metal_impeller.h"
#include "flutter/shell/gpu/gpu_surface_metal_skia.h"
#include "flutter/shell/platform/darwin/graphics/FlutterDarwinContextMetalImpeller.h"
#include "flutter/shell/platform/darwin/graphics/FlutterDarwinContextMetalSkia.h"
namespace flutter {
namespace testing {
static fml::scoped_nsprotocol<id<MTLTexture>> CreateOffscreenTexture(id<MTLDevice> device) {
auto descriptor =
[MTLTextureDescriptor texture2DDescriptorWithPixelFormat:MTLPixelFormatBGRA8Unorm
width:800
height:600
mipmapped:NO];
descriptor.usage = MTLTextureUsageRenderTarget | MTLTextureUsageShaderRead;
return fml::scoped_nsprotocol<id<MTLTexture>>{[device newTextureWithDescriptor:descriptor]};
}
// This is out of the header so that shell_test_platform_view_metal.h can be included in
// non-Objective-C TUs.
class DarwinContextMetal {
public:
explicit DarwinContextMetal(
bool impeller,
const std::shared_ptr<const fml::SyncSwitch>& is_gpu_disabled_sync_switch)
: context_(impeller ? nil : [[FlutterDarwinContextMetalSkia alloc] initWithDefaultMTLDevice]),
impeller_context_(
impeller ? [[FlutterDarwinContextMetalImpeller alloc] init:is_gpu_disabled_sync_switch]
: nil),
offscreen_texture_(CreateOffscreenTexture(
impeller ? [impeller_context_ context]->GetMTLDevice() : [context_ device])) {}
~DarwinContextMetal() = default;
fml::scoped_nsobject<FlutterDarwinContextMetalImpeller> impeller_context() const {
return impeller_context_;
}
fml::scoped_nsobject<FlutterDarwinContextMetalSkia> context() const { return context_; }
fml::scoped_nsprotocol<id<MTLTexture>> offscreen_texture() const { return offscreen_texture_; }
GPUMTLTextureInfo offscreen_texture_info() const {
GPUMTLTextureInfo info = {};
info.texture_id = 0;
info.texture = reinterpret_cast<GPUMTLTextureHandle>(offscreen_texture_.get());
return info;
}
private:
const fml::scoped_nsobject<FlutterDarwinContextMetalSkia> context_;
const fml::scoped_nsobject<FlutterDarwinContextMetalImpeller> impeller_context_;
const fml::scoped_nsprotocol<id<MTLTexture>> offscreen_texture_;
FML_DISALLOW_COPY_AND_ASSIGN(DarwinContextMetal);
};
ShellTestPlatformViewMetal::ShellTestPlatformViewMetal(
PlatformView::Delegate& delegate,
const TaskRunners& task_runners,
std::shared_ptr<ShellTestVsyncClock> vsync_clock,
CreateVsyncWaiter create_vsync_waiter,
std::shared_ptr<ShellTestExternalViewEmbedder> shell_test_external_view_embedder,
const std::shared_ptr<const fml::SyncSwitch>& is_gpu_disabled_sync_switch)
: ShellTestPlatformView(delegate, task_runners),
GPUSurfaceMetalDelegate(MTLRenderTargetType::kMTLTexture),
metal_context_(std::make_unique<DarwinContextMetal>(GetSettings().enable_impeller,
is_gpu_disabled_sync_switch)),
create_vsync_waiter_(std::move(create_vsync_waiter)),
vsync_clock_(std::move(vsync_clock)),
shell_test_external_view_embedder_(std::move(shell_test_external_view_embedder)) {
if (GetSettings().enable_impeller) {
FML_CHECK([metal_context_->impeller_context() context] != nil);
} else {
FML_CHECK([metal_context_->context() mainContext] != nil);
}
}
ShellTestPlatformViewMetal::~ShellTestPlatformViewMetal() = default;
std::unique_ptr<VsyncWaiter> ShellTestPlatformViewMetal::CreateVSyncWaiter() {
return create_vsync_waiter_();
}
// |ShellTestPlatformView|
void ShellTestPlatformViewMetal::SimulateVSync() {
vsync_clock_->SimulateVSync();
}
// |PlatformView|
std::shared_ptr<ExternalViewEmbedder> ShellTestPlatformViewMetal::CreateExternalViewEmbedder() {
return shell_test_external_view_embedder_;
}
// |PlatformView|
PointerDataDispatcherMaker ShellTestPlatformViewMetal::GetDispatcherMaker() {
return [](DefaultPointerDataDispatcher::Delegate& delegate) {
return std::make_unique<SmoothPointerDataDispatcher>(delegate);
};
}
// |PlatformView|
std::unique_ptr<Surface> ShellTestPlatformViewMetal::CreateRenderingSurface() {
if (GetSettings().enable_impeller) {
return std::make_unique<GPUSurfaceMetalImpeller>(this,
[metal_context_->impeller_context() context]);
}
return std::make_unique<GPUSurfaceMetalSkia>(this, [metal_context_->context() mainContext],
MsaaSampleCount::kNone);
}
// |PlatformView|
std::shared_ptr<impeller::Context> ShellTestPlatformViewMetal::GetImpellerContext() const {
return [metal_context_->impeller_context() context];
}
// |GPUSurfaceMetalDelegate|
GPUCAMetalLayerHandle ShellTestPlatformViewMetal::GetCAMetalLayer(const SkISize& frame_info) const {
FML_CHECK(false) << "A Metal Delegate configured with MTLRenderTargetType::kMTLTexture was asked "
"to acquire a layer.";
return nullptr;
}
// |GPUSurfaceMetalDelegate|
bool ShellTestPlatformViewMetal::PresentDrawable(GrMTLHandle drawable) const {
FML_CHECK(false) << "A Metal Delegate configured with MTLRenderTargetType::kMTLTexture was asked "
"to present a layer drawable.";
return true;
}
// |GPUSurfaceMetalDelegate|
GPUMTLTextureInfo ShellTestPlatformViewMetal::GetMTLTexture(const SkISize& frame_info) const {
return metal_context_->offscreen_texture_info();
}
// |GPUSurfaceMetalDelegate|
bool ShellTestPlatformViewMetal::PresentTexture(GPUMTLTextureInfo texture) const {
// The texture resides offscreen. There is nothing to render to.
return true;
}
} // namespace testing
} // namespace flutter
| engine/shell/common/shell_test_platform_view_metal.mm/0 | {
"file_path": "engine/shell/common/shell_test_platform_view_metal.mm",
"repo_id": "engine",
"token_count": 2353
} | 286 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/common/thread_host.h"
#include <algorithm>
#include <memory>
#include <optional>
#include <string>
#include <utility>
namespace flutter {
std::string ThreadHost::ThreadHostConfig::MakeThreadName(
Type type,
const std::string& prefix) {
switch (type) {
case Type::kPlatform:
return prefix + ".platform";
case Type::kUi:
return prefix + ".ui";
case Type::kIo:
return prefix + ".io";
case Type::kRaster:
return prefix + ".raster";
case Type::kProfiler:
return prefix + ".profiler";
}
}
void ThreadHost::ThreadHostConfig::SetIOConfig(const ThreadConfig& config) {
type_mask |= ThreadHost::Type::kIo;
io_config = config;
}
void ThreadHost::ThreadHostConfig::SetUIConfig(const ThreadConfig& config) {
type_mask |= ThreadHost::Type::kUi;
ui_config = config;
}
void ThreadHost::ThreadHostConfig::SetPlatformConfig(
const ThreadConfig& config) {
type_mask |= ThreadHost::Type::kPlatform;
platform_config = config;
}
void ThreadHost::ThreadHostConfig::SetRasterConfig(const ThreadConfig& config) {
type_mask |= ThreadHost::Type::kRaster;
raster_config = config;
}
void ThreadHost::ThreadHostConfig::SetProfilerConfig(
const ThreadConfig& config) {
type_mask |= ThreadHost::Type::kProfiler;
profiler_config = config;
}
std::unique_ptr<fml::Thread> ThreadHost::CreateThread(
Type type,
std::optional<ThreadConfig> thread_config,
const ThreadHostConfig& host_config) const {
/// if not specified ThreadConfig, create a ThreadConfig.
if (!thread_config.has_value()) {
thread_config = ThreadConfig(
ThreadHostConfig::MakeThreadName(type, host_config.name_prefix));
}
return std::make_unique<fml::Thread>(host_config.config_setter,
thread_config.value());
}
ThreadHost::ThreadHost() = default;
ThreadHost::ThreadHost(ThreadHost&&) = default;
ThreadHost::ThreadHost(const std::string& name_prefix, uint64_t mask)
: ThreadHost(ThreadHostConfig(name_prefix, mask)) {}
ThreadHost::ThreadHost(const ThreadHostConfig& host_config)
: name_prefix(host_config.name_prefix) {
if (host_config.isThreadNeeded(ThreadHost::Type::kPlatform)) {
platform_thread =
CreateThread(Type::kPlatform, host_config.platform_config, host_config);
}
if (host_config.isThreadNeeded(ThreadHost::Type::kUi)) {
ui_thread = CreateThread(Type::kUi, host_config.ui_config, host_config);
}
if (host_config.isThreadNeeded(ThreadHost::Type::kRaster)) {
raster_thread =
CreateThread(Type::kRaster, host_config.raster_config, host_config);
}
if (host_config.isThreadNeeded(ThreadHost::Type::kIo)) {
io_thread = CreateThread(Type::kIo, host_config.io_config, host_config);
}
if (host_config.isThreadNeeded(ThreadHost::Type::kProfiler)) {
profiler_thread =
CreateThread(Type::kProfiler, host_config.profiler_config, host_config);
}
}
ThreadHost::~ThreadHost() = default;
} // namespace flutter
| engine/shell/common/thread_host.cc/0 | {
"file_path": "engine/shell/common/thread_host.cc",
"repo_id": "engine",
"token_count": 1129
} | 287 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/gpu/gpu_surface_gl_delegate.h"
#include <cstring>
#include "third_party/skia/include/gpu/gl/GrGLAssembleInterface.h"
namespace flutter {
GPUSurfaceGLDelegate::~GPUSurfaceGLDelegate() = default;
bool GPUSurfaceGLDelegate::GLContextFBOResetAfterPresent() const {
return false;
}
SurfaceFrame::FramebufferInfo GPUSurfaceGLDelegate::GLContextFramebufferInfo()
const {
SurfaceFrame::FramebufferInfo res;
res.supports_readback = true;
return res;
}
SkMatrix GPUSurfaceGLDelegate::GLContextSurfaceTransformation() const {
SkMatrix matrix;
matrix.setIdentity();
return matrix;
}
GPUSurfaceGLDelegate::GLProcResolver GPUSurfaceGLDelegate::GetGLProcResolver()
const {
return nullptr;
}
static bool IsProcResolverOpenGLES(
const GPUSurfaceGLDelegate::GLProcResolver& proc_resolver) {
// Version string prefix that identifies an OpenGL ES implementation.
#define GPU_GL_VERSION 0x1F02
constexpr char kGLESVersionPrefix[] = "OpenGL ES";
#ifdef WIN32
using GLGetStringProc = const char*(__stdcall*)(uint32_t);
#else
using GLGetStringProc = const char* (*)(uint32_t);
#endif
GLGetStringProc gl_get_string =
reinterpret_cast<GLGetStringProc>(proc_resolver("glGetString"));
FML_CHECK(gl_get_string)
<< "The GL proc resolver could not resolve glGetString";
const char* gl_version_string = gl_get_string(GPU_GL_VERSION);
FML_CHECK(gl_version_string)
<< "The GL proc resolver's glGetString(GL_VERSION) failed";
return strncmp(gl_version_string, kGLESVersionPrefix,
strlen(kGLESVersionPrefix)) == 0;
}
static sk_sp<const GrGLInterface> CreateGLInterface(
const GPUSurfaceGLDelegate::GLProcResolver& proc_resolver) {
if (proc_resolver == nullptr) {
// If there is no custom proc resolver, ask Skia to guess the native
// interface. This often leads to interesting results on most platforms.
return GrGLMakeNativeInterface();
}
struct ProcResolverContext {
GPUSurfaceGLDelegate::GLProcResolver resolver;
};
ProcResolverContext context = {proc_resolver};
GrGLGetProc gl_get_proc = [](void* context,
const char gl_proc_name[]) -> GrGLFuncPtr {
auto proc_resolver_context =
reinterpret_cast<ProcResolverContext*>(context);
return reinterpret_cast<GrGLFuncPtr>(
proc_resolver_context->resolver(gl_proc_name));
};
// glGetString indicates an OpenGL ES interface.
if (IsProcResolverOpenGLES(proc_resolver)) {
return GrGLMakeAssembledGLESInterface(&context, gl_get_proc);
}
// Fallback to OpenGL.
if (auto interface = GrGLMakeAssembledGLInterface(&context, gl_get_proc)) {
return interface;
}
FML_LOG(ERROR) << "Could not create a valid GL interface.";
return nullptr;
}
sk_sp<const GrGLInterface> GPUSurfaceGLDelegate::GetGLInterface() const {
return CreateGLInterface(GetGLProcResolver());
}
sk_sp<const GrGLInterface>
GPUSurfaceGLDelegate::GetDefaultPlatformGLInterface() {
return CreateGLInterface(nullptr);
}
bool GPUSurfaceGLDelegate::AllowsDrawingWhenGpuDisabled() const {
return true;
}
} // namespace flutter
| engine/shell/gpu/gpu_surface_gl_delegate.cc/0 | {
"file_path": "engine/shell/gpu/gpu_surface_gl_delegate.cc",
"repo_id": "engine",
"token_count": 1158
} | 288 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_GPU_GPU_SURFACE_SOFTWARE_DELEGATE_H_
#define FLUTTER_SHELL_GPU_GPU_SURFACE_SOFTWARE_DELEGATE_H_
#include "flutter/flow/embedded_views.h"
#include "flutter/fml/macros.h"
#include "third_party/skia/include/core/SkSurface.h"
namespace flutter {
//------------------------------------------------------------------------------
/// @brief Interface implemented by all platform surfaces that can present
/// a software backing store to the "screen". The GPU surface
/// abstraction (which abstracts the client rendering API) uses this
/// delegation pattern to tell the platform surface (which abstracts
/// how backing stores fulfilled by the selected client rendering
/// API end up on the "screen" on a particular platform) when the
/// rasterizer needs to allocate and present the software backing
/// store.
///
/// @see |IOSSurfaceSoftware|, |AndroidSurfaceSoftware|,
/// |EmbedderSurfaceSoftware|.
///
class GPUSurfaceSoftwareDelegate {
public:
~GPUSurfaceSoftwareDelegate();
//----------------------------------------------------------------------------
/// @brief Called when the GPU surface needs a new buffer to render a new
/// frame into.
///
/// @param[in] size The size of the frame.
///
/// @return A raster surface returned by the platform.
///
virtual sk_sp<SkSurface> AcquireBackingStore(const SkISize& size) = 0;
//----------------------------------------------------------------------------
/// @brief Called by the platform when a frame has been rendered into the
/// backing store and the platform must display it on-screen.
///
/// @param[in] backing_store The software backing store to present.
///
/// @return Returns if the platform could present the backing store onto
/// the screen.
///
virtual bool PresentBackingStore(sk_sp<SkSurface> backing_store) = 0;
};
} // namespace flutter
#endif // FLUTTER_SHELL_GPU_GPU_SURFACE_SOFTWARE_DELEGATE_H_
| engine/shell/gpu/gpu_surface_software_delegate.h/0 | {
"file_path": "engine/shell/gpu/gpu_surface_software_delegate.h",
"repo_id": "engine",
"token_count": 734
} | 289 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_ANDROID_ANDROID_CONTEXT_GL_SKIA_H_
#define FLUTTER_SHELL_PLATFORM_ANDROID_ANDROID_CONTEXT_GL_SKIA_H_
#include "flutter/fml/macros.h"
#include "flutter/fml/memory/ref_counted.h"
#include "flutter/fml/memory/ref_ptr.h"
#include "flutter/shell/common/platform_view.h"
#include "flutter/shell/platform/android/android_environment_gl.h"
#include "flutter/shell/platform/android/context/android_context.h"
#include "flutter/shell/platform/android/surface/android_native_window.h"
#include "third_party/skia/include/core/SkSize.h"
namespace flutter {
class AndroidEGLSurface;
//------------------------------------------------------------------------------
/// The Android context is used by `AndroidSurfaceGL` to create and manage
/// EGL surfaces.
///
/// This context binds `EGLContext` to the current rendering thread and to the
/// draw and read `EGLSurface`s.
///
class AndroidContextGLSkia : public AndroidContext {
public:
AndroidContextGLSkia(fml::RefPtr<AndroidEnvironmentGL> environment,
const TaskRunners& taskRunners,
uint8_t msaa_samples);
~AndroidContextGLSkia();
//----------------------------------------------------------------------------
/// @brief Allocates an new EGL window surface that is used for on-screen
/// pixels.
///
/// @return The window surface.
///
std::unique_ptr<AndroidEGLSurface> CreateOnscreenSurface(
const fml::RefPtr<AndroidNativeWindow>& window) const;
//----------------------------------------------------------------------------
/// @brief Allocates an 1x1 pbuffer surface that is used for making the
/// offscreen current for texture uploads.
///
/// @return The pbuffer surface.
///
std::unique_ptr<AndroidEGLSurface> CreateOffscreenSurface() const;
//----------------------------------------------------------------------------
/// @brief Allocates an 1x1 pbuffer surface that is used for making the
/// onscreen context current for snapshotting.
///
/// @return The pbuffer surface.
///
std::unique_ptr<AndroidEGLSurface> CreatePbufferSurface() const;
//----------------------------------------------------------------------------
/// @return The Android environment that contains a reference to the
/// display.
///
fml::RefPtr<AndroidEnvironmentGL> Environment() const;
//----------------------------------------------------------------------------
/// @return Whether the current context is valid. That is, if the EGL
/// contexts were successfully created.
///
bool IsValid() const override;
//----------------------------------------------------------------------------
/// @return Whether the current context was successfully clear.
///
bool ClearCurrent() const;
//----------------------------------------------------------------------------
/// @brief Returns the EGLContext.
///
/// @return EGLContext.
///
EGLContext GetEGLContext() const;
//----------------------------------------------------------------------------
/// @brief Returns the EGLDisplay.
///
/// @return EGLDisplay.
///
EGLDisplay GetEGLDisplay() const;
//----------------------------------------------------------------------------
/// @brief Create a new EGLContext using the same EGLConfig.
///
/// @return The EGLContext.
///
EGLContext CreateNewContext() const;
//----------------------------------------------------------------------------
/// @brief The EGLConfig for this context.
///
EGLConfig Config() const { return config_; }
private:
fml::RefPtr<AndroidEnvironmentGL> environment_;
EGLConfig config_;
EGLContext context_;
EGLContext resource_context_;
bool valid_ = false;
TaskRunners task_runners_;
FML_DISALLOW_COPY_AND_ASSIGN(AndroidContextGLSkia);
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_ANDROID_ANDROID_CONTEXT_GL_SKIA_H_
| engine/shell/platform/android/android_context_gl_skia.h/0 | {
"file_path": "engine/shell/platform/android/android_context_gl_skia.h",
"repo_id": "engine",
"token_count": 1233
} | 290 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/android/android_surface_gl_impeller.h"
#include "flutter/fml/logging.h"
#include "flutter/impeller/toolkit/egl/surface.h"
#include "flutter/shell/gpu/gpu_surface_gl_impeller.h"
namespace flutter {
AndroidSurfaceGLImpeller::AndroidSurfaceGLImpeller(
const std::shared_ptr<AndroidContextGLImpeller>& android_context)
: android_context_(android_context) {
offscreen_surface_ = android_context_->CreateOffscreenSurface();
if (!offscreen_surface_) {
FML_DLOG(ERROR) << "Could not create offscreen surface.";
return;
}
// The onscreen surface will be acquired once the native window is set.
is_valid_ = true;
}
AndroidSurfaceGLImpeller::~AndroidSurfaceGLImpeller() = default;
// |AndroidSurface|
bool AndroidSurfaceGLImpeller::IsValid() const {
return is_valid_;
}
// |AndroidSurface|
std::unique_ptr<Surface> AndroidSurfaceGLImpeller::CreateGPUSurface(
GrDirectContext* gr_context) {
auto surface = std::make_unique<GPUSurfaceGLImpeller>(
this, // delegate
android_context_->GetImpellerContext(), // context
true // render to surface
);
if (!surface->IsValid()) {
return nullptr;
}
return surface;
}
// |AndroidSurface|
void AndroidSurfaceGLImpeller::TeardownOnScreenContext() {
GLContextClearCurrent();
onscreen_surface_.reset();
}
// |AndroidSurface|
bool AndroidSurfaceGLImpeller::OnScreenSurfaceResize(const SkISize& size) {
// The size is unused. It was added only for iOS where the sizes were
// necessary to re-create auxiliary buffers (stencil, depth, etc.).
return RecreateOnscreenSurfaceAndMakeOnscreenContextCurrent();
}
// |AndroidSurface|
bool AndroidSurfaceGLImpeller::ResourceContextMakeCurrent() {
if (!offscreen_surface_) {
return false;
}
return android_context_->ResourceContextMakeCurrent(offscreen_surface_.get());
}
// |AndroidSurface|
bool AndroidSurfaceGLImpeller::ResourceContextClearCurrent() {
return android_context_->ResourceContextClearCurrent();
}
// |AndroidSurface|
bool AndroidSurfaceGLImpeller::SetNativeWindow(
fml::RefPtr<AndroidNativeWindow> window) {
native_window_ = std::move(window);
return RecreateOnscreenSurfaceAndMakeOnscreenContextCurrent();
}
// |AndroidSurface|
std::unique_ptr<Surface> AndroidSurfaceGLImpeller::CreateSnapshotSurface() {
FML_UNREACHABLE();
}
// |AndroidSurface|
std::shared_ptr<impeller::Context>
AndroidSurfaceGLImpeller::GetImpellerContext() {
return android_context_->GetImpellerContext();
}
// |GPUSurfaceGLDelegate|
std::unique_ptr<GLContextResult>
AndroidSurfaceGLImpeller::GLContextMakeCurrent() {
return std::make_unique<GLContextDefaultResult>(OnGLContextMakeCurrent());
}
bool AndroidSurfaceGLImpeller::OnGLContextMakeCurrent() {
if (!onscreen_surface_) {
return false;
}
return android_context_->OnscreenContextMakeCurrent(onscreen_surface_.get());
}
// |GPUSurfaceGLDelegate|
bool AndroidSurfaceGLImpeller::GLContextClearCurrent() {
if (!onscreen_surface_) {
return false;
}
return android_context_->OnscreenContextClearCurrent();
}
// |GPUSurfaceGLDelegate|
SurfaceFrame::FramebufferInfo
AndroidSurfaceGLImpeller::GLContextFramebufferInfo() const {
auto info = SurfaceFrame::FramebufferInfo{};
info.supports_readback = true;
info.supports_partial_repaint = false;
return info;
}
// |GPUSurfaceGLDelegate|
void AndroidSurfaceGLImpeller::GLContextSetDamageRegion(
const std::optional<SkIRect>& region) {
// Not supported.
}
// |GPUSurfaceGLDelegate|
bool AndroidSurfaceGLImpeller::GLContextPresent(
const GLPresentInfo& present_info) {
// The FBO ID is superfluous and was introduced for iOS where the default
// framebuffer was not FBO0.
if (!onscreen_surface_) {
return false;
}
return onscreen_surface_->Present();
}
// |GPUSurfaceGLDelegate|
GLFBOInfo AndroidSurfaceGLImpeller::GLContextFBO(GLFrameInfo frame_info) const {
// FBO0 is the default window bound framebuffer in EGL environments.
return GLFBOInfo{
.fbo_id = 0,
};
}
// |GPUSurfaceGLDelegate|
sk_sp<const GrGLInterface> AndroidSurfaceGLImpeller::GetGLInterface() const {
return nullptr;
}
bool AndroidSurfaceGLImpeller::
RecreateOnscreenSurfaceAndMakeOnscreenContextCurrent() {
GLContextClearCurrent();
if (!native_window_) {
return false;
}
onscreen_surface_.reset();
auto onscreen_surface =
android_context_->CreateOnscreenSurface(native_window_->handle());
if (!onscreen_surface) {
FML_DLOG(ERROR) << "Could not create onscreen surface.";
return false;
}
onscreen_surface_ = std::move(onscreen_surface);
return OnGLContextMakeCurrent();
}
} // namespace flutter
| engine/shell/platform/android/android_surface_gl_impeller.cc/0 | {
"file_path": "engine/shell/platform/android/android_surface_gl_impeller.cc",
"repo_id": "engine",
"token_count": 1671
} | 291 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/android/external_view_embedder/external_view_embedder.h"
#include "flutter/common/constants.h"
#include "flutter/fml/synchronization/waitable_event.h"
#include "flutter/fml/trace_event.h"
namespace flutter {
AndroidExternalViewEmbedder::AndroidExternalViewEmbedder(
const AndroidContext& android_context,
std::shared_ptr<PlatformViewAndroidJNI> jni_facade,
std::shared_ptr<AndroidSurfaceFactory> surface_factory,
const TaskRunners& task_runners)
: ExternalViewEmbedder(),
android_context_(android_context),
jni_facade_(std::move(jni_facade)),
surface_factory_(std::move(surface_factory)),
surface_pool_(std::make_unique<SurfacePool>()),
task_runners_(task_runners) {}
// |ExternalViewEmbedder|
void AndroidExternalViewEmbedder::PrerollCompositeEmbeddedView(
int64_t view_id,
std::unique_ptr<EmbeddedViewParams> params) {
TRACE_EVENT0("flutter",
"AndroidExternalViewEmbedder::PrerollCompositeEmbeddedView");
SkRect view_bounds = SkRect::Make(frame_size_);
std::unique_ptr<EmbedderViewSlice> view;
view = std::make_unique<DisplayListEmbedderViewSlice>(view_bounds);
slices_.insert_or_assign(view_id, std::move(view));
composition_order_.push_back(view_id);
// Update params only if they changed.
if (view_params_.count(view_id) == 1 &&
view_params_.at(view_id) == *params.get()) {
return;
}
view_params_.insert_or_assign(view_id, EmbeddedViewParams(*params.get()));
}
// |ExternalViewEmbedder|
DlCanvas* AndroidExternalViewEmbedder::CompositeEmbeddedView(int64_t view_id) {
if (slices_.count(view_id) == 1) {
return slices_.at(view_id)->canvas();
}
return nullptr;
}
SkRect AndroidExternalViewEmbedder::GetViewRect(int64_t view_id) const {
const EmbeddedViewParams& params = view_params_.at(view_id);
// TODO(egarciad): The rect should be computed from the mutator stack.
// (Clipping is missing)
// https://github.com/flutter/flutter/issues/59821
return SkRect::MakeXYWH(params.finalBoundingRect().x(), //
params.finalBoundingRect().y(), //
params.finalBoundingRect().width(), //
params.finalBoundingRect().height() //
);
}
// |ExternalViewEmbedder|
void AndroidExternalViewEmbedder::SubmitFlutterView(
GrDirectContext* context,
const std::shared_ptr<impeller::AiksContext>& aiks_context,
std::unique_ptr<SurfaceFrame> frame) {
TRACE_EVENT0("flutter", "AndroidExternalViewEmbedder::SubmitFlutterView");
if (!FrameHasPlatformLayers()) {
frame->Submit();
return;
}
std::unordered_map<int64_t, SkRect> overlay_layers;
DlCanvas* background_canvas = frame->Canvas();
auto current_frame_view_count = composition_order_.size();
// Restore the clip context after exiting this method since it's changed
// below.
DlAutoCanvasRestore save(background_canvas, /*do_save=*/true);
for (size_t i = 0; i < current_frame_view_count; i++) {
int64_t view_id = composition_order_[i];
EmbedderViewSlice* slice = slices_.at(view_id).get();
if (slice->canvas() == nullptr) {
continue;
}
slice->end_recording();
SkRect full_joined_rect = SkRect::MakeEmpty();
// Determinate if Flutter UI intersects with any of the previous
// platform views stacked by z position.
//
// This is done by querying the r-tree that holds the records for the
// picture recorder corresponding to the flow layers added after a platform
// view layer.
for (ssize_t j = i; j >= 0; j--) {
int64_t current_view_id = composition_order_[j];
SkRect current_view_rect = GetViewRect(current_view_id);
// The rect above the `current_view_rect`
SkRect partial_joined_rect = SkRect::MakeEmpty();
// Each rect corresponds to a native view that renders Flutter UI.
std::vector<SkIRect> intersection_rects =
slice->region(current_view_rect).getRects();
// Limit the number of native views, so it doesn't grow forever.
//
// In this case, the rects are merged into a single one that is the union
// of all the rects.
for (const SkIRect& rect : intersection_rects) {
partial_joined_rect.join(SkRect::Make(rect));
}
// Get the intersection rect with the `current_view_rect`,
partial_joined_rect.intersect(current_view_rect);
// Join the `partial_joined_rect` into `full_joined_rect` to get the rect
// above the current `slice`
full_joined_rect.join(partial_joined_rect);
}
if (!full_joined_rect.isEmpty()) {
// Subpixels in the platform may not align with the canvas subpixels.
//
// To workaround it, round the floating point bounds and make the rect
// slightly larger.
//
// For example, {0.3, 0.5, 3.1, 4.7} becomes {0, 0, 4, 5}.
full_joined_rect.set(full_joined_rect.roundOut());
overlay_layers.insert({view_id, full_joined_rect});
// Clip the background canvas, so it doesn't contain any of the pixels
// drawn on the overlay layer.
background_canvas->ClipRect(full_joined_rect,
DlCanvas::ClipOp::kDifference);
}
slice->render_into(background_canvas);
}
// Manually trigger the DlAutoCanvasRestore before we submit the frame
save.Restore();
// Submit the background canvas frame before switching the GL context to
// the overlay surfaces.
//
// Skip a frame if the embedding is switching surfaces, and indicate in
// `PostPrerollAction` that this frame must be resubmitted.
auto should_submit_current_frame = previous_frame_view_count_ > 0;
if (should_submit_current_frame) {
frame->Submit();
}
for (int64_t view_id : composition_order_) {
SkRect view_rect = GetViewRect(view_id);
const EmbeddedViewParams& params = view_params_.at(view_id);
// Display the platform view. If it's already displayed, then it's
// just positioned and sized.
jni_facade_->FlutterViewOnDisplayPlatformView(
view_id, //
view_rect.x(), //
view_rect.y(), //
view_rect.width(), //
view_rect.height(), //
params.sizePoints().width() * device_pixel_ratio_,
params.sizePoints().height() * device_pixel_ratio_,
params.mutatorsStack() //
);
std::unordered_map<int64_t, SkRect>::const_iterator overlay =
overlay_layers.find(view_id);
if (overlay == overlay_layers.end()) {
continue;
}
std::unique_ptr<SurfaceFrame> frame =
CreateSurfaceIfNeeded(context, //
view_id, //
slices_.at(view_id).get(), //
overlay->second //
);
if (should_submit_current_frame) {
frame->Submit();
}
}
}
// |ExternalViewEmbedder|
std::unique_ptr<SurfaceFrame>
AndroidExternalViewEmbedder::CreateSurfaceIfNeeded(GrDirectContext* context,
int64_t view_id,
EmbedderViewSlice* slice,
const SkRect& rect) {
std::shared_ptr<OverlayLayer> layer = surface_pool_->GetLayer(
context, android_context_, jni_facade_, surface_factory_);
std::unique_ptr<SurfaceFrame> frame =
layer->surface->AcquireFrame(frame_size_);
// Display the overlay surface. If it's already displayed, then it's
// just positioned and sized.
jni_facade_->FlutterViewDisplayOverlaySurface(layer->id, //
rect.x(), //
rect.y(), //
rect.width(), //
rect.height() //
);
DlCanvas* overlay_canvas = frame->Canvas();
overlay_canvas->Clear(DlColor::kTransparent());
// Offset the picture since its absolute position on the scene is determined
// by the position of the overlay view.
overlay_canvas->Translate(-rect.x(), -rect.y());
slice->render_into(overlay_canvas);
return frame;
}
// |ExternalViewEmbedder|
PostPrerollResult AndroidExternalViewEmbedder::PostPrerollAction(
const fml::RefPtr<fml::RasterThreadMerger>& raster_thread_merger) {
if (!FrameHasPlatformLayers()) {
return PostPrerollResult::kSuccess;
}
if (!raster_thread_merger->IsMerged()) {
// The raster thread merger may be disabled if the rasterizer is being
// created or teared down.
//
// In such cases, the current frame is dropped, and a new frame is attempted
// with the same layer tree.
//
// Eventually, the frame is submitted once this method returns `kSuccess`.
// At that point, the raster tasks are handled on the platform thread.
CancelFrame();
raster_thread_merger->MergeWithLease(kDefaultMergedLeaseDuration);
return PostPrerollResult::kSkipAndRetryFrame;
}
raster_thread_merger->ExtendLeaseTo(kDefaultMergedLeaseDuration);
// Surface switch requires to resubmit the frame.
// TODO(egarciad): https://github.com/flutter/flutter/issues/65652
if (previous_frame_view_count_ == 0) {
return PostPrerollResult::kResubmitFrame;
}
return PostPrerollResult::kSuccess;
}
bool AndroidExternalViewEmbedder::FrameHasPlatformLayers() {
return !composition_order_.empty();
}
// |ExternalViewEmbedder|
DlCanvas* AndroidExternalViewEmbedder::GetRootCanvas() {
// On Android, the root surface is created from the on-screen render target.
return nullptr;
}
void AndroidExternalViewEmbedder::Reset() {
previous_frame_view_count_ = composition_order_.size();
composition_order_.clear();
slices_.clear();
}
// |ExternalViewEmbedder|
void AndroidExternalViewEmbedder::BeginFrame(
GrDirectContext* context,
const fml::RefPtr<fml::RasterThreadMerger>& raster_thread_merger) {
// JNI method must be called on the platform thread.
if (raster_thread_merger->IsOnPlatformThread()) {
jni_facade_->FlutterViewBeginFrame();
}
}
// |ExternalViewEmbedder|
void AndroidExternalViewEmbedder::PrepareFlutterView(
int64_t flutter_view_id,
SkISize frame_size,
double device_pixel_ratio) {
// TODO(dkwingsmt): This class only supports rendering into the implicit view.
// Properly support multi-view in the future.
FML_DCHECK(flutter_view_id == kFlutterImplicitViewId);
Reset();
// The surface size changed. Therefore, destroy existing surfaces as
// the existing surfaces in the pool can't be recycled.
if (frame_size_ != frame_size) {
DestroySurfaces();
}
surface_pool_->SetFrameSize(frame_size);
frame_size_ = frame_size;
device_pixel_ratio_ = device_pixel_ratio;
}
// |ExternalViewEmbedder|
void AndroidExternalViewEmbedder::CancelFrame() {
Reset();
}
// |ExternalViewEmbedder|
void AndroidExternalViewEmbedder::EndFrame(
bool should_resubmit_frame,
const fml::RefPtr<fml::RasterThreadMerger>& raster_thread_merger) {
surface_pool_->RecycleLayers();
// JNI method must be called on the platform thread.
if (raster_thread_merger->IsOnPlatformThread()) {
jni_facade_->FlutterViewEndFrame();
}
}
// |ExternalViewEmbedder|
bool AndroidExternalViewEmbedder::SupportsDynamicThreadMerging() {
return true;
}
// |ExternalViewEmbedder|
void AndroidExternalViewEmbedder::Teardown() {
DestroySurfaces();
}
// |ExternalViewEmbedder|
void AndroidExternalViewEmbedder::DestroySurfaces() {
if (!surface_pool_->HasLayers()) {
return;
}
fml::AutoResetWaitableEvent latch;
fml::TaskRunner::RunNowOrPostTask(task_runners_.GetPlatformTaskRunner(),
[&]() {
surface_pool_->DestroyLayers(jni_facade_);
latch.Signal();
});
latch.Wait();
}
} // namespace flutter
| engine/shell/platform/android/external_view_embedder/external_view_embedder.cc/0 | {
"file_path": "engine/shell/platform/android/external_view_embedder/external_view_embedder.cc",
"repo_id": "engine",
"token_count": 4792
} | 292 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/android/image_lru.h"
namespace flutter {
sk_sp<flutter::DlImage> ImageLRU::FindImage(
std::optional<HardwareBufferKey> key) {
if (!key.has_value()) {
return nullptr;
}
auto key_value = key.value();
for (size_t i = 0u; i < kImageReaderSwapchainSize; i++) {
if (images_[i].key == key_value) {
auto result = images_[i].value;
UpdateKey(result, key_value);
return result;
}
}
return nullptr;
}
void ImageLRU::UpdateKey(const sk_sp<flutter::DlImage>& image,
HardwareBufferKey key) {
if (images_[0].key == key) {
return;
}
size_t i = 1u;
for (; i < kImageReaderSwapchainSize; i++) {
if (images_[i].key == key) {
break;
}
}
for (auto j = i; j > 0; j--) {
images_[j] = images_[j - 1];
}
images_[0] = Data{.key = key, .value = image};
}
HardwareBufferKey ImageLRU::AddImage(const sk_sp<flutter::DlImage>& image,
HardwareBufferKey key) {
HardwareBufferKey lru_key = images_[kImageReaderSwapchainSize - 1].key;
bool updated_image = false;
for (size_t i = 0u; i < kImageReaderSwapchainSize; i++) {
if (images_[i].key == lru_key) {
updated_image = true;
images_[i] = Data{.key = key, .value = image};
break;
}
}
if (!updated_image) {
images_[0] = Data{.key = key, .value = image};
}
UpdateKey(image, key);
return lru_key;
}
void ImageLRU::Clear() {
for (size_t i = 0u; i < kImageReaderSwapchainSize; i++) {
images_[i] = Data{.key = 0u, .value = nullptr};
}
}
} // namespace flutter
| engine/shell/platform/android/image_lru.cc/0 | {
"file_path": "engine/shell/platform/android/image_lru.cc",
"repo_id": "engine",
"token_count": 759
} | 293 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.android;
import static io.flutter.Build.API_LEVELS;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.DART_ENTRYPOINT_META_DATA_KEY;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.DART_ENTRYPOINT_URI_META_DATA_KEY;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.DEFAULT_BACKGROUND_MODE;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.DEFAULT_DART_ENTRYPOINT;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.DEFAULT_INITIAL_ROUTE;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_BACKGROUND_MODE;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_CACHED_ENGINE_GROUP_ID;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_CACHED_ENGINE_ID;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_DART_ENTRYPOINT;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_DART_ENTRYPOINT_ARGS;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_DESTROY_ENGINE_WITH_ACTIVITY;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_ENABLE_STATE_RESTORATION;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.EXTRA_INITIAL_ROUTE;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.HANDLE_DEEPLINKING_META_DATA_KEY;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.INITIAL_ROUTE_META_DATA_KEY;
import static io.flutter.embedding.android.FlutterActivityLaunchConfigs.NORMAL_THEME_META_DATA_KEY;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Build;
import android.os.Bundle;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.window.OnBackInvokedCallback;
import android.window.OnBackInvokedDispatcher;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import androidx.lifecycle.Lifecycle;
import androidx.lifecycle.LifecycleOwner;
import androidx.lifecycle.LifecycleRegistry;
import io.flutter.Log;
import io.flutter.embedding.android.FlutterActivityLaunchConfigs.BackgroundMode;
import io.flutter.embedding.engine.FlutterEngine;
import io.flutter.embedding.engine.FlutterShellArgs;
import io.flutter.embedding.engine.plugins.activity.ActivityControlSurface;
import io.flutter.embedding.engine.plugins.util.GeneratedPluginRegister;
import io.flutter.plugin.platform.PlatformPlugin;
import java.util.ArrayList;
import java.util.List;
/**
* {@code Activity} which displays a fullscreen Flutter UI.
*
* <p>{@code FlutterActivity} is the simplest and most direct way to integrate Flutter within an
* Android app.
*
* <p><strong>FlutterActivity responsibilities</strong>
*
* <p>{@code FlutterActivity} maintains the following responsibilities:
*
* <ul>
* <li>Displays an Android launch screen.
* <li>Configures the status bar appearance.
* <li>Chooses the Dart execution app bundle path, entrypoint and entrypoint arguments.
* <li>Chooses Flutter's initial route.
* <li>Renders {@code Activity} transparently, if desired.
* <li>Offers hooks for subclasses to provide and configure a {@link
* io.flutter.embedding.engine.FlutterEngine}.
* <li>Save and restore instance state, see {@code #shouldRestoreAndSaveState()};
* </ul>
*
* <p><strong>Dart entrypoint, entrypoint arguments, initial route, and app bundle path</strong>
*
* <p>The Dart entrypoint executed within this {@code Activity} is "main()" by default. To change
* the entrypoint that a {@code FlutterActivity} executes, subclass {@code FlutterActivity} and
* override {@link #getDartEntrypointFunctionName()}. For non-main Dart entrypoints to not be
* tree-shaken away, you need to annotate those functions with {@code @pragma('vm:entry-point')} in
* Dart.
*
* <p>The Dart entrypoint arguments will be passed as a list of string to Dart's entrypoint
* function. It can be passed using a {@link NewEngineIntentBuilder} via {@link
* NewEngineIntentBuilder#dartEntrypointArgs}.
*
* <p>The Flutter route that is initially loaded within this {@code Activity} is "/". The initial
* route may be specified explicitly by passing the name of the route as a {@code String} in {@link
* FlutterActivityLaunchConfigs#EXTRA_INITIAL_ROUTE}, e.g., "my/deep/link".
*
* <p>The initial route can each be controlled using a {@link NewEngineIntentBuilder} via {@link
* NewEngineIntentBuilder#initialRoute}.
*
* <p>The app bundle path, Dart entrypoint, Dart entrypoint arguments, and initial route can also be
* controlled in a subclass of {@code FlutterActivity} by overriding their respective methods:
*
* <ul>
* <li>{@link #getAppBundlePath()}
* <li>{@link #getDartEntrypointFunctionName()}
* <li>{@link #getDartEntrypointArgs()}
* <li>{@link #getInitialRoute()}
* </ul>
*
* <p>The Dart entrypoint and app bundle path are not supported as {@code Intent} parameters since
* your Dart library entrypoints are your private APIs and Intents are invocable by other processes.
*
* <p><strong>Using a cached FlutterEngine</strong>
*
* <p>{@code FlutterActivity} can be used with a cached {@link
* io.flutter.embedding.engine.FlutterEngine} instead of creating a new one. Use {@link
* #withCachedEngine(String)} to build a {@code FlutterActivity} {@code Intent} that is configured
* to use an existing, cached {@link io.flutter.embedding.engine.FlutterEngine}. {@link
* io.flutter.embedding.engine.FlutterEngineCache} is the cache that is used to obtain a given
* cached {@link io.flutter.embedding.engine.FlutterEngine}. You must create and put a {@link
* io.flutter.embedding.engine.FlutterEngine} into the {@link
* io.flutter.embedding.engine.FlutterEngineCache} yourself before using the {@link
* #withCachedEngine(String)} builder. An {@code IllegalStateException} will be thrown if a cached
* engine is requested but does not exist in the cache.
*
* <p>When using a cached {@link io.flutter.embedding.engine.FlutterEngine}, that {@link
* io.flutter.embedding.engine.FlutterEngine} should already be executing Dart code, which means
* that the Dart entrypoint and initial route have already been defined. Therefore, {@link
* CachedEngineIntentBuilder} does not offer configuration of these properties.
*
* <p>It is generally recommended to use a cached {@link io.flutter.embedding.engine.FlutterEngine}
* to avoid a momentary delay when initializing a new {@link
* io.flutter.embedding.engine.FlutterEngine}. The two exceptions to using a cached {@link
* FlutterEngine} are:
*
* <ul>
* <li>When {@code FlutterActivity} is the first {@code Activity} displayed by the app, because
* pre-warming a {@link io.flutter.embedding.engine.FlutterEngine} would have no impact in
* this situation.
* <li>When you are unsure when/if you will need to display a Flutter experience.
* </ul>
*
* <p>See https://flutter.dev/docs/development/add-to-app/performance for additional performance
* explorations on engine loading.
*
* <p>The following illustrates how to pre-warm and cache a {@link
* io.flutter.embedding.engine.FlutterEngine}:
*
* <pre>{@code
* // Create and pre-warm a FlutterEngine.
* FlutterEngineGroup group = new FlutterEngineGroup(context);
* FlutterEngine flutterEngine = group.createAndRunDefaultEngine(context);
* flutterEngine.getDartExecutor().executeDartEntrypoint(DartEntrypoint.createDefault());
*
* // Cache the pre-warmed FlutterEngine in the FlutterEngineCache.
* FlutterEngineCache.getInstance().put("my_engine", flutterEngine);
* }</pre>
*
* <p><strong>Alternatives to FlutterActivity</strong>
*
* <p>If Flutter is needed in a location that cannot use an {@code Activity}, consider using a
* {@link FlutterFragment}. Using a {@link FlutterFragment} requires forwarding some calls from an
* {@code Activity} to the {@link FlutterFragment}.
*
* <p>If Flutter is needed in a location that can only use a {@code View}, consider using a {@link
* FlutterView}. Using a {@link FlutterView} requires forwarding some calls from an {@code
* Activity}, as well as forwarding lifecycle calls from an {@code Activity} or a {@code Fragment}.
*
* <p><strong>Launch Screen</strong>
*
* <p>{@code FlutterActivity} supports the display of an Android "launch screen", which is displayed
* while the Android application loads. It is only applicable if {@code FlutterActivity} is the
* first {@code Activity} displayed upon loading the app.
*
* <p>Prior to Flutter 2.5, {@code FlutterActivity} supported the display of a Flutter-specific
* "splash screen" that would be displayed after the launch screen passes. This has since been
* deprecated. If a launch screen is specified, it will automatically persist for as long as it
* takes Flutter to initialize and render its first frame.
*
* <p>Use Android themes to display a launch screen. Create two themes: a launch theme and a normal
* theme. In the launch theme, set {@code windowBackground} to the desired {@code Drawable} for the
* launch screen. In the normal theme, set {@code windowBackground} to any desired background color
* that should normally appear behind your Flutter content. In most cases this background color will
* never be seen, but for possible transition edge cases it is a good idea to explicitly replace the
* launch screen window background with a neutral color.
*
* <p>Do not change aspects of system chrome between a launch theme and normal theme. Either define
* both themes to be fullscreen or not, and define both themes to display the same status bar and
* navigation bar settings. To adjust system chrome once the Flutter app renders, use platform
* channels to instruct Android to do so at the appropriate time. This will avoid any jarring visual
* changes during app startup.
*
* <p>In the AndroidManifest.xml, set the theme of {@code FlutterActivity} to the defined launch
* theme. In the metadata section for {@code FlutterActivity}, defined the following reference to
* your normal theme:
*
* <p>{@code <meta-data android:name="io.flutter.embedding.android.NormalTheme"
* android:resource="@style/YourNormalTheme" /> }
*
* <p>With themes defined, and AndroidManifest.xml updated, Flutter displays the specified launch
* screen until the Android application is initialized.
*
* <p><strong>Alternative Activity</strong> {@link FlutterFragmentActivity} is also available, which
* is similar to {@code FlutterActivity} but it extends {@code FragmentActivity}. You should use
* {@code FlutterActivity}, if possible, but if you need a {@code FragmentActivity} then you should
* use {@link FlutterFragmentActivity}.
*/
// A number of methods in this class have the same implementation as FlutterFragmentActivity. These
// methods are duplicated for readability purposes. Be sure to replicate any change in this class in
// FlutterFragmentActivity, too.
public class FlutterActivity extends Activity
implements FlutterActivityAndFragmentDelegate.Host, LifecycleOwner {
private static final String TAG = "FlutterActivity";
private boolean hasRegisteredBackCallback = false;
/**
* The ID of the {@code FlutterView} created by this activity.
*
* <p>This ID can be used to lookup {@code FlutterView} in the Android view hierarchy. For more,
* see {@link android.view.View#findViewById}.
*/
public static final int FLUTTER_VIEW_ID = View.generateViewId();
/**
* Creates an {@link Intent} that launches a {@code FlutterActivity}, which creates a {@link
* FlutterEngine} that executes a {@code main()} Dart entrypoint, and displays the "/" route as
* Flutter's initial route.
*
* <p>Consider using the {@link #withCachedEngine(String)} {@link Intent} builder to control when
* the {@link io.flutter.embedding.engine.FlutterEngine} should be created in your application.
*
* @param launchContext The launch context. e.g. An Activity.
* @return The default intent.
*/
@NonNull
public static Intent createDefaultIntent(@NonNull Context launchContext) {
return withNewEngine().build(launchContext);
}
/**
* Creates an {@link NewEngineIntentBuilder}, which can be used to configure an {@link Intent} to
* launch a {@code FlutterActivity} that internally creates a new {@link
* io.flutter.embedding.engine.FlutterEngine} using the desired Dart entrypoint, initial route,
* etc.
*
* @return The engine intent builder.
*/
@NonNull
public static NewEngineIntentBuilder withNewEngine() {
return new NewEngineIntentBuilder(FlutterActivity.class);
}
/**
* Builder to create an {@code Intent} that launches a {@code FlutterActivity} with a new {@link
* FlutterEngine} and the desired configuration.
*/
public static class NewEngineIntentBuilder {
private final Class<? extends FlutterActivity> activityClass;
private String initialRoute = DEFAULT_INITIAL_ROUTE;
private String backgroundMode = DEFAULT_BACKGROUND_MODE;
@Nullable private List<String> dartEntrypointArgs;
/**
* Constructor that allows this {@code NewEngineIntentBuilder} to be used by subclasses of
* {@code FlutterActivity}.
*
* <p>Subclasses of {@code FlutterActivity} should provide their own static version of {@link
* #withNewEngine()}, which returns an instance of {@code NewEngineIntentBuilder} constructed
* with a {@code Class} reference to the {@code FlutterActivity} subclass, e.g.:
*
* <p>{@code return new NewEngineIntentBuilder(MyFlutterActivity.class); }
*/
public NewEngineIntentBuilder(@NonNull Class<? extends FlutterActivity> activityClass) {
this.activityClass = activityClass;
}
/**
* The initial route that a Flutter app will render in this {@link FlutterActivity}, defaults to
* "/".
*
* @param initialRoute The route.
* @return The engine intent builder.
*/
@NonNull
public NewEngineIntentBuilder initialRoute(@NonNull String initialRoute) {
this.initialRoute = initialRoute;
return this;
}
/**
* The mode of {@code FlutterActivity}'s background, either {@link BackgroundMode#opaque} or
* {@link BackgroundMode#transparent}.
*
* <p>The default background mode is {@link BackgroundMode#opaque}.
*
* <p>Choosing a background mode of {@link BackgroundMode#transparent} will configure the inner
* {@link FlutterView} of this {@code FlutterActivity} to be configured with a {@link
* FlutterTextureView} to support transparency. This choice has a non-trivial performance
* impact. A transparent background should only be used if it is necessary for the app design
* being implemented.
*
* <p>A {@code FlutterActivity} that is configured with a background mode of {@link
* BackgroundMode#transparent} must have a theme applied to it that includes the following
* property: {@code <item name="android:windowIsTranslucent">true</item>}.
*
* @param backgroundMode The background mode.
* @return The engine intent builder.
*/
@NonNull
public NewEngineIntentBuilder backgroundMode(@NonNull BackgroundMode backgroundMode) {
this.backgroundMode = backgroundMode.name();
return this;
}
/**
* The Dart entrypoint arguments will be passed as a list of string to Dart's entrypoint
* function.
*
* <p>A value of null means do not pass any arguments to Dart's entrypoint function.
*
* @param dartEntrypointArgs The Dart entrypoint arguments.
* @return The engine intent builder.
*/
@NonNull
public NewEngineIntentBuilder dartEntrypointArgs(@Nullable List<String> dartEntrypointArgs) {
this.dartEntrypointArgs = dartEntrypointArgs;
return this;
}
/**
* Creates and returns an {@link Intent} that will launch a {@code FlutterActivity} with the
* desired configuration.
*
* @param context The context. e.g. An Activity.
* @return The intent.
*/
@NonNull
public Intent build(@NonNull Context context) {
Intent intent =
new Intent(context, activityClass)
.putExtra(EXTRA_INITIAL_ROUTE, initialRoute)
.putExtra(EXTRA_BACKGROUND_MODE, backgroundMode)
.putExtra(EXTRA_DESTROY_ENGINE_WITH_ACTIVITY, true);
if (dartEntrypointArgs != null) {
intent.putExtra(EXTRA_DART_ENTRYPOINT_ARGS, new ArrayList(dartEntrypointArgs));
}
return intent;
}
}
/**
* Creates a {@link CachedEngineIntentBuilder}, which can be used to configure an {@link Intent}
* to launch a {@code FlutterActivity} that internally uses an existing {@link
* io.flutter.embedding.engine.FlutterEngine} that is cached in {@link
* io.flutter.embedding.engine.FlutterEngineCache}.
*
* @param cachedEngineId A cached engine ID.
* @return The builder.
*/
public static CachedEngineIntentBuilder withCachedEngine(@NonNull String cachedEngineId) {
return new CachedEngineIntentBuilder(FlutterActivity.class, cachedEngineId);
}
/**
* Builder to create an {@code Intent} that launches a {@code FlutterActivity} with an existing
* {@link io.flutter.embedding.engine.FlutterEngine} that is cached in {@link
* io.flutter.embedding.engine.FlutterEngineCache}.
*/
public static class CachedEngineIntentBuilder {
private final Class<? extends FlutterActivity> activityClass;
private final String cachedEngineId;
private boolean destroyEngineWithActivity = false;
private String backgroundMode = DEFAULT_BACKGROUND_MODE;
/**
* Constructor that allows this {@code CachedEngineIntentBuilder} to be used by subclasses of
* {@code FlutterActivity}.
*
* <p>Subclasses of {@code FlutterActivity} should provide their own static version of {@link
* FlutterActivity#withCachedEngine(String)}, which returns an instance of {@code
* CachedEngineIntentBuilder} constructed with a {@code Class} reference to the {@code
* FlutterActivity} subclass, e.g.:
*
* <p>{@code return new CachedEngineIntentBuilder(MyFlutterActivity.class, engineId); }
*
* @param activityClass A subclass of {@code FlutterActivity}.
* @param engineId The engine id.
*/
public CachedEngineIntentBuilder(
@NonNull Class<? extends FlutterActivity> activityClass, @NonNull String engineId) {
this.activityClass = activityClass;
this.cachedEngineId = engineId;
}
/**
* Whether the cached {@link io.flutter.embedding.engine.FlutterEngine} should be destroyed and
* removed from the cache when this {@code FlutterActivity} is destroyed.
*
* <p>The default value is {@code false}.
*
* @param destroyEngineWithActivity Whether to destroy the engine.
* @return The builder.
*/
public CachedEngineIntentBuilder destroyEngineWithActivity(boolean destroyEngineWithActivity) {
this.destroyEngineWithActivity = destroyEngineWithActivity;
return this;
}
/**
* The mode of {@code FlutterActivity}'s background, either {@link BackgroundMode#opaque} or
* {@link BackgroundMode#transparent}.
*
* <p>The default background mode is {@link BackgroundMode#opaque}.
*
* <p>Choosing a background mode of {@link BackgroundMode#transparent} will configure the inner
* {@link FlutterView} of this {@code FlutterActivity} to be configured with a {@link
* FlutterTextureView} to support transparency. This choice has a non-trivial performance
* impact. A transparent background should only be used if it is necessary for the app design
* being implemented.
*
* <p>A {@code FlutterActivity} that is configured with a background mode of {@link
* BackgroundMode#transparent} must have a theme applied to it that includes the following
* property: {@code <item name="android:windowIsTranslucent">true</item>}.
*
* @param backgroundMode The background mode
* @return The builder.
*/
@NonNull
public CachedEngineIntentBuilder backgroundMode(@NonNull BackgroundMode backgroundMode) {
this.backgroundMode = backgroundMode.name();
return this;
}
/**
* Creates and returns an {@link Intent} that will launch a {@code FlutterActivity} with the
* desired configuration.
*
* @param context The context. e.g. An Activity.
* @return The intent.
*/
@NonNull
public Intent build(@NonNull Context context) {
return new Intent(context, activityClass)
.putExtra(EXTRA_CACHED_ENGINE_ID, cachedEngineId)
.putExtra(EXTRA_DESTROY_ENGINE_WITH_ACTIVITY, destroyEngineWithActivity)
.putExtra(EXTRA_BACKGROUND_MODE, backgroundMode);
}
}
/**
* Creates a {@link NewEngineInGroupIntentBuilder}, which can be used to configure an {@link
* Intent} to launch a {@code FlutterActivity} by internally creating a FlutterEngine from an
* existing {@link io.flutter.embedding.engine.FlutterEngineGroup} cached in a specified {@link
* io.flutter.embedding.engine.FlutterEngineGroupCache}.
*
* <pre>{@code
* // Create a FlutterEngineGroup, such as in the onCreate method of the Application.
* FlutterEngineGroup engineGroup = new FlutterEngineGroup(this);
* FlutterEngineGroupCache.getInstance().put("my_cached_engine_group_id", engineGroup);
*
* // Start a FlutterActivity with the FlutterEngineGroup by creating an intent with withNewEngineInGroup
* Intent intent = FlutterActivity.withNewEngineInGroup("my_cached_engine_group_id")
* .dartEntrypoint("custom_entrypoint")
* .initialRoute("/custom/route")
* .backgroundMode(BackgroundMode.transparent)
* .build(context);
* startActivity(intent);
* }</pre>
*
* @param engineGroupId A cached engine group ID.
* @return The builder.
*/
public static NewEngineInGroupIntentBuilder withNewEngineInGroup(@NonNull String engineGroupId) {
return new NewEngineInGroupIntentBuilder(FlutterActivity.class, engineGroupId);
}
/**
* Builder to create an {@code Intent} that launches a {@code FlutterActivity} with a new {@link
* FlutterEngine} created by FlutterEngineGroup#createAndRunEngine.
*/
public static class NewEngineInGroupIntentBuilder {
private final Class<? extends FlutterActivity> activityClass;
private final String cachedEngineGroupId;
private String dartEntrypoint = DEFAULT_DART_ENTRYPOINT;
private String initialRoute = DEFAULT_INITIAL_ROUTE;
private String backgroundMode = DEFAULT_BACKGROUND_MODE;
/**
* Constructor that allows this {@code NewEngineInGroupIntentBuilder} to be used by subclasses
* of {@code FlutterActivity}.
*
* <p>Subclasses of {@code FlutterActivity} should provide their own static version of {@link
* #withNewEngineInGroup}, which returns an instance of {@code NewEngineInGroupIntentBuilder}
* constructed with a {@code Class} reference to the {@code FlutterActivity} subclass, e.g.:
*
* <p>{@code return new NewEngineInGroupIntentBuilder(MyFlutterActivity.class,
* cacheedEngineGroupId); }
*
* <pre>{@code
* // Create a FlutterEngineGroup, such as in the onCreate method of the Application.
* FlutterEngineGroup engineGroup = new FlutterEngineGroup(this);
* FlutterEngineGroupCache.getInstance().put("my_cached_engine_group_id", engineGroup);
*
* // Create a NewEngineInGroupIntentBuilder that would build an intent to start my custom FlutterActivity subclass.
* FlutterActivity.NewEngineInGroupIntentBuilder intentBuilder =
* new FlutterActivity.NewEngineInGroupIntentBuilder(
* MyFlutterActivity.class,
* app.engineGroupId);
* intentBuilder.dartEntrypoint("main")
* .initialRoute("/custom/route")
* .backgroundMode(BackgroundMode.transparent);
* startActivity(intentBuilder.build(context));
* }</pre>
*
* @param activityClass A subclass of {@code FlutterActivity}.
* @param engineGroupId The engine group id.
*/
public NewEngineInGroupIntentBuilder(
@NonNull Class<? extends FlutterActivity> activityClass, @NonNull String engineGroupId) {
this.activityClass = activityClass;
this.cachedEngineGroupId = engineGroupId;
}
/**
* The Dart entrypoint that will be executed in the newly created FlutterEngine as soon as the
* Dart snapshot is loaded. Default to "main".
*
* @param dartEntrypoint The dart entrypoint's name
* @return The engine group intent builder
*/
@NonNull
public NewEngineInGroupIntentBuilder dartEntrypoint(@NonNull String dartEntrypoint) {
this.dartEntrypoint = dartEntrypoint;
return this;
}
/**
* The initial route that a Flutter app will render in this {@link FlutterActivity}, defaults to
* "/".
*
* @param initialRoute The route.
* @return The engine group intent builder.
*/
@NonNull
public NewEngineInGroupIntentBuilder initialRoute(@NonNull String initialRoute) {
this.initialRoute = initialRoute;
return this;
}
/**
* The mode of {@code FlutterActivity}'s background, either {@link BackgroundMode#opaque} or
* {@link BackgroundMode#transparent}.
*
* <p>The default background mode is {@link BackgroundMode#opaque}.
*
* <p>Choosing a background mode of {@link BackgroundMode#transparent} will configure the inner
* {@link FlutterView} of this {@code FlutterActivity} to be configured with a {@link
* FlutterTextureView} to support transparency. This choice has a non-trivial performance
* impact. A transparent background should only be used if it is necessary for the app design
* being implemented.
*
* <p>A {@code FlutterActivity} that is configured with a background mode of {@link
* BackgroundMode#transparent} must have a theme applied to it that includes the following
* property: {@code <item name="android:windowIsTranslucent">true</item>}.
*
* @param backgroundMode The background mode.
* @return The engine group intent builder.
*/
@NonNull
public NewEngineInGroupIntentBuilder backgroundMode(@NonNull BackgroundMode backgroundMode) {
this.backgroundMode = backgroundMode.name();
return this;
}
/**
* Creates and returns an {@link Intent} that will launch a {@code FlutterActivity} with the
* desired configuration.
*
* @param context The context. e.g. An Activity.
* @return The intent.
*/
@NonNull
public Intent build(@NonNull Context context) {
return new Intent(context, activityClass)
.putExtra(EXTRA_DART_ENTRYPOINT, dartEntrypoint)
.putExtra(EXTRA_INITIAL_ROUTE, initialRoute)
.putExtra(EXTRA_CACHED_ENGINE_GROUP_ID, cachedEngineGroupId)
.putExtra(EXTRA_BACKGROUND_MODE, backgroundMode)
.putExtra(EXTRA_DESTROY_ENGINE_WITH_ACTIVITY, true);
}
}
// Delegate that runs all lifecycle and OS hook logic that is common between
// FlutterActivity and FlutterFragment. See the FlutterActivityAndFragmentDelegate
// implementation for details about why it exists.
@VisibleForTesting protected FlutterActivityAndFragmentDelegate delegate;
@NonNull private LifecycleRegistry lifecycle;
public FlutterActivity() {
lifecycle = new LifecycleRegistry(this);
}
/**
* This method exists so that JVM tests can ensure that a delegate exists without putting this
* Activity through any lifecycle events, because JVM tests cannot handle executing any lifecycle
* methods, at the time of writing this.
*
* <p>The testing infrastructure should be upgraded to make FlutterActivity tests easy to write
* while exercising real lifecycle methods. At such a time, this method should be removed.
*
* @param delegate The delegate.
*/
// TODO(mattcarroll): remove this when tests allow for it
// (https://github.com/flutter/flutter/issues/43798)
@VisibleForTesting
/* package */ void setDelegate(@NonNull FlutterActivityAndFragmentDelegate delegate) {
this.delegate = delegate;
}
/**
* Returns the Android App Component exclusively attached to {@link
* io.flutter.embedding.engine.FlutterEngine}.
*/
@Override
public ExclusiveAppComponent<Activity> getExclusiveAppComponent() {
return delegate;
}
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
switchLaunchThemeForNormalTheme();
super.onCreate(savedInstanceState);
delegate = new FlutterActivityAndFragmentDelegate(this);
delegate.onAttach(this);
delegate.onRestoreInstanceState(savedInstanceState);
lifecycle.handleLifecycleEvent(Lifecycle.Event.ON_CREATE);
configureWindowForTransparency();
setContentView(createFlutterView());
configureStatusBarForFullscreenFlutterExperience();
}
/**
* Registers the callback with OnBackInvokedDispatcher to capture back navigation gestures and
* pass them to the framework.
*
* <p>This replaces the deprecated onBackPressed method override in order to support API 33's
* predictive back navigation feature.
*
* <p>The callback must be unregistered in order to prevent unpredictable behavior once outside
* the Flutter app.
*/
@VisibleForTesting
public void registerOnBackInvokedCallback() {
if (Build.VERSION.SDK_INT >= API_LEVELS.API_33) {
getOnBackInvokedDispatcher()
.registerOnBackInvokedCallback(
OnBackInvokedDispatcher.PRIORITY_DEFAULT, onBackInvokedCallback);
hasRegisteredBackCallback = true;
}
}
/**
* Unregisters the callback from OnBackInvokedDispatcher.
*
* <p>This should be called when the activity is no longer in use to prevent unpredictable
* behavior such as being stuck and unable to press back.
*/
@VisibleForTesting
public void unregisterOnBackInvokedCallback() {
if (Build.VERSION.SDK_INT >= API_LEVELS.API_33) {
getOnBackInvokedDispatcher().unregisterOnBackInvokedCallback(onBackInvokedCallback);
hasRegisteredBackCallback = false;
}
}
private final OnBackInvokedCallback onBackInvokedCallback =
Build.VERSION.SDK_INT >= API_LEVELS.API_33
? new OnBackInvokedCallback() {
// TODO(garyq): Remove SuppressWarnings annotation. This was added to workaround
// a google3 bug where the linter is not properly running against API 33, causing
// a failure here. See b/243609613 and https://github.com/flutter/flutter/issues/111295
@SuppressWarnings("Override")
@Override
public void onBackInvoked() {
onBackPressed();
}
}
: null;
@Override
public void setFrameworkHandlesBack(boolean frameworkHandlesBack) {
if (frameworkHandlesBack && !hasRegisteredBackCallback) {
registerOnBackInvokedCallback();
} else if (!frameworkHandlesBack && hasRegisteredBackCallback) {
unregisterOnBackInvokedCallback();
}
}
/**
* Switches themes for this {@code Activity} from the theme used to launch this {@code Activity}
* to a "normal theme" that is intended for regular {@code Activity} operation.
*
* <p>This behavior is offered so that a "launch screen" can be displayed while the application
* initially loads. To utilize this behavior in an app, do the following:
*
* <ol>
* <li>Create 2 different themes in style.xml: one theme for the launch screen and one theme for
* normal display.
* <li>In the launch screen theme, set the "windowBackground" property to a {@code Drawable} of
* your choice.
* <li>In the normal theme, customize however you'd like.
* <li>In the AndroidManifest.xml, set the theme of your {@code FlutterActivity} to your launch
* theme.
* <li>Add a {@code <meta-data>} property to your {@code FlutterActivity} with a name of
* "io.flutter.embedding.android.NormalTheme" and set the resource to your normal theme,
* e.g., {@code android:resource="@style/MyNormalTheme}.
* </ol>
*
* With the above settings, your launch theme will be used when loading the app, and then the
* theme will be switched to your normal theme once the app has initialized.
*
* <p>Do not change aspects of system chrome between a launch theme and normal theme. Either
* define both themes to be fullscreen or not, and define both themes to display the same status
* bar and navigation bar settings. If you wish to adjust system chrome once your Flutter app
* renders, use platform channels to instruct Android to do so at the appropriate time. This will
* avoid any jarring visual changes during app startup.
*/
private void switchLaunchThemeForNormalTheme() {
try {
Bundle metaData = getMetaData();
if (metaData != null) {
int normalThemeRID = metaData.getInt(NORMAL_THEME_META_DATA_KEY, -1);
if (normalThemeRID != -1) {
setTheme(normalThemeRID);
}
} else {
Log.v(TAG, "Using the launch theme as normal theme.");
}
} catch (PackageManager.NameNotFoundException exception) {
Log.e(
TAG,
"Could not read meta-data for FlutterActivity. Using the launch theme as normal theme.");
}
}
/**
* Sets this {@code Activity}'s {@code Window} background to be transparent, and hides the status
* bar, if this {@code Activity}'s desired {@link BackgroundMode} is {@link
* BackgroundMode#transparent}.
*
* <p>For {@code Activity} transparency to work as expected, the theme applied to this {@code
* Activity} must include {@code <item name="android:windowIsTranslucent">true</item>}.
*/
private void configureWindowForTransparency() {
BackgroundMode backgroundMode = getBackgroundMode();
if (backgroundMode == BackgroundMode.transparent) {
getWindow().setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
}
}
@NonNull
private View createFlutterView() {
return delegate.onCreateView(
/* inflater=*/ null,
/* container=*/ null,
/* savedInstanceState=*/ null,
/*flutterViewId=*/ FLUTTER_VIEW_ID,
/*shouldDelayFirstAndroidViewDraw=*/ getRenderMode() == RenderMode.surface);
}
private void configureStatusBarForFullscreenFlutterExperience() {
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
window.setStatusBarColor(0x40000000);
window.getDecorView().setSystemUiVisibility(PlatformPlugin.DEFAULT_SYSTEM_UI);
}
@Override
protected void onStart() {
super.onStart();
lifecycle.handleLifecycleEvent(Lifecycle.Event.ON_START);
if (stillAttachedForEvent("onStart")) {
delegate.onStart();
}
}
@Override
protected void onResume() {
super.onResume();
lifecycle.handleLifecycleEvent(Lifecycle.Event.ON_RESUME);
if (stillAttachedForEvent("onResume")) {
delegate.onResume();
}
}
@Override
public void onPostResume() {
super.onPostResume();
if (stillAttachedForEvent("onPostResume")) {
delegate.onPostResume();
}
}
@Override
protected void onPause() {
super.onPause();
if (stillAttachedForEvent("onPause")) {
delegate.onPause();
}
lifecycle.handleLifecycleEvent(Lifecycle.Event.ON_PAUSE);
}
@Override
protected void onStop() {
super.onStop();
if (stillAttachedForEvent("onStop")) {
delegate.onStop();
}
lifecycle.handleLifecycleEvent(Lifecycle.Event.ON_STOP);
}
@Override
protected void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
if (stillAttachedForEvent("onSaveInstanceState")) {
delegate.onSaveInstanceState(outState);
}
}
/**
* Irreversibly release this activity's control of the {@link
* io.flutter.embedding.engine.FlutterEngine} and its subcomponents.
*
* <p>Calling will disconnect this activity's view from the Flutter renderer, disconnect this
* activity from plugins' {@link ActivityControlSurface}, and stop system channel messages from
* this activity.
*
* <p>After calling, this activity should be disposed immediately and not be re-used.
*/
@VisibleForTesting
public void release() {
unregisterOnBackInvokedCallback();
if (delegate != null) {
delegate.release();
delegate = null;
}
}
@Override
public void detachFromFlutterEngine() {
Log.w(
TAG,
"FlutterActivity "
+ this
+ " connection to the engine "
+ getFlutterEngine()
+ " evicted by another attaching activity");
if (delegate != null) {
delegate.onDestroyView();
delegate.onDetach();
}
}
@Override
protected void onDestroy() {
super.onDestroy();
if (stillAttachedForEvent("onDestroy")) {
delegate.onDestroyView();
delegate.onDetach();
}
release();
lifecycle.handleLifecycleEvent(Lifecycle.Event.ON_DESTROY);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (stillAttachedForEvent("onActivityResult")) {
delegate.onActivityResult(requestCode, resultCode, data);
}
}
@Override
protected void onNewIntent(@NonNull Intent intent) {
// TODO(mattcarroll): change G3 lint rule that forces us to call super
super.onNewIntent(intent);
if (stillAttachedForEvent("onNewIntent")) {
delegate.onNewIntent(intent);
}
}
@Override
public void onBackPressed() {
if (stillAttachedForEvent("onBackPressed")) {
delegate.onBackPressed();
}
}
@Override
public void onRequestPermissionsResult(
int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if (stillAttachedForEvent("onRequestPermissionsResult")) {
delegate.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
@Override
public void onUserLeaveHint() {
if (stillAttachedForEvent("onUserLeaveHint")) {
delegate.onUserLeaveHint();
}
}
@Override
public void onWindowFocusChanged(boolean hasFocus) {
super.onWindowFocusChanged(hasFocus);
if (stillAttachedForEvent("onWindowFocusChanged")) {
delegate.onWindowFocusChanged(hasFocus);
}
}
@Override
public void onTrimMemory(int level) {
super.onTrimMemory(level);
if (stillAttachedForEvent("onTrimMemory")) {
delegate.onTrimMemory(level);
}
}
/**
* {@link FlutterActivityAndFragmentDelegate.Host} method that is used by {@link
* FlutterActivityAndFragmentDelegate} to obtain a {@code Context} reference as needed.
*/
@Override
@NonNull
public Context getContext() {
return this;
}
/**
* {@link FlutterActivityAndFragmentDelegate.Host} method that is used by {@link
* FlutterActivityAndFragmentDelegate} to obtain an {@code Activity} reference as needed. This
* reference is used by the delegate to instantiate a {@link FlutterView}, a {@link
* PlatformPlugin}, and to determine if the {@code Activity} is changing configurations.
*/
@Override
@NonNull
public Activity getActivity() {
return this;
}
/**
* {@link FlutterActivityAndFragmentDelegate.Host} method that is used by {@link
* FlutterActivityAndFragmentDelegate} to obtain a {@code Lifecycle} reference as needed. This
* reference is used by the delegate to provide Flutter plugins with access to lifecycle events.
*/
@Override
@NonNull
public Lifecycle getLifecycle() {
return lifecycle;
}
/**
* {@link FlutterActivityAndFragmentDelegate.Host} method that is used by {@link
* FlutterActivityAndFragmentDelegate} to obtain Flutter shell arguments when initializing
* Flutter.
*/
@NonNull
@Override
public FlutterShellArgs getFlutterShellArgs() {
return FlutterShellArgs.fromIntent(getIntent());
}
/**
* Returns the ID of a statically cached {@link io.flutter.embedding.engine.FlutterEngine} to use
* within this {@code FlutterActivity}, or {@code null} if this {@code FlutterActivity} does not
* want to use a cached {@link io.flutter.embedding.engine.FlutterEngine}.
*/
@Override
@Nullable
public String getCachedEngineId() {
return getIntent().getStringExtra(EXTRA_CACHED_ENGINE_ID);
}
/**
* Returns the ID of a statically cached {@link io.flutter.embedding.engine.FlutterEngineGroup} to
* use within this {@code FlutterActivity}, or {@code null} if this {@code FlutterActivity} does
* not want to use a cached {@link io.flutter.embedding.engine.FlutterEngineGroup}.
*/
@Override
@Nullable
public String getCachedEngineGroupId() {
return getIntent().getStringExtra(EXTRA_CACHED_ENGINE_GROUP_ID);
}
/**
* Returns false if the {@link io.flutter.embedding.engine.FlutterEngine} backing this {@code
* FlutterActivity} should outlive this {@code FlutterActivity}, or true to be destroyed when the
* {@code FlutterActivity} is destroyed.
*
* <p>The default value is {@code true} in cases where {@code FlutterActivity} created its own
* {@link io.flutter.embedding.engine.FlutterEngine}, and {@code false} in cases where a cached
* {@link io.flutter.embedding.engine.FlutterEngine} was provided.
*/
@Override
public boolean shouldDestroyEngineWithHost() {
boolean explicitDestructionRequested =
getIntent().getBooleanExtra(EXTRA_DESTROY_ENGINE_WITH_ACTIVITY, false);
if (getCachedEngineId() != null || delegate.isFlutterEngineFromHost()) {
// Only destroy a cached engine if explicitly requested by app developer.
return explicitDestructionRequested;
} else {
// If this Activity created the FlutterEngine, destroy it by default unless
// explicitly requested not to.
return getIntent().getBooleanExtra(EXTRA_DESTROY_ENGINE_WITH_ACTIVITY, true);
}
}
/**
* The Dart entrypoint that will be executed as soon as the Dart snapshot is loaded.
*
* <p>This preference can be controlled with 2 methods:
*
* <ol>
* <li>Pass a boolean as {@link FlutterActivityLaunchConfigs#EXTRA_DART_ENTRYPOINT} with the
* launching {@code Intent}, or
* <li>Set a {@code <meta-data>} called {@link
* FlutterActivityLaunchConfigs#DART_ENTRYPOINT_META_DATA_KEY} within the Android manifest
* definition for this {@code FlutterActivity}
* </ol>
*
* If both preferences are set, the {@code Intent} preference takes priority.
*
* <p>Subclasses may override this method to directly control the Dart entrypoint.
*/
@NonNull
public String getDartEntrypointFunctionName() {
if (getIntent().hasExtra(EXTRA_DART_ENTRYPOINT)) {
return getIntent().getStringExtra(EXTRA_DART_ENTRYPOINT);
}
try {
Bundle metaData = getMetaData();
String desiredDartEntrypoint =
metaData != null ? metaData.getString(DART_ENTRYPOINT_META_DATA_KEY) : null;
return desiredDartEntrypoint != null ? desiredDartEntrypoint : DEFAULT_DART_ENTRYPOINT;
} catch (PackageManager.NameNotFoundException e) {
return DEFAULT_DART_ENTRYPOINT;
}
}
/**
* The Dart entrypoint arguments will be passed as a list of string to Dart's entrypoint function.
*
* <p>A value of null means do not pass any arguments to Dart's entrypoint function.
*
* <p>Subclasses may override this method to directly control the Dart entrypoint arguments.
*/
@Nullable
public List<String> getDartEntrypointArgs() {
return (List<String>) getIntent().getSerializableExtra(EXTRA_DART_ENTRYPOINT_ARGS);
}
/**
* The Dart library URI for the entrypoint that will be executed as soon as the Dart snapshot is
* loaded.
*
* <p>Example value: "package:foo/bar.dart"
*
* <p>This preference can be controlled by setting a {@code <meta-data>} called {@link
* FlutterActivityLaunchConfigs#DART_ENTRYPOINT_URI_META_DATA_KEY} within the Android manifest
* definition for this {@code FlutterActivity}.
*
* <p>A value of null means use the default root library.
*
* <p>Subclasses may override this method to directly control the Dart entrypoint uri.
*/
@Nullable
public String getDartEntrypointLibraryUri() {
try {
Bundle metaData = getMetaData();
String desiredDartLibraryUri =
metaData != null ? metaData.getString(DART_ENTRYPOINT_URI_META_DATA_KEY) : null;
return desiredDartLibraryUri;
} catch (PackageManager.NameNotFoundException e) {
return null;
}
}
/**
* The initial route that a Flutter app will render upon loading and executing its Dart code.
*
* <p>This preference can be controlled with 2 methods:
*
* <ol>
* <li>Pass a boolean as {@link FlutterActivityLaunchConfigs#EXTRA_INITIAL_ROUTE} with the
* launching {@code Intent}, or
* <li>Set a {@code <meta-data>} called {@link
* FlutterActivityLaunchConfigs#INITIAL_ROUTE_META_DATA_KEY} for this {@code Activity} in
* the Android manifest.
* </ol>
*
* If both preferences are set, the {@code Intent} preference takes priority.
*
* <p>The reason that a {@code <meta-data>} preference is supported is because this {@code
* Activity} might be the very first {@code Activity} launched, which means the developer won't
* have control over the incoming {@code Intent}.
*
* <p>Subclasses may override this method to directly control the initial route.
*
* <p>If this method returns null and the {@code shouldHandleDeeplinking} returns true, the
* initial route is derived from the {@code Intent} through the Intent.getData() instead.
*/
public String getInitialRoute() {
if (getIntent().hasExtra(EXTRA_INITIAL_ROUTE)) {
return getIntent().getStringExtra(EXTRA_INITIAL_ROUTE);
}
try {
Bundle metaData = getMetaData();
String desiredInitialRoute =
metaData != null ? metaData.getString(INITIAL_ROUTE_META_DATA_KEY) : null;
return desiredInitialRoute;
} catch (PackageManager.NameNotFoundException e) {
return null;
}
}
/**
* A custom path to the bundle that contains this Flutter app's resources, e.g., Dart code
* snapshots.
*
* <p>When this {@code FlutterActivity} is run by Flutter tooling and a data String is included in
* the launching {@code Intent}, that data String is interpreted as an app bundle path.
*
* <p>When otherwise unspecified, the value is null, which defaults to the app bundle path defined
* in {@link io.flutter.embedding.engine.loader.FlutterLoader#findAppBundlePath()}.
*
* <p>Subclasses may override this method to return a custom app bundle path.
*/
@NonNull
public String getAppBundlePath() {
// If this Activity was launched from tooling, and the incoming Intent contains
// a custom app bundle path, return that path.
// TODO(mattcarroll): determine if we should have an explicit FlutterTestActivity instead of
// conflating.
if (isDebuggable() && Intent.ACTION_RUN.equals(getIntent().getAction())) {
String appBundlePath = getIntent().getDataString();
if (appBundlePath != null) {
return appBundlePath;
}
}
return null;
}
/**
* Returns true if Flutter is running in "debug mode", and false otherwise.
*
* <p>Debug mode allows Flutter to operate with hot reload and hot restart. Release mode does not.
*/
private boolean isDebuggable() {
return (getApplicationInfo().flags & ApplicationInfo.FLAG_DEBUGGABLE) != 0;
}
/**
* {@link FlutterActivityAndFragmentDelegate.Host} method that is used by {@link
* FlutterActivityAndFragmentDelegate} to obtain the desired {@link RenderMode} that should be
* used when instantiating a {@link FlutterView}.
*/
@NonNull
@Override
public RenderMode getRenderMode() {
return getBackgroundMode() == BackgroundMode.opaque ? RenderMode.surface : RenderMode.texture;
}
/**
* {@link FlutterActivityAndFragmentDelegate.Host} method that is used by {@link
* FlutterActivityAndFragmentDelegate} to obtain the desired {@link TransparencyMode} that should
* be used when instantiating a {@link FlutterView}.
*/
@NonNull
@Override
public TransparencyMode getTransparencyMode() {
return getBackgroundMode() == BackgroundMode.opaque
? TransparencyMode.opaque
: TransparencyMode.transparent;
}
/**
* The desired window background mode of this {@code Activity}, which defaults to {@link
* BackgroundMode#opaque}.
*
* @return The background mode.
*/
@NonNull
protected BackgroundMode getBackgroundMode() {
if (getIntent().hasExtra(EXTRA_BACKGROUND_MODE)) {
return BackgroundMode.valueOf(getIntent().getStringExtra(EXTRA_BACKGROUND_MODE));
} else {
return BackgroundMode.opaque;
}
}
/**
* Hook for subclasses to easily provide a custom {@link
* io.flutter.embedding.engine.FlutterEngine}.
*
* <p>This hook is where a cached {@link io.flutter.embedding.engine.FlutterEngine} should be
* provided, if a cached {@link FlutterEngine} is desired.
*/
@Nullable
@Override
public FlutterEngine provideFlutterEngine(@NonNull Context context) {
// No-op. Hook for subclasses.
return null;
}
/**
* Hook for subclasses to obtain a reference to the {@link
* io.flutter.embedding.engine.FlutterEngine} that is owned by this {@code FlutterActivity}.
*
* @return The Flutter engine.
*/
@Nullable
protected FlutterEngine getFlutterEngine() {
return delegate.getFlutterEngine();
}
/**
* Retrieves the meta data specified in the AndroidManifest.xml.
*
* @return The meta data.
* @throws PackageManager.NameNotFoundException if a package with the given name cannot be found
* on the system.
*/
@Nullable
protected Bundle getMetaData() throws PackageManager.NameNotFoundException {
ActivityInfo activityInfo =
getPackageManager().getActivityInfo(getComponentName(), PackageManager.GET_META_DATA);
return activityInfo.metaData;
}
@Nullable
@Override
public PlatformPlugin providePlatformPlugin(
@Nullable Activity activity, @NonNull FlutterEngine flutterEngine) {
return new PlatformPlugin(getActivity(), flutterEngine.getPlatformChannel(), this);
}
/**
* Hook for subclasses to easily configure a {@code FlutterEngine}.
*
* <p>This method is called after {@link #provideFlutterEngine(Context)}.
*
* <p>All plugins listed in the app's pubspec are registered in the base implementation of this
* method unless the FlutterEngine for this activity was externally created. To avoid the
* automatic plugin registration for implicitly created FlutterEngines, override this method
* without invoking super(). To keep automatic plugin registration and further configure the
* FlutterEngine, override this method, invoke super(), and then configure the FlutterEngine as
* desired.
*/
@Override
public void configureFlutterEngine(@NonNull FlutterEngine flutterEngine) {
if (delegate.isFlutterEngineFromHost()) {
// If the FlutterEngine was explicitly built and injected into this FlutterActivity, the
// builder should explicitly decide whether to automatically register plugins via the
// FlutterEngine's construction parameter or via the AndroidManifest metadata.
return;
}
GeneratedPluginRegister.registerGeneratedPlugins(flutterEngine);
}
/**
* Hook for the host to cleanup references that were established in {@link
* #configureFlutterEngine(FlutterEngine)} before the host is destroyed or detached.
*
* <p>This method is called in {@link #onDestroy()}.
*/
@Override
public void cleanUpFlutterEngine(@NonNull FlutterEngine flutterEngine) {
// No-op. Hook for subclasses.
}
/**
* Hook for subclasses to control whether or not the {@link FlutterFragment} within this {@code
* Activity} automatically attaches its {@link io.flutter.embedding.engine.FlutterEngine} to this
* {@code Activity}.
*
* <p>This property is controlled with a protected method instead of an {@code Intent} argument
* because the only situation where changing this value would help, is a situation in which {@code
* FlutterActivity} is being subclassed to utilize a custom and/or cached {@link
* io.flutter.embedding.engine.FlutterEngine}.
*
* <p>Defaults to {@code true}.
*
* <p>Control surfaces are used to provide Android resources and lifecycle events to plugins that
* are attached to the {@link io.flutter.embedding.engine.FlutterEngine}. If {@code
* shouldAttachEngineToActivity} is true, then this {@code FlutterActivity} will connect its
* {@link io.flutter.embedding.engine.FlutterEngine} to itself, along with any plugins that are
* registered with that {@link io.flutter.embedding.engine.FlutterEngine}. This allows plugins to
* access the {@code Activity}, as well as receive {@code Activity}-specific calls, e.g. {@link
* Activity#onNewIntent(Intent)}. If {@code shouldAttachEngineToActivity} is false, then this
* {@code FlutterActivity} will not automatically manage the connection between its {@link
* FlutterEngine} and itself. In this case, plugins will not be offered a reference to an {@code
* Activity} or its OS hooks.
*
* <p>Returning false from this method does not preclude a {@link
* io.flutter.embedding.engine.FlutterEngine} from being attaching to a {@code FlutterActivity} -
* it just prevents the attachment from happening automatically. A developer can choose to
* subclass {@code FlutterActivity} and then invoke {@link
* ActivityControlSurface#attachToActivity(ExclusiveAppComponent, Lifecycle)} and {@link
* ActivityControlSurface#detachFromActivity()} at the desired times.
*
* <p>One reason that a developer might choose to manually manage the relationship between the
* {@code Activity} and {@link io.flutter.embedding.engine.FlutterEngine} is if the developer
* wants to move the {@link FlutterEngine} somewhere else. For example, a developer might want the
* {@link io.flutter.embedding.engine.FlutterEngine} to outlive this {@code FlutterActivity} so
* that it can be used later in a different {@code Activity}. To accomplish this, the {@link
* io.flutter.embedding.engine.FlutterEngine} may need to be disconnected from this {@code
* FlutterActivity} at an unusual time, preventing this {@code FlutterActivity} from correctly
* managing the relationship between the {@link io.flutter.embedding.engine.FlutterEngine} and
* itself.
*/
@Override
public boolean shouldAttachEngineToActivity() {
return true;
}
/**
* Whether to handle the deeplinking from the {@code Intent} automatically if the {@code
* getInitialRoute} returns null.
*
* <p>The default implementation looks {@code <meta-data>} called {@link
* FlutterActivityLaunchConfigs#HANDLE_DEEPLINKING_META_DATA_KEY} within the Android manifest
* definition for this {@code FlutterActivity}.
*/
@Override
public boolean shouldHandleDeeplinking() {
try {
Bundle metaData = getMetaData();
boolean shouldHandleDeeplinking =
metaData != null ? metaData.getBoolean(HANDLE_DEEPLINKING_META_DATA_KEY) : false;
return shouldHandleDeeplinking;
} catch (PackageManager.NameNotFoundException e) {
return false;
}
}
@Override
public void onFlutterSurfaceViewCreated(@NonNull FlutterSurfaceView flutterSurfaceView) {
// Hook for subclasses.
}
@Override
public void onFlutterTextureViewCreated(@NonNull FlutterTextureView flutterTextureView) {
// Hook for subclasses.
}
@Override
public void onFlutterUiDisplayed() {
// Notifies Android that we're fully drawn so that performance metrics can be collected by
// Flutter performance tests. A few considerations:
// * reportFullyDrawn was supported in KitKat (API 19), but has a bug around requiring
// permissions in some Android versions.
// * reportFullyDrawn behavior isn't tested on pre-Q versions.
// See https://github.com/flutter/flutter/issues/46172, and
// https://github.com/flutter/flutter/issues/88767.
if (Build.VERSION.SDK_INT >= API_LEVELS.API_29) {
reportFullyDrawn();
}
}
@Override
public void onFlutterUiNoLongerDisplayed() {
// no-op
}
@Override
public boolean shouldRestoreAndSaveState() {
if (getIntent().hasExtra(EXTRA_ENABLE_STATE_RESTORATION)) {
return getIntent().getBooleanExtra(EXTRA_ENABLE_STATE_RESTORATION, false);
}
if (getCachedEngineId() != null) {
// Prevent overwriting the existing state in a cached engine with restoration state.
return false;
}
return true;
}
/**
* Give the host application a chance to take control of the app lifecycle events.
*
* <p>Return {@code false} means the host application dispatches these app lifecycle events, while
* return {@code true} means the engine dispatches these events.
*
* <p>Defaults to {@code true}.
*/
@Override
public boolean shouldDispatchAppLifecycleState() {
return true;
}
/**
* Whether to automatically attach the {@link FlutterView} to the engine.
*
* <p>Returning {@code false} means that the task of attaching the {@link FlutterView} to the
* engine will be taken over by the host application.
*
* <p>Defaults to {@code true}.
*/
@Override
public boolean attachToEngineAutomatically() {
return true;
}
@Override
public boolean popSystemNavigator() {
// Hook for subclass. No-op if returns false.
return false;
}
@Override
public void updateSystemUiOverlays() {
if (delegate != null) {
delegate.updateSystemUiOverlays();
}
}
private boolean stillAttachedForEvent(String event) {
if (delegate == null) {
Log.w(TAG, "FlutterActivity " + hashCode() + " " + event + " called after release.");
return false;
}
if (!delegate.isAttached()) {
Log.w(TAG, "FlutterActivity " + hashCode() + " " + event + " called after detach.");
return false;
}
return true;
}
}
| engine/shell/platform/android/io/flutter/embedding/android/FlutterActivity.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/android/FlutterActivity.java",
"repo_id": "engine",
"token_count": 19042
} | 294 |
package io.flutter.embedding.android;
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// DO NOT EDIT -- DO NOT EDIT -- DO NOT EDIT
// This file is generated by flutter/flutter@dev/tools/gen_keycodes/bin/gen_keycodes.dart and
// should not be edited directly.
//
// Edit the template dev/tools/gen_keycodes/data/android_keyboard_map_java.tmpl instead.
// See dev/tools/gen_keycodes/README.md for more information.
import android.view.KeyEvent;
import java.util.HashMap;
/** Static information used by {@link KeyEmbedderResponder}. */
public class KeyboardMap {
/** A physicalKey-logicalKey pair used to define mappings. */
public static class KeyPair {
public KeyPair(long physicalKey, long logicalKey) {
this.physicalKey = physicalKey;
this.logicalKey = logicalKey;
}
public long physicalKey;
public long logicalKey;
}
/**
* An immutable configuration item that defines how to synchronize pressing modifiers (such as
* Shift or Ctrl), so that the {@link KeyEmbedderResponder} must synthesize events until the
* combined pressing state of {@link keys} matches the true meta state masked by {@link mask}.
*/
public static class PressingGoal {
public PressingGoal(int mask, KeyPair[] keys) {
this.mask = mask;
this.keys = keys;
}
public final int mask;
public final KeyPair[] keys;
}
/**
* A configuration item that defines how to synchronize toggling modifiers (such as CapsLock), so
* that the {@link KeyEmbedderResponder} must synthesize events until the enabling state of the
* key matches the true meta state masked by {@link #mask}.
*
* <p>The objects of this class are mutable. The {@link #enabled} field will be used to store the
* current enabling state.
*/
public static class TogglingGoal {
public TogglingGoal(int mask, long physicalKey, long logicalKey) {
this.mask = mask;
this.physicalKey = physicalKey;
this.logicalKey = logicalKey;
}
public final int mask;
public final long physicalKey;
public final long logicalKey;
/**
* Used by {@link KeyEmbedderResponder} to store the current enabling state of this modifier.
*
* <p>Initialized as false.
*/
public boolean enabled = false;
}
/** Maps from Android scan codes {@link KeyEvent#getScanCode()} to Flutter physical keys. */
public static final HashMap<Long, Long> scanCodeToPhysical =
new HashMap<Long, Long>() {
private static final long serialVersionUID = 1L;
{
put(0x00000001d0L, 0x0000000012L); // fn
put(0x00000000cdL, 0x0000000014L); // suspend
put(0x000000008eL, 0x0000010082L); // sleep
put(0x000000008fL, 0x0000010083L); // wakeUp
put(0x0000000100L, 0x000005ff01L); // gameButton1
put(0x0000000120L, 0x000005ff01L); // gameButton1
put(0x0000000101L, 0x000005ff02L); // gameButton2
put(0x0000000121L, 0x000005ff02L); // gameButton2
put(0x0000000102L, 0x000005ff03L); // gameButton3
put(0x0000000122L, 0x000005ff03L); // gameButton3
put(0x0000000103L, 0x000005ff04L); // gameButton4
put(0x0000000123L, 0x000005ff04L); // gameButton4
put(0x0000000104L, 0x000005ff05L); // gameButton5
put(0x0000000124L, 0x000005ff05L); // gameButton5
put(0x0000000105L, 0x000005ff06L); // gameButton6
put(0x0000000125L, 0x000005ff06L); // gameButton6
put(0x0000000106L, 0x000005ff07L); // gameButton7
put(0x0000000126L, 0x000005ff07L); // gameButton7
put(0x0000000107L, 0x000005ff08L); // gameButton8
put(0x0000000127L, 0x000005ff08L); // gameButton8
put(0x0000000108L, 0x000005ff09L); // gameButton9
put(0x0000000128L, 0x000005ff09L); // gameButton9
put(0x0000000109L, 0x000005ff0aL); // gameButton10
put(0x0000000129L, 0x000005ff0aL); // gameButton10
put(0x000000010aL, 0x000005ff0bL); // gameButton11
put(0x000000012aL, 0x000005ff0bL); // gameButton11
put(0x000000010bL, 0x000005ff0cL); // gameButton12
put(0x000000012bL, 0x000005ff0cL); // gameButton12
put(0x000000010cL, 0x000005ff0dL); // gameButton13
put(0x000000012cL, 0x000005ff0dL); // gameButton13
put(0x000000010dL, 0x000005ff0eL); // gameButton14
put(0x000000012dL, 0x000005ff0eL); // gameButton14
put(0x000000010eL, 0x000005ff0fL); // gameButton15
put(0x000000012eL, 0x000005ff0fL); // gameButton15
put(0x000000010fL, 0x000005ff10L); // gameButton16
put(0x000000012fL, 0x000005ff10L); // gameButton16
put(0x0000000130L, 0x000005ff11L); // gameButtonA
put(0x0000000131L, 0x000005ff12L); // gameButtonB
put(0x0000000132L, 0x000005ff13L); // gameButtonC
put(0x0000000136L, 0x000005ff14L); // gameButtonLeft1
put(0x0000000138L, 0x000005ff15L); // gameButtonLeft2
put(0x000000013cL, 0x000005ff16L); // gameButtonMode
put(0x0000000137L, 0x000005ff17L); // gameButtonRight1
put(0x0000000139L, 0x000005ff18L); // gameButtonRight2
put(0x000000013aL, 0x000005ff19L); // gameButtonSelect
put(0x000000013bL, 0x000005ff1aL); // gameButtonStart
put(0x000000013dL, 0x000005ff1bL); // gameButtonThumbLeft
put(0x000000013eL, 0x000005ff1cL); // gameButtonThumbRight
put(0x0000000133L, 0x000005ff1dL); // gameButtonX
put(0x0000000134L, 0x000005ff1eL); // gameButtonY
put(0x0000000135L, 0x000005ff1fL); // gameButtonZ
put(0x000000001eL, 0x0000070004L); // keyA
put(0x0000000030L, 0x0000070005L); // keyB
put(0x000000002eL, 0x0000070006L); // keyC
put(0x0000000020L, 0x0000070007L); // keyD
put(0x0000000012L, 0x0000070008L); // keyE
put(0x0000000021L, 0x0000070009L); // keyF
put(0x0000000022L, 0x000007000aL); // keyG
put(0x0000000023L, 0x000007000bL); // keyH
put(0x0000000017L, 0x000007000cL); // keyI
put(0x0000000024L, 0x000007000dL); // keyJ
put(0x0000000025L, 0x000007000eL); // keyK
put(0x0000000026L, 0x000007000fL); // keyL
put(0x0000000032L, 0x0000070010L); // keyM
put(0x0000000031L, 0x0000070011L); // keyN
put(0x0000000018L, 0x0000070012L); // keyO
put(0x0000000019L, 0x0000070013L); // keyP
put(0x0000000010L, 0x0000070014L); // keyQ
put(0x0000000013L, 0x0000070015L); // keyR
put(0x000000001fL, 0x0000070016L); // keyS
put(0x0000000014L, 0x0000070017L); // keyT
put(0x0000000016L, 0x0000070018L); // keyU
put(0x000000002fL, 0x0000070019L); // keyV
put(0x0000000011L, 0x000007001aL); // keyW
put(0x000000002dL, 0x000007001bL); // keyX
put(0x0000000015L, 0x000007001cL); // keyY
put(0x000000002cL, 0x000007001dL); // keyZ
put(0x0000000002L, 0x000007001eL); // digit1
put(0x0000000003L, 0x000007001fL); // digit2
put(0x0000000004L, 0x0000070020L); // digit3
put(0x0000000005L, 0x0000070021L); // digit4
put(0x0000000006L, 0x0000070022L); // digit5
put(0x0000000007L, 0x0000070023L); // digit6
put(0x0000000008L, 0x0000070024L); // digit7
put(0x0000000009L, 0x0000070025L); // digit8
put(0x000000000aL, 0x0000070026L); // digit9
put(0x000000000bL, 0x0000070027L); // digit0
put(0x000000001cL, 0x0000070028L); // enter
put(0x0000000001L, 0x0000070029L); // escape
put(0x000000000eL, 0x000007002aL); // backspace
put(0x000000000fL, 0x000007002bL); // tab
put(0x0000000039L, 0x000007002cL); // space
put(0x000000000cL, 0x000007002dL); // minus
put(0x000000000dL, 0x000007002eL); // equal
put(0x000000001aL, 0x000007002fL); // bracketLeft
put(0x000000001bL, 0x0000070030L); // bracketRight
put(0x000000002bL, 0x0000070031L); // backslash
put(0x0000000056L, 0x0000070031L); // backslash
put(0x0000000027L, 0x0000070033L); // semicolon
put(0x0000000028L, 0x0000070034L); // quote
put(0x0000000029L, 0x0000070035L); // backquote
put(0x0000000033L, 0x0000070036L); // comma
put(0x0000000034L, 0x0000070037L); // period
put(0x0000000035L, 0x0000070038L); // slash
put(0x000000003aL, 0x0000070039L); // capsLock
put(0x000000003bL, 0x000007003aL); // f1
put(0x000000003cL, 0x000007003bL); // f2
put(0x000000003dL, 0x000007003cL); // f3
put(0x000000003eL, 0x000007003dL); // f4
put(0x000000003fL, 0x000007003eL); // f5
put(0x0000000040L, 0x000007003fL); // f6
put(0x0000000041L, 0x0000070040L); // f7
put(0x0000000042L, 0x0000070041L); // f8
put(0x0000000043L, 0x0000070042L); // f9
put(0x0000000044L, 0x0000070043L); // f10
put(0x0000000057L, 0x0000070044L); // f11
put(0x0000000058L, 0x0000070045L); // f12
put(0x0000000063L, 0x0000070046L); // printScreen
put(0x0000000046L, 0x0000070047L); // scrollLock
put(0x0000000077L, 0x0000070048L); // pause
put(0x000000019bL, 0x0000070048L); // pause
put(0x000000006eL, 0x0000070049L); // insert
put(0x0000000066L, 0x000007004aL); // home
put(0x0000000068L, 0x000007004bL); // pageUp
put(0x00000000b1L, 0x000007004bL); // pageUp
put(0x000000006fL, 0x000007004cL); // delete
put(0x000000006bL, 0x000007004dL); // end
put(0x000000006dL, 0x000007004eL); // pageDown
put(0x00000000b2L, 0x000007004eL); // pageDown
put(0x000000006aL, 0x000007004fL); // arrowRight
put(0x0000000069L, 0x0000070050L); // arrowLeft
put(0x000000006cL, 0x0000070051L); // arrowDown
put(0x0000000067L, 0x0000070052L); // arrowUp
put(0x0000000045L, 0x0000070053L); // numLock
put(0x0000000062L, 0x0000070054L); // numpadDivide
put(0x0000000037L, 0x0000070055L); // numpadMultiply
put(0x000000004aL, 0x0000070056L); // numpadSubtract
put(0x000000004eL, 0x0000070057L); // numpadAdd
put(0x0000000060L, 0x0000070058L); // numpadEnter
put(0x000000004fL, 0x0000070059L); // numpad1
put(0x0000000050L, 0x000007005aL); // numpad2
put(0x0000000051L, 0x000007005bL); // numpad3
put(0x000000004bL, 0x000007005cL); // numpad4
put(0x000000004cL, 0x000007005dL); // numpad5
put(0x000000004dL, 0x000007005eL); // numpad6
put(0x0000000047L, 0x000007005fL); // numpad7
put(0x0000000048L, 0x0000070060L); // numpad8
put(0x0000000049L, 0x0000070061L); // numpad9
put(0x0000000052L, 0x0000070062L); // numpad0
put(0x0000000053L, 0x0000070063L); // numpadDecimal
put(0x000000007fL, 0x0000070065L); // contextMenu
put(0x000000008bL, 0x0000070065L); // contextMenu
put(0x0000000074L, 0x0000070066L); // power
put(0x0000000098L, 0x0000070066L); // power
put(0x0000000075L, 0x0000070067L); // numpadEqual
put(0x00000000b7L, 0x0000070068L); // f13
put(0x00000000b8L, 0x0000070069L); // f14
put(0x00000000b9L, 0x000007006aL); // f15
put(0x00000000baL, 0x000007006bL); // f16
put(0x00000000bbL, 0x000007006cL); // f17
put(0x00000000bcL, 0x000007006dL); // f18
put(0x00000000bdL, 0x000007006eL); // f19
put(0x00000000beL, 0x000007006fL); // f20
put(0x00000000bfL, 0x0000070070L); // f21
put(0x00000000c0L, 0x0000070071L); // f22
put(0x00000000c1L, 0x0000070072L); // f23
put(0x00000000c2L, 0x0000070073L); // f24
put(0x0000000086L, 0x0000070074L); // open
put(0x000000008aL, 0x0000070075L); // help
put(0x0000000161L, 0x0000070077L); // select
put(0x0000000081L, 0x0000070079L); // again
put(0x0000000083L, 0x000007007aL); // undo
put(0x0000000089L, 0x000007007bL); // cut
put(0x0000000085L, 0x000007007cL); // copy
put(0x0000000087L, 0x000007007dL); // paste
put(0x0000000088L, 0x000007007eL); // find
put(0x0000000071L, 0x000007007fL); // audioVolumeMute
put(0x0000000073L, 0x0000070080L); // audioVolumeUp
put(0x0000000072L, 0x0000070081L); // audioVolumeDown
put(0x000000005fL, 0x0000070085L); // numpadComma
put(0x0000000079L, 0x0000070085L); // numpadComma
put(0x0000000059L, 0x0000070087L); // intlRo
put(0x000000007cL, 0x0000070089L); // intlYen
put(0x000000005cL, 0x000007008aL); // convert
put(0x000000005eL, 0x000007008bL); // nonConvert
put(0x000000005aL, 0x0000070092L); // lang3
put(0x000000005bL, 0x0000070093L); // lang4
put(0x0000000082L, 0x00000700a3L); // props
put(0x00000000b3L, 0x00000700b6L); // numpadParenLeft
put(0x00000000b4L, 0x00000700b7L); // numpadParenRight
put(0x000000001dL, 0x00000700e0L); // controlLeft
put(0x000000002aL, 0x00000700e1L); // shiftLeft
put(0x0000000038L, 0x00000700e2L); // altLeft
put(0x000000007dL, 0x00000700e3L); // metaLeft
put(0x0000000061L, 0x00000700e4L); // controlRight
put(0x0000000036L, 0x00000700e5L); // shiftRight
put(0x0000000064L, 0x00000700e6L); // altRight
put(0x000000007eL, 0x00000700e7L); // metaRight
put(0x0000000166L, 0x00000c0060L); // info
put(0x0000000172L, 0x00000c0061L); // closedCaptionToggle
put(0x00000000e1L, 0x00000c006fL); // brightnessUp
put(0x00000000e0L, 0x00000c0070L); // brightnessDown
put(0x0000000195L, 0x00000c0083L); // mediaLast
put(0x00000000aeL, 0x00000c0094L); // exit
put(0x0000000192L, 0x00000c009cL); // channelUp
put(0x0000000193L, 0x00000c009dL); // channelDown
put(0x00000000c8L, 0x00000c00b0L); // mediaPlay
put(0x00000000cfL, 0x00000c00b0L); // mediaPlay
put(0x00000000c9L, 0x00000c00b1L); // mediaPause
put(0x00000000a7L, 0x00000c00b2L); // mediaRecord
put(0x00000000d0L, 0x00000c00b3L); // mediaFastForward
put(0x00000000a8L, 0x00000c00b4L); // mediaRewind
put(0x00000000a3L, 0x00000c00b5L); // mediaTrackNext
put(0x00000000a5L, 0x00000c00b6L); // mediaTrackPrevious
put(0x0000000080L, 0x00000c00b7L); // mediaStop
put(0x00000000a6L, 0x00000c00b7L); // mediaStop
put(0x00000000a1L, 0x00000c00b8L); // eject
put(0x00000000a2L, 0x00000c00b8L); // eject
put(0x00000000a4L, 0x00000c00cdL); // mediaPlayPause
put(0x00000000d1L, 0x00000c00e5L); // bassBoost
put(0x000000009bL, 0x00000c018aL); // launchMail
put(0x00000000d7L, 0x00000c018aL); // launchMail
put(0x00000001adL, 0x00000c018dL); // launchContacts
put(0x000000018dL, 0x00000c018eL); // launchCalendar
put(0x0000000247L, 0x00000c01cbL); // launchAssistant
put(0x00000000a0L, 0x00000c0203L); // close
put(0x00000000ceL, 0x00000c0203L); // close
put(0x00000000d2L, 0x00000c0208L); // print
put(0x00000000d9L, 0x00000c0221L); // browserSearch
put(0x000000009fL, 0x00000c0225L); // browserForward
put(0x000000009cL, 0x00000c022aL); // browserFavorites
put(0x00000000b6L, 0x00000c0279L); // redo
}
};
/** Maps from Android key codes {@link KeyEvent#getKeyCode()} to Flutter logical keys. */
public static final HashMap<Long, Long> keyCodeToLogical =
new HashMap<Long, Long>() {
private static final long serialVersionUID = 1L;
{
put(0x000000003eL, 0x0000000020L); // space
put(0x000000004bL, 0x0000000022L); // quote
put(0x0000000012L, 0x0000000023L); // numberSign
put(0x0000000011L, 0x000000002aL); // asterisk
put(0x0000000051L, 0x000000002bL); // add
put(0x0000000037L, 0x000000002cL); // comma
put(0x0000000045L, 0x000000002dL); // minus
put(0x0000000038L, 0x000000002eL); // period
put(0x000000004cL, 0x000000002fL); // slash
put(0x0000000007L, 0x0000000030L); // digit0
put(0x0000000008L, 0x0000000031L); // digit1
put(0x0000000009L, 0x0000000032L); // digit2
put(0x000000000aL, 0x0000000033L); // digit3
put(0x000000000bL, 0x0000000034L); // digit4
put(0x000000000cL, 0x0000000035L); // digit5
put(0x000000000dL, 0x0000000036L); // digit6
put(0x000000000eL, 0x0000000037L); // digit7
put(0x000000000fL, 0x0000000038L); // digit8
put(0x0000000010L, 0x0000000039L); // digit9
put(0x000000004aL, 0x000000003bL); // semicolon
put(0x0000000046L, 0x000000003dL); // equal
put(0x000000004dL, 0x0000000040L); // at
put(0x0000000047L, 0x000000005bL); // bracketLeft
put(0x0000000049L, 0x000000005cL); // backslash
put(0x0000000048L, 0x000000005dL); // bracketRight
put(0x0000000044L, 0x0000000060L); // backquote
put(0x000000001dL, 0x0000000061L); // keyA
put(0x000000001eL, 0x0000000062L); // keyB
put(0x000000001fL, 0x0000000063L); // keyC
put(0x0000000020L, 0x0000000064L); // keyD
put(0x0000000021L, 0x0000000065L); // keyE
put(0x0000000022L, 0x0000000066L); // keyF
put(0x0000000023L, 0x0000000067L); // keyG
put(0x0000000024L, 0x0000000068L); // keyH
put(0x0000000025L, 0x0000000069L); // keyI
put(0x0000000026L, 0x000000006aL); // keyJ
put(0x0000000027L, 0x000000006bL); // keyK
put(0x0000000028L, 0x000000006cL); // keyL
put(0x0000000029L, 0x000000006dL); // keyM
put(0x000000002aL, 0x000000006eL); // keyN
put(0x000000002bL, 0x000000006fL); // keyO
put(0x000000002cL, 0x0000000070L); // keyP
put(0x000000002dL, 0x0000000071L); // keyQ
put(0x000000002eL, 0x0000000072L); // keyR
put(0x000000002fL, 0x0000000073L); // keyS
put(0x0000000030L, 0x0000000074L); // keyT
put(0x0000000031L, 0x0000000075L); // keyU
put(0x0000000032L, 0x0000000076L); // keyV
put(0x0000000033L, 0x0000000077L); // keyW
put(0x0000000034L, 0x0000000078L); // keyX
put(0x0000000035L, 0x0000000079L); // keyY
put(0x0000000036L, 0x000000007aL); // keyZ
put(0x0000000043L, 0x0100000008L); // backspace
put(0x000000003dL, 0x0100000009L); // tab
put(0x0000000042L, 0x010000000dL); // enter
put(0x000000006fL, 0x010000001bL); // escape
put(0x0000000070L, 0x010000007fL); // delete
put(0x0000000073L, 0x0100000104L); // capsLock
put(0x0000000077L, 0x0100000106L); // fn
put(0x000000008fL, 0x010000010aL); // numLock
put(0x0000000074L, 0x010000010cL); // scrollLock
put(0x000000003fL, 0x010000010fL); // symbol
put(0x0000000014L, 0x0100000301L); // arrowDown
put(0x0000000015L, 0x0100000302L); // arrowLeft
put(0x0000000016L, 0x0100000303L); // arrowRight
put(0x0000000013L, 0x0100000304L); // arrowUp
put(0x000000007bL, 0x0100000305L); // end
put(0x000000007aL, 0x0100000306L); // home
put(0x000000005dL, 0x0100000307L); // pageDown
put(0x000000005cL, 0x0100000308L); // pageUp
put(0x000000001cL, 0x0100000401L); // clear
put(0x0000000116L, 0x0100000402L); // copy
put(0x0000000115L, 0x0100000404L); // cut
put(0x000000007cL, 0x0100000407L); // insert
put(0x0000000117L, 0x0100000408L); // paste
put(0x0000000052L, 0x0100000505L); // contextMenu
put(0x0000000103L, 0x0100000508L); // help
put(0x0000000079L, 0x0100000509L); // pause
put(0x0000000017L, 0x010000050cL); // select
put(0x00000000a8L, 0x010000050dL); // zoomIn
put(0x00000000a9L, 0x010000050eL); // zoomOut
put(0x00000000dcL, 0x0100000601L); // brightnessDown
put(0x00000000ddL, 0x0100000602L); // brightnessUp
put(0x000000001bL, 0x0100000603L); // camera
put(0x0000000081L, 0x0100000604L); // eject
put(0x000000001aL, 0x0100000606L); // power
put(0x0000000078L, 0x0100000608L); // printScreen
put(0x00000000e0L, 0x010000060bL); // wakeUp
put(0x00000000d6L, 0x0100000705L); // convert
put(0x00000000ccL, 0x0100000709L); // groupNext
put(0x000000005fL, 0x010000070bL); // modeChange
put(0x00000000d5L, 0x010000070dL); // nonConvert
put(0x00000000d4L, 0x0100000714L); // eisu
put(0x00000000d7L, 0x0100000717L); // hiraganaKatakana
put(0x00000000daL, 0x0100000719L); // kanjiMode
put(0x00000000d3L, 0x010000071dL); // zenkakuHankaku
put(0x0000000083L, 0x0100000801L); // f1
put(0x0000000084L, 0x0100000802L); // f2
put(0x0000000085L, 0x0100000803L); // f3
put(0x0000000086L, 0x0100000804L); // f4
put(0x0000000087L, 0x0100000805L); // f5
put(0x0000000088L, 0x0100000806L); // f6
put(0x0000000089L, 0x0100000807L); // f7
put(0x000000008aL, 0x0100000808L); // f8
put(0x000000008bL, 0x0100000809L); // f9
put(0x000000008cL, 0x010000080aL); // f10
put(0x000000008dL, 0x010000080bL); // f11
put(0x000000008eL, 0x010000080cL); // f12
put(0x0000000080L, 0x0100000a01L); // close
put(0x0000000055L, 0x0100000a05L); // mediaPlayPause
put(0x0000000056L, 0x0100000a07L); // mediaStop
put(0x0000000057L, 0x0100000a08L); // mediaTrackNext
put(0x0000000058L, 0x0100000a09L); // mediaTrackPrevious
put(0x0000000019L, 0x0100000a0fL); // audioVolumeDown
put(0x0000000018L, 0x0100000a10L); // audioVolumeUp
put(0x00000000a4L, 0x0100000a11L); // audioVolumeMute
put(0x00000000d0L, 0x0100000b02L); // launchCalendar
put(0x0000000041L, 0x0100000b03L); // launchMail
put(0x00000000d1L, 0x0100000b05L); // launchMusicPlayer
put(0x0000000040L, 0x0100000b09L); // launchWebBrowser
put(0x00000000cfL, 0x0100000b0cL); // launchContacts
put(0x00000000dbL, 0x0100000b0eL); // launchAssistant
put(0x00000000aeL, 0x0100000c02L); // browserFavorites
put(0x000000007dL, 0x0100000c03L); // browserForward
put(0x0000000054L, 0x0100000c06L); // browserSearch
put(0x00000000b6L, 0x0100000d08L); // avrInput
put(0x00000000b5L, 0x0100000d09L); // avrPower
put(0x00000000a7L, 0x0100000d0aL); // channelDown
put(0x00000000a6L, 0x0100000d0bL); // channelUp
put(0x00000000b7L, 0x0100000d0cL); // colorF0Red
put(0x00000000b8L, 0x0100000d0dL); // colorF1Green
put(0x00000000b9L, 0x0100000d0eL); // colorF2Yellow
put(0x00000000baL, 0x0100000d0fL); // colorF3Blue
put(0x00000000afL, 0x0100000d12L); // closedCaptionToggle
put(0x00000000acL, 0x0100000d22L); // guide
put(0x00000000a5L, 0x0100000d25L); // info
put(0x000000005aL, 0x0100000d2cL); // mediaFastForward
put(0x00000000e5L, 0x0100000d2dL); // mediaLast
put(0x000000007fL, 0x0100000d2eL); // mediaPause
put(0x000000007eL, 0x0100000d2fL); // mediaPlay
put(0x0000000082L, 0x0100000d30L); // mediaRecord
put(0x0000000059L, 0x0100000d31L); // mediaRewind
put(0x00000000b0L, 0x0100000d43L); // settings
put(0x00000000b4L, 0x0100000d45L); // stbInput
put(0x00000000b3L, 0x0100000d46L); // stbPower
put(0x00000000e9L, 0x0100000d48L); // teletext
put(0x00000000aaL, 0x0100000d49L); // tv
put(0x00000000b2L, 0x0100000d4aL); // tvInput
put(0x00000000b1L, 0x0100000d4bL); // tvPower
put(0x00000000ffL, 0x0100000d4eL); // zoomToggle
put(0x00000000adL, 0x0100000d4fL); // dvr
put(0x00000000deL, 0x0100000d50L); // mediaAudioTrack
put(0x0000000111L, 0x0100000d51L); // mediaSkipBackward
put(0x0000000110L, 0x0100000d52L); // mediaSkipForward
put(0x0000000113L, 0x0100000d53L); // mediaStepBackward
put(0x0000000112L, 0x0100000d54L); // mediaStepForward
put(0x00000000e2L, 0x0100000d55L); // mediaTopMenu
put(0x0000000106L, 0x0100000d56L); // navigateIn
put(0x0000000105L, 0x0100000d57L); // navigateNext
put(0x0000000107L, 0x0100000d58L); // navigateOut
put(0x0000000104L, 0x0100000d59L); // navigatePrevious
put(0x00000000e1L, 0x0100000d5aL); // pairing
put(0x000000005bL, 0x0100000e09L); // microphoneVolumeMute
put(0x00000000bbL, 0x0100001001L); // appSwitch
put(0x0000000005L, 0x0100001002L); // call
put(0x0000000050L, 0x0100001003L); // cameraFocus
put(0x0000000006L, 0x0100001004L); // endCall
put(0x0000000004L, 0x0100001005L); // goBack
put(0x0000000003L, 0x0100001006L); // goHome
put(0x000000004fL, 0x0100001007L); // headsetHook
put(0x0000000053L, 0x0100001009L); // notification
put(0x00000000cdL, 0x010000100aL); // mannerMode
put(0x00000000ceL, 0x0100001101L); // tv3DMode
put(0x00000000f2L, 0x0100001102L); // tvAntennaCable
put(0x00000000fcL, 0x0100001103L); // tvAudioDescription
put(0x00000000feL, 0x0100001104L); // tvAudioDescriptionMixDown
put(0x00000000fdL, 0x0100001105L); // tvAudioDescriptionMixUp
put(0x0000000100L, 0x0100001106L); // tvContentsMenu
put(0x00000000e6L, 0x0100001107L); // tvDataService
put(0x00000000f9L, 0x0100001108L); // tvInputComponent1
put(0x00000000faL, 0x0100001109L); // tvInputComponent2
put(0x00000000f7L, 0x010000110aL); // tvInputComposite1
put(0x00000000f8L, 0x010000110bL); // tvInputComposite2
put(0x00000000f3L, 0x010000110cL); // tvInputHDMI1
put(0x00000000f4L, 0x010000110dL); // tvInputHDMI2
put(0x00000000f5L, 0x010000110eL); // tvInputHDMI3
put(0x00000000f6L, 0x010000110fL); // tvInputHDMI4
put(0x00000000fbL, 0x0100001110L); // tvInputVGA1
put(0x00000000f1L, 0x0100001112L); // tvNetwork
put(0x00000000eaL, 0x0100001113L); // tvNumberEntry
put(0x00000000e8L, 0x0100001114L); // tvRadioService
put(0x00000000edL, 0x0100001115L); // tvSatellite
put(0x00000000eeL, 0x0100001116L); // tvSatelliteBS
put(0x00000000efL, 0x0100001117L); // tvSatelliteCS
put(0x00000000f0L, 0x0100001118L); // tvSatelliteToggle
put(0x00000000ebL, 0x0100001119L); // tvTerrestrialAnalog
put(0x00000000ecL, 0x010000111aL); // tvTerrestrialDigital
put(0x0000000102L, 0x010000111bL); // tvTimer
put(0x00000000dfL, 0x0200000002L); // sleep
put(0x00000000d9L, 0x0200000021L); // intlRo
put(0x00000000d8L, 0x0200000022L); // intlYen
put(0x0000000071L, 0x0200000100L); // controlLeft
put(0x0000000072L, 0x0200000101L); // controlRight
put(0x000000003bL, 0x0200000102L); // shiftLeft
put(0x000000003cL, 0x0200000103L); // shiftRight
put(0x0000000039L, 0x0200000104L); // altLeft
put(0x000000003aL, 0x0200000105L); // altRight
put(0x0000000075L, 0x0200000106L); // metaLeft
put(0x0000000076L, 0x0200000107L); // metaRight
put(0x00000000a0L, 0x020000020dL); // numpadEnter
put(0x00000000a2L, 0x0200000228L); // numpadParenLeft
put(0x00000000a3L, 0x0200000229L); // numpadParenRight
put(0x000000009bL, 0x020000022aL); // numpadMultiply
put(0x000000009dL, 0x020000022bL); // numpadAdd
put(0x000000009fL, 0x020000022cL); // numpadComma
put(0x000000009cL, 0x020000022dL); // numpadSubtract
put(0x000000009eL, 0x020000022eL); // numpadDecimal
put(0x000000009aL, 0x020000022fL); // numpadDivide
put(0x0000000090L, 0x0200000230L); // numpad0
put(0x0000000091L, 0x0200000231L); // numpad1
put(0x0000000092L, 0x0200000232L); // numpad2
put(0x0000000093L, 0x0200000233L); // numpad3
put(0x0000000094L, 0x0200000234L); // numpad4
put(0x0000000095L, 0x0200000235L); // numpad5
put(0x0000000096L, 0x0200000236L); // numpad6
put(0x0000000097L, 0x0200000237L); // numpad7
put(0x0000000098L, 0x0200000238L); // numpad8
put(0x0000000099L, 0x0200000239L); // numpad9
put(0x00000000a1L, 0x020000023dL); // numpadEqual
put(0x00000000bcL, 0x0200000301L); // gameButton1
put(0x00000000bdL, 0x0200000302L); // gameButton2
put(0x00000000beL, 0x0200000303L); // gameButton3
put(0x00000000bfL, 0x0200000304L); // gameButton4
put(0x00000000c0L, 0x0200000305L); // gameButton5
put(0x00000000c1L, 0x0200000306L); // gameButton6
put(0x00000000c2L, 0x0200000307L); // gameButton7
put(0x00000000c3L, 0x0200000308L); // gameButton8
put(0x00000000c4L, 0x0200000309L); // gameButton9
put(0x00000000c5L, 0x020000030aL); // gameButton10
put(0x00000000c6L, 0x020000030bL); // gameButton11
put(0x00000000c7L, 0x020000030cL); // gameButton12
put(0x00000000c8L, 0x020000030dL); // gameButton13
put(0x00000000c9L, 0x020000030eL); // gameButton14
put(0x00000000caL, 0x020000030fL); // gameButton15
put(0x00000000cbL, 0x0200000310L); // gameButton16
put(0x0000000060L, 0x0200000311L); // gameButtonA
put(0x0000000061L, 0x0200000312L); // gameButtonB
put(0x0000000062L, 0x0200000313L); // gameButtonC
put(0x0000000066L, 0x0200000314L); // gameButtonLeft1
put(0x0000000068L, 0x0200000315L); // gameButtonLeft2
put(0x000000006eL, 0x0200000316L); // gameButtonMode
put(0x0000000067L, 0x0200000317L); // gameButtonRight1
put(0x0000000069L, 0x0200000318L); // gameButtonRight2
put(0x000000006dL, 0x0200000319L); // gameButtonSelect
put(0x000000006cL, 0x020000031aL); // gameButtonStart
put(0x000000006aL, 0x020000031bL); // gameButtonThumbLeft
put(0x000000006bL, 0x020000031cL); // gameButtonThumbRight
put(0x0000000063L, 0x020000031dL); // gameButtonX
put(0x0000000064L, 0x020000031eL); // gameButtonY
put(0x0000000065L, 0x020000031fL); // gameButtonZ
}
};
public static final PressingGoal[] pressingGoals =
new PressingGoal[] {
new PressingGoal(
KeyEvent.META_CTRL_ON,
new KeyPair[] {
new KeyPair(0x000700e0L, 0x0200000100L), // ControlLeft
new KeyPair(0x000700e4L, 0x0200000101L), // ControlRight
}),
new PressingGoal(
KeyEvent.META_SHIFT_ON,
new KeyPair[] {
new KeyPair(0x000700e1L, 0x0200000102L), // ShiftLeft
new KeyPair(0x000700e5L, 0x0200000103L), // ShiftRight
}),
new PressingGoal(
KeyEvent.META_ALT_ON,
new KeyPair[] {
new KeyPair(0x000700e2L, 0x0200000104L), // AltLeft
new KeyPair(0x000700e6L, 0x0200000105L), // AltRight
}),
};
/**
* A list of toggling modifiers that must be synchronized on each key event.
*
* <p>The list is not a static variable but constructed by a function, because {@link
* TogglingGoal} is mutable.
*/
public static TogglingGoal[] getTogglingGoals() {
return new TogglingGoal[] {
new TogglingGoal(KeyEvent.META_CAPS_LOCK_ON, 0x00070039L, 0x0100000104L),
};
}
public static final long kValueMask = 0x000ffffffffL;
public static final long kUnicodePlane = 0x00000000000L;
public static final long kAndroidPlane = 0x01100000000L;
}
| engine/shell/platform/android/io/flutter/embedding/android/KeyboardMap.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/android/KeyboardMap.java",
"repo_id": "engine",
"token_count": 16177
} | 295 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine.dart;
import android.os.Handler;
import android.os.Looper;
import androidx.annotation.NonNull;
import io.flutter.util.HandlerCompat;
/** A BinaryMessenger.TaskQueue that posts to the platform thread (aka main thread). */
public class PlatformTaskQueue implements DartMessenger.DartMessengerTaskQueue {
// Use an async handler because the default is subject to vsync synchronization and can result
// in delays when dispatching tasks.
@NonNull private final Handler handler = HandlerCompat.createAsyncHandler(Looper.getMainLooper());
@Override
public void dispatch(@NonNull Runnable runnable) {
handler.post(runnable);
}
}
| engine/shell/platform/android/io/flutter/embedding/engine/dart/PlatformTaskQueue.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/engine/dart/PlatformTaskQueue.java",
"repo_id": "engine",
"token_count": 230
} | 296 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.embedding.engine.plugins.broadcastreceiver;
import android.content.BroadcastReceiver;
import androidx.annotation.NonNull;
/**
* Binding that gives {@link BroadcastReceiverAware} plugins access to an associated {@link
* BroadcastReceiver}.
*/
public interface BroadcastReceiverPluginBinding {
/**
* Returns the {@link BroadcastReceiver} that is currently attached to the {@link
* io.flutter.embedding.engine.FlutterEngine} that owns this {@code
* BroadcastReceiverAwarePluginBinding}.
*/
@NonNull
BroadcastReceiver getBroadcastReceiver();
}
| engine/shell/platform/android/io/flutter/embedding/engine/plugins/broadcastreceiver/BroadcastReceiverPluginBinding.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/engine/plugins/broadcastreceiver/BroadcastReceiverPluginBinding.java",
"repo_id": "engine",
"token_count": 211
} | 297 |
package io.flutter.embedding.engine.systemchannels;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import io.flutter.Log;
import io.flutter.embedding.engine.FlutterJNI;
import io.flutter.embedding.engine.dart.DartExecutor;
import io.flutter.plugin.common.BasicMessageChannel;
import io.flutter.plugin.common.StandardMessageCodec;
import io.flutter.view.AccessibilityBridge;
import java.util.HashMap;
/**
* System channel that sends accessibility requests and events from Flutter to Android.
*
* <p>See {@link AccessibilityMessageHandler}, which lists all accessibility requests and events
* that might be sent from Flutter to the Android platform.
*/
public class AccessibilityChannel {
private static final String TAG = "AccessibilityChannel";
@NonNull public final BasicMessageChannel<Object> channel;
@NonNull public final FlutterJNI flutterJNI;
@Nullable private AccessibilityMessageHandler handler;
public final BasicMessageChannel.MessageHandler<Object> parsingMessageHandler =
new BasicMessageChannel.MessageHandler<Object>() {
@Override
public void onMessage(
@Nullable Object message, @NonNull BasicMessageChannel.Reply<Object> reply) {
// If there is no handler to respond to this message then we don't need to
// parse it. Return.
if (handler == null) {
reply.reply(null);
return;
}
@SuppressWarnings("unchecked")
final HashMap<String, Object> annotatedEvent = (HashMap<String, Object>) message;
final String type = (String) annotatedEvent.get("type");
@SuppressWarnings("unchecked")
final HashMap<String, Object> data = (HashMap<String, Object>) annotatedEvent.get("data");
Log.v(TAG, "Received " + type + " message.");
switch (type) {
case "announce":
String announceMessage = (String) data.get("message");
if (announceMessage != null) {
handler.announce(announceMessage);
}
break;
case "tap":
{
Integer nodeId = (Integer) annotatedEvent.get("nodeId");
if (nodeId != null) {
handler.onTap(nodeId);
}
break;
}
case "longPress":
{
Integer nodeId = (Integer) annotatedEvent.get("nodeId");
if (nodeId != null) {
handler.onLongPress(nodeId);
}
break;
}
case "focus":
{
Integer nodeId = (Integer) annotatedEvent.get("nodeId");
if (nodeId != null) {
handler.onFocus(nodeId);
}
break;
}
case "tooltip":
{
String tooltipMessage = (String) data.get("message");
if (tooltipMessage != null) {
handler.onTooltip(tooltipMessage);
}
break;
}
}
reply.reply(null);
}
};
/**
* Constructs an {@code AccessibilityChannel} that connects Android to the Dart code running in
* {@code dartExecutor}.
*
* <p>The given {@code dartExecutor} is permitted to be idle or executing code.
*
* <p>See {@link DartExecutor}.
*/
public AccessibilityChannel(@NonNull DartExecutor dartExecutor, @NonNull FlutterJNI flutterJNI) {
channel =
new BasicMessageChannel<>(
dartExecutor, "flutter/accessibility", StandardMessageCodec.INSTANCE);
channel.setMessageHandler(parsingMessageHandler);
this.flutterJNI = flutterJNI;
}
@VisibleForTesting
public AccessibilityChannel(
@NonNull BasicMessageChannel<Object> channel, @NonNull FlutterJNI flutterJNI) {
this.channel = channel;
this.flutterJNI = flutterJNI;
}
/**
* Informs Flutter that the Android OS currently has accessibility enabled.
*
* <p>To accommodate enabled accessibility, this method instructs Flutter to activate its
* semantics tree, which forms the basis of Flutter's accessibility support.
*/
public void onAndroidAccessibilityEnabled() {
flutterJNI.setSemanticsEnabled(true);
}
/**
* Informs Flutter that the Android OS currently has accessibility disabled.
*
* <p>Given that accessibility is not required at this time, this method instructs Flutter to
* deactivate its semantics tree.
*/
public void onAndroidAccessibilityDisabled() {
flutterJNI.setSemanticsEnabled(false);
}
/**
* Instructs Flutter to activate/deactivate accessibility features corresponding to the flags
* provided by {@code accessibilityFeatureFlags}.
*/
public void setAccessibilityFeatures(int accessibilityFeatureFlags) {
flutterJNI.setAccessibilityFeatures(accessibilityFeatureFlags);
}
/**
* Instructs Flutter to perform the given {@code action} on the {@code SemanticsNode} referenced
* by the given {@code virtualViewId}.
*
* <p>One might wonder why Flutter would need to be instructed that the user wants to perform an
* action. When the user is touching the screen in accessibility mode, Android takes over the
* touch input, categorizing input as one of a many accessibility gestures. Therefore, Flutter
* does not have an opportunity to react to said touch input. Instead, Flutter must be notified by
* Android of the desired action. Additionally, some accessibility systems use other input
* methods, such as speech, to take virtual actions. Android interprets those requests and then
* instructs the app to take the appropriate action.
*/
public void dispatchSemanticsAction(
int virtualViewId, @NonNull AccessibilityBridge.Action action) {
flutterJNI.dispatchSemanticsAction(virtualViewId, action);
}
/**
* Instructs Flutter to perform the given {@code action} on the {@code SemanticsNode} referenced
* by the given {@code virtualViewId}, passing the given {@code args}.
*/
public void dispatchSemanticsAction(
int virtualViewId, @NonNull AccessibilityBridge.Action action, @Nullable Object args) {
flutterJNI.dispatchSemanticsAction(virtualViewId, action, args);
}
/**
* Sets the {@link AccessibilityMessageHandler} which receives all events and requests that are
* parsed from the underlying accessibility channel.
*/
public void setAccessibilityMessageHandler(@Nullable AccessibilityMessageHandler handler) {
this.handler = handler;
flutterJNI.setAccessibilityDelegate(handler);
}
/**
* Handler that receives accessibility messages sent from Flutter to Android through a given
* {@link AccessibilityChannel}.
*
* <p>To register an {@code AccessibilityMessageHandler} with a {@link AccessibilityChannel}, see
* {@link AccessibilityChannel#setAccessibilityMessageHandler(AccessibilityMessageHandler)}.
*/
public interface AccessibilityMessageHandler extends FlutterJNI.AccessibilityDelegate {
/** The Dart application would like the given {@code message} to be announced. */
void announce(@NonNull String message);
/** The user has tapped on the semantics node with the given {@code nodeId}. */
void onTap(int nodeId);
/** The user has long pressed on the semantics node with the given {@code nodeId}. */
void onLongPress(int nodeId);
/** The framework has requested focus on the semantics node with the given {@code nodeId}. */
void onFocus(int nodeId);
/** The user has opened a tooltip. */
void onTooltip(@NonNull String message);
}
}
| engine/shell/platform/android/io/flutter/embedding/engine/systemchannels/AccessibilityChannel.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/embedding/engine/systemchannels/AccessibilityChannel.java",
"repo_id": "engine",
"token_count": 2753
} | 298 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugin.common;
import androidx.annotation.NonNull;
import io.flutter.Log;
import io.flutter.plugin.common.StandardMessageCodec.ExposedByteArrayOutputStream;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/**
* A {@link MethodCodec} using the Flutter standard binary encoding.
*
* <p>This codec is guaranteed to be compatible with the corresponding <a
* href="https://api.flutter.dev/flutter/services/StandardMethodCodec-class.html">StandardMethodCodec</a>
* on the Dart side. These parts of the Flutter SDK are evolved synchronously.
*
* <p>Values supported as method arguments and result payloads are those supported by {@link
* StandardMessageCodec}.
*/
public final class StandardMethodCodec implements MethodCodec {
public static final StandardMethodCodec INSTANCE =
new StandardMethodCodec(StandardMessageCodec.INSTANCE);
private final StandardMessageCodec messageCodec;
/** Creates a new method codec based on the specified message codec. */
public StandardMethodCodec(@NonNull StandardMessageCodec messageCodec) {
this.messageCodec = messageCodec;
}
@Override
@NonNull
public ByteBuffer encodeMethodCall(@NonNull MethodCall methodCall) {
final ExposedByteArrayOutputStream stream = new ExposedByteArrayOutputStream();
messageCodec.writeValue(stream, methodCall.method);
messageCodec.writeValue(stream, methodCall.arguments);
final ByteBuffer buffer = ByteBuffer.allocateDirect(stream.size());
buffer.put(stream.buffer(), 0, stream.size());
return buffer;
}
@Override
@NonNull
public MethodCall decodeMethodCall(@NonNull ByteBuffer methodCall) {
methodCall.order(ByteOrder.nativeOrder());
final Object method = messageCodec.readValue(methodCall);
final Object arguments = messageCodec.readValue(methodCall);
if (method instanceof String && !methodCall.hasRemaining()) {
return new MethodCall((String) method, arguments);
}
throw new IllegalArgumentException("Method call corrupted");
}
@Override
@NonNull
public ByteBuffer encodeSuccessEnvelope(@NonNull Object result) {
final ExposedByteArrayOutputStream stream = new ExposedByteArrayOutputStream();
stream.write(0);
messageCodec.writeValue(stream, result);
final ByteBuffer buffer = ByteBuffer.allocateDirect(stream.size());
buffer.put(stream.buffer(), 0, stream.size());
return buffer;
}
@Override
@NonNull
public ByteBuffer encodeErrorEnvelope(
@NonNull String errorCode, @NonNull String errorMessage, @NonNull Object errorDetails) {
final ExposedByteArrayOutputStream stream = new ExposedByteArrayOutputStream();
stream.write(1);
messageCodec.writeValue(stream, errorCode);
messageCodec.writeValue(stream, errorMessage);
if (errorDetails instanceof Throwable) {
messageCodec.writeValue(stream, Log.getStackTraceString((Throwable) errorDetails));
} else {
messageCodec.writeValue(stream, errorDetails);
}
final ByteBuffer buffer = ByteBuffer.allocateDirect(stream.size());
buffer.put(stream.buffer(), 0, stream.size());
return buffer;
}
@Override
@NonNull
public ByteBuffer encodeErrorEnvelopeWithStacktrace(
@NonNull String errorCode,
@NonNull String errorMessage,
@NonNull Object errorDetails,
@NonNull String errorStacktrace) {
final ExposedByteArrayOutputStream stream = new ExposedByteArrayOutputStream();
stream.write(1);
messageCodec.writeValue(stream, errorCode);
messageCodec.writeValue(stream, errorMessage);
if (errorDetails instanceof Throwable) {
messageCodec.writeValue(stream, Log.getStackTraceString((Throwable) errorDetails));
} else {
messageCodec.writeValue(stream, errorDetails);
}
messageCodec.writeValue(stream, errorStacktrace);
final ByteBuffer buffer = ByteBuffer.allocateDirect(stream.size());
buffer.put(stream.buffer(), 0, stream.size());
return buffer;
}
@Override
@NonNull
public Object decodeEnvelope(@NonNull ByteBuffer envelope) {
envelope.order(ByteOrder.nativeOrder());
final byte flag = envelope.get();
switch (flag) {
case 0:
{
final Object result = messageCodec.readValue(envelope);
if (!envelope.hasRemaining()) {
return result;
}
}
// Falls through intentionally.
case 1:
{
final Object code = messageCodec.readValue(envelope);
final Object message = messageCodec.readValue(envelope);
final Object details = messageCodec.readValue(envelope);
if (code instanceof String
&& (message == null || message instanceof String)
&& !envelope.hasRemaining()) {
throw new FlutterException((String) code, (String) message, details);
}
}
}
throw new IllegalArgumentException("Envelope corrupted");
}
}
| engine/shell/platform/android/io/flutter/plugin/common/StandardMethodCodec.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/plugin/common/StandardMethodCodec.java",
"repo_id": "engine",
"token_count": 1662
} | 299 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugin.platform;
import android.content.Context;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import io.flutter.plugin.common.MessageCodec;
public abstract class PlatformViewFactory {
private final MessageCodec<Object> createArgsCodec;
/** @param createArgsCodec the codec used to decode the args parameter of {@link #create}. */
public PlatformViewFactory(@Nullable MessageCodec<Object> createArgsCodec) {
this.createArgsCodec = createArgsCodec;
}
/**
* Creates a new Android view to be embedded in the Flutter hierarchy.
*
* @param context the context to be used when creating the view, this is different than
* FlutterView's context.
* @param viewId unique identifier for the created instance, this value is known on the Dart side.
* @param args arguments sent from the Flutter app. The bytes for this value are decoded using the
* createArgsCodec argument passed to the constructor. This is null if createArgsCodec was
* null, or no arguments were sent from the Flutter app.
*/
@NonNull
public abstract PlatformView create(Context context, int viewId, @Nullable Object args);
/** Returns the codec to be used for decoding the args parameter of {@link #create}. */
@Nullable
public final MessageCodec<Object> getCreateArgsCodec() {
return createArgsCodec;
}
}
| engine/shell/platform/android/io/flutter/plugin/platform/PlatformViewFactory.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/plugin/platform/PlatformViewFactory.java",
"repo_id": "engine",
"token_count": 436
} | 300 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.util;
import static io.flutter.Build.API_LEVELS;
import android.content.Context;
import android.os.Build;
import androidx.annotation.NonNull;
import java.io.File;
public final class PathUtils {
@NonNull
public static String getFilesDir(@NonNull Context applicationContext) {
File filesDir = applicationContext.getFilesDir();
if (filesDir == null) {
filesDir = new File(getDataDirPath(applicationContext), "files");
}
return filesDir.getPath();
}
@NonNull
public static String getDataDirectory(@NonNull Context applicationContext) {
final String name = "flutter";
File flutterDir = applicationContext.getDir(name, Context.MODE_PRIVATE);
if (flutterDir == null) {
flutterDir = new File(getDataDirPath(applicationContext), "app_" + name);
}
return flutterDir.getPath();
}
@NonNull
public static String getCacheDirectory(@NonNull Context applicationContext) {
File cacheDir;
cacheDir = applicationContext.getCodeCacheDir();
if (cacheDir == null) {
cacheDir = applicationContext.getCacheDir();
}
if (cacheDir == null) {
// This can happen if the disk is full. This code path is used to set up dart:io's
// `Directory.systemTemp`. It's unknown if the application will ever try to
// use that or not, so do not throw here. In this case, this directory does
// not exist because the disk is full, and the application will later get an
// exception when it tries to actually write.
cacheDir = new File(getDataDirPath(applicationContext), "cache");
}
return cacheDir.getPath();
}
private static String getDataDirPath(Context applicationContext) {
if (Build.VERSION.SDK_INT >= API_LEVELS.API_24) {
return applicationContext.getDataDir().getPath();
} else {
return applicationContext.getApplicationInfo().dataDir;
}
}
}
| engine/shell/platform/android/io/flutter/util/PathUtils.java/0 | {
"file_path": "engine/shell/platform/android/io/flutter/util/PathUtils.java",
"repo_id": "engine",
"token_count": 661
} | 301 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "flutter/shell/platform/android/jni/jni_mock.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
namespace flutter {
namespace testing {
TEST(JNIMock, FlutterViewHandlePlatformMessage) {
JNIMock mock;
auto message = std::make_unique<PlatformMessage>("<channel-name>", nullptr);
auto response_id = 1;
EXPECT_CALL(mock,
FlutterViewHandlePlatformMessage(
::testing::Property(&std::unique_ptr<PlatformMessage>::get,
message.get()),
response_id));
mock.FlutterViewHandlePlatformMessage(std::move(message), response_id);
}
} // namespace testing
} // namespace flutter
| engine/shell/platform/android/jni/jni_mock_unittest.cc/0 | {
"file_path": "engine/shell/platform/android/jni/jni_mock_unittest.cc",
"repo_id": "engine",
"token_count": 331
} | 302 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "shell/platform/android/flutter_main.h"
#include "third_party/googletest/googlemock/include/gmock/gmock-nice-strict.h"
namespace flutter {
namespace testing {
TEST(AndroidPlatformView, SelectsVulkanBasedOnApiLevel) {
Settings settings;
settings.enable_software_rendering = false;
settings.enable_impeller = true;
int api_level = android_get_device_api_level();
EXPECT_GT(api_level, 0);
if (api_level >= 29) {
EXPECT_EQ(FlutterMain::SelectedRenderingAPI(settings),
AndroidRenderingAPI::kImpellerVulkan);
} else {
EXPECT_EQ(FlutterMain::SelectedRenderingAPI(settings),
AndroidRenderingAPI::kSkiaOpenGLES);
}
}
TEST(AndroidPlatformView, SoftwareRenderingNotSupportedWithImpeller) {
Settings settings;
settings.enable_software_rendering = true;
settings.enable_impeller = true;
ASSERT_DEATH(FlutterMain::SelectedRenderingAPI(settings), "");
}
} // namespace testing
} // namespace flutter
| engine/shell/platform/android/platform_view_android_unittests.cc/0 | {
"file_path": "engine/shell/platform/android/platform_view_android_unittests.cc",
"repo_id": "engine",
"token_count": 418
} | 303 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import io.flutter.embedding.engine.deferredcomponents.PlayStoreDeferredComponentManager;
import io.flutter.embedding.engine.loader.FlutterLoader;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.annotation.Config;
@Config(manifest = Config.NONE)
@RunWith(AndroidJUnit4.class)
public class FlutterInjectorTest {
@Mock FlutterLoader mockFlutterLoader;
@Mock PlayStoreDeferredComponentManager mockDeferredComponentManager;
@Mock ExecutorService mockExecutorService;
@Before
public void setUp() {
// Since the intent is to have a convenient static class to use for production.
FlutterInjector.reset();
MockitoAnnotations.openMocks(this);
}
@After
public void tearDown() {
FlutterInjector.reset();
}
@Test
public void itHasSomeReasonableDefaults() {
// Implicitly builds when first accessed.
FlutterInjector injector = FlutterInjector.instance();
assertNotNull(injector.flutterLoader());
assertNull(injector.deferredComponentManager());
assertNotNull(injector.executorService());
}
@Test
public void executorCreatesAndNamesNewThreadsByDefault()
throws InterruptedException, ExecutionException {
// Implicitly builds when first accessed.
FlutterInjector injector = FlutterInjector.instance();
List<Callable<String>> callables =
Arrays.asList(
() -> {
return Thread.currentThread().getName();
},
() -> {
return Thread.currentThread().getName();
});
List<Future<String>> threadNames;
threadNames = injector.executorService().invokeAll(callables);
assertEquals(threadNames.size(), 2);
for (Future<String> name : threadNames) {
assertTrue(name.get().startsWith("flutter-worker-"));
}
}
@Test
public void canPartiallyOverride() {
FlutterInjector.setInstance(
new FlutterInjector.Builder().setFlutterLoader(mockFlutterLoader).build());
FlutterInjector injector = FlutterInjector.instance();
assertEquals(injector.flutterLoader(), mockFlutterLoader);
}
@Test
public void canInjectDeferredComponentManager() {
FlutterInjector.setInstance(
new FlutterInjector.Builder()
.setDeferredComponentManager(mockDeferredComponentManager)
.build());
FlutterInjector injector = FlutterInjector.instance();
assertEquals(injector.deferredComponentManager(), mockDeferredComponentManager);
}
@Test
public void canInjectExecutorService() {
FlutterInjector.setInstance(
new FlutterInjector.Builder().setExecutorService(mockExecutorService).build());
FlutterInjector injector = FlutterInjector.instance();
assertEquals(injector.executorService(), mockExecutorService);
}
@Test()
public void cannotBeChangedOnceRead() {
FlutterInjector.instance();
assertThrows(
IllegalStateException.class,
() -> {
FlutterInjector.setInstance(
new FlutterInjector.Builder().setFlutterLoader(mockFlutterLoader).build());
});
}
}
| engine/shell/platform/android/test/io/flutter/FlutterInjectorTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/FlutterInjectorTest.java",
"repo_id": "engine",
"token_count": 1358
} | 304 |
package io.flutter.embedding.engine;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import androidx.test.core.app.ApplicationProvider;
import io.flutter.FlutterInjector;
import io.flutter.embedding.engine.loader.FlutterLoader;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.RobolectricTestRunner;
import org.robolectric.annotation.Config;
@Config(manifest = Config.NONE)
@RunWith(RobolectricTestRunner.class)
public class FlutterEngineGroupCacheTest {
private FlutterEngineGroup flutterEngineGroup;
@Before
public void setup() {
// Create a mocked FlutterEngineGroup that provided to run this test case
FlutterInjector.reset();
FlutterLoader mockFlutterLoader = mock(FlutterLoader.class);
when(mockFlutterLoader.findAppBundlePath()).thenReturn("default_flutter_assets/path");
FlutterInjector.setInstance(
new FlutterInjector.Builder().setFlutterLoader(mockFlutterLoader).build());
flutterEngineGroup = mock(FlutterEngineGroup.class);
}
@Test
public void itHoldsFlutterEngineGroups() {
// --- Test Setup ---
FlutterEngineGroupCache cache = new FlutterEngineGroupCache();
// --- Execute Test ---
cache.put("my_flutter_engine_group", flutterEngineGroup);
// --- Verify Results ---
assertEquals(flutterEngineGroup, cache.get("my_flutter_engine_group"));
}
@Test
public void itQueriesFlutterEngineGroupExistence() {
// --- Test Setup ---
FlutterEngineGroupCache cache = new FlutterEngineGroupCache();
// --- Execute Test ---
assertFalse(cache.contains("my_flutter_engine_group"));
cache.put("my_flutter_engine_group", flutterEngineGroup);
// --- Verify Results ---
assertTrue(cache.contains("my_flutter_engine_group"));
}
@Test
public void itRemovesFlutterEngineGroups() {
// --- Test Setup ---
FlutterEngineGroupCache cache = new FlutterEngineGroupCache();
// --- Execute Test ---
cache.put("my_flutter_engine_group", flutterEngineGroup);
cache.remove("my_flutter_engine_group");
// --- Verify Results ---
assertNull(cache.get("my_flutter_engine_group"));
}
@Test
public void itRemovesAllFlutterEngineGroups() {
// --- Test Setup ---
FlutterEngineGroup flutterEngineGroup1 =
new FlutterEngineGroup(ApplicationProvider.getApplicationContext());
FlutterEngineGroup flutterEngineGroup2 =
new FlutterEngineGroup(ApplicationProvider.getApplicationContext());
FlutterEngineGroupCache cache = new FlutterEngineGroupCache();
// --- Execute Test ---
cache.put("my_flutter_engine_group", flutterEngineGroup1);
cache.put("my_flutter_engine_group_2", flutterEngineGroup2);
// --- Verify Results ---
assertEquals(flutterEngineGroup1, cache.get("my_flutter_engine_group"));
assertEquals(flutterEngineGroup2, cache.get("my_flutter_engine_group_2"));
cache.clear();
// --- Verify Results ---
assertNull(cache.get("my_flutter_engine_group"));
assertNull(cache.get("my_flutter_engine_group_2"));
}
}
| engine/shell/platform/android/test/io/flutter/embedding/engine/FlutterEngineGroupCacheTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/embedding/engine/FlutterEngineGroupCacheTest.java",
"repo_id": "engine",
"token_count": 1097
} | 305 |
package io.flutter.embedding.engine.renderer;
import static junit.framework.TestCase.*;
import static org.mockito.Mockito.*;
import android.graphics.SurfaceTexture;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
public class SurfaceTextureWrapperTest {
@Test
public void attachToGLContext() {
final SurfaceTexture tx = mock(SurfaceTexture.class);
final SurfaceTextureWrapper wrapper = new SurfaceTextureWrapper(tx);
wrapper.attachToGLContext(0);
verify(tx, times(1)).attachToGLContext(0);
verifyNoMoreInteractions(tx);
}
@Test
public void attachToGLContext_detachesFromCurrentContext() {
final SurfaceTexture tx = mock(SurfaceTexture.class);
final SurfaceTextureWrapper wrapper = new SurfaceTextureWrapper(tx);
wrapper.attachToGLContext(0);
reset(tx);
wrapper.attachToGLContext(0);
verify(tx, times(1)).detachFromGLContext();
verify(tx, times(1)).attachToGLContext(0);
verifyNoMoreInteractions(tx);
}
@Test
public void attachToGLContext_doesNotDetacheFromCurrentContext() {
final SurfaceTexture tx = mock(SurfaceTexture.class);
final SurfaceTextureWrapper wrapper = new SurfaceTextureWrapper(tx);
wrapper.attachToGLContext(0);
wrapper.detachFromGLContext();
reset(tx);
wrapper.attachToGLContext(0);
verify(tx, times(1)).attachToGLContext(0);
verifyNoMoreInteractions(tx);
}
@Test
public void detachFromGLContext() {
final SurfaceTexture tx = mock(SurfaceTexture.class);
final SurfaceTextureWrapper wrapper = new SurfaceTextureWrapper(tx);
wrapper.attachToGLContext(0);
reset(tx);
wrapper.detachFromGLContext();
verify(tx, times(1)).detachFromGLContext();
verifyNoMoreInteractions(tx);
}
@Test
public void release() {
final SurfaceTexture tx = mock(SurfaceTexture.class);
final SurfaceTextureWrapper wrapper = new SurfaceTextureWrapper(tx);
wrapper.release();
verify(tx, times(1)).release();
reset(tx);
wrapper.detachFromGLContext();
wrapper.attachToGLContext(0);
verifyNoMoreInteractions(tx);
}
}
| engine/shell/platform/android/test/io/flutter/embedding/engine/renderer/SurfaceTextureWrapperTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/embedding/engine/renderer/SurfaceTextureWrapperTest.java",
"repo_id": "engine",
"token_count": 728
} | 306 |
package io.flutter.plugin.editing;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.isNull;
import static org.mockito.Mockito.anyString;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.content.ClipDescription;
import android.content.ClipboardManager;
import android.content.ContentResolver;
import android.content.Context;
import android.content.res.AssetManager;
import android.net.Uri;
import android.os.Bundle;
import android.text.InputType;
import android.text.Selection;
import android.text.SpannableStringBuilder;
import android.view.KeyEvent;
import android.view.View;
import android.view.inputmethod.CursorAnchorInfo;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.ExtractedText;
import android.view.inputmethod.ExtractedTextRequest;
import android.view.inputmethod.InputConnection;
import android.view.inputmethod.InputContentInfo;
import android.view.inputmethod.InputMethodManager;
import androidx.core.view.inputmethod.InputConnectionCompat;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.ibm.icu.lang.UCharacter;
import com.ibm.icu.lang.UProperty;
import io.flutter.embedding.android.KeyboardManager;
import io.flutter.embedding.engine.FlutterJNI;
import io.flutter.embedding.engine.dart.DartExecutor;
import io.flutter.embedding.engine.systemchannels.TextInputChannel;
import io.flutter.plugin.common.JSONMethodCodec;
import io.flutter.plugin.common.MethodCall;
import io.flutter.util.FakeKeyEvent;
import java.io.ByteArrayInputStream;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import org.json.JSONArray;
import org.json.JSONException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.robolectric.Shadows;
import org.robolectric.annotation.Config;
import org.robolectric.annotation.Implementation;
import org.robolectric.annotation.Implements;
import org.robolectric.shadow.api.Shadow;
import org.robolectric.shadows.ShadowContentResolver;
import org.robolectric.shadows.ShadowInputMethodManager;
@Config(
manifest = Config.NONE,
shadows = {InputConnectionAdaptorTest.TestImm.class})
@RunWith(AndroidJUnit4.class)
public class InputConnectionAdaptorTest {
private final Context ctx = ApplicationProvider.getApplicationContext();
private ContentResolver contentResolver;
private ShadowContentResolver shadowContentResolver;
@Mock KeyboardManager mockKeyboardManager;
// Verifies the method and arguments for a captured method call.
private void verifyMethodCall(ByteBuffer buffer, String methodName, String[] expectedArgs)
throws JSONException {
buffer.rewind();
MethodCall methodCall = JSONMethodCodec.INSTANCE.decodeMethodCall(buffer);
assertEquals(methodName, methodCall.method);
if (expectedArgs != null) {
JSONArray args = methodCall.arguments();
assertEquals(expectedArgs.length, args.length());
for (int i = 0; i < args.length(); i++) {
assertEquals(expectedArgs[i], args.get(i).toString());
}
}
}
@Before
public void setUp() {
MockitoAnnotations.openMocks(this);
contentResolver = ctx.getContentResolver();
shadowContentResolver = Shadows.shadowOf(contentResolver);
}
@Test
public void inputConnectionAdaptor_ReceivesEnter() throws NullPointerException {
View testView = new View(ctx);
FlutterJNI mockFlutterJni = mock(FlutterJNI.class);
DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJni, mock(AssetManager.class)));
int inputTargetId = 0;
TextInputChannel textInputChannel = new TextInputChannel(dartExecutor);
ListenableEditingState mEditable = new ListenableEditingState(null, testView);
Selection.setSelection(mEditable, 0, 0);
ListenableEditingState spyEditable = spy(mEditable);
EditorInfo outAttrs = new EditorInfo();
outAttrs.inputType = InputType.TYPE_CLASS_TEXT | InputType.TYPE_TEXT_FLAG_MULTI_LINE;
InputConnectionAdaptor inputConnectionAdaptor =
new InputConnectionAdaptor(
testView, inputTargetId, textInputChannel, mockKeyboardManager, spyEditable, outAttrs);
// Send an enter key and make sure the Editable received it.
FakeKeyEvent keyEvent = new FakeKeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_ENTER, '\n');
inputConnectionAdaptor.handleKeyEvent(keyEvent);
verify(spyEditable, times(1)).insert(eq(0), anyString());
}
@Test
public void testPerformContextMenuAction_selectAll() {
int selStart = 5;
ListenableEditingState editable = sampleEditable(selStart, selStart);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
boolean didConsume = adaptor.performContextMenuAction(android.R.id.selectAll);
assertTrue(didConsume);
assertEquals(0, Selection.getSelectionStart(editable));
assertEquals(editable.length(), Selection.getSelectionEnd(editable));
}
@SuppressWarnings("deprecation")
// ClipboardManager.hasText is deprecated.
@Test
public void testPerformContextMenuAction_cut() {
ClipboardManager clipboardManager = ctx.getSystemService(ClipboardManager.class);
int selStart = 6;
int selEnd = 11;
ListenableEditingState editable = sampleEditable(selStart, selEnd);
CharSequence textToBeCut = editable.subSequence(selStart, selEnd);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
boolean didConsume = adaptor.performContextMenuAction(android.R.id.cut);
assertTrue(didConsume);
assertTrue(clipboardManager.hasText());
assertEquals(textToBeCut, clipboardManager.getPrimaryClip().getItemAt(0).getText());
assertFalse(editable.toString().contains(textToBeCut));
}
@SuppressWarnings("deprecation")
// ClipboardManager.hasText is deprecated.
@Test
public void testPerformContextMenuAction_copy() {
ClipboardManager clipboardManager = ctx.getSystemService(ClipboardManager.class);
int selStart = 6;
int selEnd = 11;
ListenableEditingState editable = sampleEditable(selStart, selEnd);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
assertFalse(clipboardManager.hasText());
boolean didConsume = adaptor.performContextMenuAction(android.R.id.copy);
assertTrue(didConsume);
assertTrue(clipboardManager.hasText());
assertEquals(
editable.subSequence(selStart, selEnd),
clipboardManager.getPrimaryClip().getItemAt(0).getText());
}
@SuppressWarnings("deprecation")
// ClipboardManager.setText is deprecated.
@Test
public void testPerformContextMenuAction_paste() {
ClipboardManager clipboardManager = ctx.getSystemService(ClipboardManager.class);
String textToBePasted = "deadbeef";
clipboardManager.setText(textToBePasted);
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
boolean didConsume = adaptor.performContextMenuAction(android.R.id.paste);
assertTrue(didConsume);
assertTrue(editable.toString().startsWith(textToBePasted));
}
@SuppressWarnings("deprecation")
// DartExecutor.send is deprecated.
@Test
public void testCommitContent() throws JSONException {
View testView = new View(ctx);
int client = 0;
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJNI, mock(AssetManager.class)));
TextInputChannel textInputChannel = new TextInputChannel(dartExecutor);
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
client,
textInputChannel,
mockKeyboardManager,
editable,
null,
mockFlutterJNI);
String uri = "content://mock/uri/test/commitContent";
Charset charset = Charset.forName("UTF-8");
String fakeImageData = "fake image data";
byte[] fakeImageDataBytes = fakeImageData.getBytes(charset);
shadowContentResolver.registerInputStream(
Uri.parse(uri), new ByteArrayInputStream(fakeImageDataBytes));
boolean commitContentSuccess =
adaptor.commitContent(
new InputContentInfo(
Uri.parse(uri),
new ClipDescription("commitContent test", new String[] {"image/png"})),
InputConnectionCompat.INPUT_CONTENT_GRANT_READ_URI_PERMISSION,
null);
assertTrue(commitContentSuccess);
ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(dartExecutor, times(1)).send(channelCaptor.capture(), bufferCaptor.capture(), isNull());
assertEquals("flutter/textinput", channelCaptor.getValue());
String fakeImageDataIntString = "";
for (int i = 0; i < fakeImageDataBytes.length; i++) {
int byteAsInt = fakeImageDataBytes[i];
fakeImageDataIntString += byteAsInt;
if (i < (fakeImageDataBytes.length - 1)) {
fakeImageDataIntString += ",";
}
}
verifyMethodCall(
bufferCaptor.getValue(),
"TextInputClient.performAction",
new String[] {
"0",
"TextInputAction.commitContent",
"{\"data\":["
+ fakeImageDataIntString
+ "],\"mimeType\":\"image\\/png\",\"uri\":\"content:\\/\\/mock\\/uri\\/test\\/commitContent\"}"
});
}
@SuppressWarnings("deprecation")
// DartExecutor.send is deprecated.
@Test
public void testPerformPrivateCommand_dataIsNull() throws JSONException {
View testView = new View(ctx);
int client = 0;
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJNI, mock(AssetManager.class)));
TextInputChannel textInputChannel = new TextInputChannel(dartExecutor);
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
client,
textInputChannel,
mockKeyboardManager,
editable,
null,
mockFlutterJNI);
adaptor.performPrivateCommand("actionCommand", null);
ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(dartExecutor, times(1)).send(channelCaptor.capture(), bufferCaptor.capture(), isNull());
assertEquals("flutter/textinput", channelCaptor.getValue());
verifyMethodCall(
bufferCaptor.getValue(),
"TextInputClient.performPrivateCommand",
new String[] {"0", "{\"action\":\"actionCommand\"}"});
}
@SuppressWarnings("deprecation")
// DartExecutor.send is deprecated.
@Test
public void testPerformPrivateCommand_dataIsByteArray() throws JSONException {
View testView = new View(ctx);
int client = 0;
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJNI, mock(AssetManager.class)));
TextInputChannel textInputChannel = new TextInputChannel(dartExecutor);
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
client,
textInputChannel,
mockKeyboardManager,
editable,
null,
mockFlutterJNI);
Bundle bundle = new Bundle();
byte[] buffer = new byte[] {'a', 'b', 'c', 'd'};
bundle.putByteArray("keyboard_layout", buffer);
adaptor.performPrivateCommand("actionCommand", bundle);
ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(dartExecutor, times(1)).send(channelCaptor.capture(), bufferCaptor.capture(), isNull());
assertEquals("flutter/textinput", channelCaptor.getValue());
verifyMethodCall(
bufferCaptor.getValue(),
"TextInputClient.performPrivateCommand",
new String[] {
"0", "{\"data\":{\"keyboard_layout\":[97,98,99,100]},\"action\":\"actionCommand\"}"
});
}
@SuppressWarnings("deprecation")
// DartExecutor.send is deprecated.
@Test
public void testPerformPrivateCommand_dataIsByte() throws JSONException {
View testView = new View(ctx);
int client = 0;
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJNI, mock(AssetManager.class)));
TextInputChannel textInputChannel = new TextInputChannel(dartExecutor);
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
client,
textInputChannel,
mockKeyboardManager,
editable,
null,
mockFlutterJNI);
Bundle bundle = new Bundle();
byte b = 3;
bundle.putByte("keyboard_layout", b);
adaptor.performPrivateCommand("actionCommand", bundle);
ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(dartExecutor, times(1)).send(channelCaptor.capture(), bufferCaptor.capture(), isNull());
assertEquals("flutter/textinput", channelCaptor.getValue());
verifyMethodCall(
bufferCaptor.getValue(),
"TextInputClient.performPrivateCommand",
new String[] {"0", "{\"data\":{\"keyboard_layout\":3},\"action\":\"actionCommand\"}"});
}
@SuppressWarnings("deprecation")
// DartExecutor.send is deprecated.
@Test
public void testPerformPrivateCommand_dataIsCharArray() throws JSONException {
View testView = new View(ctx);
int client = 0;
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJNI, mock(AssetManager.class)));
TextInputChannel textInputChannel = new TextInputChannel(dartExecutor);
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
client,
textInputChannel,
mockKeyboardManager,
editable,
null,
mockFlutterJNI);
Bundle bundle = new Bundle();
char[] buffer = new char[] {'a', 'b', 'c', 'd'};
bundle.putCharArray("keyboard_layout", buffer);
adaptor.performPrivateCommand("actionCommand", bundle);
ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(dartExecutor, times(1)).send(channelCaptor.capture(), bufferCaptor.capture(), isNull());
assertEquals("flutter/textinput", channelCaptor.getValue());
verifyMethodCall(
bufferCaptor.getValue(),
"TextInputClient.performPrivateCommand",
new String[] {
"0",
"{\"data\":{\"keyboard_layout\":[\"a\",\"b\",\"c\",\"d\"]},\"action\":\"actionCommand\"}"
});
}
@SuppressWarnings("deprecation")
// DartExecutor.send is deprecated.
@Test
public void testPerformPrivateCommand_dataIsChar() throws JSONException {
View testView = new View(ctx);
int client = 0;
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJNI, mock(AssetManager.class)));
TextInputChannel textInputChannel = new TextInputChannel(dartExecutor);
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
client,
textInputChannel,
mockKeyboardManager,
editable,
null,
mockFlutterJNI);
Bundle bundle = new Bundle();
char b = 'a';
bundle.putChar("keyboard_layout", b);
adaptor.performPrivateCommand("actionCommand", bundle);
ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(dartExecutor, times(1)).send(channelCaptor.capture(), bufferCaptor.capture(), isNull());
assertEquals("flutter/textinput", channelCaptor.getValue());
verifyMethodCall(
bufferCaptor.getValue(),
"TextInputClient.performPrivateCommand",
new String[] {"0", "{\"data\":{\"keyboard_layout\":\"a\"},\"action\":\"actionCommand\"}"});
}
@SuppressWarnings("deprecation")
// DartExecutor.send is deprecated.
@Test
public void testPerformPrivateCommand_dataIsCharSequenceArray() throws JSONException {
View testView = new View(ctx);
int client = 0;
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJNI, mock(AssetManager.class)));
TextInputChannel textInputChannel = new TextInputChannel(dartExecutor);
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
client,
textInputChannel,
mockKeyboardManager,
editable,
null,
mockFlutterJNI);
Bundle bundle = new Bundle();
CharSequence charSequence1 = new StringBuffer("abc");
CharSequence charSequence2 = new StringBuffer("efg");
CharSequence[] value = {charSequence1, charSequence2};
bundle.putCharSequenceArray("keyboard_layout", value);
adaptor.performPrivateCommand("actionCommand", bundle);
ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(dartExecutor, times(1)).send(channelCaptor.capture(), bufferCaptor.capture(), isNull());
assertEquals("flutter/textinput", channelCaptor.getValue());
verifyMethodCall(
bufferCaptor.getValue(),
"TextInputClient.performPrivateCommand",
new String[] {
"0", "{\"data\":{\"keyboard_layout\":[\"abc\",\"efg\"]},\"action\":\"actionCommand\"}"
});
}
@SuppressWarnings("deprecation")
// DartExecutor.send is deprecated.
@Test
public void testPerformPrivateCommand_dataIsCharSequence() throws JSONException {
View testView = new View(ctx);
int client = 0;
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJNI, mock(AssetManager.class)));
TextInputChannel textInputChannel = new TextInputChannel(dartExecutor);
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
client,
textInputChannel,
mockKeyboardManager,
editable,
null,
mockFlutterJNI);
Bundle bundle = new Bundle();
CharSequence charSequence = new StringBuffer("abc");
bundle.putCharSequence("keyboard_layout", charSequence);
adaptor.performPrivateCommand("actionCommand", bundle);
ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(dartExecutor, times(1)).send(channelCaptor.capture(), bufferCaptor.capture(), isNull());
assertEquals("flutter/textinput", channelCaptor.getValue());
verifyMethodCall(
bufferCaptor.getValue(),
"TextInputClient.performPrivateCommand",
new String[] {
"0", "{\"data\":{\"keyboard_layout\":\"abc\"},\"action\":\"actionCommand\"}"
});
}
@SuppressWarnings("deprecation")
// DartExecutor.send is deprecated.
@Test
public void testPerformPrivateCommand_dataIsFloat() throws JSONException {
View testView = new View(ctx);
int client = 0;
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJNI, mock(AssetManager.class)));
TextInputChannel textInputChannel = new TextInputChannel(dartExecutor);
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
client,
textInputChannel,
mockKeyboardManager,
editable,
null,
mockFlutterJNI);
Bundle bundle = new Bundle();
float value = 0.5f;
bundle.putFloat("keyboard_layout", value);
adaptor.performPrivateCommand("actionCommand", bundle);
ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(dartExecutor, times(1)).send(channelCaptor.capture(), bufferCaptor.capture(), isNull());
assertEquals("flutter/textinput", channelCaptor.getValue());
verifyMethodCall(
bufferCaptor.getValue(),
"TextInputClient.performPrivateCommand",
new String[] {"0", "{\"data\":{\"keyboard_layout\":0.5},\"action\":\"actionCommand\"}"});
}
@SuppressWarnings("deprecation")
// DartExecutor.send is deprecated.
@Test
public void testPerformPrivateCommand_dataIsFloatArray() throws JSONException {
View testView = new View(ctx);
int client = 0;
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
DartExecutor dartExecutor = spy(new DartExecutor(mockFlutterJNI, mock(AssetManager.class)));
TextInputChannel textInputChannel = new TextInputChannel(dartExecutor);
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
client,
textInputChannel,
mockKeyboardManager,
editable,
null,
mockFlutterJNI);
Bundle bundle = new Bundle();
float[] value = {0.5f, 0.6f};
bundle.putFloatArray("keyboard_layout", value);
adaptor.performPrivateCommand("actionCommand", bundle);
ArgumentCaptor<String> channelCaptor = ArgumentCaptor.forClass(String.class);
ArgumentCaptor<ByteBuffer> bufferCaptor = ArgumentCaptor.forClass(ByteBuffer.class);
verify(dartExecutor, times(1)).send(channelCaptor.capture(), bufferCaptor.capture(), isNull());
assertEquals("flutter/textinput", channelCaptor.getValue());
verifyMethodCall(
bufferCaptor.getValue(),
"TextInputClient.performPrivateCommand",
new String[] {
"0", "{\"data\":{\"keyboard_layout\":[0.5,0.6]},\"action\":\"actionCommand\"}"
});
}
@Test
public void testSendKeyEvent_shiftKeyUpDoesNotCancelSelection() {
// Regression test for https://github.com/flutter/flutter/issues/101569.
int selStart = 5;
int selEnd = 10;
ListenableEditingState editable = sampleEditable(selStart, selEnd);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent shiftKeyUp = new KeyEvent(KeyEvent.ACTION_UP, KeyEvent.KEYCODE_SHIFT_LEFT);
boolean didConsume = adaptor.handleKeyEvent(shiftKeyUp);
assertFalse(didConsume);
assertEquals(selStart, Selection.getSelectionStart(editable));
assertEquals(selEnd, Selection.getSelectionEnd(editable));
}
@Test
public void testSendKeyEvent_leftKeyMovesCaretLeft() {
int selStart = 5;
ListenableEditingState editable = sampleEditable(selStart, selStart);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent leftKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_LEFT);
boolean didConsume = adaptor.handleKeyEvent(leftKeyDown);
assertTrue(didConsume);
assertEquals(selStart - 1, Selection.getSelectionStart(editable));
assertEquals(selStart - 1, Selection.getSelectionEnd(editable));
}
@Test
public void testSendKeyEvent_leftKeyMovesCaretLeftComplexEmoji() {
int selStart = 75;
ListenableEditingState editable = sampleEditable(selStart, selStart, SAMPLE_EMOJI_TEXT);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent downKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_LEFT);
boolean didConsume;
// Normal Character
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 74);
// Non-Spacing Mark
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 73);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 72);
// Keycap
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 69);
// Keycap with invalid base
adaptor.setSelection(68, 68);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 66);
adaptor.setSelection(67, 67);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 66);
// Zero Width Joiner
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 55);
// Zero Width Joiner with invalid base
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 53);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 52);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 51);
// ----- Start Emoji Tag Sequence with invalid base testing ----
// Delete base tag
adaptor.setSelection(39, 39);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 37);
// Delete the sequence
adaptor.setSelection(49, 49);
for (int i = 0; i < 6; i++) {
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
}
assertEquals(Selection.getSelectionStart(editable), 37);
// ----- End Emoji Tag Sequence with invalid base testing ----
// Emoji Tag Sequence
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 23);
// Variation Selector with invalid base
adaptor.setSelection(22, 22);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 21);
adaptor.setSelection(22, 22);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 21);
// Variation Selector
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 19);
// Emoji Modifier
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 16);
// Emoji Modifier with invalid base
adaptor.setSelection(14, 14);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 13);
adaptor.setSelection(14, 14);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 13);
// Line Feed
adaptor.setSelection(12, 12);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 11);
// Carriage Return
adaptor.setSelection(12, 12);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 11);
// Carriage Return and Line Feed
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 9);
// Regional Indicator Symbol odd
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 7);
// Regional Indicator Symbol even
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 3);
// Simple Emoji
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 1);
// First CodePoint
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 0);
}
@Test
public void testSendKeyEvent_leftKeyExtendsSelectionLeft() {
int selStart = 5;
int selEnd = 40;
ListenableEditingState editable = sampleEditable(selStart, selEnd);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent leftKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_LEFT);
boolean didConsume = adaptor.handleKeyEvent(leftKeyDown);
assertTrue(didConsume);
assertEquals(selStart, Selection.getSelectionStart(editable));
assertEquals(selEnd - 1, Selection.getSelectionEnd(editable));
}
@Test
public void testSendKeyEvent_shiftLeftKeyStartsSelectionLeft() {
int selStart = 5;
ListenableEditingState editable = sampleEditable(selStart, selStart);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent shiftLeftKeyDown =
new KeyEvent(
0, 0, KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_LEFT, 0, KeyEvent.META_SHIFT_ON);
boolean didConsume = adaptor.handleKeyEvent(shiftLeftKeyDown);
assertTrue(didConsume);
assertEquals(selStart, Selection.getSelectionStart(editable));
assertEquals(selStart - 1, Selection.getSelectionEnd(editable));
}
@Test
public void testSendKeyEvent_rightKeyMovesCaretRight() {
int selStart = 5;
ListenableEditingState editable = sampleEditable(selStart, selStart);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent rightKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_RIGHT);
boolean didConsume = adaptor.handleKeyEvent(rightKeyDown);
assertTrue(didConsume);
assertEquals(selStart + 1, Selection.getSelectionStart(editable));
assertEquals(selStart + 1, Selection.getSelectionEnd(editable));
}
@Test
public void testSendKeyEvent_rightKeyMovesCaretRightComplexRegion() {
int selStart = 0;
// Seven region indicator characters. The first six should be considered as
// three region indicators, and the final seventh character should be
// considered to be on its own because it has no partner.
String SAMPLE_REGION_TEXT = "🇷🇷🇷🇷🇷🇷🇷";
ListenableEditingState editable = sampleEditable(selStart, selStart, SAMPLE_REGION_TEXT);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent downKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_RIGHT);
boolean didConsume;
// The cursor moves over two region indicators at a time.
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 4);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 8);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 12);
// When there is only one region indicator left with no pair, the cursor
// moves over that single region indicator.
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 14);
// If the cursor is placed in the middle of a region indicator pair, it
// moves over only the second half of the pair.
adaptor.setSelection(6, 6);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 8);
}
@Test
public void testSendKeyEvent_rightKeyMovesCaretRightComplexEmoji() {
int selStart = 0;
ListenableEditingState editable = sampleEditable(selStart, selStart, SAMPLE_EMOJI_TEXT);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent downKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_RIGHT);
boolean didConsume;
// First CodePoint
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 1);
// Simple Emoji
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 3);
// Regional Indicator Symbol even
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 7);
// Regional Indicator Symbol odd
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 9);
// Carriage Return
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 10);
// Line Feed and Carriage Return
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 12);
// Line Feed
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 13);
// Modified Emoji
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 16);
// Emoji Modifier
adaptor.setSelection(14, 14);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 16);
// Emoji Modifier with invalid base
adaptor.setSelection(18, 18);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 19);
// Variation Selector
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 21);
// Variation Selector with invalid base
adaptor.setSelection(22, 22);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 23);
// Emoji Tag Sequence
for (int i = 0; i < 7; i++) {
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 25 + 2 * i);
}
assertEquals(Selection.getSelectionStart(editable), 37);
// ----- Start Emoji Tag Sequence with invalid base testing ----
// Pass the sequence
adaptor.setSelection(39, 39);
for (int i = 0; i < 6; i++) {
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 41 + 2 * i);
}
assertEquals(Selection.getSelectionStart(editable), 51);
// ----- End Emoji Tag Sequence with invalid base testing ----
// Zero Width Joiner with invalid base
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 52);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 53);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 55);
// Zero Width Joiner
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 66);
// Keycap with invalid base
adaptor.setSelection(67, 67);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 68);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 69);
// Keycap
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 72);
// Non-Spacing Mark
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 73);
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 74);
// Normal Character
didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
assertEquals(Selection.getSelectionStart(editable), 75);
}
@Test
public void testSendKeyEvent_rightKeyExtendsSelectionRight() {
int selStart = 5;
int selEnd = 40;
ListenableEditingState editable = sampleEditable(selStart, selEnd);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent rightKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_RIGHT);
boolean didConsume = adaptor.handleKeyEvent(rightKeyDown);
assertTrue(didConsume);
assertEquals(selStart, Selection.getSelectionStart(editable));
assertEquals(selEnd + 1, Selection.getSelectionEnd(editable));
}
@Test
public void testSendKeyEvent_shiftRightKeyStartsSelectionRight() {
int selStart = 5;
ListenableEditingState editable = sampleEditable(selStart, selStart);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent shiftRightKeyDown =
new KeyEvent(
0, 0, KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_RIGHT, 0, KeyEvent.META_SHIFT_ON);
boolean didConsume = adaptor.handleKeyEvent(shiftRightKeyDown);
assertTrue(didConsume);
assertEquals(selStart, Selection.getSelectionStart(editable));
assertEquals(selStart + 1, Selection.getSelectionEnd(editable));
}
@Test
public void testSendKeyEvent_upKeyMovesCaretUp() {
int selStart = SAMPLE_TEXT.indexOf('\n') + 4;
ListenableEditingState editable = sampleEditable(selStart, selStart);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent upKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_UP);
boolean didConsume = adaptor.handleKeyEvent(upKeyDown);
assertTrue(didConsume);
// Checks the caret moved left (to some previous character). Selection.moveUp() behaves
// different in tests than on a real device, we can't verify the exact position.
assertTrue(Selection.getSelectionStart(editable) < selStart);
}
@Test
public void testSendKeyEvent_downKeyMovesCaretDown() {
int selStart = 4;
ListenableEditingState editable = sampleEditable(selStart, selStart);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent downKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_DOWN);
boolean didConsume = adaptor.handleKeyEvent(downKeyDown);
assertTrue(didConsume);
// Checks the caret moved right (to some following character). Selection.moveDown() behaves
// different in tests than on a real device, we can't verify the exact position.
assertTrue(Selection.getSelectionStart(editable) > selStart);
}
@Test
public void testSendKeyEvent_MovementKeysAreNopWhenNoSelection() {
// Regression test for https://github.com/flutter/flutter/issues/76283.
ListenableEditingState editable = sampleEditable(-1, -1);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent keyEvent = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_DOWN);
boolean didConsume = adaptor.handleKeyEvent(keyEvent);
assertFalse(didConsume);
assertEquals(Selection.getSelectionStart(editable), -1);
assertEquals(Selection.getSelectionEnd(editable), -1);
keyEvent = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_UP);
didConsume = adaptor.handleKeyEvent(keyEvent);
assertFalse(didConsume);
assertEquals(Selection.getSelectionStart(editable), -1);
assertEquals(Selection.getSelectionEnd(editable), -1);
keyEvent = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_LEFT);
didConsume = adaptor.handleKeyEvent(keyEvent);
assertFalse(didConsume);
assertEquals(Selection.getSelectionStart(editable), -1);
assertEquals(Selection.getSelectionEnd(editable), -1);
keyEvent = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DPAD_RIGHT);
didConsume = adaptor.handleKeyEvent(keyEvent);
assertFalse(didConsume);
assertEquals(Selection.getSelectionStart(editable), -1);
assertEquals(Selection.getSelectionEnd(editable), -1);
}
@Test
public void testMethod_getExtractedText() {
int selStart = 5;
ListenableEditingState editable = sampleEditable(selStart, selStart);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
ExtractedText extractedText = adaptor.getExtractedText(null, 0);
assertEquals(extractedText.text, SAMPLE_TEXT);
assertEquals(extractedText.selectionStart, selStart);
assertEquals(extractedText.selectionEnd, selStart);
}
@Test
public void testExtractedText_monitoring() {
ListenableEditingState editable = sampleEditable(5, 5);
View testView = new View(ctx);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
1,
mock(TextInputChannel.class),
mockKeyboardManager,
editable,
new EditorInfo());
TestImm testImm = Shadow.extract(ctx.getSystemService(Context.INPUT_METHOD_SERVICE));
testImm.resetStates();
ExtractedTextRequest request = new ExtractedTextRequest();
request.token = 123;
ExtractedText extractedText = adaptor.getExtractedText(request, 0);
assertEquals(5, extractedText.selectionStart);
assertEquals(5, extractedText.selectionEnd);
assertFalse(extractedText.text instanceof SpannableStringBuilder);
// Move the cursor. Should not report extracted text.
adaptor.setSelection(2, 3);
assertNull(testImm.lastExtractedText);
// Now request monitoring, and update the request text flag.
request.flags = InputConnection.GET_TEXT_WITH_STYLES;
extractedText = adaptor.getExtractedText(request, InputConnection.GET_EXTRACTED_TEXT_MONITOR);
assertEquals(2, extractedText.selectionStart);
assertEquals(3, extractedText.selectionEnd);
assertTrue(extractedText.text instanceof SpannableStringBuilder);
adaptor.setSelection(3, 5);
assertEquals(3, testImm.lastExtractedText.selectionStart);
assertEquals(5, testImm.lastExtractedText.selectionEnd);
assertTrue(testImm.lastExtractedText.text instanceof SpannableStringBuilder);
// Stop monitoring.
testImm.resetStates();
extractedText = adaptor.getExtractedText(request, 0);
assertEquals(3, extractedText.selectionStart);
assertEquals(5, extractedText.selectionEnd);
assertTrue(extractedText.text instanceof SpannableStringBuilder);
adaptor.setSelection(1, 3);
assertNull(testImm.lastExtractedText);
}
@Test
public void testCursorAnchorInfo() {
ListenableEditingState editable = sampleEditable(5, 5);
View testView = new View(ctx);
InputConnectionAdaptor adaptor =
new InputConnectionAdaptor(
testView,
1,
mock(TextInputChannel.class),
mockKeyboardManager,
editable,
new EditorInfo());
TestImm testImm = Shadow.extract(ctx.getSystemService(Context.INPUT_METHOD_SERVICE));
testImm.resetStates();
// Monitoring only. Does not send update immediately.
adaptor.requestCursorUpdates(InputConnection.CURSOR_UPDATE_MONITOR);
assertNull(testImm.lastCursorAnchorInfo);
// Monitor selection changes.
adaptor.setSelection(0, 1);
CursorAnchorInfo cursorAnchorInfo = testImm.lastCursorAnchorInfo;
assertEquals(0, cursorAnchorInfo.getSelectionStart());
assertEquals(1, cursorAnchorInfo.getSelectionEnd());
// Turn monitoring off.
testImm.resetStates();
assertNull(testImm.lastCursorAnchorInfo);
adaptor.requestCursorUpdates(InputConnection.CURSOR_UPDATE_IMMEDIATE);
cursorAnchorInfo = testImm.lastCursorAnchorInfo;
assertEquals(0, cursorAnchorInfo.getSelectionStart());
assertEquals(1, cursorAnchorInfo.getSelectionEnd());
// No more updates.
testImm.resetStates();
adaptor.setSelection(1, 3);
assertNull(testImm.lastCursorAnchorInfo);
}
@Test
public void testSendKeyEvent_sendSoftKeyEvents() {
ListenableEditingState editable = sampleEditable(5, 5);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable, mockKeyboardManager);
KeyEvent shiftKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_SHIFT_LEFT);
boolean didConsume = adaptor.handleKeyEvent(shiftKeyDown);
assertFalse(didConsume);
verify(mockKeyboardManager, never()).handleEvent(shiftKeyDown);
}
@Test
public void testSendKeyEvent_sendHardwareKeyEvents() {
ListenableEditingState editable = sampleEditable(5, 5);
when(mockKeyboardManager.handleEvent(any())).thenReturn(true);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable, mockKeyboardManager);
KeyEvent shiftKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_SHIFT_LEFT);
// Call sendKeyEvent instead of handleKeyEvent.
boolean didConsume = adaptor.sendKeyEvent(shiftKeyDown);
assertTrue(didConsume);
verify(mockKeyboardManager, times(1)).handleEvent(shiftKeyDown);
}
@Test
public void testSendKeyEvent_delKeyNotConsumed() {
ListenableEditingState editable = sampleEditable(5, 5);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
KeyEvent downKeyDown = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_DEL);
for (int i = 0; i < 4; i++) {
boolean didConsume = adaptor.handleKeyEvent(downKeyDown);
assertFalse(didConsume);
}
assertEquals(5, Selection.getSelectionStart(editable));
}
@Test
public void testDoesNotConsumeBackButton() {
ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor = sampleInputConnectionAdaptor(editable);
FakeKeyEvent keyEvent = new FakeKeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_BACK, '\b');
boolean didConsume = adaptor.handleKeyEvent(keyEvent);
assertFalse(didConsume);
}
@Test
public void testCleanUpBatchEndsOnCloseConnection() {
final ListenableEditingState editable = sampleEditable(0, 0);
InputConnectionAdaptor adaptor = spy(sampleInputConnectionAdaptor(editable));
for (int i = 0; i < 5; i++) {
adaptor.beginBatchEdit();
}
adaptor.endBatchEdit();
verify(adaptor, times(1)).endBatchEdit();
adaptor.closeConnection();
verify(adaptor, times(4)).endBatchEdit();
}
private static final String SAMPLE_TEXT =
"Lorem ipsum dolor sit amet," + "\nconsectetur adipiscing elit.";
private static final String SAMPLE_EMOJI_TEXT =
"a" // First CodePoint
+ "😂" // Simple Emoji
+ "🇮🇷" // Regional Indicator Symbol even
+ "🇷" // Regional Indicator Symbol odd
+ "\r\n" // Carriage Return and Line Feed
+ "\r\n"
+ "✋🏿" // Emoji Modifier
+ "✋🏿"
+ "⚠️" // Variant Selector
+ "⚠️"
+ "🏴" // Emoji Tag Sequence
+ "🏴"
+ "a👨" // Zero Width Joiner
+ "👨👩👧👦"
+ "5️⃣" // Keycap
+ "5️⃣"
+ "عَ" // Non-Spacing Mark
+ "a"; // Normal Character
private static final String SAMPLE_RTL_TEXT = "متن ساختگی" + "\nبرای تستfor test😊";
private static ListenableEditingState sampleEditable(int selStart, int selEnd) {
ListenableEditingState sample =
new ListenableEditingState(null, new View(ApplicationProvider.getApplicationContext()));
sample.replace(0, 0, SAMPLE_TEXT);
Selection.setSelection(sample, selStart, selEnd);
return sample;
}
private static ListenableEditingState sampleEditable(int selStart, int selEnd, String text) {
ListenableEditingState sample =
new ListenableEditingState(null, new View(ApplicationProvider.getApplicationContext()));
sample.replace(0, 0, text);
Selection.setSelection(sample, selStart, selEnd);
return sample;
}
private static InputConnectionAdaptor sampleInputConnectionAdaptor(
ListenableEditingState editable) {
return sampleInputConnectionAdaptor(editable, mock(KeyboardManager.class));
}
private static InputConnectionAdaptor sampleInputConnectionAdaptor(
ListenableEditingState editable, KeyboardManager mockKeyboardManager) {
View testView = new View(ApplicationProvider.getApplicationContext());
int client = 0;
TextInputChannel textInputChannel = mock(TextInputChannel.class);
FlutterJNI mockFlutterJNI = mock(FlutterJNI.class);
when(mockFlutterJNI.isCodePointEmoji(anyInt()))
.thenAnswer((invocation) -> Emoji.isEmoji((int) invocation.getArguments()[0]));
when(mockFlutterJNI.isCodePointEmojiModifier(anyInt()))
.thenAnswer((invocation) -> Emoji.isEmojiModifier((int) invocation.getArguments()[0]));
when(mockFlutterJNI.isCodePointEmojiModifierBase(anyInt()))
.thenAnswer((invocation) -> Emoji.isEmojiModifierBase((int) invocation.getArguments()[0]));
when(mockFlutterJNI.isCodePointVariantSelector(anyInt()))
.thenAnswer((invocation) -> Emoji.isVariationSelector((int) invocation.getArguments()[0]));
when(mockFlutterJNI.isCodePointRegionalIndicator(anyInt()))
.thenAnswer(
(invocation) -> Emoji.isRegionalIndicatorSymbol((int) invocation.getArguments()[0]));
return new InputConnectionAdaptor(
testView, client, textInputChannel, mockKeyboardManager, editable, null, mockFlutterJNI);
}
private static class Emoji {
public static boolean isEmoji(int codePoint) {
return UCharacter.hasBinaryProperty(codePoint, UProperty.EMOJI);
}
public static boolean isEmojiModifier(int codePoint) {
return UCharacter.hasBinaryProperty(codePoint, UProperty.EMOJI_MODIFIER);
}
public static boolean isEmojiModifierBase(int codePoint) {
return UCharacter.hasBinaryProperty(codePoint, UProperty.EMOJI_MODIFIER_BASE);
}
public static boolean isRegionalIndicatorSymbol(int codePoint) {
return UCharacter.hasBinaryProperty(codePoint, UProperty.REGIONAL_INDICATOR);
}
public static boolean isVariationSelector(int codePoint) {
return UCharacter.hasBinaryProperty(codePoint, UProperty.VARIATION_SELECTOR);
}
}
private class TestTextInputChannel extends TextInputChannel {
public TestTextInputChannel(DartExecutor dartExecutor) {
super(dartExecutor);
}
public int inputClientId;
public String text;
public int selectionStart;
public int selectionEnd;
public int composingStart;
public int composingEnd;
public int updateEditingStateInvocations = 0;
@Override
public void updateEditingState(
int inputClientId,
String text,
int selectionStart,
int selectionEnd,
int composingStart,
int composingEnd) {
this.inputClientId = inputClientId;
this.text = text;
this.selectionStart = selectionStart;
this.selectionEnd = selectionEnd;
this.composingStart = composingStart;
this.composingEnd = composingEnd;
updateEditingStateInvocations++;
}
}
@Implements(InputMethodManager.class)
public static class TestImm extends ShadowInputMethodManager {
public static int empty = -999;
CursorAnchorInfo lastCursorAnchorInfo;
int lastExtractedTextToken = empty;
ExtractedText lastExtractedText;
int lastSelectionStart = empty;
int lastSelectionEnd = empty;
int lastCandidatesStart = empty;
int lastCandidatesEnd = empty;
public TestImm() {}
@Implementation
public void updateCursorAnchorInfo(View view, CursorAnchorInfo cursorAnchorInfo) {
lastCursorAnchorInfo = cursorAnchorInfo;
}
@Implementation
public void updateExtractedText(View view, int token, ExtractedText text) {
lastExtractedTextToken = token;
lastExtractedText = text;
}
@Implementation
public void updateSelection(
View view, int selStart, int selEnd, int candidatesStart, int candidatesEnd) {
lastSelectionStart = selStart;
lastSelectionEnd = selEnd;
lastCandidatesStart = candidatesStart;
lastCandidatesEnd = candidatesEnd;
}
public void resetStates() {
lastExtractedText = null;
lastExtractedTextToken = empty;
lastSelectionStart = empty;
lastSelectionEnd = empty;
lastCandidatesStart = empty;
lastCandidatesEnd = empty;
lastCursorAnchorInfo = null;
}
}
}
| engine/shell/platform/android/test/io/flutter/plugin/editing/InputConnectionAdaptorTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/plugin/editing/InputConnectionAdaptorTest.java",
"repo_id": "engine",
"token_count": 19691
} | 307 |
package io.flutter.plugin.text;
import static io.flutter.Build.API_LEVELS;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyInt;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.ActivityInfo;
import android.content.pm.PackageItemInfo;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import androidx.annotation.RequiresApi;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import io.flutter.embedding.engine.dart.DartExecutor;
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding;
import io.flutter.embedding.engine.systemchannels.ProcessTextChannel;
import io.flutter.plugin.common.BinaryMessenger;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.StandardMethodCodec;
import java.lang.reflect.Field;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
@RunWith(AndroidJUnit4.class)
@TargetApi(API_LEVELS.API_24)
@RequiresApi(API_LEVELS.API_24)
public class ProcessTextPluginTest {
private static void sendToBinaryMessageHandler(
BinaryMessenger.BinaryMessageHandler binaryMessageHandler, String method, Object args) {
MethodCall methodCall = new MethodCall(method, args);
ByteBuffer encodedMethodCall = StandardMethodCodec.INSTANCE.encodeMethodCall(methodCall);
binaryMessageHandler.onMessage(
(ByteBuffer) encodedMethodCall.flip(), mock(BinaryMessenger.BinaryReply.class));
}
@SuppressWarnings("deprecation")
// setMessageHandler is deprecated.
@Test
public void respondsToProcessTextChannelMessage() {
ArgumentCaptor<BinaryMessenger.BinaryMessageHandler> binaryMessageHandlerCaptor =
ArgumentCaptor.forClass(BinaryMessenger.BinaryMessageHandler.class);
DartExecutor mockBinaryMessenger = mock(DartExecutor.class);
ProcessTextChannel.ProcessTextMethodHandler mockHandler =
mock(ProcessTextChannel.ProcessTextMethodHandler.class);
PackageManager mockPackageManager = mock(PackageManager.class);
ProcessTextChannel processTextChannel =
new ProcessTextChannel(mockBinaryMessenger, mockPackageManager);
processTextChannel.setMethodHandler(mockHandler);
verify(mockBinaryMessenger, times(1))
.setMessageHandler(any(String.class), binaryMessageHandlerCaptor.capture());
BinaryMessenger.BinaryMessageHandler binaryMessageHandler =
binaryMessageHandlerCaptor.getValue();
sendToBinaryMessageHandler(binaryMessageHandler, "ProcessText.queryTextActions", null);
verify(mockHandler).queryTextActions();
}
@SuppressWarnings("deprecation")
// setMessageHandler is deprecated.
@Test
public void performQueryTextActions() {
DartExecutor mockBinaryMessenger = mock(DartExecutor.class);
PackageManager mockPackageManager = mock(PackageManager.class);
ProcessTextChannel processTextChannel =
new ProcessTextChannel(mockBinaryMessenger, mockPackageManager);
// Set up mocked result for PackageManager.queryIntentActivities.
ResolveInfo action1 = createFakeResolveInfo("Action1", mockPackageManager);
ResolveInfo action2 = createFakeResolveInfo("Action2", mockPackageManager);
List<ResolveInfo> infos = new ArrayList<ResolveInfo>(Arrays.asList(action1, action2));
Intent intent = new Intent().setAction(Intent.ACTION_PROCESS_TEXT).setType("text/plain");
when(mockPackageManager.queryIntentActivities(
any(Intent.class), any(PackageManager.ResolveInfoFlags.class)))
.thenReturn(infos);
// ProcessTextPlugin should retrieve the mocked text actions.
ProcessTextPlugin processTextPlugin = new ProcessTextPlugin(processTextChannel);
Map<String, String> textActions = processTextPlugin.queryTextActions();
final String action1Id = "mockActivityName.Action1";
final String action2Id = "mockActivityName.Action2";
assertEquals(textActions, Map.of(action1Id, "Action1", action2Id, "Action2"));
}
@SuppressWarnings("deprecation")
// setMessageHandler is deprecated.
@Test
public void performProcessTextActionWithNoReturnedValue() {
DartExecutor mockBinaryMessenger = mock(DartExecutor.class);
PackageManager mockPackageManager = mock(PackageManager.class);
ProcessTextChannel processTextChannel =
new ProcessTextChannel(mockBinaryMessenger, mockPackageManager);
// Set up mocked result for PackageManager.queryIntentActivities.
ResolveInfo action1 = createFakeResolveInfo("Action1", mockPackageManager);
ResolveInfo action2 = createFakeResolveInfo("Action2", mockPackageManager);
List<ResolveInfo> infos = new ArrayList<ResolveInfo>(Arrays.asList(action1, action2));
when(mockPackageManager.queryIntentActivities(
any(Intent.class), any(PackageManager.ResolveInfoFlags.class)))
.thenReturn(infos);
// ProcessTextPlugin should retrieve the mocked text actions.
ProcessTextPlugin processTextPlugin = new ProcessTextPlugin(processTextChannel);
Map<String, String> textActions = processTextPlugin.queryTextActions();
final String action1Id = "mockActivityName.Action1";
final String action2Id = "mockActivityName.Action2";
assertEquals(textActions, Map.of(action1Id, "Action1", action2Id, "Action2"));
// Set up the activity binding.
ActivityPluginBinding mockActivityPluginBinding = mock(ActivityPluginBinding.class);
Activity mockActivity = mock(Activity.class);
when(mockActivityPluginBinding.getActivity()).thenReturn(mockActivity);
processTextPlugin.onAttachedToActivity(mockActivityPluginBinding);
// Execute th first action.
String textToBeProcessed = "Flutter!";
MethodChannel.Result result = mock(MethodChannel.Result.class);
processTextPlugin.processTextAction(action1Id, textToBeProcessed, false, result);
// Activity.startActivityForResult should have been called.
ArgumentCaptor<Intent> intentCaptor = ArgumentCaptor.forClass(Intent.class);
verify(mockActivity, times(1)).startActivityForResult(intentCaptor.capture(), anyInt());
Intent intent = intentCaptor.getValue();
assertEquals(intent.getStringExtra(Intent.EXTRA_PROCESS_TEXT), textToBeProcessed);
// Simulate an Android activity answer which does not return a value.
Intent resultIntent = new Intent();
processTextPlugin.onActivityResult(result.hashCode(), Activity.RESULT_OK, resultIntent);
// Success with no returned value is expected.
verify(result).success(null);
}
@SuppressWarnings("deprecation")
// setMessageHandler is deprecated.
@Test
public void performProcessTextActionWithReturnedValue() {
DartExecutor mockBinaryMessenger = mock(DartExecutor.class);
PackageManager mockPackageManager = mock(PackageManager.class);
ProcessTextChannel processTextChannel =
new ProcessTextChannel(mockBinaryMessenger, mockPackageManager);
// Set up mocked result for PackageManager.queryIntentActivities.
ResolveInfo action1 = createFakeResolveInfo("Action1", mockPackageManager);
ResolveInfo action2 = createFakeResolveInfo("Action2", mockPackageManager);
List<ResolveInfo> infos = new ArrayList<ResolveInfo>(Arrays.asList(action1, action2));
when(mockPackageManager.queryIntentActivities(
any(Intent.class), any(PackageManager.ResolveInfoFlags.class)))
.thenReturn(infos);
// ProcessTextPlugin should retrieve the mocked text actions.
ProcessTextPlugin processTextPlugin = new ProcessTextPlugin(processTextChannel);
Map<String, String> textActions = processTextPlugin.queryTextActions();
final String action1Id = "mockActivityName.Action1";
final String action2Id = "mockActivityName.Action2";
assertEquals(textActions, Map.of(action1Id, "Action1", action2Id, "Action2"));
// Set up the activity binding.
ActivityPluginBinding mockActivityPluginBinding = mock(ActivityPluginBinding.class);
Activity mockActivity = mock(Activity.class);
when(mockActivityPluginBinding.getActivity()).thenReturn(mockActivity);
processTextPlugin.onAttachedToActivity(mockActivityPluginBinding);
// Execute the first action.
String textToBeProcessed = "Flutter!";
MethodChannel.Result result = mock(MethodChannel.Result.class);
processTextPlugin.processTextAction(action1Id, textToBeProcessed, false, result);
// Activity.startActivityForResult should have been called.
ArgumentCaptor<Intent> intentCaptor = ArgumentCaptor.forClass(Intent.class);
verify(mockActivity, times(1)).startActivityForResult(intentCaptor.capture(), anyInt());
Intent intent = intentCaptor.getValue();
assertEquals(intent.getStringExtra(Intent.EXTRA_PROCESS_TEXT), textToBeProcessed);
// Simulate an Android activity answer which returns a transformed text.
String processedText = "Flutter!!!";
Intent resultIntent = new Intent();
resultIntent.putExtra(Intent.EXTRA_PROCESS_TEXT, processedText);
processTextPlugin.onActivityResult(result.hashCode(), Activity.RESULT_OK, resultIntent);
// Success with the transformed text is expected.
verify(result).success(processedText);
}
@SuppressWarnings("deprecation")
// setMessageHandler is deprecated.
@Test
public void doNotCrashOnNonRelatedActivityResult() {
DartExecutor mockBinaryMessenger = mock(DartExecutor.class);
PackageManager mockPackageManager = mock(PackageManager.class);
ProcessTextChannel processTextChannel =
new ProcessTextChannel(mockBinaryMessenger, mockPackageManager);
// Set up mocked result for PackageManager.queryIntentActivities.
ResolveInfo action1 = createFakeResolveInfo("Action1", mockPackageManager);
ResolveInfo action2 = createFakeResolveInfo("Action2", mockPackageManager);
List<ResolveInfo> infos = new ArrayList<ResolveInfo>(Arrays.asList(action1, action2));
when(mockPackageManager.queryIntentActivities(
any(Intent.class), any(PackageManager.ResolveInfoFlags.class)))
.thenReturn(infos);
// ProcessTextPlugin should retrieve the mocked text actions.
ProcessTextPlugin processTextPlugin = new ProcessTextPlugin(processTextChannel);
Map<String, String> textActions = processTextPlugin.queryTextActions();
final String action1Id = "mockActivityName.Action1";
final String action2Id = "mockActivityName.Action2";
assertEquals(textActions, Map.of(action1Id, "Action1", action2Id, "Action2"));
// Set up the activity binding.
ActivityPluginBinding mockActivityPluginBinding = mock(ActivityPluginBinding.class);
Activity mockActivity = mock(Activity.class);
when(mockActivityPluginBinding.getActivity()).thenReturn(mockActivity);
processTextPlugin.onAttachedToActivity(mockActivityPluginBinding);
// Execute the first action.
String textToBeProcessed = "Flutter!";
MethodChannel.Result result = mock(MethodChannel.Result.class);
processTextPlugin.processTextAction(action1Id, textToBeProcessed, false, result);
// Activity.startActivityForResult should have been called.
ArgumentCaptor<Intent> intentCaptor = ArgumentCaptor.forClass(Intent.class);
verify(mockActivity, times(1)).startActivityForResult(intentCaptor.capture(), anyInt());
Intent intent = intentCaptor.getValue();
assertEquals(intent.getStringExtra(Intent.EXTRA_PROCESS_TEXT), textToBeProcessed);
// Result to a request not sent by this plugin should be ignored.
final int externalRequestCode = 42;
processTextPlugin.onActivityResult(externalRequestCode, Activity.RESULT_OK, new Intent());
// Simulate an Android activity answer which returns a transformed text.
String processedText = "Flutter!!!";
Intent resultIntent = new Intent();
resultIntent.putExtra(Intent.EXTRA_PROCESS_TEXT, processedText);
processTextPlugin.onActivityResult(result.hashCode(), Activity.RESULT_OK, resultIntent);
// Success with the transformed text is expected.
verify(result).success(processedText);
}
private ResolveInfo createFakeResolveInfo(String label, PackageManager mockPackageManager) {
ResolveInfo resolveInfo = mock(ResolveInfo.class);
ActivityInfo activityInfo = new ActivityInfo();
when(resolveInfo.loadLabel(mockPackageManager)).thenReturn(label);
// Use Java reflection to set required member variables.
try {
Field activityField = ResolveInfo.class.getDeclaredField("activityInfo");
activityField.setAccessible(true);
activityField.set(resolveInfo, activityInfo);
Field packageNameField = PackageItemInfo.class.getDeclaredField("packageName");
packageNameField.setAccessible(true);
packageNameField.set(activityInfo, "mockActivityPackageName");
Field nameField = PackageItemInfo.class.getDeclaredField("name");
nameField.setAccessible(true);
nameField.set(activityInfo, "mockActivityName." + label);
} catch (Exception ex) {
// Test will failed if reflection APIs throw.
}
return resolveInfo;
}
}
| engine/shell/platform/android/test/io/flutter/plugin/text/ProcessTextPluginTest.java/0 | {
"file_path": "engine/shell/platform/android/test/io/flutter/plugin/text/ProcessTextPluginTest.java",
"repo_id": "engine",
"token_count": 4180
} | 308 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_ANDROID_VSYNC_WAITER_ANDROID_H_
#define FLUTTER_SHELL_PLATFORM_ANDROID_VSYNC_WAITER_ANDROID_H_
#include <jni.h>
#include <memory>
#include "flutter/fml/macros.h"
#include "flutter/shell/common/vsync_waiter.h"
namespace flutter {
class AndroidChoreographer;
class VsyncWaiterAndroid final : public VsyncWaiter {
public:
static bool Register(JNIEnv* env);
explicit VsyncWaiterAndroid(const flutter::TaskRunners& task_runners);
~VsyncWaiterAndroid() override;
private:
// |VsyncWaiter|
void AwaitVSync() override;
static void OnVsyncFromNDK(int64_t frame_nanos, void* data);
static void OnVsyncFromJava(JNIEnv* env,
jclass jcaller,
jlong frameDelayNanos,
jlong refreshPeriodNanos,
jlong java_baton);
static void ConsumePendingCallback(std::weak_ptr<VsyncWaiter>* weak_this,
fml::TimePoint frame_start_time,
fml::TimePoint frame_target_time);
static void OnUpdateRefreshRate(JNIEnv* env,
jclass jcaller,
jfloat refresh_rate);
FML_DISALLOW_COPY_AND_ASSIGN(VsyncWaiterAndroid);
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_ANDROID_VSYNC_WAITER_ANDROID_H_
| engine/shell/platform/android/vsync_waiter_android.h/0 | {
"file_path": "engine/shell/platform/android/vsync_waiter_android.h",
"repo_id": "engine",
"token_count": 737
} | 309 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is deprecated in favor of core_implementations.cc. This is a
// temporary forwarding implementation so that the switch to
// core_implementations.cc isn't an immediate breaking change, allowing for the
// template to be updated to include it and update the template version before
// removing this file.
#include "core_implementations.cc"
| engine/shell/platform/common/client_wrapper/engine_method_result.cc/0 | {
"file_path": "engine/shell/platform/common/client_wrapper/engine_method_result.cc",
"repo_id": "engine",
"token_count": 125
} | 310 |
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_FUNCTIONS_H_
#define FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_FUNCTIONS_H_
#include <functional>
#include <string>
#include <utility>
#include "method_result.h"
namespace flutter {
class EncodableValue;
// Handler types for each of the MethodResult outcomes.
template <typename T>
using ResultHandlerSuccess = std::function<void(const T* result)>;
template <typename T>
using ResultHandlerError = std::function<void(const std::string& error_code,
const std::string& error_message,
const T* error_details)>;
template <typename T>
using ResultHandlerNotImplemented = std::function<void()>;
// An implementation of MethodResult that pass calls through to provided
// function objects, for ease of constructing one-off result handlers.
template <typename T = EncodableValue>
class MethodResultFunctions : public MethodResult<T> {
public:
// Creates a result object that calls the provided functions for the
// corresponding MethodResult outcomes.
MethodResultFunctions(ResultHandlerSuccess<T> on_success,
ResultHandlerError<T> on_error,
ResultHandlerNotImplemented<T> on_not_implemented)
: on_success_(on_success),
on_error_(on_error),
on_not_implemented_(std::move(on_not_implemented)) {}
virtual ~MethodResultFunctions() = default;
// Prevent copying.
MethodResultFunctions(MethodResultFunctions const&) = delete;
MethodResultFunctions& operator=(MethodResultFunctions const&) = delete;
protected:
// |flutter::MethodResult|
void SuccessInternal(const T* result) override {
if (on_success_) {
on_success_(result);
}
}
// |flutter::MethodResult|
void ErrorInternal(const std::string& error_code,
const std::string& error_message,
const T* error_details) override {
if (on_error_) {
on_error_(error_code, error_message, error_details);
}
}
// |flutter::MethodResult|
void NotImplementedInternal() override {
if (on_not_implemented_) {
on_not_implemented_();
}
}
private:
ResultHandlerSuccess<T> on_success_;
ResultHandlerError<T> on_error_;
ResultHandlerNotImplemented<T> on_not_implemented_;
};
} // namespace flutter
#endif // FLUTTER_SHELL_PLATFORM_COMMON_CLIENT_WRAPPER_INCLUDE_FLUTTER_METHOD_RESULT_FUNCTIONS_H_
| engine/shell/platform/common/client_wrapper/include/flutter/method_result_functions.h/0 | {
"file_path": "engine/shell/platform/common/client_wrapper/include/flutter/method_result_functions.h",
"repo_id": "engine",
"token_count": 1021
} | 311 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.